summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--.circleci/config.yml92
-rw-r--r--.dockerignore5
-rw-r--r--.github/CODEOWNERS6
-rw-r--r--.github/ISSUE_TEMPLATE/bug_report.md63
-rw-r--r--.github/ISSUE_TEMPLATE/feature_request.md32
-rw-r--r--.github/ISSUE_TEMPLATE/question-about-using-compose.md12
-rw-r--r--.github/stale.yml59
-rw-r--r--.gitignore16
-rw-r--r--.pre-commit-config.yaml15
-rw-r--r--CHANGELOG.md790
-rw-r--r--Dockerfile131
-rw-r--r--Dockerfile.armhf73
-rw-r--r--Dockerfile.run23
-rw-r--r--Dockerfile.s390x15
-rw-r--r--Jenkinsfile174
-rw-r--r--MAINTAINERS37
-rw-r--r--MANIFEST.in4
-rw-r--r--Makefile57
-rw-r--r--README.md103
-rw-r--r--Release.Jenkinsfile312
-rw-r--r--appveyor.yml24
-rwxr-xr-xbin/docker-compose3
-rw-r--r--compose/__init__.py5
-rw-r--r--compose/__main__.py3
-rw-r--r--compose/bundle.py258
-rw-r--r--compose/cli/colors.py31
-rw-r--r--compose/cli/command.py170
-rw-r--r--compose/cli/docker_client.py74
-rw-r--r--compose/cli/docopt_command.py19
-rw-r--r--compose/cli/errors.py13
-rw-r--r--compose/cli/formatter.py37
-rw-r--r--compose/cli/log_printer.py81
-rw-r--r--compose/cli/main.py661
-rw-r--r--compose/cli/signals.py3
-rw-r--r--compose/cli/utils.py27
-rw-r--r--compose/cli/verbose_proxy.py15
-rw-r--r--compose/config/__init__.py4
-rw-r--r--compose/config/compose_spec.json (renamed from compose/config/config_schema_v3.6.json)374
-rw-r--r--compose/config/config.py461
-rw-r--r--compose/config/config_schema_v2.0.json405
-rw-r--r--compose/config/config_schema_v2.1.json461
-rw-r--r--compose/config/config_schema_v2.2.json470
-rw-r--r--compose/config/config_schema_v2.3.json514
-rw-r--r--compose/config/config_schema_v2.4.json513
-rw-r--r--compose/config/config_schema_v3.0.json399
-rw-r--r--compose/config/config_schema_v3.1.json444
-rw-r--r--compose/config/config_schema_v3.2.json492
-rw-r--r--compose/config/config_schema_v3.3.json551
-rw-r--r--compose/config/config_schema_v3.4.json560
-rw-r--r--compose/config/config_schema_v3.5.json588
-rw-r--r--compose/config/environment.py72
-rw-r--r--compose/config/errors.py12
-rw-r--r--compose/config/interpolation.py95
-rw-r--r--compose/config/serialize.py64
-rw-r--r--compose/config/sort_services.py6
-rw-r--r--compose/config/types.py58
-rw-r--r--compose/config/validation.py99
-rw-r--r--compose/const.py52
-rw-r--r--compose/container.py60
-rw-r--r--compose/errors.py13
-rw-r--r--compose/metrics/__init__.py (renamed from compose/state.py)0
-rw-r--r--compose/metrics/client.py64
-rw-r--r--compose/metrics/decorator.py21
-rw-r--r--compose/network.py114
-rw-r--r--compose/parallel.py91
-rw-r--r--compose/progress_stream.py38
-rw-r--r--compose/project.py654
-rw-r--r--compose/service.py853
-rw-r--r--compose/timeparse.py22
-rw-r--r--compose/utils.py72
-rw-r--r--compose/version.py3
-rw-r--r--compose/volume.py77
-rw-r--r--contrib/completion/bash/docker-compose142
-rw-r--r--contrib/completion/fish/docker-compose.fish3
-rwxr-xr-x[-rw-r--r--]contrib/completion/zsh/_docker-compose180
-rwxr-xr-xcontrib/migration/migrate-compose-file-v1-to-v2.py11
-rw-r--r--debian/changelog85
-rw-r--r--debian/compat1
-rw-r--r--debian/control52
-rw-r--r--debian/copyright2
-rw-r--r--debian/docker-compose.install3
-rw-r--r--debian/docs1
-rw-r--r--debian/patches/Relax-dependencies.patch26
-rw-r--r--debian/patches/series1
-rw-r--r--debian/pydist-overrides9
-rwxr-xr-xdebian/rules13
-rw-r--r--debian/upstream/metadata4
-rw-r--r--debian/watch2
-rwxr-xr-xdocker-compose-entrypoint.sh20
-rw-r--r--docker-compose.spec62
-rw-r--r--docker-compose_darwin.spec48
-rw-r--r--docs/README.md6
l---------[-rw-r--r--]project/RELEASE-PROCESS.md149
-rwxr-xr-xpyinstaller/ldd13
-rw-r--r--requirements-build.txt2
-rw-r--r--requirements-dev.txt14
-rw-r--r--requirements-indirect.txt28
-rw-r--r--requirements.txt39
-rwxr-xr-xscript/build/image11
-rwxr-xr-xscript/build/linux17
-rwxr-xr-xscript/build/linux-entrypoint43
-rwxr-xr-xscript/build/osx16
-rwxr-xr-xscript/build/test-image15
-rw-r--r--script/build/windows.ps119
-rwxr-xr-xscript/build/write-git-sha11
-rwxr-xr-xscript/circle/bintray-deploy.sh10
-rwxr-xr-xscript/docs/check_help.py29
-rw-r--r--script/release/README.md23
-rwxr-xr-xscript/release/build-binaries40
-rw-r--r--script/release/const.py4
-rwxr-xr-xscript/release/contributors30
-rwxr-xr-xscript/release/download-binaries39
-rwxr-xr-xscript/release/generate_changelog.sh42
-rwxr-xr-xscript/release/make-branch86
-rwxr-xr-xscript/release/push-release82
-rwxr-xr-xscript/release/rebase-bump-commit38
-rw-r--r--script/release/release.md.tmpl34
-rwxr-xr-xscript/release/release.py123
-rw-r--r--script/release/utils.py44
-rwxr-xr-xscript/run/run.sh58
-rwxr-xr-xscript/setup/osx123
-rw-r--r--script/setup/osx_helpers.sh41
-rwxr-xr-xscript/test/acceptance3
-rwxr-xr-xscript/test/all20
-rwxr-xr-xscript/test/ci3
-rwxr-xr-xscript/test/default9
-rwxr-xr-xscript/test/versions.py52
-rw-r--r--setup.py60
-rw-r--r--tests/__init__.py16
-rw-r--r--tests/acceptance/cli_test.py866
-rw-r--r--tests/acceptance/context_test.py44
-rw-r--r--tests/conftest.py240
-rw-r--r--tests/fixtures/UpperCaseDir/docker-compose.yml4
-rw-r--r--tests/fixtures/abort-on-container-exit-0/docker-compose.yml4
-rw-r--r--tests/fixtures/abort-on-container-exit-1/docker-compose.yml4
-rw-r--r--tests/fixtures/abort-on-container-exit-dependencies/docker-compose.yml10
-rw-r--r--tests/fixtures/build-args/Dockerfile2
-rw-r--r--tests/fixtures/build-ctx/Dockerfile2
-rw-r--r--tests/fixtures/build-memory/Dockerfile2
-rw-r--r--tests/fixtures/build-multiple-composefile/a/Dockerfile4
-rw-r--r--tests/fixtures/build-multiple-composefile/b/Dockerfile4
-rw-r--r--tests/fixtures/build-multiple-composefile/docker-compose.yml8
-rw-r--r--tests/fixtures/bundle-with-digests/docker-compose.yml9
-rw-r--r--tests/fixtures/compatibility-mode/docker-compose.yml8
-rw-r--r--tests/fixtures/config-profiles/docker-compose.yml15
-rw-r--r--tests/fixtures/default-env-file/.env24
-rw-r--r--tests/fixtures/default-env-file/alt/.env4
-rw-r--r--tests/fixtures/default-env-file/docker-compose.yml4
-rw-r--r--tests/fixtures/dockerfile-with-volume/Dockerfile2
-rw-r--r--tests/fixtures/duplicate-override-yaml-files/docker-compose.yml4
-rw-r--r--tests/fixtures/echo-services-dependencies/docker-compose.yml10
-rw-r--r--tests/fixtures/echo-services/docker-compose.yml4
-rw-r--r--tests/fixtures/entrypoint-dockerfile/Dockerfile2
-rw-r--r--tests/fixtures/env-file-override/.env1
-rw-r--r--tests/fixtures/env-file-override/.env.conf2
-rw-r--r--tests/fixtures/env-file-override/.env.override1
-rw-r--r--tests/fixtures/env-file-override/docker-compose.yml6
-rw-r--r--tests/fixtures/env/three.env2
-rw-r--r--tests/fixtures/environment-composefile/docker-compose.yml2
-rw-r--r--tests/fixtures/environment-exec/docker-compose.yml2
-rw-r--r--tests/fixtures/exec-novalue-var/docker-compose.yml6
-rw-r--r--tests/fixtures/exit-code-from/docker-compose.yml4
-rw-r--r--tests/fixtures/expose-composefile/docker-compose.yml2
-rw-r--r--tests/fixtures/flag-as-service-name/Dockerfile3
-rw-r--r--tests/fixtures/flag-as-service-name/docker-compose.yml12
-rw-r--r--tests/fixtures/images-service-tag/Dockerfile2
-rw-r--r--tests/fixtures/images-service-tag/docker-compose.yml11
-rw-r--r--tests/fixtures/ipc-mode/docker-compose.yml17
-rw-r--r--tests/fixtures/links-composefile/docker-compose.yml6
-rw-r--r--tests/fixtures/logging-composefile-legacy/docker-compose.yml4
-rw-r--r--tests/fixtures/logging-composefile/docker-compose.yml4
-rw-r--r--tests/fixtures/logs-composefile/docker-compose.yml8
-rw-r--r--tests/fixtures/logs-restart-composefile/docker-compose.yml7
-rw-r--r--tests/fixtures/logs-tail-composefile/docker-compose.yml4
-rw-r--r--tests/fixtures/longer-filename-composefile/docker-compose.yaml2
-rw-r--r--tests/fixtures/multiple-composefiles/compose2.yml2
-rw-r--r--tests/fixtures/multiple-composefiles/docker-compose.yml4
-rw-r--r--tests/fixtures/networks/default-network-config.yml4
-rw-r--r--tests/fixtures/networks/docker-compose.yml6
-rw-r--r--tests/fixtures/networks/external-default.yml4
-rw-r--r--tests/fixtures/no-build/docker-compose.yml8
-rw-r--r--tests/fixtures/no-links-composefile/docker-compose.yml6
-rw-r--r--tests/fixtures/override-files/docker-compose.yml4
-rw-r--r--tests/fixtures/override-files/extra.yml2
-rw-r--r--tests/fixtures/override-yaml-files/docker-compose.yml4
-rw-r--r--tests/fixtures/ports-composefile-scale/docker-compose.yml2
-rw-r--r--tests/fixtures/ports-composefile/docker-compose.yml2
-rw-r--r--tests/fixtures/ports-composefile/expanded-notation.yml2
-rw-r--r--tests/fixtures/profiles/docker-compose.yml20
-rw-r--r--tests/fixtures/profiles/merge-profiles.yml5
-rw-r--r--tests/fixtures/ps-services-filter/docker-compose.yml2
-rw-r--r--tests/fixtures/run-labels/docker-compose.yml2
-rw-r--r--tests/fixtures/run-workdir/docker-compose.yml2
-rw-r--r--tests/fixtures/scale/docker-compose.yml8
-rw-r--r--tests/fixtures/simple-composefile-volume-ready/docker-compose.merge.yml2
-rw-r--r--tests/fixtures/simple-composefile-volume-ready/docker-compose.yml2
-rw-r--r--tests/fixtures/simple-composefile/can-build-pull-failures.yml6
-rw-r--r--tests/fixtures/simple-composefile/digest.yml2
-rw-r--r--tests/fixtures/simple-composefile/docker-compose.yml4
-rw-r--r--tests/fixtures/simple-composefile/ignore-pull-failures.yml2
-rw-r--r--tests/fixtures/simple-composefile/pull-with-build.yml11
-rw-r--r--tests/fixtures/simple-dockerfile/Dockerfile2
-rw-r--r--tests/fixtures/simple-failing-dockerfile/Dockerfile4
-rw-r--r--tests/fixtures/sleeps-composefile/docker-compose.yml4
-rw-r--r--tests/fixtures/stop-signal-composefile/docker-compose.yml2
-rw-r--r--tests/fixtures/tagless-image/Dockerfile2
-rw-r--r--tests/fixtures/top/docker-compose.yml4
-rw-r--r--tests/fixtures/unicode-environment/docker-compose.yml2
-rw-r--r--tests/fixtures/user-composefile/docker-compose.yml2
-rw-r--r--tests/fixtures/v2-dependencies/docker-compose.yml6
-rw-r--r--tests/fixtures/v2-full/Dockerfile2
-rw-r--r--tests/fixtures/v2-full/docker-compose.yml2
-rw-r--r--tests/fixtures/v2-simple/docker-compose.yml4
-rw-r--r--tests/fixtures/v2-simple/links-invalid.yml4
-rw-r--r--tests/fixtures/v2-simple/one-container.yml5
-rw-r--r--tests/fixtures/v2-unhealthy-dependencies/docker-compose.yml19
-rw-r--r--tests/fixtures/v3-full/docker-compose.yml4
-rw-r--r--tests/helpers.py26
-rw-r--r--tests/integration/environment_test.py92
-rw-r--r--tests/integration/metrics_test.py125
-rw-r--r--tests/integration/network_test.py3
-rw-r--r--tests/integration/project_test.py572
-rw-r--r--tests/integration/resilience_test.py5
-rw-r--r--tests/integration/service_test.py293
-rw-r--r--tests/integration/state_test.py225
-rw-r--r--tests/integration/testcases.py51
-rw-r--r--tests/integration/volume_test.py8
-rw-r--r--tests/unit/bundle_test.py222
-rw-r--r--tests/unit/cli/colors_test.py56
-rw-r--r--tests/unit/cli/command_test.py38
-rw-r--r--tests/unit/cli/docker_client_test.py30
-rw-r--r--tests/unit/cli/errors_test.py17
-rw-r--r--tests/unit/cli/formatter_test.py7
-rw-r--r--tests/unit/cli/log_printer_test.py41
-rw-r--r--tests/unit/cli/main_test.py139
-rw-r--r--tests/unit/cli/utils_test.py28
-rw-r--r--tests/unit/cli/verbose_proxy_test.py7
-rw-r--r--tests/unit/cli_test.py93
-rw-r--r--tests/unit/config/config_test.py1294
-rw-r--r--tests/unit/config/environment_test.py39
-rw-r--r--tests/unit/config/interpolation_test.py69
-rw-r--r--tests/unit/config/sort_services_test.py5
-rw-r--r--tests/unit/config/types_test.py23
-rw-r--r--tests/unit/container_test.py51
-rw-r--r--tests/unit/metrics/__init__.py0
-rw-r--r--tests/unit/metrics/metrics_test.py36
-rw-r--r--tests/unit/network_test.py23
-rw-r--r--tests/unit/parallel_test.py21
-rw-r--r--tests/unit/progress_stream_test.py60
-rw-r--r--tests/unit/project_test.py435
-rw-r--r--tests/unit/service_test.py233
-rw-r--r--tests/unit/split_buffer_test.py5
-rw-r--r--tests/unit/timeparse_test.py3
-rw-r--r--tests/unit/utils_test.py22
-rw-r--r--tests/unit/volume_test.py5
-rw-r--r--tox.ini7
256 files changed, 10072 insertions, 10167 deletions
diff --git a/.circleci/config.yml b/.circleci/config.yml
deleted file mode 100644
index d422fdcc..00000000
--- a/.circleci/config.yml
+++ /dev/null
@@ -1,92 +0,0 @@
-version: 2
-jobs:
- test:
- macos:
- xcode: "8.3.3"
- steps:
- - checkout
- - run:
- name: setup script
- command: ./script/setup/osx
- - run:
- name: install tox
- command: sudo pip install --upgrade tox==2.1.1
- - run:
- name: unit tests
- command: tox -e py27,py36 -- tests/unit
-
- build-osx-binary:
- macos:
- xcode: "8.3.3"
- steps:
- - checkout
- - run:
- name: upgrade python tools
- command: sudo pip install --upgrade pip virtualenv
- - run:
- name: setup script
- command: ./script/setup/osx
- - run:
- name: build script
- command: ./script/build/osx
- - store_artifacts:
- path: dist/docker-compose-Darwin-x86_64
- destination: docker-compose-Darwin-x86_64
- # - deploy:
- # name: Deploy binary to bintray
- # command: |
- # OS_NAME=Darwin PKG_NAME=osx ./script/circle/bintray-deploy.sh
-
-
- build-linux-binary:
- machine:
- enabled: true
- steps:
- - checkout
- - run:
- name: build Linux binary
- command: ./script/build/linux
- - store_artifacts:
- path: dist/docker-compose-Linux-x86_64
- destination: docker-compose-Linux-x86_64
- - deploy:
- name: Deploy binary to bintray
- command: |
- OS_NAME=Linux PKG_NAME=linux ./script/circle/bintray-deploy.sh
-
- trigger-osx-binary-deploy:
- # We use a separate repo to build OSX binaries meant for distribution
- # with support for OSSX 10.11 (xcode 7). This job triggers a build on
- # that repo.
- docker:
- - image: alpine:3.6
-
- steps:
- - run:
- name: install curl
- command: apk update && apk add curl
-
- - run:
- name: API trigger
- command: |
- curl -X POST -H "Content-Type: application/json" -d "{\
- \"build_parameters\": {\
- \"COMPOSE_BRANCH\": \"${CIRCLE_BRANCH}\"\
- }\
- }" https://circleci.com/api/v1.1/project/github/docker/compose-osx-release?circle-token=${OSX_RELEASE_TOKEN} \
- > /dev/null
-
-
-workflows:
- version: 2
- all:
- jobs:
- - test
- - build-linux-binary
- - build-osx-binary
- - trigger-osx-binary-deploy:
- filters:
- branches:
- only:
- - master
- - /bump-.*/
diff --git a/.dockerignore b/.dockerignore
index eccd86dd..c1323a91 100644
--- a/.dockerignore
+++ b/.dockerignore
@@ -1,11 +1,14 @@
*.egg-info
.coverage
.git
+.github
.tox
build
+binaries
coverage-html
docs/_site
-venv
+*venv
.tox
**/__pycache__
*.pyc
+Jenkinsfile
diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS
new file mode 100644
index 00000000..85ab9015
--- /dev/null
+++ b/.github/CODEOWNERS
@@ -0,0 +1,6 @@
+# GitHub code owners
+# See https://help.github.com/articles/about-codeowners/
+#
+# KEEP THIS FILE SORTED. Order is important. Last match takes precedence.
+
+* @aiordache @ndeloof @rumpl @ulyssessouza
diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md
new file mode 100644
index 00000000..2f3012f6
--- /dev/null
+++ b/.github/ISSUE_TEMPLATE/bug_report.md
@@ -0,0 +1,63 @@
+---
+name: Bug report
+about: Report a bug encountered while using docker-compose
+title: ''
+labels: kind/bug
+assignees: ''
+
+---
+
+<!--
+Welcome to the docker-compose issue tracker! Before creating an issue, please heed the following:
+
+1. This tracker should only be used to report bugs and request features / enhancements to docker-compose
+ - For questions and general support, use https://forums.docker.com
+ - For documentation issues, use https://github.com/docker/docker.github.io
+ - For issues with the `docker stack` commands and the version 3 of the Compose file, use
+ https://github.com/docker/cli
+2. Use the search function before creating a new issue. Duplicates will be closed and directed to
+ the original discussion.
+3. When making a bug report, make sure you provide all required information. The easier it is for
+ maintainers to reproduce, the faster it'll be fixed.
+-->
+
+## Description of the issue
+
+## Context information (for bug reports)
+
+**Output of `docker-compose version`**
+```
+(paste here)
+```
+
+**Output of `docker version`**
+```
+(paste here)
+```
+
+**Output of `docker-compose config`**
+(Make sure to add the relevant `-f` and other flags)
+```
+(paste here)
+```
+
+
+## Steps to reproduce the issue
+
+1.
+2.
+3.
+
+### Observed result
+
+### Expected result
+
+### Stacktrace / full error message
+
+```
+(paste here)
+```
+
+## Additional information
+
+OS version / distribution, `docker-compose` install method, etc.
diff --git a/.github/ISSUE_TEMPLATE/feature_request.md b/.github/ISSUE_TEMPLATE/feature_request.md
new file mode 100644
index 00000000..603d34c3
--- /dev/null
+++ b/.github/ISSUE_TEMPLATE/feature_request.md
@@ -0,0 +1,32 @@
+---
+name: Feature request
+about: Suggest an idea to improve Compose
+title: ''
+labels: kind/feature
+assignees: ''
+
+---
+
+<!--
+Welcome to the docker-compose issue tracker! Before creating an issue, please heed the following:
+
+1. This tracker should only be used to report bugs and request features / enhancements to docker-compose
+ - For questions and general support, use https://forums.docker.com
+ - For documentation issues, use https://github.com/docker/docker.github.io
+ - For issues with the `docker stack` commands and the version 3 of the Compose file, use
+ https://github.com/docker/cli
+2. Use the search function before creating a new issue. Duplicates will be closed and directed to
+ the original discussion.
+-->
+
+**Is your feature request related to a problem? Please describe.**
+A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]
+
+**Describe the solution you'd like**
+A clear and concise description of what you want to happen.
+
+**Describe alternatives you've considered**
+A clear and concise description of any alternative solutions or features you've considered.
+
+**Additional context**
+Add any other context or screenshots about the feature request here.
diff --git a/.github/ISSUE_TEMPLATE/question-about-using-compose.md b/.github/ISSUE_TEMPLATE/question-about-using-compose.md
new file mode 100644
index 00000000..ccb4e9b3
--- /dev/null
+++ b/.github/ISSUE_TEMPLATE/question-about-using-compose.md
@@ -0,0 +1,12 @@
+---
+name: Question about using Compose
+about: This is not the appropriate channel
+title: ''
+labels: kind/question
+assignees: ''
+
+---
+
+Please post on our forums: https://forums.docker.com for questions about using `docker-compose`.
+
+Posts that are not a bug report or a feature/enhancement request will not be addressed on this issue tracker.
diff --git a/.github/stale.yml b/.github/stale.yml
new file mode 100644
index 00000000..6de76aef
--- /dev/null
+++ b/.github/stale.yml
@@ -0,0 +1,59 @@
+# Configuration for probot-stale - https://github.com/probot/stale
+
+# Number of days of inactivity before an Issue or Pull Request becomes stale
+daysUntilStale: 180
+
+# Number of days of inactivity before an Issue or Pull Request with the stale label is closed.
+# Set to false to disable. If disabled, issues still need to be closed manually, but will remain marked as stale.
+daysUntilClose: 7
+
+# Only issues or pull requests with all of these labels are check if stale. Defaults to `[]` (disabled)
+onlyLabels: []
+
+# Issues or Pull Requests with these labels will never be considered stale. Set to `[]` to disable
+exemptLabels:
+ - kind/feature
+
+# Set to true to ignore issues in a project (defaults to false)
+exemptProjects: false
+
+# Set to true to ignore issues in a milestone (defaults to false)
+exemptMilestones: false
+
+# Set to true to ignore issues with an assignee (defaults to false)
+exemptAssignees: true
+
+# Label to use when marking as stale
+staleLabel: stale
+
+# Comment to post when marking as stale. Set to `false` to disable
+markComment: >
+ This issue has been automatically marked as stale because it has not had
+ recent activity. It will be closed if no further activity occurs. Thank you
+ for your contributions.
+
+# Comment to post when removing the stale label.
+unmarkComment: >
+ This issue has been automatically marked as not stale anymore due to the recent activity.
+
+# Comment to post when closing a stale Issue or Pull Request.
+closeComment: >
+ This issue has been automatically closed because it had not recent activity during the stale period.
+
+# Limit the number of actions per hour, from 1-30. Default is 30
+limitPerRun: 30
+
+# Limit to only `issues` or `pulls`
+only: issues
+
+# Optionally, specify configuration settings that are specific to just 'issues' or 'pulls':
+# pulls:
+# daysUntilStale: 30
+# markComment: >
+# This pull request has been automatically marked as stale because it has not had
+# recent activity. It will be closed if no further activity occurs. Thank you
+# for your contributions.
+
+# issues:
+# exemptLabels:
+# - confirmed
diff --git a/.gitignore b/.gitignore
index ef04ca15..79888274 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,14 +1,18 @@
*.egg-info
*.pyc
+*.swo
+*.swp
+.cache
.coverage*
+.DS_Store
+.idea
+
/.tox
+/binaries
/build
+/compose/GITSHA
/coverage-html
/dist
/docs/_site
-/venv
-README.rst
-compose/GITSHA
-*.swo
-*.swp
-.DS_Store
+/README.rst
+/*venv
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index b7bcc846..05cd5202 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -14,12 +14,15 @@
- id: requirements-txt-fixer
- id: trailing-whitespace
- repo: git://github.com/asottile/reorder_python_imports
- sha: v0.3.5
+ sha: v1.3.4
hooks:
- id: reorder-python-imports
- language_version: 'python2.7'
+ language_version: 'python3.7'
args:
- - --add-import
- - from __future__ import absolute_import
- - --add-import
- - from __future__ import unicode_literals
+ - --py3-plus
+- repo: https://github.com/asottile/pyupgrade
+ rev: v2.1.0
+ hooks:
+ - id: pyupgrade
+ args:
+ - --py3-plus
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 3709e263..525e5ffc 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,6 +1,792 @@
Change log
==========
+1.29.2 (2021-05-10)
+-------------------
+
+[List of PRs / issues for this release](https://github.com/docker/compose/milestone/59?closed=1)
+
+### Miscellaneous
+
+- Remove advertisement for `docker compose` in the `up` command to avoid annoyance
+
+- Bump `py` to `1.10.0` in `requirements-indirect.txt`
+
+1.29.1 (2021-04-13)
+-------------------
+
+[List of PRs / issues for this release](https://github.com/docker/compose/milestone/58?closed=1)
+
+### Bugs
+
+- Fix for invalid handler warning on Windows builds
+
+- Fix config hash to trigger container recreation on IPC mode updates
+
+- Fix conversion map for `placement.max_replicas_per_node`
+
+- Remove extra scan suggestion on build
+
+1.29.0 (2021-04-06)
+-------------------
+
+[List of PRs / issues for this release](https://github.com/docker/compose/milestone/56?closed=1)
+
+### Features
+
+- Add profile filter to `docker-compose config`
+
+- Add a `depends_on` condition to wait for successful service completion
+
+### Miscellaneous
+
+- Add image scan message on build
+
+- Update warning message for `--no-ansi` to mention `--ansi never` as alternative
+
+- Bump docker-py to 5.0.0
+
+- Bump PyYAML to 5.4.1
+
+- Bump python-dotenv to 0.17.0
+
+1.28.6 (2021-03-23)
+-------------------
+
+[List of PRs / issues for this release](https://github.com/docker/compose/milestone/57?closed=1)
+
+### Bugs
+
+- Make `--env-file` relative to the current working directory and error out for invalid paths. Environment file paths set with `--env-file` are relative to the current working directory while the default `.env` file is located in the project directory which by default is the base directory of the Compose file.
+
+- Fix missing service property `storage_opt` by updating the compose schema
+
+- Fix build `extra_hosts` list format
+
+- Remove extra error message on `exec`
+
+### Miscellaneous
+
+- Add `compose.yml` and `compose.yaml` to default filename list
+
+1.28.5 (2021-02-25)
+-------------------
+
+[List of PRs / issues for this release](https://github.com/docker/compose/milestone/55?closed=1)
+
+### Bugs
+
+- Fix OpenSSL version mismatch error when shelling out to the ssh client (via bump to docker-py 4.4.4 which contains the fix)
+
+- Add missing build flags to the native builder: `platform`, `isolation` and `extra_hosts`
+
+- Remove info message on native build
+
+- Avoid fetching logs when service logging driver is set to 'none'
+
+1.28.4 (2021-02-18)
+-------------------
+
+[List of PRs / issues for this release](https://github.com/docker/compose/milestone/54?closed=1)
+
+### Bugs
+
+- Fix SSH port parsing by bumping docker-py to 4.4.3
+
+### Miscellaneous
+
+- Bump Python to 3.7.10
+
+1.28.3 (2021-02-17)
+-------------------
+
+[List of PRs / issues for this release](https://github.com/docker/compose/milestone/53?closed=1)
+
+### Bugs
+
+- Fix SSH hostname parsing when it contains leading s/h, and remove the quiet option that was hiding the error (via docker-py bump to 4.4.2)
+
+- Fix key error for '--no-log-prefix' option
+
+- Fix incorrect CLI environment variable name for service profiles: `COMPOSE_PROFILES` instead of `COMPOSE_PROFILE`
+
+- Fix fish completion
+
+### Miscellaneous
+
+- Bump cryptography to 3.3.2
+
+- Remove log driver filter
+
+1.28.2 (2021-01-26)
+-------------------
+
+### Miscellaneous
+
+- CI setup update
+
+1.28.1 (2021-01-25)
+-------------------
+
+### Bugs
+
+- Revert to Python 3.7 bump for Linux static builds
+
+- Add bash completion for `docker-compose logs|up --no-log-prefix`
+
+1.28.0 (2021-01-20)
+-------------------
+
+### Features
+
+- Support for Nvidia GPUs via device requests
+
+- Support for service profiles
+
+- Change the SSH connection approach to the Docker CLI's via shellout to the local SSH client (old behaviour enabled by setting `COMPOSE_PARAMIKO_SSH` environment variable)
+
+- Add flag to disable log prefix
+
+- Add flag for ansi output control
+
+### Bugs
+
+- Make `parallel_pull=True` by default
+
+- Bring back warning for configs in non-swarm mode
+
+- Take `--file` in account when defining `project_dir`
+
+- On `compose up`, attach only to services we read logs from
+
+### Miscellaneous
+
+- Make COMPOSE_DOCKER_CLI_BUILD=1 the default
+
+- Add usage metrics
+
+- Sync schema with COMPOSE specification
+
+- Improve failure report for missing mandatory environment variables
+
+- Bump attrs to 20.3.0
+
+- Bump more_itertools to 8.6.0
+
+- Bump cryptograhy to 3.2.1
+
+- Bump cffi to 1.14.4
+
+- Bump virtualenv to 20.2.2
+
+- Bump bcrypt to 3.2.0
+
+- Bump gitpython to 3.1.11
+
+- Bump docker-py to 4.4.1
+
+- Bump Python to 3.9
+
+- Linux: bump Debian base image from stretch to buster (required for Python 3.9)
+
+- macOS: OpenSSL 1.1.1g to 1.1.1h, Python 3.7.7 to 3.9.0
+
+- Bump pyinstaller 4.1
+
+- Loosen restriction on base images to latest minor
+
+- Updates of READMEs
+
+
+1.27.4 (2020-09-24)
+-------------------
+
+### Bugs
+
+- Remove path checks for bind mounts
+
+- Fix port rendering to output long form syntax for non-v1
+
+- Add protocol to the docker socket address
+
+1.27.3 (2020-09-16)
+-------------------
+
+### Bugs
+
+- Merge `max_replicas_per_node` on `docker-compose config`
+
+- Fix `depends_on` serialization on `docker-compose config`
+
+- Fix scaling when some containers are not running on `docker-compose up`
+
+- Enable relative paths for `driver_opts.device` for `local` driver
+
+- Allow strings for `cpus` fields
+
+1.27.2 (2020-09-10)
+-------------------
+
+### Bugs
+
+- Fix bug on `docker-compose run` container attach
+
+1.27.1 (2020-09-10)
+-------------------
+
+### Bugs
+
+- Fix `docker-compose run` when `service.scale` is specified
+
+- Allow `driver` property for external networks as temporary workaround for swarm network propagation issue
+
+- Pin new internal schema version to `3.9` as the default
+
+- Preserve the version when configured in the compose file
+
+1.27.0 (2020-09-07)
+-------------------
+
+### Features
+
+- Merge 2.x and 3.x compose formats and align with COMPOSE_SPEC schema
+
+- Implement service mode for ipc
+
+- Pass `COMPOSE_PROJECT_NAME` environment variable in container mode
+
+- Make run behave in the same way as up
+
+- Use `docker build` on `docker-compose run` when `COMPOSE_DOCKER_CLI_BUILD` environment variable is set
+
+- Use docker-py default API version for engine queries (`auto`)
+
+- Parse `network_mode` on build
+
+### Bugs
+
+- Ignore build context path validation when building is not required
+
+- Fix float to bytes conversion via docker-py bump to 4.3.1
+
+- Fix scale bug when deploy section is set
+
+- Fix `docker-py` bump in `setup.py`
+
+- Fix experimental build failure detection
+
+- Fix context propagation to docker cli
+
+### Miscellaneous
+
+- Drop support for Python 2.7
+
+- Bump `docker-py` to 4.3.1
+
+- Bump `tox` to 3.19.0
+
+- Bump `virtualenv` to 20.0.30
+
+- Add script for docs syncronization
+
+1.26.2 (2020-07-02)
+-------------------
+
+### Bugs
+
+- Enforce `docker-py` 4.2.2 as minimum version when installing with pip
+
+1.26.1 (2020-06-30)
+-------------------
+
+### Features
+
+- Bump `docker-py` from 4.2.1 to 4.2.2
+
+### Bugs
+
+- Enforce `docker-py` 4.2.1 as minimum version when installing with pip
+
+- Fix context load for non-docker endpoints
+
+1.26.0 (2020-06-03)
+-------------------
+
+### Features
+
+- Add `docker context` support
+
+- Add missing test dependency `ddt` to `setup.py`
+
+- Add `--attach-dependencies` to command `up` for attaching to dependencies
+
+- Allow compatibility option with `COMPOSE_COMPATIBILITY` environment variable
+
+- Bump `Pytest` to 5.3.4 and add refactor compatibility with new version
+
+- Bump `OpenSSL` from 1.1.1f to 1.1.1g
+
+- Bump `docker-py` from 4.2.0 to 4.2.1
+
+### Bugs
+
+- Properly escape values coming from env_files
+
+- Sync compose-schemas with upstream (docker/cli)
+
+- Remove `None` entries on exec command
+
+- Add `python-dotenv` to delegate `.env` file processing
+
+- Don't adjust output on terminal width when piped into another command
+
+- Show an error message when `version` attribute is malformed
+
+- Fix HTTPS connection when DOCKER_HOST is remote
+
+1.25.5 (2020-02-04)
+-------------------
+
+### Features
+
+- Bump OpenSSL from 1.1.1d to 1.1.1f
+
+- Add 3.8 compose version
+
+1.25.4 (2020-01-23)
+-------------------
+
+### Bugfixes
+
+- Fix CI script to enforce the minimal MacOS version to 10.11
+
+- Fix docker-compose exec for keys with no value
+
+1.25.3 (2020-01-23)
+-------------------
+
+### Bugfixes
+
+- Fix CI script to enforce the compilation with Python3
+
+- Fix binary's sha256 in the release page
+
+1.25.2 (2020-01-20)
+-------------------
+
+### Features
+
+- Allow compatibility option with `COMPOSE_COMPATIBILITY` environment variable
+
+- Bump PyInstaller from 3.5 to 3.6
+
+- Bump pysocks from 1.6.7 to 1.7.1
+
+- Bump websocket-client from 0.32.0 to 0.57.0
+
+- Bump urllib3 from 1.24.2 to 1.25.7
+
+- Bump jsonschema from 3.0.1 to 3.2.0
+
+- Bump PyYAML from 4.2b1 to 5.3
+
+- Bump certifi from 2017.4.17 to 2019.11.28
+
+- Bump coverage from 4.5.4 to 5.0.3
+
+- Bump paramiko from 2.6.0 to 2.7.1
+
+- Bump cached-property from 1.3.0 to 1.5.1
+
+- Bump minor Linux and MacOSX dependencies
+
+### Bugfixes
+
+- Validate version format on formats 2+
+
+- Assume infinite terminal width when not running in a terminal
+
+1.25.1 (2020-01-06)
+-------------------
+
+### Features
+
+- Bump `pytest-cov` 2.8.1
+
+- Bump `flake8` 3.7.9
+
+- Bump `coverage` 4.5.4
+
+### Bugfixes
+
+- Decode APIError explanation to unicode before usage on start and create of a container
+
+- Reports when images that cannot be pulled and must be built
+
+- Discard label `com.docker.compose.filepaths` having None as value. Typically, when coming from stdin
+
+- Added OSX binary as a directory to solve slow start up time caused by MacOS Catalina binary scan
+
+- Passed in HOME env-var in container mode (running with `script/run/run.sh`)
+
+- Reverted behavior of "only pull images that we can't build" and replace by a warning informing the image we can't pull and must be built
+
+
+1.25.0 (2019-11-18)
+-------------------
+
+### Features
+
+- Set no-colors to true if CLICOLOR env variable is set to 0
+
+- Add working dir, config files and env file in service labels
+
+- Add dependencies for ARM build
+
+- Add BuildKit support, use `DOCKER_BUILDKIT=1` and `COMPOSE_DOCKER_CLI_BUILD=1`
+
+- Bump paramiko to 2.6.0
+
+- Add working dir, config files and env file in service labels
+
+- Add tag `docker-compose:latest`
+
+- Add `docker-compose:<version>-alpine` image/tag
+
+- Add `docker-compose:<version>-debian` image/tag
+
+- Bumped `docker-py` 4.1.0
+
+- Supports `requests` up to 2.22.0 version
+
+- Drops empty tag on `build:cache_from`
+
+- `Dockerfile` now generates `libmusl` binaries for alpine
+
+- Only pull images that can't be built
+
+- Attribute `scale` can now accept `0` as a value
+
+- Added `--quiet` build flag
+
+- Added `--no-interpolate` to `docker-compose config`
+
+- Bump OpenSSL for macOS build (`1.1.0j` to `1.1.1c`)
+
+- Added `--no-rm` to `build` command
+
+- Added support for `credential_spec`
+
+- Resolve digests without pulling image
+
+- Upgrade `pyyaml` to `4.2b1`
+
+- Lowered severity to `warning` if `down` tries to remove nonexisting image
+
+- Use improved API fields for project events when possible
+
+- Update `setup.py` for modern `pypi/setuptools` and remove `pandoc` dependencies
+
+- Removed `Dockerfile.armhf` which is no longer needed
+
+### Bugfixes
+
+- Make container service color deterministic, remove red from chosen colors
+
+- Fix non ascii chars error. Python2 only
+
+- Format image size as decimal to be align with Docker CLI
+
+- Use Python Posix support to get tty size
+
+- Fix same file 'extends' optimization
+
+- Use python POSIX support to get tty size
+
+- Format image size as decimal to be align with Docker CLI
+
+- Fixed stdin_open
+
+- Fixed `--remove-orphans` when used with `up --no-start`
+
+- Fixed `docker-compose ps --all`
+
+- Fixed `depends_on` dependency recreation behavior
+
+- Fixed bash completion for `build --memory`
+
+- Fixed misleading warning concerning env vars when performing an `exec` command
+
+- Fixed failure check in parallel_execute_watch
+
+- Fixed race condition after pulling image
+
+- Fixed error on duplicate mount points
+
+- Fixed merge on networks section
+
+- Always connect Compose container to `stdin`
+
+- Fixed the presentation of failed services on 'docker-compose start' when containers are not available
+
+1.24.1 (2019-06-24)
+-------------------
+
+### Bugfixes
+
+- Fixed acceptance tests
+
+1.24.0 (2019-03-28)
+-------------------
+
+### Features
+
+- Added support for connecting to the Docker Engine using the `ssh` protocol.
+
+- Added a `--all` flag to `docker-compose ps` to include stopped one-off containers
+ in the command's output.
+
+- Add bash completion for `ps --all|-a`
+
+- Support for credential_spec
+
+- Add `--parallel` to `docker build`'s options in `bash` and `zsh` completion
+
+### Bugfixes
+
+- Fixed a bug where some valid credential helpers weren't properly handled by Compose
+ when attempting to pull images from private registries.
+
+- Fixed an issue where the output of `docker-compose start` before containers were created
+ was misleading
+
+- To match the Docker CLI behavior and to avoid confusing issues, Compose will no longer
+ accept whitespace in variable names sourced from environment files.
+
+- Compose will now report a configuration error if a service attempts to declare
+ duplicate mount points in the volumes section.
+
+- Fixed an issue with the containerized version of Compose that prevented users from
+ writing to stdin during interactive sessions started by `run` or `exec`.
+
+- One-off containers started by `run` no longer adopt the restart policy of the service,
+ and are instead set to never restart.
+
+- Fixed an issue that caused some container events to not appear in the output of
+ the `docker-compose events` command.
+
+- Missing images will no longer stop the execution of `docker-compose down` commands
+ (a warning will be displayed instead).
+
+- Force `virtualenv` version for macOS CI
+
+- Fix merging of compose files when network has `None` config
+
+- Fix `CTRL+C` issues by enabling `bootloader_ignore_signals` in `pyinstaller`
+
+- Bump `docker-py` version to `3.7.2` to fix SSH and proxy config issues
+
+- Fix release script and some typos on release documentation
+
+1.23.2 (2018-11-28)
+-------------------
+
+### Bugfixes
+
+- Reverted a 1.23.0 change that appended random strings to container names
+ created by `docker-compose up`, causing addressability issues.
+ Note: Containers created by `docker-compose run` will continue to use
+ randomly generated names to avoid collisions during parallel runs.
+
+- Fixed an issue where some `dockerfile` paths would fail unexpectedly when
+ attempting to build on Windows.
+
+- Fixed a bug where build context URLs would fail to build on Windows.
+
+- Fixed a bug that caused `run` and `exec` commands to fail for some otherwise
+ accepted values of the `--host` parameter.
+
+- Fixed an issue where overrides for the `storage_opt` and `isolation` keys in
+ service definitions weren't properly applied.
+
+- Fixed a bug where some invalid Compose files would raise an uncaught
+ exception during validation.
+
+1.23.1 (2018-11-01)
+-------------------
+
+### Bugfixes
+
+- Fixed a bug where working with containers created with a previous (< 1.23.0)
+ version of Compose would cause unexpected crashes
+
+- Fixed an issue where the behavior of the `--project-directory` flag would
+ vary depending on which subcommand was being used.
+
+1.23.0 (2018-10-30)
+-------------------
+
+### Important note
+
+The default naming scheme for containers created by Compose in this version
+has changed from `<project>_<service>_<index>` to
+`<project>_<service>_<index>_<slug>`, where `<slug>` is a randomly-generated
+hexadecimal string. Please make sure to update scripts relying on the old
+naming scheme accordingly before upgrading.
+
+### Features
+
+- Logs for containers restarting after a crash will now appear in the output
+ of the `up` and `logs` commands.
+
+- Added `--hash` option to the `docker-compose config` command, allowing users
+ to print a hash string for each service's configuration to facilitate rolling
+ updates.
+
+- Added `--parallel` flag to the `docker-compose build` command, allowing
+ Compose to build up to 5 images simultaneously.
+
+- Output for the `pull` command now reports status / progress even when pulling
+ multiple images in parallel.
+
+- For images with multiple names, Compose will now attempt to match the one
+ present in the service configuration in the output of the `images` command.
+
+### Bugfixes
+
+- Parallel `run` commands for the same service will no longer fail due to name
+ collisions.
+
+- Fixed an issue where paths longer than 260 characters on Windows clients would
+ cause `docker-compose build` to fail.
+
+- Fixed a bug where attempting to mount `/var/run/docker.sock` with
+ Docker Desktop for Windows would result in failure.
+
+- The `--project-directory` option is now used by Compose to determine where to
+ look for the `.env` file.
+
+- `docker-compose build` no longer fails when attempting to pull an image with
+ credentials provided by the gcloud credential helper.
+
+- Fixed the `--exit-code-from` option in `docker-compose up` to always report
+ the actual exit code even when the watched container isn't the cause of the
+ exit.
+
+- Fixed an issue that would prevent recreating a service in some cases where
+ a volume would be mapped to the same mountpoint as a volume declared inside
+ the image's Dockerfile.
+
+- Fixed a bug that caused hash configuration with multiple networks to be
+ inconsistent, causing some services to be unnecessarily restarted.
+
+- Fixed a bug that would cause failures with variable substitution for services
+ with a name containing one or more dot characters
+
+- Fixed a pipe handling issue when using the containerized version of Compose.
+
+- Fixed a bug causing `external: false` entries in the Compose file to be
+ printed as `external: true` in the output of `docker-compose config`
+
+- Fixed a bug where issuing a `docker-compose pull` command on services
+ without a defined image key would cause Compose to crash
+
+- Volumes and binds are now mounted in the order they're declared in the
+ service definition
+
+### Miscellaneous
+
+- The `zsh` completion script has been updated with new options, and no
+ longer suggests container names where service names are expected.
+
+1.22.0 (2018-07-17)
+-------------------
+
+### Features
+
+#### Compose format version 3.7
+
+- Introduced version 3.7 of the `docker-compose.yml` specification.
+ This version requires Docker Engine 18.06.0 or above.
+
+- Added support for `rollback_config` in the deploy configuration
+
+- Added support for the `init` parameter in service configurations
+
+- Added support for extension fields in service, network, volume, secret,
+ and config configurations
+
+#### Compose format version 2.4
+
+- Added support for extension fields in service, network,
+ and volume configurations
+
+### Bugfixes
+
+- Fixed a bug that prevented deployment with some Compose files when
+ `DOCKER_DEFAULT_PLATFORM` was set
+
+- Compose will no longer try to create containers or volumes with
+ invalid starting characters
+
+- Fixed several bugs that prevented Compose commands from working properly
+ with containers created with an older version of Compose
+
+- Fixed an issue with the output of `docker-compose config` with the
+ `--compatibility-mode` flag enabled when the source file contains
+ attachable networks
+
+- Fixed a bug that prevented the `gcloud` credential store from working
+ properly when used with the Compose binary on UNIX
+
+- Fixed a bug that caused connection errors when trying to operate
+ over a non-HTTPS TCP connection on Windows
+
+- Fixed a bug that caused builds to fail on Windows if the Dockerfile
+ was located in a subdirectory of the build context
+
+- Fixed an issue that prevented proper parsing of UTF-8 BOM encoded
+ Compose files on Windows
+
+- Fixed an issue with handling of the double-wildcard (`**`) pattern in `.dockerignore` files when using `docker-compose build`
+
+- Fixed a bug that caused auth values in legacy `.dockercfg` files to be ignored
+- `docker-compose build` will no longer attempt to create image names starting with an invalid character
+
+1.21.2 (2018-05-03)
+-------------------
+
+### Bugfixes
+
+- Fixed a bug where the ip_range attribute in IPAM configs was prevented
+ from passing validation
+
+1.21.1 (2018-04-27)
+-------------------
+
+### Bugfixes
+
+- In 1.21.0, we introduced a change to how project names are sanitized for
+ internal use in resource names. This caused issues when manipulating an
+ existing, deployed application whose name had changed as a result.
+ This release properly detects resources using "legacy" naming conventions.
+
+- Fixed an issue where specifying an in-context Dockerfile using an absolute
+ path would fail despite being valid.
+
+- Fixed a bug where IPAM option changes were incorrectly detected, preventing
+ redeployments.
+
+- Validation of v2 files now properly checks the structure of IPAM configs.
+
+- Improved support for credentials stores on Windows to include binaries using
+ extensions other than `.exe`. The list of valid extensions is determined by
+ the contents of the `PATHEXT` environment variable.
+
+- Fixed a bug where Compose would generate invalid binds containing duplicate
+ elements with some v3.2 files, triggering errors at the Engine level during
+ deployment.
+
1.21.0 (2018-04-10)
-------------------
@@ -197,7 +983,7 @@ Change log
preventing Compose from recovering volume data from previous containers for
anonymous volumes
-- Added limit for number of simulatenous parallel operations, which should
+- Added limit for number of simultaneous parallel operations, which should
prevent accidental resource exhaustion of the server. Default is 64 and
can be configured using the `COMPOSE_PARALLEL_LIMIT` environment variable
@@ -495,7 +1281,7 @@ Change log
### Bugfixes
- Volumes specified through the `--volume` flag of `docker-compose run` now
- complement volumes declared in the service's defintion instead of replacing
+ complement volumes declared in the service's definition instead of replacing
them
- Fixed a bug where using multiple Compose files would unset the scale value
diff --git a/Dockerfile b/Dockerfile
index 9df78a82..d934a887 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -1,39 +1,100 @@
-FROM python:3.6
-
-RUN set -ex; \
- apt-get update -qq; \
- apt-get install -y \
- locales \
- curl \
- python-dev \
- git
-
-RUN curl -fsSL -o dockerbins.tgz "https://download.docker.com/linux/static/stable/x86_64/docker-17.12.0-ce.tgz" && \
- SHA256=692e1c72937f6214b1038def84463018d8e320c8eaf8530546c84c2f8f9c767d; \
- echo "${SHA256} dockerbins.tgz" | sha256sum -c - && \
- tar xvf dockerbins.tgz docker/docker --strip-components 1 && \
- mv docker /usr/local/bin/docker && \
- chmod +x /usr/local/bin/docker && \
- rm dockerbins.tgz
-
-# Python3 requires a valid locale
-RUN echo "en_US.UTF-8 UTF-8" > /etc/locale.gen && locale-gen
-ENV LANG en_US.UTF-8
-
-RUN useradd -d /home/user -m -s /bin/bash user
-WORKDIR /code/
+ARG DOCKER_VERSION=19.03
+ARG PYTHON_VERSION=3.7.10
+
+ARG BUILD_ALPINE_VERSION=3.12
+ARG BUILD_CENTOS_VERSION=7
+ARG BUILD_DEBIAN_VERSION=slim-stretch
+
+ARG RUNTIME_ALPINE_VERSION=3.12
+ARG RUNTIME_CENTOS_VERSION=7
+ARG RUNTIME_DEBIAN_VERSION=stretch-slim
+
+ARG DISTRO=alpine
-RUN pip install tox==2.1.1
+FROM docker:${DOCKER_VERSION} AS docker-cli
-ADD requirements.txt /code/
-ADD requirements-dev.txt /code/
-ADD .pre-commit-config.yaml /code/
-ADD setup.py /code/
-ADD tox.ini /code/
-ADD compose /code/compose/
-RUN tox --notest
+FROM python:${PYTHON_VERSION}-alpine${BUILD_ALPINE_VERSION} AS build-alpine
+RUN apk add --no-cache \
+ bash \
+ build-base \
+ ca-certificates \
+ curl \
+ gcc \
+ git \
+ libc-dev \
+ libffi-dev \
+ libgcc \
+ make \
+ musl-dev \
+ openssl \
+ openssl-dev \
+ zlib-dev
+ENV BUILD_BOOTLOADER=1
+
+FROM python:${PYTHON_VERSION}-${BUILD_DEBIAN_VERSION} AS build-debian
+RUN apt-get update && apt-get install --no-install-recommends -y \
+ curl \
+ gcc \
+ git \
+ libc-dev \
+ libffi-dev \
+ libgcc-6-dev \
+ libssl-dev \
+ make \
+ openssl \
+ zlib1g-dev
+
+FROM centos:${BUILD_CENTOS_VERSION} AS build-centos
+RUN yum install -y \
+ gcc \
+ git \
+ libffi-devel \
+ make \
+ openssl \
+ openssl-devel
+WORKDIR /tmp/python3/
+ARG PYTHON_VERSION
+RUN curl -L https://www.python.org/ftp/python/${PYTHON_VERSION}/Python-${PYTHON_VERSION}.tgz | tar xzf - \
+ && cd Python-${PYTHON_VERSION} \
+ && ./configure --enable-optimizations --enable-shared --prefix=/usr LDFLAGS="-Wl,-rpath /usr/lib" \
+ && make altinstall
+RUN alternatives --install /usr/bin/python python /usr/bin/python2.7 50
+RUN alternatives --install /usr/bin/python python /usr/bin/python$(echo "${PYTHON_VERSION%.*}") 60
+RUN curl https://bootstrap.pypa.io/get-pip.py | python -
+
+FROM build-${DISTRO} AS build
+ENTRYPOINT ["sh", "/usr/local/bin/docker-compose-entrypoint.sh"]
+WORKDIR /code/
+COPY docker-compose-entrypoint.sh /usr/local/bin/
+COPY --from=docker-cli /usr/local/bin/docker /usr/local/bin/docker
+RUN pip install \
+ virtualenv==20.4.0 \
+ tox==3.21.2
+COPY requirements-dev.txt .
+COPY requirements-indirect.txt .
+COPY requirements.txt .
+RUN pip install -r requirements.txt -r requirements-indirect.txt -r requirements-dev.txt
+COPY .pre-commit-config.yaml .
+COPY tox.ini .
+COPY setup.py .
+COPY README.md .
+COPY compose compose/
+RUN tox -e py37 --notest
+COPY . .
+ARG GIT_COMMIT=unknown
+ENV DOCKER_COMPOSE_GITSHA=$GIT_COMMIT
+RUN script/build/linux-entrypoint
-ADD . /code/
-RUN chown -R user /code/
+FROM scratch AS bin
+ARG TARGETARCH
+ARG TARGETOS
+COPY --from=build /usr/local/bin/docker-compose /docker-compose-${TARGETOS}-${TARGETARCH}
-ENTRYPOINT ["/code/.tox/py36/bin/docker-compose"]
+FROM alpine:${RUNTIME_ALPINE_VERSION} AS runtime-alpine
+FROM debian:${RUNTIME_DEBIAN_VERSION} AS runtime-debian
+FROM centos:${RUNTIME_CENTOS_VERSION} AS runtime-centos
+FROM runtime-${DISTRO} AS runtime
+COPY docker-compose-entrypoint.sh /usr/local/bin/
+ENTRYPOINT ["sh", "/usr/local/bin/docker-compose-entrypoint.sh"]
+COPY --from=docker-cli /usr/local/bin/docker /usr/local/bin/docker
+COPY --from=build /usr/local/bin/docker-compose /usr/local/bin/docker-compose
diff --git a/Dockerfile.armhf b/Dockerfile.armhf
deleted file mode 100644
index ce4ab7c1..00000000
--- a/Dockerfile.armhf
+++ /dev/null
@@ -1,73 +0,0 @@
-FROM armhf/debian:wheezy
-
-RUN set -ex; \
- apt-get update -qq; \
- apt-get install -y \
- locales \
- gcc \
- make \
- zlib1g \
- zlib1g-dev \
- libssl-dev \
- git \
- ca-certificates \
- curl \
- libsqlite3-dev \
- libbz2-dev \
- ; \
- rm -rf /var/lib/apt/lists/*
-
-RUN curl -fsSL -o dockerbins.tgz "https://download.docker.com/linux/static/stable/armhf/docker-17.12.0-ce.tgz" && \
- tar xvf dockerbins.tgz docker/docker --strip-components 1 && \
- mv docker /usr/local/bin/docker && \
- chmod +x /usr/local/bin/docker && \
- rm dockerbins.tgz
-
-# Build Python 2.7.13 from source
-RUN set -ex; \
- curl -L https://www.python.org/ftp/python/2.7.13/Python-2.7.13.tgz | tar -xz; \
- cd Python-2.7.13; \
- ./configure --enable-shared; \
- make; \
- make install; \
- cd ..; \
- rm -rf /Python-2.7.13
-
-# Build python 3.6 from source
-RUN set -ex; \
- curl -L https://www.python.org/ftp/python/3.6.4/Python-3.6.4.tgz | tar -xz; \
- cd Python-3.6.4; \
- ./configure --enable-shared; \
- make; \
- make install; \
- cd ..; \
- rm -rf /Python-3.6.4
-
-# Make libpython findable
-ENV LD_LIBRARY_PATH /usr/local/lib
-
-# Install pip
-RUN set -ex; \
- curl -L https://bootstrap.pypa.io/get-pip.py | python
-
-# Python3 requires a valid locale
-RUN echo "en_US.UTF-8 UTF-8" > /etc/locale.gen && locale-gen
-ENV LANG en_US.UTF-8
-
-RUN useradd -d /home/user -m -s /bin/bash user
-WORKDIR /code/
-
-RUN pip install tox==2.1.1
-
-ADD requirements.txt /code/
-ADD requirements-dev.txt /code/
-ADD .pre-commit-config.yaml /code/
-ADD setup.py /code/
-ADD tox.ini /code/
-ADD compose /code/compose/
-RUN tox --notest
-
-ADD . /code/
-RUN chown -R user /code/
-
-ENTRYPOINT ["/code/.tox/py27/bin/docker-compose"]
diff --git a/Dockerfile.run b/Dockerfile.run
deleted file mode 100644
index c403ac23..00000000
--- a/Dockerfile.run
+++ /dev/null
@@ -1,23 +0,0 @@
-FROM alpine:3.6
-
-ENV GLIBC 2.27-r0
-ENV DOCKERBINS_SHA 1270dce1bd7e1838d62ae21d2505d87f16efc1d9074645571daaefdfd0c14054
-
-RUN apk update && apk add --no-cache openssl ca-certificates curl libgcc && \
- curl -fsSL -o /etc/apk/keys/sgerrand.rsa.pub https://raw.githubusercontent.com/sgerrand/alpine-pkg-glibc/master/sgerrand.rsa.pub && \
- curl -fsSL -o glibc-$GLIBC.apk https://github.com/sgerrand/alpine-pkg-glibc/releases/download/$GLIBC/glibc-$GLIBC.apk && \
- apk add --no-cache glibc-$GLIBC.apk && \
- ln -s /lib/libz.so.1 /usr/glibc-compat/lib/ && \
- ln -s /lib/libc.musl-x86_64.so.1 /usr/glibc-compat/lib && \
- ln -s /usr/lib/libgcc_s.so.1 /usr/glibc-compat/lib && \
- curl -fsSL -o dockerbins.tgz "https://download.docker.com/linux/static/stable/x86_64/docker-17.12.1-ce.tgz" && \
- echo "${DOCKERBINS_SHA} dockerbins.tgz" | sha256sum -c - && \
- tar xvf dockerbins.tgz docker/docker --strip-components 1 && \
- mv docker /usr/local/bin/docker && \
- chmod +x /usr/local/bin/docker && \
- rm dockerbins.tgz /etc/apk/keys/sgerrand.rsa.pub glibc-$GLIBC.apk && \
- apk del curl
-
-COPY dist/docker-compose-Linux-x86_64 /usr/local/bin/docker-compose
-
-ENTRYPOINT ["docker-compose"]
diff --git a/Dockerfile.s390x b/Dockerfile.s390x
deleted file mode 100644
index 3b19bb39..00000000
--- a/Dockerfile.s390x
+++ /dev/null
@@ -1,15 +0,0 @@
-FROM s390x/alpine:3.6
-
-ARG COMPOSE_VERSION=1.16.1
-
-RUN apk add --update --no-cache \
- python \
- py-pip \
- && pip install --no-cache-dir docker-compose==$COMPOSE_VERSION \
- && rm -rf /var/cache/apk/*
-
-WORKDIR /data
-VOLUME /data
-
-
-ENTRYPOINT ["docker-compose"]
diff --git a/Jenkinsfile b/Jenkinsfile
index 44cd7c3c..454f9bb8 100644
--- a/Jenkinsfile
+++ b/Jenkinsfile
@@ -1,83 +1,117 @@
#!groovy
-def image
+def dockerVersions = ['19.03.13']
+def baseImages = ['alpine', 'debian']
+def pythonVersions = ['py37']
-def buildImage = { ->
- wrappedNode(label: "ubuntu && !zfs", cleanWorkspace: true) {
- stage("build image") {
- checkout(scm)
- def imageName = "dockerbuildbot/compose:${gitCommit()}"
- image = docker.image(imageName)
- try {
- image.pull()
- } catch (Exception exc) {
- image = docker.build(imageName, ".")
- image.push()
- }
- }
- }
-}
+pipeline {
+ agent none
-def get_versions = { int number ->
- def docker_versions
- wrappedNode(label: "ubuntu && !zfs") {
- def result = sh(script: """docker run --rm \\
- --entrypoint=/code/.tox/py27/bin/python \\
- ${image.id} \\
- /code/script/test/versions.py -n ${number} docker/docker-ce recent
- """, returnStdout: true
- )
- docker_versions = result.split()
- }
- return docker_versions
-}
-
-def runTests = { Map settings ->
- def dockerVersions = settings.get("dockerVersions", null)
- def pythonVersions = settings.get("pythonVersions", null)
+ options {
+ skipDefaultCheckout(true)
+ buildDiscarder(logRotator(daysToKeepStr: '30'))
+ timeout(time: 2, unit: 'HOURS')
+ timestamps()
+ }
+ environment {
+ DOCKER_BUILDKIT="1"
+ }
- if (!pythonVersions) {
- throw new Exception("Need Python versions to test. e.g.: `runTests(pythonVersions: 'py27,py36')`")
- }
- if (!dockerVersions) {
- throw new Exception("Need Docker versions to test. e.g.: `runTests(dockerVersions: 'all')`")
- }
+ stages {
+ stage('Build test images') {
+ // TODO use declarative 1.5.0 `matrix` once available on CI
+ parallel {
+ stage('alpine') {
+ agent {
+ label 'ubuntu-2004 && amd64 && !zfs && cgroup1'
+ }
+ steps {
+ buildImage('alpine')
+ }
+ }
+ stage('debian') {
+ agent {
+ label 'ubuntu-2004 && amd64 && !zfs && cgroup1'
+ }
+ steps {
+ buildImage('debian')
+ }
+ }
+ }
+ }
+ stage('Test') {
+ steps {
+ // TODO use declarative 1.5.0 `matrix` once available on CI
+ script {
+ def testMatrix = [:]
+ baseImages.each { baseImage ->
+ dockerVersions.each { dockerVersion ->
+ pythonVersions.each { pythonVersion ->
+ testMatrix["${baseImage}_${dockerVersion}_${pythonVersion}"] = runTests(dockerVersion, pythonVersion, baseImage)
+ }
+ }
+ }
- { ->
- wrappedNode(label: "ubuntu && !zfs", cleanWorkspace: true) {
- stage("test python=${pythonVersions} / docker=${dockerVersions}") {
- checkout(scm)
- def storageDriver = sh(script: 'docker info | awk -F \': \' \'$1 == "Storage Driver" { print $2; exit }\'', returnStdout: true).trim()
- echo "Using local system's storage driver: ${storageDriver}"
- sh """docker run \\
- -t \\
- --rm \\
- --privileged \\
- --volume="\$(pwd)/.git:/code/.git" \\
- --volume="/var/run/docker.sock:/var/run/docker.sock" \\
- -e "TAG=${image.id}" \\
- -e "STORAGE_DRIVER=${storageDriver}" \\
- -e "DOCKER_VERSIONS=${dockerVersions}" \\
- -e "BUILD_NUMBER=\$BUILD_TAG" \\
- -e "PY_TEST_VERSIONS=${pythonVersions}" \\
- --entrypoint="script/test/ci" \\
- ${image.id} \\
- --verbose
- """
- }
+ parallel testMatrix
+ }
+ }
+ }
}
- }
}
-buildImage()
-def testMatrix = [failFast: true]
-def docker_versions = get_versions(2)
+def buildImage(baseImage) {
+ def scmvar = checkout(scm)
+ def imageName = "dockerpinata/compose:${baseImage}-${scmvar.GIT_COMMIT}"
+ image = docker.image(imageName)
-for (int i = 0 ;i < docker_versions.length ; i++) {
- def dockerVersion = docker_versions[i]
- testMatrix["${dockerVersion}_py27"] = runTests([dockerVersions: dockerVersion, pythonVersions: "py27"])
- testMatrix["${dockerVersion}_py36"] = runTests([dockerVersions: dockerVersion, pythonVersions: "py36"])
+ withDockerRegistry(credentialsId:'dockerbuildbot-index.docker.io') {
+ try {
+ image.pull()
+ } catch (Exception exc) {
+ ansiColor('xterm') {
+ sh """docker build -t ${imageName} \\
+ --target build \\
+ --build-arg DISTRO="${baseImage}" \\
+ --build-arg GIT_COMMIT="${scmvar.GIT_COMMIT}" \\
+ .\\
+ """
+ sh "docker push ${imageName}"
+ }
+ echo "${imageName}"
+ return imageName
+ }
+ }
}
-parallel(testMatrix)
+def runTests(dockerVersion, pythonVersion, baseImage) {
+ return {
+ stage("python=${pythonVersion} docker=${dockerVersion} ${baseImage}") {
+ node("ubuntu-2004 && amd64 && !zfs && cgroup1") {
+ def scmvar = checkout(scm)
+ def imageName = "dockerpinata/compose:${baseImage}-${scmvar.GIT_COMMIT}"
+ def storageDriver = sh(script: "docker info -f \'{{.Driver}}\'", returnStdout: true).trim()
+ echo "Using local system's storage driver: ${storageDriver}"
+ withDockerRegistry(credentialsId:'dockerbuildbot-index.docker.io') {
+ sh """docker run \\
+ -t \\
+ --rm \\
+ --privileged \\
+ --volume="\$(pwd)/.git:/code/.git" \\
+ --volume="/var/run/docker.sock:/var/run/docker.sock" \\
+ --volume="\${DOCKER_CONFIG}/config.json:/root/.docker/config.json" \\
+ -e "DOCKER_TLS_CERTDIR=" \\
+ -e "TAG=${imageName}" \\
+ -e "STORAGE_DRIVER=${storageDriver}" \\
+ -e "DOCKER_VERSIONS=${dockerVersion}" \\
+ -e "BUILD_NUMBER=${env.BUILD_NUMBER}" \\
+ -e "PY_TEST_VERSIONS=${pythonVersion}" \\
+ --entrypoint="script/test/ci" \\
+ ${imageName} \\
+ --verbose
+ """
+ }
+ }
+ }
+ }
+}
diff --git a/MAINTAINERS b/MAINTAINERS
index 7aedd46e..7e178147 100644
--- a/MAINTAINERS
+++ b/MAINTAINERS
@@ -11,9 +11,10 @@
[Org]
[Org."Core maintainers"]
people = [
- "mefyl",
- "mnottale",
- "shin-",
+ "aiordache",
+ "ndeloof",
+ "rumpl",
+ "ulyssessouza",
]
[Org.Alumni]
people = [
@@ -29,11 +30,15 @@
# in 2015 with solid bugfixes and improved error handling
# among them
"mnowster",
- # Daniel Nephin is one of the longest-running maitainers on
+ # Daniel Nephin is one of the longest-running maintainers on
# the Compose project, and has contributed several major features
# including muti-file support, variable interpolation, secrets
# emulation and many more
"dnephin",
+
+ "shin-",
+ "mefyl",
+ "mnottale",
]
[people]
@@ -49,6 +54,11 @@
Email = "aanand.prasad@gmail.com"
GitHub = "aanand"
+ [people.aiordache]
+ Name = "Anca Iordache"
+ Email = "anca.iordache@docker.com"
+ GitHub = "aiordache"
+
[people.bfirsh]
Name = "Ben Firshman"
Email = "ben@firshman.co.uk"
@@ -74,7 +84,22 @@
Email = "mazz@houseofmnowster.com"
GitHub = "mnowster"
- [People.shin-]
+ [people.ndeloof]
+ Name = "Nicolas De Loof"
+ Email = "nicolas.deloof@gmail.com"
+ GitHub = "ndeloof"
+
+ [people.rumpl]
+ Name = "Djordje Lukic"
+ Email = "djordje.lukic@docker.com"
+ GitHub = "rumpl"
+
+ [people.shin-]
Name = "Joffrey F"
- Email = "joffrey@docker.com"
+ Email = "f.joffrey@gmail.com"
GitHub = "shin-"
+
+ [people.ulyssessouza]
+ Name = "Ulysses Domiciano Souza"
+ Email = "ulysses.souza@docker.com"
+ GitHub = "ulyssessouza"
diff --git a/MANIFEST.in b/MANIFEST.in
index 8c6f932b..313b4e00 100644
--- a/MANIFEST.in
+++ b/MANIFEST.in
@@ -1,11 +1,11 @@
include Dockerfile
include LICENSE
+include requirements-indirect.txt
include requirements.txt
include requirements-dev.txt
include tox.ini
include *.md
-exclude README.md
-include README.rst
+include README.md
include compose/config/*.json
include compose/GITSHA
recursive-include contrib/completion *
diff --git a/Makefile b/Makefile
new file mode 100644
index 00000000..0a7a5c36
--- /dev/null
+++ b/Makefile
@@ -0,0 +1,57 @@
+TAG = "docker-compose:alpine-$(shell git rev-parse --short HEAD)"
+GIT_VOLUME = "--volume=$(shell pwd)/.git:/code/.git"
+
+DOCKERFILE ?="Dockerfile"
+DOCKER_BUILD_TARGET ?="build"
+
+UNAME_S := $(shell uname -s)
+ifeq ($(UNAME_S),Linux)
+ BUILD_SCRIPT = linux
+endif
+ifeq ($(UNAME_S),Darwin)
+ BUILD_SCRIPT = osx
+endif
+
+COMPOSE_SPEC_SCHEMA_PATH = "compose/config/compose_spec.json"
+COMPOSE_SPEC_RAW_URL = "https://raw.githubusercontent.com/compose-spec/compose-spec/master/schema/compose-spec.json"
+
+all: cli
+
+cli: download-compose-spec ## Compile the cli
+ ./script/build/$(BUILD_SCRIPT)
+
+download-compose-spec: ## Download the compose-spec schema from it's repo
+ curl -so $(COMPOSE_SPEC_SCHEMA_PATH) $(COMPOSE_SPEC_RAW_URL)
+
+cache-clear: ## Clear the builder cache
+ @docker builder prune --force --filter type=exec.cachemount --filter=unused-for=24h
+
+base-image: ## Builds base image
+ docker build -f $(DOCKERFILE) -t $(TAG) --target $(DOCKER_BUILD_TARGET) .
+
+lint: base-image ## Run linter
+ docker run --rm \
+ --tty \
+ $(GIT_VOLUME) \
+ $(TAG) \
+ tox -e pre-commit
+
+test-unit: base-image ## Run tests
+ docker run --rm \
+ --tty \
+ $(GIT_VOLUME) \
+ $(TAG) \
+ pytest -v tests/unit/
+
+test: ## Run all tests
+ ./script/test/default
+
+pre-commit: lint test-unit cli
+
+help: ## Show help
+ @echo Please specify a build target. The choices are:
+ @grep -E '^[0-9a-zA-Z_-]+:.*?## .*$$' $(MAKEFILE_LIST) | awk 'BEGIN {FS = ":.*?## "}; {printf "\033[36m%-30s\033[0m %s\n", $$1, $$2}'
+
+FORCE:
+
+.PHONY: all cli download-compose-spec cache-clear base-image lint test-unit test pre-commit help
diff --git a/README.md b/README.md
index ea07f6a7..d0d23d8a 100644
--- a/README.md
+++ b/README.md
@@ -1,63 +1,86 @@
Docker Compose
==============
+[![Build Status](https://ci-next.docker.com/public/buildStatus/icon?job=compose/master)](https://ci-next.docker.com/public/job/compose/job/master/)
+
![Docker Compose](logo.png?raw=true "Docker Compose Logo")
-Compose is a tool for defining and running multi-container Docker applications.
-With Compose, you use a Compose file to configure your application's services.
-Then, using a single command, you create and start all the services
-from your configuration. To learn more about all the features of Compose
-see [the list of features](https://github.com/docker/docker.github.io/blob/master/compose/overview.md#features).
+Docker Compose is a tool for running multi-container applications on Docker
+defined using the [Compose file format](https://compose-spec.io).
+A Compose file is used to define how the one or more containers that make up
+your application are configured.
+Once you have a Compose file, you can create and start your application with a
+single command: `docker-compose up`.
-Compose is great for development, testing, and staging environments, as well as
-CI workflows. You can learn more about each case in
-[Common Use Cases](https://github.com/docker/docker.github.io/blob/master/compose/overview.md#common-use-cases).
+Compose files can be used to deploy applications locally, or to the cloud on
+[Amazon ECS](https://aws.amazon.com/ecs) or
+[Microsoft ACI](https://azure.microsoft.com/services/container-instances/) using
+the Docker CLI. You can read more about how to do this:
+- [Compose for Amazon ECS](https://docs.docker.com/engine/context/ecs-integration/)
+- [Compose for Microsoft ACI](https://docs.docker.com/engine/context/aci-integration/)
-Using Compose is basically a three-step process.
+Where to get Docker Compose
+----------------------------
-1. Define your app's environment with a `Dockerfile` so it can be
-reproduced anywhere.
-2. Define the services that make up your app in `docker-compose.yml` so
-they can be run together in an isolated environment.
-3. Lastly, run `docker-compose up` and Compose will start and run your entire app.
+### Windows and macOS
-A `docker-compose.yml` looks like this:
+Docker Compose is included in
+[Docker Desktop](https://www.docker.com/products/docker-desktop)
+for Windows and macOS.
- version: '2'
+### Linux
- services:
- web:
- build: .
- ports:
- - "5000:5000"
- volumes:
- - .:/code
- redis:
- image: redis
+You can download Docker Compose binaries from the
+[release page](https://github.com/docker/compose/releases) on this repository.
-For more information about the Compose file, see the
-[Compose file reference](https://github.com/docker/docker.github.io/blob/master/compose/compose-file/compose-versioning.md)
+### Using pip
-Compose has commands for managing the whole lifecycle of your application:
+If your platform is not supported, you can download Docker Compose using `pip`:
- * Start, stop and rebuild services
- * View the status of running services
- * Stream the log output of running services
- * Run a one-off command on a service
+```console
+pip install docker-compose
+```
-Installation and documentation
-------------------------------
+> **Note:** Docker Compose requires Python 3.6 or later.
-- Full documentation is available on [Docker's website](https://docs.docker.com/compose/).
-- If you have any questions, you can talk in real-time with other developers in the #docker-compose IRC channel on Freenode. [Click here to join using IRCCloud.](https://www.irccloud.com/invite?hostname=irc.freenode.net&channel=%23docker-compose)
-- Code repository for Compose is on [GitHub](https://github.com/docker/compose)
-- If you find any problems please fill out an [issue](https://github.com/docker/compose/issues/new)
+Quick Start
+-----------
+
+Using Docker Compose is basically a three-step process:
+1. Define your app's environment with a `Dockerfile` so it can be
+ reproduced anywhere.
+2. Define the services that make up your app in `docker-compose.yml` so
+ they can be run together in an isolated environment.
+3. Lastly, run `docker-compose up` and Compose will start and run your entire
+ app.
+
+A Compose file looks like this:
+
+```yaml
+services:
+ web:
+ build: .
+ ports:
+ - "5000:5000"
+ volumes:
+ - .:/code
+ redis:
+ image: redis
+```
+
+You can find examples of Compose applications in our
+[Awesome Compose repository](https://github.com/docker/awesome-compose).
+
+For more information about the Compose format, see the
+[Compose file reference](https://docs.docker.com/compose/compose-file/).
Contributing
------------
-[![Build Status](https://jenkins.dockerproject.org/buildStatus/icon?job=docker/compose/master)](https://jenkins.dockerproject.org/job/docker/job/compose/job/master/)
+Want to help develop Docker Compose? Check out our
+[contributing documentation](https://github.com/docker/compose/blob/master/CONTRIBUTING.md).
-Want to help build Compose? Check out our [contributing documentation](https://github.com/docker/compose/blob/master/CONTRIBUTING.md).
+If you find an issue, please report it on the
+[issue tracker](https://github.com/docker/compose/issues/new/choose).
Releasing
---------
diff --git a/Release.Jenkinsfile b/Release.Jenkinsfile
new file mode 100644
index 00000000..4779c25a
--- /dev/null
+++ b/Release.Jenkinsfile
@@ -0,0 +1,312 @@
+#!groovy
+
+def dockerVersions = ['19.03.13', '18.09.9']
+def baseImages = ['alpine', 'debian']
+def pythonVersions = ['py37']
+
+pipeline {
+ agent none
+
+ options {
+ skipDefaultCheckout(true)
+ buildDiscarder(logRotator(daysToKeepStr: '30'))
+ timeout(time: 2, unit: 'HOURS')
+ timestamps()
+ }
+ environment {
+ DOCKER_BUILDKIT="1"
+ }
+
+ stages {
+ stage('Build test images') {
+ // TODO use declarative 1.5.0 `matrix` once available on CI
+ parallel {
+ stage('alpine') {
+ agent {
+ label 'linux && docker && ubuntu-2004 && amd64 && cgroup1'
+ }
+ steps {
+ buildImage('alpine')
+ }
+ }
+ stage('debian') {
+ agent {
+ label 'linux && docker && ubuntu-2004 && amd64 && cgroup1'
+ }
+ steps {
+ buildImage('debian')
+ }
+ }
+ }
+ }
+ stage('Test') {
+ agent {
+ label 'linux && docker && ubuntu-2004 && amd64 && cgroup1'
+ }
+ steps {
+ // TODO use declarative 1.5.0 `matrix` once available on CI
+ script {
+ def testMatrix = [:]
+ baseImages.each { baseImage ->
+ dockerVersions.each { dockerVersion ->
+ pythonVersions.each { pythonVersion ->
+ testMatrix["${baseImage}_${dockerVersion}_${pythonVersion}"] = runTests(dockerVersion, pythonVersion, baseImage)
+ }
+ }
+ }
+
+ parallel testMatrix
+ }
+ }
+ }
+ stage('Generate Changelog') {
+ agent {
+ label 'linux && docker && ubuntu-2004 && amd64 && cgroup1'
+ }
+ steps {
+ checkout scm
+ withCredentials([string(credentialsId: 'github-compose-release-test-token', variable: 'GITHUB_TOKEN')]) {
+ sh "./script/release/generate_changelog.sh"
+ }
+ archiveArtifacts artifacts: 'CHANGELOG.md'
+ stash( name: "changelog", includes: 'CHANGELOG.md' )
+ }
+ }
+ stage('Package') {
+ parallel {
+ stage('macosx binary') {
+ agent {
+ label 'mac-python'
+ }
+ environment {
+ DEPLOYMENT_TARGET="10.11"
+ }
+ steps {
+ checkout scm
+ sh './script/setup/osx'
+ sh 'tox -e py39 -- tests/unit'
+ sh './script/build/osx'
+ dir ('dist') {
+ checksum('docker-compose-Darwin-x86_64')
+ checksum('docker-compose-Darwin-x86_64.tgz')
+ }
+ archiveArtifacts artifacts: 'dist/*', fingerprint: true
+ dir("dist") {
+ stash name: "bin-darwin"
+ }
+ }
+ }
+ stage('linux binary') {
+ agent {
+ label 'linux && docker && ubuntu-2004 && amd64 && cgroup1'
+ }
+ steps {
+ checkout scm
+ sh ' ./script/build/linux'
+ dir ('dist') {
+ checksum('docker-compose-Linux-x86_64')
+ }
+ archiveArtifacts artifacts: 'dist/*', fingerprint: true
+ dir("dist") {
+ stash name: "bin-linux"
+ }
+ }
+ }
+ stage('windows binary') {
+ agent {
+ label 'windows-python'
+ }
+ environment {
+ PATH = "C:\\Python39;C:\\Python39\\Scripts;$PATH"
+ }
+ steps {
+ checkout scm
+ bat 'tox.exe -e py39 -- tests/unit'
+ powershell '.\\script\\build\\windows.ps1'
+ dir ('dist') {
+ checksum('docker-compose-Windows-x86_64.exe')
+ }
+ archiveArtifacts artifacts: 'dist/*', fingerprint: true
+ dir("dist") {
+ stash name: "bin-win"
+ }
+ }
+ }
+ stage('alpine image') {
+ agent {
+ label 'linux && docker && ubuntu-2004 && amd64 && cgroup1'
+ }
+ steps {
+ buildRuntimeImage('alpine')
+ }
+ }
+ stage('debian image') {
+ agent {
+ label 'linux && docker && ubuntu-2004 && amd64 && cgroup1'
+ }
+ steps {
+ buildRuntimeImage('debian')
+ }
+ }
+ }
+ }
+ stage('Release') {
+ when {
+ buildingTag()
+ }
+ parallel {
+ stage('Pushing images') {
+ agent {
+ label 'linux && docker && ubuntu-2004 && amd64 && cgroup1'
+ }
+ steps {
+ pushRuntimeImage('alpine')
+ pushRuntimeImage('debian')
+ }
+ }
+ stage('Creating Github Release') {
+ agent {
+ label 'linux && docker && ubuntu-2004 && amd64 && cgroup1'
+ }
+ environment {
+ GITHUB_TOKEN = credentials('github-release-token')
+ }
+ steps {
+ checkout scm
+ sh 'mkdir -p dist'
+ dir("dist") {
+ unstash "bin-darwin"
+ unstash "bin-linux"
+ unstash "bin-win"
+ unstash "changelog"
+ sh("""
+ curl -SfL https://github.com/github/hub/releases/download/v2.13.0/hub-linux-amd64-2.13.0.tgz | tar xzv --wildcards 'hub-*/bin/hub' --strip=2
+ ./hub release create --draft --prerelease=${env.TAG_NAME !=~ /v[0-9\.]+/} \\
+ -a docker-compose-Darwin-x86_64 \\
+ -a docker-compose-Darwin-x86_64.sha256 \\
+ -a docker-compose-Darwin-x86_64.tgz \\
+ -a docker-compose-Darwin-x86_64.tgz.sha256 \\
+ -a docker-compose-Linux-x86_64 \\
+ -a docker-compose-Linux-x86_64.sha256 \\
+ -a docker-compose-Windows-x86_64.exe \\
+ -a docker-compose-Windows-x86_64.exe.sha256 \\
+ -a ../script/run/run.sh \\
+ -F CHANGELOG.md \${TAG_NAME}
+ """)
+ }
+ }
+ }
+ stage('Publishing Python packages') {
+ agent {
+ label 'linux && docker && ubuntu-2004 && amd64 && cgroup1'
+ }
+ environment {
+ PYPIRC = credentials('pypirc-docker-dsg-cibot')
+ }
+ steps {
+ checkout scm
+ sh """
+ rm -rf build/ dist/
+ pip3 install wheel
+ python3 setup.py sdist bdist_wheel
+ pip3 install twine
+ ~/.local/bin/twine upload --config-file ${PYPIRC} ./dist/docker-compose-*.tar.gz ./dist/docker_compose-*-py2.py3-none-any.whl
+ """
+ }
+ }
+ }
+ }
+ }
+}
+
+
+def buildImage(baseImage) {
+ def scmvar = checkout(scm)
+ def imageName = "dockerpinata/compose:${baseImage}-${scmvar.GIT_COMMIT}"
+ image = docker.image(imageName)
+
+ withDockerRegistry(credentialsId:'dockerbuildbot-index.docker.io') {
+ try {
+ image.pull()
+ } catch (Exception exc) {
+ ansiColor('xterm') {
+ sh """docker build -t ${imageName} \\
+ --target build \\
+ --build-arg DISTRO="${baseImage}" \\
+ --build-arg GIT_COMMIT="${scmvar.GIT_COMMIT}" \\
+ .\\
+ """
+ sh "docker push ${imageName}"
+ }
+ echo "${imageName}"
+ return imageName
+ }
+ }
+}
+
+def runTests(dockerVersion, pythonVersion, baseImage) {
+ return {
+ stage("python=${pythonVersion} docker=${dockerVersion} ${baseImage}") {
+ node("linux && docker && ubuntu-2004 && amd64 && cgroup1") {
+ def scmvar = checkout(scm)
+ def imageName = "dockerpinata/compose:${baseImage}-${scmvar.GIT_COMMIT}"
+ def storageDriver = sh(script: "docker info -f \'{{.Driver}}\'", returnStdout: true).trim()
+ echo "Using local system's storage driver: ${storageDriver}"
+ withDockerRegistry(credentialsId:'dockerbuildbot-index.docker.io') {
+ sh """docker run \\
+ -t \\
+ --rm \\
+ --privileged \\
+ --volume="\$(pwd)/.git:/code/.git" \\
+ --volume="/var/run/docker.sock:/var/run/docker.sock" \\
+ --volume="\${DOCKER_CONFIG}/config.json:/root/.docker/config.json" \\
+ -e "DOCKER_TLS_CERTDIR=" \\
+ -e "TAG=${imageName}" \\
+ -e "STORAGE_DRIVER=${storageDriver}" \\
+ -e "DOCKER_VERSIONS=${dockerVersion}" \\
+ -e "BUILD_NUMBER=${env.BUILD_NUMBER}" \\
+ -e "PY_TEST_VERSIONS=${pythonVersion}" \\
+ --entrypoint="script/test/ci" \\
+ ${imageName} \\
+ --verbose
+ """
+ }
+ }
+ }
+ }
+}
+
+def buildRuntimeImage(baseImage) {
+ scmvar = checkout scm
+ def imageName = "docker/compose:${baseImage}-${env.BRANCH_NAME}"
+ ansiColor('xterm') {
+ sh """docker build -t ${imageName} \\
+ --build-arg DISTRO="${baseImage}" \\
+ --build-arg GIT_COMMIT="${scmvar.GIT_COMMIT.take(7)}" \\
+ .
+ """
+ }
+ sh "mkdir -p dist"
+ sh "docker save ${imageName} -o dist/docker-compose-${baseImage}.tar"
+ stash name: "compose-${baseImage}", includes: "dist/docker-compose-${baseImage}.tar"
+}
+
+def pushRuntimeImage(baseImage) {
+ unstash "compose-${baseImage}"
+ sh "docker load -i dist/docker-compose-${baseImage}.tar"
+ withDockerRegistry(credentialsId: 'dockerhub-dockerdsgcibot') {
+ sh "docker push docker/compose:${baseImage}-${env.TAG_NAME}"
+ if (baseImage == "alpine" && env.TAG_NAME != null) {
+ sh "docker tag docker/compose:alpine-${env.TAG_NAME} docker/compose:${env.TAG_NAME}"
+ sh "docker push docker/compose:${env.TAG_NAME}"
+ }
+ }
+}
+
+def checksum(filepath) {
+ if (isUnix()) {
+ sh "openssl sha256 -r -out ${filepath}.sha256 ${filepath}"
+ } else {
+ powershell "(Get-FileHash -Path ${filepath} -Algorithm SHA256 | % hash).ToLower() + ' *${filepath}' | Out-File -encoding ascii ${filepath}.sha256"
+ }
+}
diff --git a/appveyor.yml b/appveyor.yml
deleted file mode 100644
index f027a118..00000000
--- a/appveyor.yml
+++ /dev/null
@@ -1,24 +0,0 @@
-
-version: '{branch}-{build}'
-
-install:
- - "SET PATH=C:\\Python36-x64;C:\\Python36-x64\\Scripts;%PATH%"
- - "python --version"
- - "pip install tox==2.9.1 virtualenv==15.1.0"
-
-# Build the binary after tests
-build: false
-
-test_script:
- - "tox -e py27,py36 -- tests/unit"
- - ps: ".\\script\\build\\windows.ps1"
-
-artifacts:
- - path: .\dist\docker-compose-Windows-x86_64.exe
- name: "Compose Windows binary"
-
-deploy:
- - provider: Environment
- name: master-builds
- on:
- branch: master
diff --git a/bin/docker-compose b/bin/docker-compose
index aeb53870..5976e1d4 100755
--- a/bin/docker-compose
+++ b/bin/docker-compose
@@ -1,6 +1,3 @@
#!/usr/bin/env python
-from __future__ import absolute_import
-from __future__ import unicode_literals
-
from compose.cli.main import main
main()
diff --git a/compose/__init__.py b/compose/__init__.py
index 693a1ab1..b27c34d5 100644
--- a/compose/__init__.py
+++ b/compose/__init__.py
@@ -1,4 +1 @@
-from __future__ import absolute_import
-from __future__ import unicode_literals
-
-__version__ = '1.21.0'
+__version__ = '1.29.2'
diff --git a/compose/__main__.py b/compose/__main__.py
index 27a7acbb..199ba2ae 100644
--- a/compose/__main__.py
+++ b/compose/__main__.py
@@ -1,6 +1,3 @@
-from __future__ import absolute_import
-from __future__ import unicode_literals
-
from compose.cli.main import main
main()
diff --git a/compose/bundle.py b/compose/bundle.py
deleted file mode 100644
index 937a3708..00000000
--- a/compose/bundle.py
+++ /dev/null
@@ -1,258 +0,0 @@
-from __future__ import absolute_import
-from __future__ import unicode_literals
-
-import json
-import logging
-
-import six
-from docker.utils import split_command
-from docker.utils.ports import split_port
-
-from .cli.errors import UserError
-from .config.serialize import denormalize_config
-from .network import get_network_defs_for_service
-from .service import format_environment
-from .service import NoSuchImageError
-from .service import parse_repository_tag
-
-
-log = logging.getLogger(__name__)
-
-
-SERVICE_KEYS = {
- 'working_dir': 'WorkingDir',
- 'user': 'User',
- 'labels': 'Labels',
-}
-
-IGNORED_KEYS = {'build'}
-
-SUPPORTED_KEYS = {
- 'image',
- 'ports',
- 'expose',
- 'networks',
- 'command',
- 'environment',
- 'entrypoint',
-} | set(SERVICE_KEYS)
-
-VERSION = '0.1'
-
-
-class NeedsPush(Exception):
- def __init__(self, image_name):
- self.image_name = image_name
-
-
-class NeedsPull(Exception):
- def __init__(self, image_name, service_name):
- self.image_name = image_name
- self.service_name = service_name
-
-
-class MissingDigests(Exception):
- def __init__(self, needs_push, needs_pull):
- self.needs_push = needs_push
- self.needs_pull = needs_pull
-
-
-def serialize_bundle(config, image_digests):
- return json.dumps(to_bundle(config, image_digests), indent=2, sort_keys=True)
-
-
-def get_image_digests(project, allow_push=False):
- digests = {}
- needs_push = set()
- needs_pull = set()
-
- for service in project.services:
- try:
- digests[service.name] = get_image_digest(
- service,
- allow_push=allow_push,
- )
- except NeedsPush as e:
- needs_push.add(e.image_name)
- except NeedsPull as e:
- needs_pull.add(e.service_name)
-
- if needs_push or needs_pull:
- raise MissingDigests(needs_push, needs_pull)
-
- return digests
-
-
-def get_image_digest(service, allow_push=False):
- if 'image' not in service.options:
- raise UserError(
- "Service '{s.name}' doesn't define an image tag. An image name is "
- "required to generate a proper image digest for the bundle. Specify "
- "an image repo and tag with the 'image' option.".format(s=service))
-
- _, _, separator = parse_repository_tag(service.options['image'])
- # Compose file already uses a digest, no lookup required
- if separator == '@':
- return service.options['image']
-
- try:
- image = service.image()
- except NoSuchImageError:
- action = 'build' if 'build' in service.options else 'pull'
- raise UserError(
- "Image not found for service '{service}'. "
- "You might need to run `docker-compose {action} {service}`."
- .format(service=service.name, action=action))
-
- if image['RepoDigests']:
- # TODO: pick a digest based on the image tag if there are multiple
- # digests
- return image['RepoDigests'][0]
-
- if 'build' not in service.options:
- raise NeedsPull(service.image_name, service.name)
-
- if not allow_push:
- raise NeedsPush(service.image_name)
-
- return push_image(service)
-
-
-def push_image(service):
- try:
- digest = service.push()
- except Exception:
- log.error(
- "Failed to push image for service '{s.name}'. Please use an "
- "image tag that can be pushed to a Docker "
- "registry.".format(s=service))
- raise
-
- if not digest:
- raise ValueError("Failed to get digest for %s" % service.name)
-
- repo, _, _ = parse_repository_tag(service.options['image'])
- identifier = '{repo}@{digest}'.format(repo=repo, digest=digest)
-
- # only do this if RepoDigests isn't already populated
- image = service.image()
- if not image['RepoDigests']:
- # Pull by digest so that image['RepoDigests'] is populated for next time
- # and we don't have to pull/push again
- service.client.pull(identifier)
- log.info("Stored digest for {}".format(service.image_name))
-
- return identifier
-
-
-def to_bundle(config, image_digests):
- if config.networks:
- log.warn("Unsupported top level key 'networks' - ignoring")
-
- if config.volumes:
- log.warn("Unsupported top level key 'volumes' - ignoring")
-
- config = denormalize_config(config)
-
- return {
- 'Version': VERSION,
- 'Services': {
- name: convert_service_to_bundle(
- name,
- service_dict,
- image_digests[name],
- )
- for name, service_dict in config['services'].items()
- },
- }
-
-
-def convert_service_to_bundle(name, service_dict, image_digest):
- container_config = {'Image': image_digest}
-
- for key, value in service_dict.items():
- if key in IGNORED_KEYS:
- continue
-
- if key not in SUPPORTED_KEYS:
- log.warn("Unsupported key '{}' in services.{} - ignoring".format(key, name))
- continue
-
- if key == 'environment':
- container_config['Env'] = format_environment({
- envkey: envvalue for envkey, envvalue in value.items()
- if envvalue
- })
- continue
-
- if key in SERVICE_KEYS:
- container_config[SERVICE_KEYS[key]] = value
- continue
-
- set_command_and_args(
- container_config,
- service_dict.get('entrypoint', []),
- service_dict.get('command', []))
- container_config['Networks'] = make_service_networks(name, service_dict)
-
- ports = make_port_specs(service_dict)
- if ports:
- container_config['Ports'] = ports
-
- return container_config
-
-
-# See https://github.com/docker/swarmkit/blob/agent/exec/container/container.go#L95
-def set_command_and_args(config, entrypoint, command):
- if isinstance(entrypoint, six.string_types):
- entrypoint = split_command(entrypoint)
- if isinstance(command, six.string_types):
- command = split_command(command)
-
- if entrypoint:
- config['Command'] = entrypoint + command
- return
-
- if command:
- config['Args'] = command
-
-
-def make_service_networks(name, service_dict):
- networks = []
-
- for network_name, network_def in get_network_defs_for_service(service_dict).items():
- for key in network_def.keys():
- log.warn(
- "Unsupported key '{}' in services.{}.networks.{} - ignoring"
- .format(key, name, network_name))
-
- networks.append(network_name)
-
- return networks
-
-
-def make_port_specs(service_dict):
- ports = []
-
- internal_ports = [
- internal_port
- for port_def in service_dict.get('ports', [])
- for internal_port in split_port(port_def)[0]
- ]
-
- internal_ports += service_dict.get('expose', [])
-
- for internal_port in internal_ports:
- spec = make_port_spec(internal_port)
- if spec not in ports:
- ports.append(spec)
-
- return ports
-
-
-def make_port_spec(value):
- components = six.text_type(value).partition('/')
- return {
- 'Protocol': components[2] or 'tcp',
- 'Port': int(components[0]),
- }
diff --git a/compose/cli/colors.py b/compose/cli/colors.py
index cb30e361..042403a9 100644
--- a/compose/cli/colors.py
+++ b/compose/cli/colors.py
@@ -1,5 +1,5 @@
-from __future__ import absolute_import
-from __future__ import unicode_literals
+import enum
+import os
from ..const import IS_WINDOWS_PLATFORM
@@ -15,18 +15,33 @@ NAMES = [
]
+@enum.unique
+class AnsiMode(enum.Enum):
+ """Enumeration for when to output ANSI colors."""
+ NEVER = "never"
+ ALWAYS = "always"
+ AUTO = "auto"
+
+ def use_ansi_codes(self, stream):
+ if self is AnsiMode.ALWAYS:
+ return True
+ if self is AnsiMode.NEVER or os.environ.get('CLICOLOR') == '0':
+ return False
+ return stream.isatty()
+
+
def get_pairs():
for i, name in enumerate(NAMES):
- yield(name, str(30 + i))
- yield('intense_' + name, str(30 + i) + ';1')
+ yield (name, str(30 + i))
+ yield ('intense_' + name, str(30 + i) + ';1')
def ansi(code):
- return '\033[{0}m'.format(code)
+ return '\033[{}m'.format(code)
def ansi_color(code, s):
- return '{0}{1}{2}'.format(ansi(code), s, ansi(0))
+ return '{}{}{}'.format(ansi(code), s, ansi(0))
def make_color_fn(code):
@@ -41,9 +56,9 @@ for (name, code) in get_pairs():
def rainbow():
- cs = ['cyan', 'yellow', 'green', 'magenta', 'red', 'blue',
+ cs = ['cyan', 'yellow', 'green', 'magenta', 'blue',
'intense_cyan', 'intense_yellow', 'intense_green',
- 'intense_magenta', 'intense_red', 'intense_blue']
+ 'intense_magenta', 'intense_blue']
for c in cs:
yield globals()[c]
diff --git a/compose/cli/command.py b/compose/cli/command.py
index 8a32a93a..e8d399aa 100644
--- a/compose/cli/command.py
+++ b/compose/cli/command.py
@@ -1,44 +1,73 @@
-from __future__ import absolute_import
-from __future__ import unicode_literals
-
import logging
import os
import re
-import six
-
from . import errors
-from . import verbose_proxy
from .. import config
from .. import parallel
from ..config.environment import Environment
-from ..const import API_VERSIONS
+from ..const import LABEL_CONFIG_FILES
+from ..const import LABEL_ENVIRONMENT_FILE
+from ..const import LABEL_WORKING_DIR
from ..project import Project
-from .docker_client import docker_client
-from .docker_client import get_tls_version
-from .docker_client import tls_config_from_options
-from .utils import get_version_info
+from .docker_client import get_client
+from .docker_client import load_context
+from .docker_client import make_context
+from .errors import UserError
log = logging.getLogger(__name__)
-
-def project_from_options(project_dir, options):
- environment = Environment.from_env_file(project_dir)
+SILENT_COMMANDS = {
+ 'events',
+ 'exec',
+ 'kill',
+ 'logs',
+ 'pause',
+ 'ps',
+ 'restart',
+ 'rm',
+ 'start',
+ 'stop',
+ 'top',
+ 'unpause',
+}
+
+
+def project_from_options(project_dir, options, additional_options=None):
+ additional_options = additional_options or {}
+ override_dir = get_project_dir(options)
+ environment_file = options.get('--env-file')
+ environment = Environment.from_env_file(override_dir or project_dir, environment_file)
+ environment.silent = options.get('COMMAND', None) in SILENT_COMMANDS
set_parallel_limit(environment)
- host = options.get('--host')
+ # get the context for the run
+ context = None
+ context_name = options.get('--context', None)
+ if context_name:
+ context = load_context(context_name)
+ if not context:
+ raise UserError("Context '{}' not found".format(context_name))
+
+ host = options.get('--host', None)
if host is not None:
+ if context:
+ raise UserError(
+ "-H, --host and -c, --context are mutually exclusive. Only one should be set.")
host = host.lstrip('=')
+ context = make_context(host, options, environment)
+
return get_project(
project_dir,
- get_config_path_from_options(project_dir, options, environment),
+ get_config_path_from_options(options, environment),
project_name=options.get('--project-name'),
verbose=options.get('--verbose'),
- host=host,
- tls_config=tls_config_from_options(options, environment),
+ context=context,
environment=environment,
- override_dir=options.get('--project-directory'),
- compatibility=options.get('--compatibility'),
+ override_dir=override_dir,
+ interpolate=(not additional_options.get('--no-interpolate')),
+ environment_file=environment_file,
+ enabled_profiles=get_profiles_from_options(options, environment)
)
@@ -58,20 +87,31 @@ def set_parallel_limit(environment):
parallel.GlobalLimit.set_global_limit(parallel_limit)
-def get_config_from_options(base_dir, options):
- environment = Environment.from_env_file(base_dir)
- config_path = get_config_path_from_options(
- base_dir, options, environment
- )
+def get_project_dir(options):
+ override_dir = None
+ files = get_config_path_from_options(options, os.environ)
+ if files:
+ if files[0] == '-':
+ return '.'
+ override_dir = os.path.dirname(files[0])
+ return options.get('--project-directory') or override_dir
+
+
+def get_config_from_options(base_dir, options, additional_options=None):
+ additional_options = additional_options or {}
+ override_dir = get_project_dir(options)
+ environment_file = options.get('--env-file')
+ environment = Environment.from_env_file(override_dir or base_dir, environment_file)
+ config_path = get_config_path_from_options(options, environment)
return config.load(
- config.find(base_dir, config_path, environment),
- options.get('--compatibility')
+ config.find(base_dir, config_path, environment, override_dir),
+ not additional_options.get('--no-interpolate')
)
-def get_config_path_from_options(base_dir, options, environment):
+def get_config_path_from_options(options, environment):
def unicode_paths(paths):
- return [p.decode('utf-8') if isinstance(p, six.binary_type) else p for p in paths]
+ return [p.decode('utf-8') if isinstance(p, bytes) else p for p in paths]
file_option = options.get('--file')
if file_option:
@@ -84,46 +124,72 @@ def get_config_path_from_options(base_dir, options, environment):
return None
-def get_client(environment, verbose=False, version=None, tls_config=None, host=None,
- tls_version=None):
+def get_profiles_from_options(options, environment):
+ profile_option = options.get('--profile')
+ if profile_option:
+ return profile_option
- client = docker_client(
- version=version, tls_config=tls_config, host=host,
- environment=environment, tls_version=get_tls_version(environment)
- )
- if verbose:
- version_info = six.iteritems(client.version())
- log.info(get_version_info('full'))
- log.info("Docker base_url: %s", client.base_url)
- log.info("Docker version: %s",
- ", ".join("%s=%s" % item for item in version_info))
- return verbose_proxy.VerboseProxy('docker', client)
- return client
+ profiles = environment.get('COMPOSE_PROFILES')
+ if profiles:
+ return profiles.split(',')
+
+ return []
def get_project(project_dir, config_path=None, project_name=None, verbose=False,
- host=None, tls_config=None, environment=None, override_dir=None,
- compatibility=False):
+ context=None, environment=None, override_dir=None,
+ interpolate=True, environment_file=None, enabled_profiles=None):
if not environment:
environment = Environment.from_env_file(project_dir)
config_details = config.find(project_dir, config_path, environment, override_dir)
project_name = get_project_name(
config_details.working_dir, project_name, environment
)
- config_data = config.load(config_details, compatibility)
+ config_data = config.load(config_details, interpolate)
- api_version = environment.get(
- 'COMPOSE_API_VERSION',
- API_VERSIONS[config_data.version])
+ api_version = environment.get('COMPOSE_API_VERSION')
client = get_client(
- verbose=verbose, version=api_version, tls_config=tls_config,
- host=host, environment=environment
+ verbose=verbose, version=api_version, context=context, environment=environment
)
with errors.handle_connection_errors(client):
return Project.from_config(
- project_name, config_data, client, environment.get('DOCKER_DEFAULT_PLATFORM')
+ project_name,
+ config_data,
+ client,
+ environment.get('DOCKER_DEFAULT_PLATFORM'),
+ execution_context_labels(config_details, environment_file),
+ enabled_profiles,
+ )
+
+
+def execution_context_labels(config_details, environment_file):
+ extra_labels = [
+ '{}={}'.format(LABEL_WORKING_DIR, os.path.abspath(config_details.working_dir))
+ ]
+
+ if not use_config_from_stdin(config_details):
+ extra_labels.append('{}={}'.format(LABEL_CONFIG_FILES, config_files_label(config_details)))
+
+ if environment_file is not None:
+ extra_labels.append('{}={}'.format(
+ LABEL_ENVIRONMENT_FILE,
+ os.path.normpath(environment_file))
+ )
+ return extra_labels
+
+
+def use_config_from_stdin(config_details):
+ for c in config_details.config_files:
+ if not c.filename:
+ return True
+ return False
+
+
+def config_files_label(config_details):
+ return ",".join(
+ os.path.normpath(c.filename) for c in config_details.config_files
)
diff --git a/compose/cli/docker_client.py b/compose/cli/docker_client.py
index 939e95bf..e4a0fea6 100644
--- a/compose/cli/docker_client.py
+++ b/compose/cli/docker_client.py
@@ -1,21 +1,22 @@
-from __future__ import absolute_import
-from __future__ import unicode_literals
-
import logging
import os.path
import ssl
from docker import APIClient
+from docker import Context
+from docker import ContextAPI
+from docker import TLSConfig
from docker.errors import TLSParameterError
-from docker.tls import TLSConfig
from docker.utils import kwargs_from_env
from docker.utils.config import home_dir
+from . import verbose_proxy
from ..config.environment import Environment
from ..const import HTTP_TIMEOUT
from ..utils import unquote_path
from .errors import UserError
from .utils import generate_user_agent
+from .utils import get_version_info
log = logging.getLogger(__name__)
@@ -24,6 +25,33 @@ def default_cert_path():
return os.path.join(home_dir(), '.docker')
+def make_context(host, options, environment):
+ tls = tls_config_from_options(options, environment)
+ ctx = Context("compose", host=host, tls=tls.verify if tls else False)
+ if tls:
+ ctx.set_endpoint("docker", host, tls, skip_tls_verify=not tls.verify)
+ return ctx
+
+
+def load_context(name=None):
+ return ContextAPI.get_context(name)
+
+
+def get_client(environment, verbose=False, version=None, context=None):
+ client = docker_client(
+ version=version, context=context,
+ environment=environment, tls_version=get_tls_version(environment)
+ )
+ if verbose:
+ version_info = client.version().items()
+ log.info(get_version_info('full'))
+ log.info("Docker base_url: %s", client.base_url)
+ log.info("Docker version: %s",
+ ", ".join("%s=%s" % item for item in version_info))
+ return verbose_proxy.VerboseProxy('docker', client)
+ return client
+
+
def get_tls_version(environment):
compose_tls_version = environment.get('COMPOSE_TLS_VERSION', None)
if not compose_tls_version:
@@ -31,7 +59,7 @@ def get_tls_version(environment):
tls_attr_name = "PROTOCOL_{}".format(compose_tls_version)
if not hasattr(ssl, tls_attr_name):
- log.warn(
+ log.warning(
'The "{}" protocol is unavailable. You may need to update your '
'version of Python or OpenSSL. Falling back to TLSv1 (default).'
.format(compose_tls_version)
@@ -87,8 +115,7 @@ def tls_config_from_options(options, environment=None):
return None
-def docker_client(environment, version=None, tls_config=None, host=None,
- tls_version=None):
+def docker_client(environment, version=None, context=None, tls_version=None):
"""
Returns a docker-py client configured using environment variables
according to the same logic as the official Docker client.
@@ -101,10 +128,26 @@ def docker_client(environment, version=None, tls_config=None, host=None,
"and DOCKER_CERT_PATH are set correctly.\n"
"You might need to run `eval \"$(docker-machine env default)\"`")
- if host:
- kwargs['base_url'] = host
- if tls_config:
- kwargs['tls'] = tls_config
+ if not context:
+ # check env for DOCKER_HOST and certs path
+ host = kwargs.get("base_url", None)
+ tls = kwargs.get("tls", None)
+ verify = False if not tls else tls.verify
+ if host:
+ context = Context("compose", host=host, tls=verify)
+ else:
+ context = ContextAPI.get_current_context()
+ if tls:
+ context.set_endpoint("docker", host=host, tls_cfg=tls, skip_tls_verify=not verify)
+
+ if not context.is_docker_host():
+ raise UserError(
+ "The platform targeted with the current context is not supported.\n"
+ "Make sure the context in use targets a Docker Engine.\n")
+
+ kwargs['base_url'] = context.Host
+ if context.TLSConfig:
+ kwargs['tls'] = context.TLSConfig
if version:
kwargs['version'] = version
@@ -117,7 +160,14 @@ def docker_client(environment, version=None, tls_config=None, host=None,
kwargs['user_agent'] = generate_user_agent()
- client = APIClient(**kwargs)
+ # Workaround for
+ # https://pyinstaller.readthedocs.io/en/v3.3.1/runtime-information.html#ld-library-path-libpath-considerations
+ if 'LD_LIBRARY_PATH_ORIG' in environment:
+ kwargs['credstore_env'] = {
+ 'LD_LIBRARY_PATH': environment.get('LD_LIBRARY_PATH_ORIG'),
+ }
+ use_paramiko_ssh = int(environment.get('COMPOSE_PARAMIKO_SSH', 0))
+ client = APIClient(use_ssh_client=not use_paramiko_ssh, **kwargs)
client._original_base_url = kwargs.get('base_url')
return client
diff --git a/compose/cli/docopt_command.py b/compose/cli/docopt_command.py
index 809a4b74..e56b3783 100644
--- a/compose/cli/docopt_command.py
+++ b/compose/cli/docopt_command.py
@@ -1,6 +1,3 @@
-from __future__ import absolute_import
-from __future__ import unicode_literals
-
from inspect import getdoc
from docopt import docopt
@@ -14,16 +11,22 @@ def docopt_full_help(docstring, *args, **kwargs):
raise SystemExit(docstring)
-class DocoptDispatcher(object):
+class DocoptDispatcher:
def __init__(self, command_class, options):
self.command_class = command_class
self.options = options
+ @classmethod
+ def get_command_and_options(cls, doc_entity, argv, options):
+ command_help = getdoc(doc_entity)
+ opt = docopt_full_help(command_help, argv, **options)
+ command = opt['COMMAND']
+ return command_help, opt, command
+
def parse(self, argv):
- command_help = getdoc(self.command_class)
- options = docopt_full_help(command_help, argv, **self.options)
- command = options['COMMAND']
+ command_help, options, command = DocoptDispatcher.get_command_and_options(
+ self.command_class, argv, self.options)
if command is None:
raise SystemExit(command_help)
@@ -53,7 +56,7 @@ def get_handler(command_class, command):
class NoSuchCommand(Exception):
def __init__(self, command, supercommand):
- super(NoSuchCommand, self).__init__("No such command: %s" % command)
+ super().__init__("No such command: %s" % command)
self.command = command
self.supercommand = supercommand
diff --git a/compose/cli/errors.py b/compose/cli/errors.py
index 82768970..a807c7d1 100644
--- a/compose/cli/errors.py
+++ b/compose/cli/errors.py
@@ -1,6 +1,3 @@
-from __future__ import absolute_import
-from __future__ import unicode_literals
-
import contextlib
import logging
import socket
@@ -29,11 +26,9 @@ class UserError(Exception):
def __init__(self, msg):
self.msg = dedent(msg).strip()
- def __unicode__(self):
+ def __str__(self):
return self.msg
- __str__ = __unicode__
-
class ConnectionError(Exception):
pass
@@ -54,7 +49,7 @@ def handle_connection_errors(client):
except APIError as e:
log_api_error(e, client.api_version)
raise ConnectionError()
- except (ReadTimeout, socket.timeout) as e:
+ except (ReadTimeout, socket.timeout):
log_timeout_error(client.timeout)
raise ConnectionError()
except Exception as e:
@@ -67,7 +62,9 @@ def handle_connection_errors(client):
def log_windows_pipe_error(exc):
- if exc.winerror == 232: # https://github.com/docker/compose/issues/5005
+ if exc.winerror == 2:
+ log.error("Couldn't connect to Docker daemon. You might need to start Docker for Windows.")
+ elif exc.winerror == 232: # https://github.com/docker/compose/issues/5005
log.error(
"The current Compose file version is not compatible with your engine version. "
"Please upgrade your Compose file to a more recent version, or set "
diff --git a/compose/cli/formatter.py b/compose/cli/formatter.py
index 6c0a3695..ff81ee65 100644
--- a/compose/cli/formatter.py
+++ b/compose/cli/formatter.py
@@ -1,26 +1,29 @@
-from __future__ import absolute_import
-from __future__ import unicode_literals
-
import logging
-import os
+from shutil import get_terminal_size
-import six
import texttable
from compose.cli import colors
def get_tty_width():
- tty_size = os.popen('stty size 2> /dev/null', 'r').read().split()
- if len(tty_size) != 2:
+ try:
+ # get_terminal_size can't determine the size if compose is piped
+ # to another command. But in such case it doesn't make sense to
+ # try format the output by terminal size as this output is consumed
+ # by another command. So let's pretend we have a huge terminal so
+ # output is single-lined
+ width, _ = get_terminal_size(fallback=(999, 0))
+ return int(width)
+ except OSError:
return 0
- _, width = tty_size
- return int(width)
-class Formatter(object):
+class Formatter:
"""Format tabular data for printing."""
- def table(self, headers, rows):
+
+ @staticmethod
+ def table(headers, rows):
table = texttable.Texttable(max_width=get_tty_width())
table.set_cols_dtype(['t' for h in headers])
table.add_rows([headers] + rows)
@@ -37,15 +40,15 @@ class ConsoleWarningFormatter(logging.Formatter):
def get_level_message(self, record):
separator = ': '
- if record.levelno == logging.WARNING:
- return colors.yellow(record.levelname) + separator
- if record.levelno == logging.ERROR:
+ if record.levelno >= logging.ERROR:
return colors.red(record.levelname) + separator
+ if record.levelno >= logging.WARNING:
+ return colors.yellow(record.levelname) + separator
return ''
def format(self, record):
- if isinstance(record.msg, six.binary_type):
+ if isinstance(record.msg, bytes):
record.msg = record.msg.decode('utf-8')
- message = super(ConsoleWarningFormatter, self).format(record)
- return '{0}{1}'.format(self.get_level_message(record), message)
+ message = super().format(record)
+ return '{}{}'.format(self.get_level_message(record), message)
diff --git a/compose/cli/log_printer.py b/compose/cli/log_printer.py
index 60bba8da..e56dc2fa 100644
--- a/compose/cli/log_printer.py
+++ b/compose/cli/log_printer.py
@@ -1,36 +1,37 @@
-from __future__ import absolute_import
-from __future__ import unicode_literals
-
+import _thread as thread
import sys
from collections import namedtuple
from itertools import cycle
+from operator import attrgetter
+from queue import Empty
+from queue import Queue
from threading import Thread
from docker.errors import APIError
-from six.moves import _thread as thread
-from six.moves.queue import Empty
-from six.moves.queue import Queue
from . import colors
-from compose import utils
from compose.cli.signals import ShutdownException
from compose.utils import split_buffer
-class LogPresenter(object):
+class LogPresenter:
- def __init__(self, prefix_width, color_func):
+ def __init__(self, prefix_width, color_func, keep_prefix=True):
self.prefix_width = prefix_width
self.color_func = color_func
+ self.keep_prefix = keep_prefix
def present(self, container, line):
- prefix = container.name_without_project.ljust(self.prefix_width)
- return '{prefix} {line}'.format(
- prefix=self.color_func(prefix + ' |'),
- line=line)
+ to_log = '{line}'.format(line=line)
+
+ if self.keep_prefix:
+ prefix = container.name_without_project.ljust(self.prefix_width)
+ to_log = '{prefix} '.format(prefix=self.color_func(prefix + ' |')) + to_log
+ return to_log
-def build_log_presenters(service_names, monochrome):
+
+def build_log_presenters(service_names, monochrome, keep_prefix=True):
"""Return an iterable of functions.
Each function can be used to format the logs output of a container.
@@ -41,7 +42,7 @@ def build_log_presenters(service_names, monochrome):
return text
for color_func in cycle([no_color] if monochrome else colors.rainbow()):
- yield LogPresenter(prefix_width, color_func)
+ yield LogPresenter(prefix_width, color_func, keep_prefix)
def max_name_width(service_names, max_index_width=3):
@@ -54,7 +55,7 @@ def max_name_width(service_names, max_index_width=3):
return max(len(name) for name in service_names) + max_index_width
-class LogPrinter(object):
+class LogPrinter:
"""Print logs from many containers to a single output stream."""
def __init__(self,
@@ -67,7 +68,7 @@ class LogPrinter(object):
self.containers = containers
self.presenters = presenters
self.event_stream = event_stream
- self.output = utils.get_output_stream(output)
+ self.output = output
self.cascade_stop = cascade_stop
self.log_args = log_args or {}
@@ -134,7 +135,10 @@ def build_thread(container, presenter, queue, log_args):
def build_thread_map(initial_containers, presenters, thread_args):
return {
container.id: build_thread(container, next(presenters), *thread_args)
- for container in initial_containers
+ # Container order is unspecified, so they are sorted by name in order to make
+ # container:presenter (log color) assignment deterministic when given a list of containers
+ # with the same names.
+ for container in sorted(initial_containers, key=attrgetter('name'))
}
@@ -154,10 +158,8 @@ class QueueItem(namedtuple('_QueueItem', 'item is_stop exc')):
def tail_container_logs(container, presenter, queue, log_args):
- generator = get_log_generator(container)
-
try:
- for item in generator(container, log_args):
+ for item in build_log_generator(container, log_args):
queue.put(QueueItem.new(presenter.present(container, item)))
except Exception as e:
queue.put(QueueItem.exception(e))
@@ -167,20 +169,6 @@ def tail_container_logs(container, presenter, queue, log_args):
queue.put(QueueItem.stop(container.name))
-def get_log_generator(container):
- if container.has_api_logs:
- return build_log_generator
- return build_no_log_generator
-
-
-def build_no_log_generator(container, log_args):
- """Return a generator that prints a warning about logs and waits for
- container to exit.
- """
- yield "WARNING: no logs are available with the '{}' log driver\n".format(
- container.log_driver)
-
-
def build_log_generator(container, log_args):
# if the container doesn't have a log_stream we need to attach to container
# before log printer starts running
@@ -195,9 +183,9 @@ def build_log_generator(container, log_args):
def wait_on_exit(container):
try:
exit_code = container.wait()
- return "%s exited with code %s\n" % (container.name, exit_code)
+ return "{} exited with code {}\n".format(container.name, exit_code)
except APIError as e:
- return "Unexpected API error for %s (HTTP code %s)\nResponse body:\n%s\n" % (
+ return "Unexpected API error for {} (HTTP code {})\nResponse body:\n{}\n".format(
container.name, e.response.status_code,
e.response.text or '[empty]'
)
@@ -210,10 +198,15 @@ def start_producer_thread(thread_args):
def watch_events(thread_map, event_stream, presenters, thread_args):
+ crashed_containers = set()
for event in event_stream:
if event['action'] == 'stop':
thread_map.pop(event['id'], None)
+ if event['action'] == 'die':
+ thread_map.pop(event['id'], None)
+ crashed_containers.add(event['id'])
+
if event['action'] != 'start':
continue
@@ -223,10 +216,22 @@ def watch_events(thread_map, event_stream, presenters, thread_args):
# Container was stopped and started, we need a new thread
thread_map.pop(event['id'], None)
+ # Container crashed so we should reattach to it
+ if event['id'] in crashed_containers:
+ container = event['container']
+ if not container.is_restarting:
+ try:
+ container.attach_log_stream()
+ except APIError:
+ # Just ignore errors when reattaching to already crashed containers
+ pass
+ crashed_containers.remove(event['id'])
+
thread_map[event['id']] = build_thread(
event['container'],
next(presenters),
- *thread_args)
+ *thread_args
+ )
def consume_queue(queue, cascade_stop):
diff --git a/compose/cli/main.py b/compose/cli/main.py
index a9720583..fabd6087 100644
--- a/compose/cli/main.py
+++ b/compose/cli/main.py
@@ -1,7 +1,3 @@
-from __future__ import absolute_import
-from __future__ import print_function
-from __future__ import unicode_literals
-
import contextlib
import functools
import json
@@ -14,14 +10,12 @@ from distutils.spawn import find_executable
from inspect import getdoc
from operator import attrgetter
-import docker
+import docker.errors
+import docker.utils
from . import errors
from . import signals
from .. import __version__
-from ..bundle import get_image_digests
-from ..bundle import MissingDigests
-from ..bundle import serialize_bundle
from ..config import ConfigurationError
from ..config import parse_environment
from ..config import parse_labels
@@ -29,10 +23,14 @@ from ..config import resolve_build_args
from ..config.environment import Environment
from ..config.serialize import serialize_config
from ..config.types import VolumeSpec
-from ..const import COMPOSEFILE_V2_2 as V2_2
+from ..const import IS_LINUX_PLATFORM
from ..const import IS_WINDOWS_PLATFORM
from ..errors import StreamParseError
+from ..metrics.decorator import metrics
+from ..parallel import ParallelStreamWriter
from ..progress_stream import StreamOutputError
+from ..project import get_image_digests
+from ..project import MissingDigests
from ..project import NoSuchService
from ..project import OneOffFilter
from ..project import ProjectError
@@ -42,7 +40,10 @@ from ..service import ConvergenceStrategy
from ..service import ImageType
from ..service import NeedsBuildError
from ..service import OperationFailedError
+from ..utils import filter_attached_for_up
+from .colors import AnsiMode
from .command import get_config_from_options
+from .command import get_project_dir
from .command import project_from_options
from .docopt_command import DocoptDispatcher
from .docopt_command import get_handler
@@ -55,57 +56,132 @@ from .log_printer import LogPrinter
from .utils import get_version_info
from .utils import human_readable_file_size
from .utils import yesno
+from compose.metrics.client import MetricsCommand
+from compose.metrics.client import Status
if not IS_WINDOWS_PLATFORM:
from dockerpty.pty import PseudoTerminal, RunOperation, ExecOperation
log = logging.getLogger(__name__)
-console_handler = logging.StreamHandler(sys.stderr)
-def main():
+def main(): # noqa: C901
signals.ignore_sigpipe()
+ command = None
+ try:
+ _, opts, command = DocoptDispatcher.get_command_and_options(
+ TopLevelCommand,
+ get_filtered_args(sys.argv[1:]),
+ {'options_first': True, 'version': get_version_info('compose')})
+ except Exception:
+ pass
try:
- command = dispatch()
- command()
+ command_func = dispatch()
+ command_func()
+ if not IS_LINUX_PLATFORM and command == 'help':
+ print("\nDocker Compose is now in the Docker CLI, try `docker compose` help")
except (KeyboardInterrupt, signals.ShutdownException):
- log.error("Aborting.")
- sys.exit(1)
+ exit_with_metrics(command, "Aborting.", status=Status.CANCELED)
except (UserError, NoSuchService, ConfigurationError,
ProjectError, OperationFailedError) as e:
- log.error(e.msg)
- sys.exit(1)
+ exit_with_metrics(command, e.msg, status=Status.FAILURE)
except BuildError as e:
- log.error("Service '%s' failed to build: %s" % (e.service.name, e.reason))
- sys.exit(1)
+ reason = ""
+ if e.reason:
+ reason = " : " + e.reason
+ exit_with_metrics(command,
+ "Service '{}' failed to build{}".format(e.service.name, reason),
+ status=Status.FAILURE)
except StreamOutputError as e:
- log.error(e)
- sys.exit(1)
+ exit_with_metrics(command, e, status=Status.FAILURE)
except NeedsBuildError as e:
- log.error("Service '%s' needs to be built, but --no-build was passed." % e.service.name)
- sys.exit(1)
+ exit_with_metrics(command,
+ "Service '{}' needs to be built, but --no-build was passed.".format(
+ e.service.name), status=Status.FAILURE)
except NoSuchCommand as e:
commands = "\n".join(parse_doc_section("commands:", getdoc(e.supercommand)))
- log.error("No such command: %s\n\n%s", e.command, commands)
- sys.exit(1)
+ if not IS_LINUX_PLATFORM:
+ commands += "\n\nDocker Compose is now in the Docker CLI, try `docker compose`"
+ exit_with_metrics("", log_msg="No such command: {}\n\n{}".format(
+ e.command, commands), status=Status.FAILURE)
except (errors.ConnectionError, StreamParseError):
- sys.exit(1)
+ exit_with_metrics(command, status=Status.FAILURE)
+ except SystemExit as e:
+ status = Status.SUCCESS
+ if len(sys.argv) > 1 and '--help' not in sys.argv:
+ status = Status.FAILURE
+
+ if command and len(sys.argv) >= 3 and sys.argv[2] == '--help':
+ command = '--help ' + command
+
+ if not command and len(sys.argv) >= 2 and sys.argv[1] == '--help':
+ command = '--help'
+
+ msg = e.args[0] if len(e.args) else ""
+ code = 0
+ if isinstance(e.code, int):
+ code = e.code
+
+ if not IS_LINUX_PLATFORM and not command:
+ msg += "\n\nDocker Compose is now in the Docker CLI, try `docker compose`"
+
+ exit_with_metrics(command, log_msg=msg, status=status,
+ exit_code=code)
+
+
+def get_filtered_args(args):
+ if args[0] in ('-h', '--help'):
+ return []
+ if args[0] == '--version':
+ return ['version']
+
+
+def exit_with_metrics(command, log_msg=None, status=Status.SUCCESS, exit_code=1):
+ if log_msg and command != 'exec':
+ if not exit_code:
+ log.info(log_msg)
+ else:
+ log.error(log_msg)
+
+ MetricsCommand(command, status=status).send_metrics()
+ sys.exit(exit_code)
def dispatch():
- setup_logging()
+ console_stream = sys.stderr
+ console_handler = logging.StreamHandler(console_stream)
+ setup_logging(console_handler)
dispatcher = DocoptDispatcher(
TopLevelCommand,
{'options_first': True, 'version': get_version_info('compose')})
options, handler, command_options = dispatcher.parse(sys.argv[1:])
+
+ ansi_mode = AnsiMode.AUTO
+ try:
+ if options.get("--ansi"):
+ ansi_mode = AnsiMode(options.get("--ansi"))
+ except ValueError:
+ raise UserError(
+ 'Invalid value for --ansi: {}. Expected one of {}.'.format(
+ options.get("--ansi"),
+ ', '.join(m.value for m in AnsiMode)
+ )
+ )
+ if options.get("--no-ansi"):
+ if options.get("--ansi"):
+ raise UserError("--no-ansi and --ansi cannot be combined.")
+ log.warning('--no-ansi option is deprecated and will be removed in future versions. '
+ 'Use `--ansi never` instead.')
+ ansi_mode = AnsiMode.NEVER
+
setup_console_handler(console_handler,
options.get('--verbose'),
- options.get('--no-ansi'),
+ ansi_mode.use_ansi_codes(console_handler.stream),
options.get("--log-level"))
- setup_parallel_logger(options.get('--no-ansi'))
- if options.get('--no-ansi'):
+ setup_parallel_logger(ansi_mode)
+ if ansi_mode is AnsiMode.NEVER:
command_options['--no-color'] = True
return functools.partial(perform_command, options, handler, command_options)
@@ -127,23 +203,23 @@ def perform_command(options, handler, command_options):
handler(command, command_options)
-def setup_logging():
+def setup_logging(console_handler):
root_logger = logging.getLogger()
root_logger.addHandler(console_handler)
root_logger.setLevel(logging.DEBUG)
- # Disable requests logging
+ # Disable requests and docker-py logging
+ logging.getLogger("urllib3").propagate = False
logging.getLogger("requests").propagate = False
+ logging.getLogger("docker").propagate = False
-def setup_parallel_logger(noansi):
- if noansi:
- import compose.parallel
- compose.parallel.ParallelStreamWriter.set_noansi()
+def setup_parallel_logger(ansi_mode):
+ ParallelStreamWriter.set_default_ansi_mode(ansi_mode)
-def setup_console_handler(handler, verbose, noansi=False, level=None):
- if handler.stream.isatty() and noansi is False:
+def setup_console_handler(handler, verbose, use_console_formatter=True, level=None):
+ if use_console_formatter:
format_class = ConsoleWarningFormatter
else:
format_class = logging.Formatter
@@ -179,11 +255,11 @@ def parse_doc_section(name, source):
return [s.strip() for s in pattern.findall(source)]
-class TopLevelCommand(object):
+class TopLevelCommand:
"""Define and run multi-container applications with Docker.
Usage:
- docker-compose [-f <arg>...] [options] [COMMAND] [ARGS...]
+ docker-compose [-f <arg>...] [--profile <name>...] [options] [--] [COMMAND] [ARGS...]
docker-compose -h|--help
Options:
@@ -191,9 +267,12 @@ class TopLevelCommand(object):
(default: docker-compose.yml)
-p, --project-name NAME Specify an alternate project name
(default: directory name)
+ --profile NAME Specify a profile to enable
+ -c, --context NAME Specify a context name
--verbose Show more output
--log-level LEVEL Set log level (DEBUG, INFO, WARNING, ERROR, CRITICAL)
- --no-ansi Do not print ANSI control characters
+ --ansi (never|always|auto) Control when to print ANSI control characters
+ --no-ansi Do not print ANSI control characters (DEPRECATED)
-v, --version Print version and exit
-H, --host HOST Daemon socket to connect to
@@ -206,15 +285,15 @@ class TopLevelCommand(object):
name specified in the client certificate
--project-directory PATH Specify an alternate working directory
(default: the path of the Compose file)
- --compatibility If set, Compose will attempt to convert deploy
- keys in v3 files to their non-Swarm equivalent
+ --compatibility If set, Compose will attempt to convert keys
+ in v3 files to their non-Swarm equivalent (DEPRECATED)
+ --env-file PATH Specify an alternate environment file
Commands:
build Build or rebuild services
- bundle Generate a Docker bundle from the Compose file
config Validate and view the Compose file
create Create services
- down Stop and remove containers, networks, images, and volumes
+ down Stop and remove resources
events Receive real time events from containers
exec Execute a command in a running container
help Get help on a command
@@ -235,14 +314,23 @@ class TopLevelCommand(object):
top Display the running processes
unpause Unpause services
up Create and start containers
- version Show the Docker-Compose version information
+ version Show version information and quit
"""
- def __init__(self, project, project_dir='.', options=None):
+ def __init__(self, project, options=None):
self.project = project
- self.project_dir = '.'
self.toplevel_options = options or {}
+ @property
+ def project_dir(self):
+ return get_project_dir(self.toplevel_options)
+
+ @property
+ def toplevel_environment(self):
+ environment_file = self.toplevel_options.get('--env-file')
+ return Environment.from_env_file(self.project_dir, environment_file)
+
+ @metrics()
def build(self, options):
"""
Build or rebuild services.
@@ -251,15 +339,19 @@ class TopLevelCommand(object):
e.g. `composetest_db`. If you change a service's `Dockerfile` or the
contents of its build directory, you can run `docker-compose build` to rebuild it.
- Usage: build [options] [--build-arg key=val...] [SERVICE...]
+ Usage: build [options] [--build-arg key=val...] [--] [SERVICE...]
Options:
+ --build-arg key=val Set build-time variables for services.
--compress Compress the build context using gzip.
--force-rm Always remove intermediate containers.
+ -m, --memory MEM Set memory limit for the build container.
--no-cache Do not use cache when building the image.
+ --no-rm Do not remove intermediate containers after a successful build.
+ --parallel Build images in parallel.
+ --progress string Set type of progress output (auto, plain, tty).
--pull Always attempt to pull a newer version of the image.
- -m, --memory MEM Sets memory limit for the build container.
- --build-arg key=val Set build-time variables for services.
+ -q, --quiet Don't print anything to STDOUT
"""
service_names = options['SERVICE']
build_args = options.get('--build-arg', None)
@@ -269,8 +361,9 @@ class TopLevelCommand(object):
'--build-arg is only supported when services are specified for API version < 1.25.'
' Please use a Compose file version > 2.2 or specify which services to build.'
)
- environment = Environment.from_env_file(self.project_dir)
- build_args = resolve_build_args(build_args, environment)
+ build_args = resolve_build_args(build_args, self.toplevel_environment)
+
+ native_builder = self.toplevel_environment.get_boolean('COMPOSE_DOCKER_CLI_BUILD', True)
self.project.build(
service_names=options['SERVICE'],
@@ -278,42 +371,16 @@ class TopLevelCommand(object):
pull=bool(options.get('--pull', False)),
force_rm=bool(options.get('--force-rm', False)),
memory=options.get('--memory'),
+ rm=not bool(options.get('--no-rm', False)),
build_args=build_args,
gzip=options.get('--compress', False),
+ parallel_build=options.get('--parallel', False),
+ silent=options.get('--quiet', False),
+ cli=native_builder,
+ progress=options.get('--progress'),
)
- def bundle(self, options):
- """
- Generate a Distributed Application Bundle (DAB) from the Compose file.
-
- Images must have digests stored, which requires interaction with a
- Docker registry. If digests aren't stored for all images, you can fetch
- them with `docker-compose pull` or `docker-compose push`. To push images
- automatically when bundling, pass `--push-images`. Only services with
- a `build` option specified will have their images pushed.
-
- Usage: bundle [options]
-
- Options:
- --push-images Automatically push images for any services
- which have a `build` option specified.
-
- -o, --output PATH Path to write the bundle file to.
- Defaults to "<project name>.dab".
- """
- compose_config = get_config_from_options(self.project_dir, self.toplevel_options)
-
- output = options["--output"]
- if not output:
- output = "{}.dab".format(self.project.name)
-
- image_digests = image_digests_for_project(self.project, options['--push-images'])
-
- with open(output, 'w') as f:
- f.write(serialize_bundle(compose_config, image_digests))
-
- log.info("Wrote bundle to {}".format(output))
-
+ @metrics()
def config(self, options):
"""
Validate and view the Compose file.
@@ -322,24 +389,38 @@ class TopLevelCommand(object):
Options:
--resolve-image-digests Pin image tags to digests.
+ --no-interpolate Don't interpolate environment variables.
-q, --quiet Only validate the configuration, don't print
anything.
+ --profiles Print the profile names, one per line.
--services Print the service names, one per line.
--volumes Print the volume names, one per line.
-
+ --hash="*" Print the service config hash, one per line.
+ Set "service1,service2" for a list of specified services
+ or use the wildcard symbol to display all services.
"""
- compose_config = get_config_from_options(self.project_dir, self.toplevel_options)
+ additional_options = {'--no-interpolate': options.get('--no-interpolate')}
+ compose_config = get_config_from_options('.', self.toplevel_options, additional_options)
image_digests = None
if options['--resolve-image-digests']:
- self.project = project_from_options('.', self.toplevel_options)
+ self.project = project_from_options('.', self.toplevel_options, additional_options)
with errors.handle_connection_errors(self.project.client):
image_digests = image_digests_for_project(self.project)
if options['--quiet']:
return
+ if options['--profiles']:
+ profiles = set()
+ for service in compose_config.services:
+ if 'profiles' in service:
+ for profile in service['profiles']:
+ profiles.add(profile)
+ print('\n'.join(sorted(profiles)))
+ return
+
if options['--services']:
print('\n'.join(service['name'] for service in compose_config.services))
return
@@ -348,8 +429,18 @@ class TopLevelCommand(object):
print('\n'.join(volume for volume in compose_config.volumes))
return
- print(serialize_config(compose_config, image_digests))
+ if options['--hash'] is not None:
+ h = options['--hash']
+ self.project = project_from_options('.', self.toplevel_options, additional_options)
+ services = [svc for svc in options['--hash'].split(',')] if h != '*' else None
+ with errors.handle_connection_errors(self.project.client):
+ for service in self.project.get_services(services):
+ print('{} {}'.format(service.name, service.config_hash))
+ return
+
+ print(serialize_config(compose_config, image_digests, not options['--no-interpolate']))
+ @metrics()
def create(self, options):
"""
Creates containers for a service.
@@ -367,7 +458,7 @@ class TopLevelCommand(object):
"""
service_names = options['SERVICE']
- log.warn(
+ log.warning(
'The create command is deprecated. '
'Use the up command with the --no-start flag instead.'
)
@@ -378,6 +469,7 @@ class TopLevelCommand(object):
do_build=build_action_from_opts(options),
)
+ @metrics()
def down(self, options):
"""
Stops containers and removes containers, networks, volumes, and images
@@ -406,8 +498,7 @@ class TopLevelCommand(object):
-t, --timeout TIMEOUT Specify a shutdown timeout in seconds.
(default: 10)
"""
- environment = Environment.from_env_file(self.project_dir)
- ignore_orphans = environment.get_boolean('COMPOSE_IGNORE_ORPHANS')
+ ignore_orphans = self.toplevel_environment.get_boolean('COMPOSE_IGNORE_ORPHANS')
if ignore_orphans and options['--remove-orphans']:
raise UserError("COMPOSE_IGNORE_ORPHANS and --remove-orphans cannot be combined.")
@@ -425,11 +516,12 @@ class TopLevelCommand(object):
"""
Receive real time events from containers.
- Usage: events [options] [SERVICE...]
+ Usage: events [options] [--] [SERVICE...]
Options:
--json Output events as a stream of json objects
"""
+
def format_event(event):
attributes = ["%s=%s" % item for item in event['attributes'].items()]
return ("{time} {type} {action} {id} ({attrs})").format(
@@ -446,11 +538,12 @@ class TopLevelCommand(object):
print(formatter(event))
sys.stdout.flush()
+ @metrics("exec")
def exec_command(self, options):
"""
Execute a command in a running container
- Usage: exec [options] [-e KEY=VAL...] SERVICE COMMAND [ARGS...]
+ Usage: exec [options] [-e KEY=VAL...] [--] SERVICE COMMAND [ARGS...]
Options:
-d, --detach Detached mode: Run command in the background.
@@ -464,8 +557,7 @@ class TopLevelCommand(object):
not supported in API < 1.25)
-w, --workdir DIR Path to workdir directory for this command.
"""
- environment = Environment.from_env_file(self.project_dir)
- use_cli = not environment.get_boolean('COMPOSE_INTERACTIVE_NO_CLI')
+ use_cli = not self.toplevel_environment.get_boolean('COMPOSE_INTERACTIVE_NO_CLI')
index = int(options.get('--index'))
service = self.project.get_service(options['SERVICE'])
detach = options.get('--detach')
@@ -488,7 +580,7 @@ class TopLevelCommand(object):
if IS_WINDOWS_PLATFORM or use_cli and not detach:
sys.exit(call_docker(
build_exec_command(options, container.id, command),
- self.toplevel_options)
+ self.toplevel_options, self.toplevel_environment)
)
create_exec_options = {
@@ -523,6 +615,7 @@ class TopLevelCommand(object):
sys.exit(exit_code)
@classmethod
+ @metrics()
def help(cls, options):
"""
Get help on a command.
@@ -536,10 +629,11 @@ class TopLevelCommand(object):
print(getdoc(subject))
+ @metrics()
def images(self, options):
"""
List images used by the created containers.
- Usage: images [options] [SERVICE...]
+ Usage: images [options] [--] [SERVICE...]
Options:
-q, --quiet Only display IDs
@@ -550,39 +644,52 @@ class TopLevelCommand(object):
key=attrgetter('name'))
if options['--quiet']:
- for image in set(c.image for c in containers):
+ for image in {c.image for c in containers}:
print(image.split(':')[1])
- else:
- headers = [
- 'Container',
- 'Repository',
- 'Tag',
- 'Image Id',
- 'Size'
- ]
- rows = []
- for container in containers:
- image_config = container.image_config
- repo_tags = (
- image_config['RepoTags'][0].rsplit(':', 1) if image_config['RepoTags']
- else ('<none>', '<none>')
- )
- image_id = image_config['Id'].split(':')[1][:12]
- size = human_readable_file_size(image_config['Size'])
- rows.append([
- container.name,
- repo_tags[0],
- repo_tags[1],
- image_id,
- size
- ])
- print(Formatter().table(headers, rows))
+ return
+
+ def add_default_tag(img_name):
+ if ':' not in img_name.split('/')[-1]:
+ return '{}:latest'.format(img_name)
+ return img_name
+
+ headers = [
+ 'Container',
+ 'Repository',
+ 'Tag',
+ 'Image Id',
+ 'Size'
+ ]
+ rows = []
+ for container in containers:
+ image_config = container.image_config
+ service = self.project.get_service(container.service)
+ index = 0
+ img_name = add_default_tag(service.image_name)
+ if img_name in image_config['RepoTags']:
+ index = image_config['RepoTags'].index(img_name)
+ repo_tags = (
+ image_config['RepoTags'][index].rsplit(':', 1) if image_config['RepoTags']
+ else ('<none>', '<none>')
+ )
+ image_id = image_config['Id'].split(':')[1][:12]
+ size = human_readable_file_size(image_config['Size'])
+ rows.append([
+ container.name,
+ repo_tags[0],
+ repo_tags[1],
+ image_id,
+ size
+ ])
+ print(Formatter.table(headers, rows))
+
+ @metrics()
def kill(self, options):
"""
Force stop service containers.
- Usage: kill [options] [SERVICE...]
+ Usage: kill [options] [--] [SERVICE...]
Options:
-s SIGNAL SIGNAL to send to the container.
@@ -592,18 +699,20 @@ class TopLevelCommand(object):
self.project.kill(service_names=options['SERVICE'], signal=signal)
+ @metrics()
def logs(self, options):
"""
View output from containers.
- Usage: logs [options] [SERVICE...]
+ Usage: logs [options] [--] [SERVICE...]
Options:
- --no-color Produce monochrome output.
- -f, --follow Follow log output.
- -t, --timestamps Show timestamps.
- --tail="all" Number of lines to show from the end of the logs
- for each container.
+ --no-color Produce monochrome output.
+ -f, --follow Follow log output.
+ -t, --timestamps Show timestamps.
+ --tail="all" Number of lines to show from the end of the logs
+ for each container.
+ --no-log-prefix Don't print prefix in logs.
"""
containers = self.project.containers(service_names=options['SERVICE'], stopped=True)
@@ -624,8 +733,10 @@ class TopLevelCommand(object):
containers,
options['--no-color'],
log_args,
- event_stream=self.project.events(service_names=options['SERVICE'])).run()
+ event_stream=self.project.events(service_names=options['SERVICE']),
+ keep_prefix=not options['--no-log-prefix']).run()
+ @metrics()
def pause(self, options):
"""
Pause services.
@@ -635,11 +746,12 @@ class TopLevelCommand(object):
containers = self.project.pause(service_names=options['SERVICE'])
exit_if(not containers, 'No containers to pause', 1)
+ @metrics()
def port(self, options):
"""
Print the public port for a port binding.
- Usage: port [options] SERVICE PRIVATE_PORT
+ Usage: port [options] [--] SERVICE PRIVATE_PORT
Options:
--protocol=proto tcp or udp [default: tcp]
@@ -656,16 +768,18 @@ class TopLevelCommand(object):
options['PRIVATE_PORT'],
protocol=options.get('--protocol') or 'tcp') or '')
+ @metrics()
def ps(self, options):
"""
List containers.
- Usage: ps [options] [SERVICE...]
+ Usage: ps [options] [--] [SERVICE...]
Options:
-q, --quiet Only display IDs
--services Display services
--filter KEY=VAL Filter services by a property
+ -a, --all Show all stopped containers (including those created by the run command)
"""
if options['--quiet'] and options['--services']:
raise UserError('--quiet and --services cannot be combined')
@@ -678,10 +792,15 @@ class TopLevelCommand(object):
print('\n'.join(service.name for service in services))
return
- containers = sorted(
- self.project.containers(service_names=options['SERVICE'], stopped=True) +
- self.project.containers(service_names=options['SERVICE'], one_off=OneOffFilter.only),
- key=attrgetter('name'))
+ if options['--all']:
+ containers = sorted(self.project.containers(service_names=options['SERVICE'],
+ one_off=OneOffFilter.include, stopped=True),
+ key=attrgetter('name'))
+ else:
+ containers = sorted(
+ self.project.containers(service_names=options['SERVICE'], stopped=True) +
+ self.project.containers(service_names=options['SERVICE'], one_off=OneOffFilter.only),
+ key=attrgetter('name'))
if options['--quiet']:
for container in containers:
@@ -704,13 +823,14 @@ class TopLevelCommand(object):
container.human_readable_state,
container.human_readable_ports,
])
- print(Formatter().table(headers, rows))
+ print(Formatter.table(headers, rows))
+ @metrics()
def pull(self, options):
"""
Pulls images for services defined in a Compose file, but does not start the containers.
- Usage: pull [options] [SERVICE...]
+ Usage: pull [options] [--] [SERVICE...]
Options:
--ignore-pull-failures Pull what it can and ignores images with pull failures.
@@ -720,7 +840,7 @@ class TopLevelCommand(object):
--include-deps Also pull services declared as dependencies
"""
if options.get('--parallel'):
- log.warn('--parallel option is deprecated and will be removed in future versions.')
+ log.warning('--parallel option is deprecated and will be removed in future versions.')
self.project.pull(
service_names=options['SERVICE'],
ignore_pull_failures=options.get('--ignore-pull-failures'),
@@ -729,11 +849,12 @@ class TopLevelCommand(object):
include_deps=options.get('--include-deps'),
)
+ @metrics()
def push(self, options):
"""
Pushes images for services.
- Usage: push [options] [SERVICE...]
+ Usage: push [options] [--] [SERVICE...]
Options:
--ignore-push-failures Push what it can and ignores images with push failures.
@@ -743,6 +864,7 @@ class TopLevelCommand(object):
ignore_push_failures=options.get('--ignore-push-failures')
)
+ @metrics()
def rm(self, options):
"""
Removes stopped service containers.
@@ -752,7 +874,7 @@ class TopLevelCommand(object):
Any data which is not in a volume will be lost.
- Usage: rm [options] [SERVICE...]
+ Usage: rm [options] [--] [SERVICE...]
Options:
-f, --force Don't ask to confirm removal
@@ -761,7 +883,7 @@ class TopLevelCommand(object):
-a, --all Deprecated - no effect.
"""
if options.get('--all'):
- log.warn(
+ log.warning(
'--all flag is obsolete. This is now the default behavior '
'of `docker-compose rm`'
)
@@ -787,6 +909,7 @@ class TopLevelCommand(object):
else:
print("No stopped containers")
+ @metrics()
def run(self, options):
"""
Run a one-off command on a service.
@@ -800,7 +923,7 @@ class TopLevelCommand(object):
`docker-compose run --no-deps SERVICE COMMAND [ARGS...]`.
Usage:
- run [options] [-v VOLUME...] [-p PORT...] [-e KEY=VAL...] [-l KEY=VALUE...]
+ run [options] [-v VOLUME...] [-p PORT...] [-e KEY=VAL...] [-l KEY=VALUE...] [--]
SERVICE [COMMAND] [ARGS...]
Options:
@@ -839,12 +962,15 @@ class TopLevelCommand(object):
else:
command = service.options.get('command')
- container_options = build_container_options(options, detach, command)
+ options['stdin_open'] = service.options.get('stdin_open', True)
+
+ container_options = build_one_off_container_options(options, detach, command)
run_one_off_container(
container_options, self.project, service, options,
- self.toplevel_options, self.project_dir
+ self.toplevel_options, self.toplevel_environment
)
+ @metrics()
def scale(self, options):
"""
Set number of containers to run for a service.
@@ -865,20 +991,15 @@ class TopLevelCommand(object):
"""
timeout = timeout_from_opts(options)
- if self.project.config_version == V2_2:
- raise UserError(
- 'The scale command is incompatible with the v2.2 format. '
- 'Use the up command with the --scale flag instead.'
- )
- else:
- log.warn(
- 'The scale command is deprecated. '
- 'Use the up command with the --scale flag instead.'
- )
+ log.warning(
+ 'The scale command is deprecated. '
+ 'Use the up command with the --scale flag instead.'
+ )
for service_name, num in parse_scale_args(options['SERVICE=NUM']).items():
self.project.get_service(service_name).scale(num, timeout=timeout)
+ @metrics()
def start(self, options):
"""
Start existing containers.
@@ -888,13 +1009,14 @@ class TopLevelCommand(object):
containers = self.project.start(service_names=options['SERVICE'])
exit_if(not containers, 'No containers to start', 1)
+ @metrics()
def stop(self, options):
"""
Stop running containers without removing them.
They can be started again with `docker-compose start`.
- Usage: stop [options] [SERVICE...]
+ Usage: stop [options] [--] [SERVICE...]
Options:
-t, --timeout TIMEOUT Specify a shutdown timeout in seconds.
@@ -903,11 +1025,12 @@ class TopLevelCommand(object):
timeout = timeout_from_opts(options)
self.project.stop(service_names=options['SERVICE'], timeout=timeout)
+ @metrics()
def restart(self, options):
"""
Restart running containers.
- Usage: restart [options] [SERVICE...]
+ Usage: restart [options] [--] [SERVICE...]
Options:
-t, --timeout TIMEOUT Specify a shutdown timeout in seconds.
@@ -917,6 +1040,7 @@ class TopLevelCommand(object):
containers = self.project.restart(service_names=options['SERVICE'], timeout=timeout)
exit_if(not containers, 'No containers to restart', 1)
+ @metrics()
def top(self, options):
"""
Display the running processes
@@ -942,8 +1066,9 @@ class TopLevelCommand(object):
rows.append(process)
print(container.name)
- print(Formatter().table(headers, rows))
+ print(Formatter.table(headers, rows))
+ @metrics()
def unpause(self, options):
"""
Unpause services.
@@ -953,6 +1078,7 @@ class TopLevelCommand(object):
containers = self.project.unpause(service_names=options['SERVICE'])
exit_if(not containers, 'No containers to unpause', 1)
+ @metrics()
def up(self, options):
"""
Builds, (re)creates, starts, and attaches to containers for a service.
@@ -972,7 +1098,7 @@ class TopLevelCommand(object):
If you want to force Compose to stop and recreate all containers, use the
`--force-recreate` flag.
- Usage: up [options] [--scale SERVICE=NUM...] [SERVICE...]
+ Usage: up [options] [--scale SERVICE=NUM...] [--] [SERVICE...]
Options:
-d, --detach Detached mode: Run containers in the background,
@@ -992,6 +1118,7 @@ class TopLevelCommand(object):
--build Build images before starting containers.
--abort-on-container-exit Stops all containers if any container was
stopped. Incompatible with -d.
+ --attach-dependencies Attach to dependent containers.
-t, --timeout TIMEOUT Use this timeout in seconds for container
shutdown when attached or when containers are
already running. (default: 10)
@@ -1003,6 +1130,7 @@ class TopLevelCommand(object):
container. Implies --abort-on-container-exit.
--scale SERVICE=NUM Scale SERVICE to NUM instances. Overrides the
`scale` setting in the Compose file if present.
+ --no-log-prefix Don't print prefix in logs.
"""
start_deps = not options['--no-deps']
always_recreate_deps = options['--always-recreate-deps']
@@ -1013,20 +1141,24 @@ class TopLevelCommand(object):
remove_orphans = options['--remove-orphans']
detached = options.get('--detach')
no_start = options.get('--no-start')
+ attach_dependencies = options.get('--attach-dependencies')
+ keep_prefix = not options.get('--no-log-prefix')
- if detached and (cascade_stop or exit_value_from):
- raise UserError("--abort-on-container-exit and -d cannot be combined.")
+ if detached and (cascade_stop or exit_value_from or attach_dependencies):
+ raise UserError(
+ "-d cannot be combined with --abort-on-container-exit or --attach-dependencies.")
- environment = Environment.from_env_file(self.project_dir)
- ignore_orphans = environment.get_boolean('COMPOSE_IGNORE_ORPHANS')
+ ignore_orphans = self.toplevel_environment.get_boolean('COMPOSE_IGNORE_ORPHANS')
if ignore_orphans and remove_orphans:
raise UserError("COMPOSE_IGNORE_ORPHANS and --remove-orphans cannot be combined.")
- opts = ['--detach', '--abort-on-container-exit', '--exit-code-from']
+ opts = ['--detach', '--abort-on-container-exit', '--exit-code-from', '--attach-dependencies']
for excluded in [x for x in opts if options.get(x) and no_start]:
raise UserError('--no-start and {} cannot be combined.'.format(excluded))
+ native_builder = self.toplevel_environment.get_boolean('COMPOSE_DOCKER_CLI_BUILD', True)
+
with up_shutdown_context(self.project, service_names, timeout, detached):
warn_for_swarm_mode(self.project.client)
@@ -1046,6 +1178,8 @@ class TopLevelCommand(object):
reset_container_image=rebuild,
renew_anonymous_volumes=options.get('--renew-anon-volumes'),
silent=options.get('--quiet-pull'),
+ cli=native_builder,
+ attach_dependencies=attach_dependencies,
)
try:
@@ -1054,7 +1188,7 @@ class TopLevelCommand(object):
log.error(
"The image for the service you're trying to recreate has been removed. "
"If you continue, volume data could be lost. Consider backing up your data "
- "before continuing.\n".format(e.explanation)
+ "before continuing.\n"
)
res = yesno("Continue with the new image? [yN]", False)
if res is None or not res:
@@ -1065,7 +1199,10 @@ class TopLevelCommand(object):
if detached or no_start:
return
- attached_containers = filter_containers_to_service_names(to_attach, service_names)
+ attached_containers = filter_attached_containers(
+ to_attach,
+ service_names,
+ attach_dependencies)
log_printer = log_printer_from_project(
self.project,
@@ -1073,7 +1210,8 @@ class TopLevelCommand(object):
options['--no-color'],
{'follow': True},
cascade_stop,
- event_stream=self.project.events(service_names=service_names))
+ event_stream=self.project.events(service_names=service_names),
+ keep_prefix=keep_prefix)
print("Attaching to", list_containers(log_printer.containers))
cascade_starter = log_printer.run()
@@ -1085,12 +1223,16 @@ class TopLevelCommand(object):
)
self.project.stop(service_names=service_names, timeout=timeout)
+ if exit_value_from:
+ exit_code = compute_service_exit_code(exit_value_from, attached_containers)
+
sys.exit(exit_code)
@classmethod
+ @metrics()
def version(cls, options):
"""
- Show version informations
+ Show version information and quit.
Usage: version [--short]
@@ -1103,33 +1245,33 @@ class TopLevelCommand(object):
print(get_version_info('full'))
+def compute_service_exit_code(exit_value_from, attached_containers):
+ candidates = list(filter(
+ lambda c: c.service == exit_value_from,
+ attached_containers))
+ if not candidates:
+ log.error(
+ 'No containers matching the spec "{}" '
+ 'were run.'.format(exit_value_from)
+ )
+ return 2
+ if len(candidates) > 1:
+ exit_values = filter(
+ lambda e: e != 0,
+ [c.inspect()['State']['ExitCode'] for c in candidates]
+ )
+
+ return exit_values[0]
+ return candidates[0].inspect()['State']['ExitCode']
+
+
def compute_exit_code(exit_value_from, attached_containers, cascade_starter, all_containers):
exit_code = 0
- if exit_value_from:
- candidates = list(filter(
- lambda c: c.service == exit_value_from,
- attached_containers))
- if not candidates:
- log.error(
- 'No containers matching the spec "{0}" '
- 'were run.'.format(exit_value_from)
- )
- exit_code = 2
- elif len(candidates) > 1:
- exit_values = filter(
- lambda e: e != 0,
- [c.inspect()['State']['ExitCode'] for c in candidates]
- )
-
- exit_code = exit_values[0]
- else:
- exit_code = candidates[0].inspect()['State']['ExitCode']
- else:
- for e in all_containers:
- if (not e.is_running and cascade_starter == e.name):
- if not e.exit_code == 0:
- exit_code = e.exit_code
- break
+ for e in all_containers:
+ if (not e.is_running and cascade_starter == e.name):
+ if not e.exit_code == 0:
+ exit_code = e.exit_code
+ break
return exit_code
@@ -1158,12 +1300,10 @@ def timeout_from_opts(options):
return None if timeout is None else int(timeout)
-def image_digests_for_project(project, allow_push=False):
+def image_digests_for_project(project):
try:
- return get_image_digests(
- project,
- allow_push=allow_push
- )
+ return get_image_digests(project)
+
except MissingDigests as e:
def list_images(images):
return "\n".join(" {}".format(name) for name in sorted(images))
@@ -1172,7 +1312,7 @@ def image_digests_for_project(project, allow_push=False):
if e.needs_push:
command_hint = (
- "Use `docker-compose push {}` to push them. "
+ "Use `docker push {}` to push them. "
.format(" ".join(sorted(e.needs_push)))
)
paras += [
@@ -1183,7 +1323,7 @@ def image_digests_for_project(project, allow_push=False):
if e.needs_pull:
command_hint = (
- "Use `docker-compose pull {}` to pull them. "
+ "Use `docker pull {}` to pull them. "
.format(" ".join(sorted(e.needs_pull)))
)
@@ -1200,7 +1340,7 @@ def exitval_from_opts(options, project):
exit_value_from = options.get('--exit-code-from')
if exit_value_from:
if not options.get('--abort-on-container-exit'):
- log.warn('using --exit-code-from implies --abort-on-container-exit')
+ log.warning('using --exit-code-from implies --abort-on-container-exit')
options['--abort-on-container-exit'] = True
if exit_value_from not in [s.name for s in project.get_services()]:
log.error('No service named "%s" was found in your compose file.',
@@ -1231,11 +1371,11 @@ def build_action_from_opts(options):
return BuildAction.none
-def build_container_options(options, detach, command):
+def build_one_off_container_options(options, detach, command):
container_options = {
'command': command,
'tty': not (detach or options['-T'] or not sys.stdin.isatty()),
- 'stdin_open': not detach,
+ 'stdin_open': options.get('stdin_open'),
'detach': detach,
}
@@ -1252,8 +1392,8 @@ def build_container_options(options, detach, command):
[""] if options['--entrypoint'] == '' else options['--entrypoint']
)
- if options['--rm']:
- container_options['restart'] = None
+ # Ensure that run command remains one-off (issue #6302)
+ container_options['restart'] = None
if options['--user']:
container_options['user'] = options.get('--user')
@@ -1278,37 +1418,36 @@ def build_container_options(options, detach, command):
def run_one_off_container(container_options, project, service, options, toplevel_options,
- project_dir='.'):
- if not options['--no-deps']:
- deps = service.get_dependency_names()
- if deps:
- project.up(
- service_names=deps,
- start_deps=True,
- strategy=ConvergenceStrategy.never,
- rescale=False
- )
-
- project.initialize()
-
- container = service.create_container(
- quiet=True,
+ toplevel_environment):
+ native_builder = toplevel_environment.get_boolean('COMPOSE_DOCKER_CLI_BUILD')
+ detach = options.get('--detach')
+ use_network_aliases = options.get('--use-aliases')
+ service.scale_num = 1
+ containers = project.up(
+ service_names=[service.name],
+ start_deps=not options['--no-deps'],
+ strategy=ConvergenceStrategy.never,
+ detached=True,
+ rescale=False,
+ cli=native_builder,
one_off=True,
- **container_options)
-
- use_network_aliases = options['--use-aliases']
+ override_options=container_options,
+ )
+ try:
+ container = next(c for c in containers if c.service == service.name)
+ except StopIteration:
+ raise OperationFailedError('Could not bring up the requested service')
- if options.get('--detach'):
+ if detach:
service.start_container(container, use_network_aliases)
print(container.name)
return
- def remove_container(force=False):
+ def remove_container():
if options['--rm']:
project.client.remove_container(container.id, force=True, v=True)
- environment = Environment.from_env_file(project_dir)
- use_cli = not environment.get_boolean('COMPOSE_INTERACTIVE_NO_CLI')
+ use_cli = not toplevel_environment.get_boolean('COMPOSE_INTERACTIVE_NO_CLI')
signals.set_signal_handler_to_shutdown()
signals.set_signal_handler_to_hang_up()
@@ -1317,8 +1456,8 @@ def run_one_off_container(container_options, project, service, options, toplevel
if IS_WINDOWS_PLATFORM or use_cli:
service.connect_container_to_networks(container, use_network_aliases)
exit_code = call_docker(
- ["start", "--attach", "--interactive", container.id],
- toplevel_options
+ get_docker_start_call(container_options, container.id),
+ toplevel_options, toplevel_environment
)
else:
operation = RunOperation(
@@ -1337,37 +1476,46 @@ def run_one_off_container(container_options, project, service, options, toplevel
exit_code = 1
except (signals.ShutdownException, signals.HangUpException):
project.client.kill(container.id)
- remove_container(force=True)
+ remove_container()
sys.exit(2)
remove_container()
sys.exit(exit_code)
+def get_docker_start_call(container_options, container_id):
+ docker_call = ["start"]
+ if not container_options.get('detach'):
+ docker_call.append("--attach")
+ if container_options.get('stdin_open'):
+ docker_call.append("--interactive")
+ docker_call.append(container_id)
+ return docker_call
+
+
def log_printer_from_project(
- project,
- containers,
- monochrome,
- log_args,
- cascade_stop=False,
- event_stream=None,
+ project,
+ containers,
+ monochrome,
+ log_args,
+ cascade_stop=False,
+ event_stream=None,
+ keep_prefix=True,
):
return LogPrinter(
- containers,
- build_log_presenters(project.service_names, monochrome),
+ [c for c in containers if c.log_driver not in (None, 'none')],
+ build_log_presenters(project.service_names, monochrome, keep_prefix),
event_stream or project.events(),
cascade_stop=cascade_stop,
log_args=log_args)
-def filter_containers_to_service_names(containers, service_names):
- if not service_names:
- return containers
-
- return [
- container
- for container in containers if container.service in service_names
- ]
+def filter_attached_containers(containers, service_names, attach_dependencies=False):
+ return filter_attached_for_up(
+ containers,
+ service_names,
+ attach_dependencies,
+ lambda container: container.service)
@contextlib.contextmanager
@@ -1398,7 +1546,7 @@ def exit_if(condition, message, exit_code):
raise SystemExit(exit_code)
-def call_docker(args, dockeropts):
+def call_docker(args, dockeropts, environment):
executable_path = find_executable('docker')
if not executable_path:
raise UserError(errors.docker_not_found_msg("Couldn't find `docker` binary."))
@@ -1409,6 +1557,7 @@ def call_docker(args, dockeropts):
key = dockeropts.get('--tlskey')
verify = dockeropts.get('--tlsverify')
host = dockeropts.get('--host')
+ context = dockeropts.get('--context')
tls_options = []
if tls:
tls_options.append('--tls')
@@ -1421,12 +1570,20 @@ def call_docker(args, dockeropts):
if verify:
tls_options.append('--tlsverify')
if host:
- tls_options.extend(['--host', host.lstrip('=')])
+ tls_options.extend(
+ ['--host', re.sub(r'^https?://', 'tcp://', host.lstrip('='))]
+ )
+ if context:
+ tls_options.extend(
+ ['--context', context]
+ )
args = [executable_path] + tls_options + args
log.debug(" ".join(map(pipes.quote, args)))
- return subprocess.call(args)
+ filtered_env = {k: v for k, v in environment.items() if v is not None}
+
+ return subprocess.call(args, env=filtered_env)
def parse_scale_args(options):
@@ -1527,7 +1684,7 @@ def warn_for_swarm_mode(client):
# UCP does multi-node scheduling with traditional Compose files.
return
- log.warn(
+ log.warning(
"The Docker Engine you're using is running in swarm mode.\n\n"
"Compose does not use swarm mode to deploy services to multiple nodes in a swarm. "
"All containers will be scheduled on the current node.\n\n"
diff --git a/compose/cli/signals.py b/compose/cli/signals.py
index 44def2ec..0244e701 100644
--- a/compose/cli/signals.py
+++ b/compose/cli/signals.py
@@ -1,6 +1,3 @@
-from __future__ import absolute_import
-from __future__ import unicode_literals
-
import signal
from ..const import IS_WINDOWS_PLATFORM
diff --git a/compose/cli/utils.py b/compose/cli/utils.py
index 4cc055cc..6a4615a9 100644
--- a/compose/cli/utils.py
+++ b/compose/cli/utils.py
@@ -1,7 +1,3 @@
-from __future__ import absolute_import
-from __future__ import division
-from __future__ import unicode_literals
-
import math
import os
import platform
@@ -9,19 +5,12 @@ import ssl
import subprocess
import sys
+import distro
import docker
-import six
import compose
from ..const import IS_WINDOWS_PLATFORM
-# WindowsError is not defined on non-win32 platforms. Avoid runtime errors by
-# defining it as OSError (its parent class) if missing.
-try:
- WindowsError
-except NameError:
- WindowsError = OSError
-
def yesno(prompt, default=None):
"""
@@ -62,7 +51,7 @@ def call_silently(*args, **kwargs):
with open(os.devnull, 'w') as shutup:
try:
return subprocess.call(*args, stdout=shutup, stderr=shutup, **kwargs)
- except WindowsError:
+ except OSError:
# On Windows, subprocess.call() can still raise exceptions. Normalize
# to POSIXy behaviour by returning a nonzero exit code.
return 1
@@ -73,7 +62,7 @@ def is_mac():
def is_ubuntu():
- return platform.system() == 'Linux' and platform.linux_distribution()[0] == 'Ubuntu'
+ return platform.system() == 'Linux' and distro.linux_distribution()[0] == 'Ubuntu'
def is_windows():
@@ -124,7 +113,7 @@ def generate_user_agent():
try:
p_system = platform.system()
p_release = platform.release()
- except IOError:
+ except OSError:
pass
else:
parts.append("{}/{}".format(p_system, p_release))
@@ -133,18 +122,18 @@ def generate_user_agent():
def human_readable_file_size(size):
suffixes = ['B', 'kB', 'MB', 'GB', 'TB', 'PB', 'EB', ]
- order = int(math.log(size, 2) / 10) if size else 0
+ order = int(math.log(size, 1000)) if size else 0
if order >= len(suffixes):
order = len(suffixes) - 1
- return '{0:.3g} {1}'.format(
- size / float(1 << (order * 10)),
+ return '{:.4g} {}'.format(
+ size / pow(10, order * 3),
suffixes[order]
)
def binarystr_to_unicode(s):
- if not isinstance(s, six.binary_type):
+ if not isinstance(s, bytes):
return s
if IS_WINDOWS_PLATFORM:
diff --git a/compose/cli/verbose_proxy.py b/compose/cli/verbose_proxy.py
index b1592eab..c9340c4e 100644
--- a/compose/cli/verbose_proxy.py
+++ b/compose/cli/verbose_proxy.py
@@ -1,23 +1,18 @@
-from __future__ import absolute_import
-from __future__ import unicode_literals
-
import functools
import logging
import pprint
from itertools import chain
-import six
-
def format_call(args, kwargs):
args = (repr(a) for a in args)
- kwargs = ("{0!s}={1!r}".format(*item) for item in six.iteritems(kwargs))
- return "({0})".format(", ".join(chain(args, kwargs)))
+ kwargs = ("{!s}={!r}".format(*item) for item in kwargs.items())
+ return "({})".format(", ".join(chain(args, kwargs)))
def format_return(result, max_lines):
if isinstance(result, (list, tuple, set)):
- return "({0} with {1} items)".format(type(result).__name__, len(result))
+ return "({} with {} items)".format(type(result).__name__, len(result))
if result:
lines = pprint.pformat(result).split('\n')
@@ -27,7 +22,7 @@ def format_return(result, max_lines):
return result
-class VerboseProxy(object):
+class VerboseProxy:
"""Proxy all function calls to another class and log method name, arguments
and return values for each call.
"""
@@ -41,7 +36,7 @@ class VerboseProxy(object):
def __getattr__(self, name):
attr = getattr(self.obj, name)
- if not six.callable(attr):
+ if not callable(attr):
return attr
return functools.partial(self.proxy_callable, name)
diff --git a/compose/config/__init__.py b/compose/config/__init__.py
index e1032f3d..855b2401 100644
--- a/compose/config/__init__.py
+++ b/compose/config/__init__.py
@@ -1,11 +1,9 @@
# flake8: noqa
-from __future__ import absolute_import
-from __future__ import unicode_literals
-
from . import environment
from .config import ConfigurationError
from .config import DOCKER_CONFIG_KEYS
from .config import find
+from .config import is_url
from .config import load
from .config import merge_environment
from .config import merge_labels
diff --git a/compose/config/config_schema_v3.6.json b/compose/config/compose_spec.json
index 95a552b3..86e3de1e 100644
--- a/compose/config/config_schema_v3.6.json
+++ b/compose/config/compose_spec.json
@@ -1,12 +1,14 @@
{
- "$schema": "http://json-schema.org/draft-04/schema#",
- "id": "config_schema_v3.6.json",
+ "$schema": "http://json-schema.org/draft/2019-09/schema#",
+ "id": "compose_spec.json",
"type": "object",
- "required": ["version"],
+ "title": "Compose Specification",
+ "description": "The Compose file is a YAML file defining a multi-containers based application.",
"properties": {
"version": {
- "type": "string"
+ "type": "string",
+ "description": "Version of the Compose specification used. Tools not implementing required version MUST reject the configuration file."
},
"services": {
@@ -85,15 +87,45 @@
"dockerfile": {"type": "string"},
"args": {"$ref": "#/definitions/list_or_dict"},
"labels": {"$ref": "#/definitions/list_or_dict"},
- "cache_from": {"$ref": "#/definitions/list_of_strings"},
+ "cache_from": {"type": "array", "items": {"type": "string"}},
"network": {"type": "string"},
"target": {"type": "string"},
- "shm_size": {"type": ["integer", "string"]}
+ "shm_size": {"type": ["integer", "string"]},
+ "extra_hosts": {"$ref": "#/definitions/list_or_dict"},
+ "isolation": {"type": "string"}
},
- "additionalProperties": false
+ "additionalProperties": false,
+ "patternProperties": {"^x-": {}}
}
]
},
+ "blkio_config": {
+ "type": "object",
+ "properties": {
+ "device_read_bps": {
+ "type": "array",
+ "items": {"$ref": "#/definitions/blkio_limit"}
+ },
+ "device_read_iops": {
+ "type": "array",
+ "items": {"$ref": "#/definitions/blkio_limit"}
+ },
+ "device_write_bps": {
+ "type": "array",
+ "items": {"$ref": "#/definitions/blkio_limit"}
+ },
+ "device_write_iops": {
+ "type": "array",
+ "items": {"$ref": "#/definitions/blkio_limit"}
+ },
+ "weight": {"type": "integer"},
+ "weight_device": {
+ "type": "array",
+ "items": {"$ref": "#/definitions/blkio_weight"}
+ }
+ },
+ "additionalProperties": false
+ },
"cap_add": {"type": "array", "items": {"type": "string"}, "uniqueItems": true},
"cap_drop": {"type": "array", "items": {"type": "string"}, "uniqueItems": true},
"cgroup_parent": {"type": "string"},
@@ -116,19 +148,59 @@
"uid": {"type": "string"},
"gid": {"type": "string"},
"mode": {"type": "number"}
- }
+ },
+ "additionalProperties": false,
+ "patternProperties": {"^x-": {}}
}
]
}
},
"container_name": {"type": "string"},
- "credential_spec": {"type": "object", "properties": {
- "file": {"type": "string"},
- "registry": {"type": "string"}
- }},
- "depends_on": {"$ref": "#/definitions/list_of_strings"},
+ "cpu_count": {"type": "integer", "minimum": 0},
+ "cpu_percent": {"type": "integer", "minimum": 0, "maximum": 100},
+ "cpu_shares": {"type": ["number", "string"]},
+ "cpu_quota": {"type": ["number", "string"]},
+ "cpu_period": {"type": ["number", "string"]},
+ "cpu_rt_period": {"type": ["number", "string"]},
+ "cpu_rt_runtime": {"type": ["number", "string"]},
+ "cpus": {"type": ["number", "string"]},
+ "cpuset": {"type": "string"},
+ "credential_spec": {
+ "type": "object",
+ "properties": {
+ "config": {"type": "string"},
+ "file": {"type": "string"},
+ "registry": {"type": "string"}
+ },
+ "additionalProperties": false,
+ "patternProperties": {"^x-": {}}
+ },
+ "depends_on": {
+ "oneOf": [
+ {"$ref": "#/definitions/list_of_strings"},
+ {
+ "type": "object",
+ "additionalProperties": false,
+ "patternProperties": {
+ "^[a-zA-Z0-9._-]+$": {
+ "type": "object",
+ "additionalProperties": false,
+ "properties": {
+ "condition": {
+ "type": "string",
+ "enum": ["service_started", "service_healthy", "service_completed_successfully"]
+ }
+ },
+ "required": ["condition"]
+ }
+ }
+ }
+ ]
+ },
+ "device_cgroup_rules": {"$ref": "#/definitions/list_of_strings"},
"devices": {"type": "array", "items": {"type": "string"}, "uniqueItems": true},
"dns": {"$ref": "#/definitions/string_or_list"},
+ "dns_opt": {"type": "array","items": {"type": "string"}, "uniqueItems": true},
"dns_search": {"$ref": "#/definitions/string_or_list"},
"domainname": {"type": "string"},
"entrypoint": {
@@ -148,35 +220,59 @@
},
"uniqueItems": true
},
+ "extends": {
+ "oneOf": [
+ {"type": "string"},
+ {
+ "type": "object",
+ "properties": {
+ "service": {"type": "string"},
+ "file": {"type": "string"}
+ },
+ "required": ["service"],
+ "additionalProperties": false
+ }
+ ]
+ },
"external_links": {"type": "array", "items": {"type": "string"}, "uniqueItems": true},
"extra_hosts": {"$ref": "#/definitions/list_or_dict"},
+ "group_add": {
+ "type": "array",
+ "items": {
+ "type": ["string", "number"]
+ },
+ "uniqueItems": true
+ },
"healthcheck": {"$ref": "#/definitions/healthcheck"},
"hostname": {"type": "string"},
"image": {"type": "string"},
+ "init": {"type": "boolean"},
"ipc": {"type": "string"},
"isolation": {"type": "string"},
"labels": {"$ref": "#/definitions/list_or_dict"},
"links": {"type": "array", "items": {"type": "string"}, "uniqueItems": true},
-
"logging": {
- "type": "object",
+ "type": "object",
- "properties": {
- "driver": {"type": "string"},
- "options": {
- "type": "object",
- "patternProperties": {
- "^.+$": {"type": ["string", "number", "null"]}
- }
- }
- },
- "additionalProperties": false
+ "properties": {
+ "driver": {"type": "string"},
+ "options": {
+ "type": "object",
+ "patternProperties": {
+ "^.+$": {"type": ["string", "number", "null"]}
+ }
+ }
+ },
+ "additionalProperties": false,
+ "patternProperties": {"^x-": {}}
},
-
"mac_address": {"type": "string"},
+ "mem_limit": {"type": ["number", "string"]},
+ "mem_reservation": {"type": ["string", "integer"]},
+ "mem_swappiness": {"type": "integer"},
+ "memswap_limit": {"type": ["number", "string"]},
"network_mode": {"type": "string"},
-
"networks": {
"oneOf": [
{"$ref": "#/definitions/list_of_strings"},
@@ -190,9 +286,12 @@
"properties": {
"aliases": {"$ref": "#/definitions/list_of_strings"},
"ipv4_address": {"type": "string"},
- "ipv6_address": {"type": "string"}
+ "ipv6_address": {"type": "string"},
+ "link_local_ips": {"$ref": "#/definitions/list_of_strings"},
+ "priority": {"type": "number"}
},
- "additionalProperties": false
+ "additionalProperties": false,
+ "patternProperties": {"^x-": {}}
},
{"type": "null"}
]
@@ -202,8 +301,11 @@
}
]
},
+ "oom_kill_disable": {"type": "boolean"},
+ "oom_score_adj": {"type": "integer", "minimum": -1000, "maximum": 1000},
"pid": {"type": ["string", "null"]},
-
+ "pids_limit": {"type": ["number", "string"]},
+ "platform": {"type": "string"},
"ports": {
"type": "array",
"items": {
@@ -218,16 +320,26 @@
"published": {"type": "integer"},
"protocol": {"type": "string"}
},
- "additionalProperties": false
+ "additionalProperties": false,
+ "patternProperties": {"^x-": {}}
}
]
},
"uniqueItems": true
},
-
"privileged": {"type": "boolean"},
+ "profiles": {"$ref": "#/definitions/list_of_strings"},
+ "pull_policy": {"type": "string", "enum": [
+ "always", "never", "if_not_present", "build"
+ ]},
"read_only": {"type": "boolean"},
"restart": {"type": "string"},
+ "runtime": {
+ "type": "string"
+ },
+ "scale": {
+ "type": "integer"
+ },
"security_opt": {"type": "array", "items": {"type": "string"}, "uniqueItems": true},
"shm_size": {"type": ["number", "string"]},
"secrets": {
@@ -243,7 +355,9 @@
"uid": {"type": "string"},
"gid": {"type": "string"},
"mode": {"type": "number"}
- }
+ },
+ "additionalProperties": false,
+ "patternProperties": {"^x-": {}}
}
]
}
@@ -252,6 +366,7 @@
"stdin_open": {"type": "boolean"},
"stop_grace_period": {"type": "string", "format": "duration"},
"stop_signal": {"type": "string"},
+ "storage_opt": {"type": "object"},
"tmpfs": {"$ref": "#/definitions/string_or_list"},
"tty": {"type": "boolean"},
"ulimits": {
@@ -261,13 +376,14 @@
"oneOf": [
{"type": "integer"},
{
- "type":"object",
+ "type": "object",
"properties": {
"hard": {"type": "integer"},
"soft": {"type": "integer"}
},
"required": ["soft", "hard"],
- "additionalProperties": false
+ "additionalProperties": false,
+ "patternProperties": {"^x-": {}}
}
]
}
@@ -293,13 +409,17 @@
"type": "object",
"properties": {
"propagation": {"type": "string"}
- }
+ },
+ "additionalProperties": false,
+ "patternProperties": {"^x-": {}}
},
"volume": {
"type": "object",
"properties": {
"nocopy": {"type": "boolean"}
- }
+ },
+ "additionalProperties": false,
+ "patternProperties": {"^x-": {}}
},
"tmpfs": {
"type": "object",
@@ -308,24 +428,32 @@
"type": "integer",
"minimum": 0
}
- }
+ },
+ "additionalProperties": false,
+ "patternProperties": {"^x-": {}}
}
},
- "additionalProperties": false
+ "additionalProperties": false,
+ "patternProperties": {"^x-": {}}
}
- ],
- "uniqueItems": true
- }
+ ]
+ },
+ "uniqueItems": true
+ },
+ "volumes_from": {
+ "type": "array",
+ "items": {"type": "string"},
+ "uniqueItems": true
},
"working_dir": {"type": "string"}
},
+ "patternProperties": {"^x-": {}},
"additionalProperties": false
},
"healthcheck": {
"id": "#/definitions/healthcheck",
"type": "object",
- "additionalProperties": false,
"properties": {
"disable": {"type": "boolean"},
"interval": {"type": "string", "format": "duration"},
@@ -338,7 +466,9 @@
},
"timeout": {"type": "string", "format": "duration"},
"start_period": {"type": "string", "format": "duration"}
- }
+ },
+ "additionalProperties": false,
+ "patternProperties": {"^x-": {}}
},
"deployment": {
"id": "#/definitions/deployment",
@@ -348,6 +478,21 @@
"endpoint_mode": {"type": "string"},
"replicas": {"type": "integer"},
"labels": {"$ref": "#/definitions/list_or_dict"},
+ "rollback_config": {
+ "type": "object",
+ "properties": {
+ "parallelism": {"type": "integer"},
+ "delay": {"type": "string", "format": "duration"},
+ "failure_action": {"type": "string"},
+ "monitor": {"type": "string", "format": "duration"},
+ "max_failure_ratio": {"type": "number"},
+ "order": {"type": "string", "enum": [
+ "start-first", "stop-first"
+ ]}
+ },
+ "additionalProperties": false,
+ "patternProperties": {"^x-": {}}
+ },
"update_config": {
"type": "object",
"properties": {
@@ -360,7 +505,8 @@
"start-first", "stop-first"
]}
},
- "additionalProperties": false
+ "additionalProperties": false,
+ "patternProperties": {"^x-": {}}
},
"resources": {
"type": "object",
@@ -368,22 +514,26 @@
"limits": {
"type": "object",
"properties": {
- "cpus": {"type": "string"},
+ "cpus": {"type": ["number", "string"]},
"memory": {"type": "string"}
},
- "additionalProperties": false
+ "additionalProperties": false,
+ "patternProperties": {"^x-": {}}
},
"reservations": {
"type": "object",
"properties": {
- "cpus": {"type": "string"},
+ "cpus": {"type": ["number", "string"]},
"memory": {"type": "string"},
- "generic_resources": {"$ref": "#/definitions/generic_resources"}
+ "generic_resources": {"$ref": "#/definitions/generic_resources"},
+ "devices": {"$ref": "#/definitions/devices"}
},
- "additionalProperties": false
+ "additionalProperties": false,
+ "patternProperties": {"^x-": {}}
}
},
- "additionalProperties": false
+ "additionalProperties": false,
+ "patternProperties": {"^x-": {}}
},
"restart_policy": {
"type": "object",
@@ -393,7 +543,8 @@
"max_attempts": {"type": "integer"},
"window": {"type": "string", "format": "duration"}
},
- "additionalProperties": false
+ "additionalProperties": false,
+ "patternProperties": {"^x-": {}}
},
"placement": {
"type": "object",
@@ -406,14 +557,18 @@
"properties": {
"spread": {"type": "string"}
},
- "additionalProperties": false
+ "additionalProperties": false,
+ "patternProperties": {"^x-": {}}
}
- }
+ },
+ "max_replicas_per_node": {"type": "integer"}
},
- "additionalProperties": false
+ "additionalProperties": false,
+ "patternProperties": {"^x-": {}}
}
},
- "additionalProperties": false
+ "additionalProperties": false,
+ "patternProperties": {"^x-": {}}
},
"generic_resources": {
@@ -428,10 +583,29 @@
"kind": {"type": "string"},
"value": {"type": "number"}
},
- "additionalProperties": false
+ "additionalProperties": false,
+ "patternProperties": {"^x-": {}}
}
},
- "additionalProperties": false
+ "additionalProperties": false,
+ "patternProperties": {"^x-": {}}
+ }
+ },
+
+ "devices": {
+ "id": "#/definitions/devices",
+ "type": "array",
+ "items": {
+ "type": "object",
+ "properties": {
+ "capabilities": {"$ref": "#/definitions/list_of_strings"},
+ "count": {"type": ["string", "integer"]},
+ "device_ids": {"$ref": "#/definitions/list_of_strings"},
+ "driver":{"type": "string"},
+ "options":{"$ref": "#/definitions/list_or_dict"}
+ },
+ "additionalProperties": false,
+ "patternProperties": {"^x-": {}}
}
},
@@ -456,26 +630,46 @@
"items": {
"type": "object",
"properties": {
- "subnet": {"type": "string"}
+ "subnet": {"type": "string", "format": "subnet_ip_address"},
+ "ip_range": {"type": "string"},
+ "gateway": {"type": "string"},
+ "aux_addresses": {
+ "type": "object",
+ "additionalProperties": false,
+ "patternProperties": {"^.+$": {"type": "string"}}
+ }
},
- "additionalProperties": false
+ "additionalProperties": false,
+ "patternProperties": {"^x-": {}}
}
+ },
+ "options": {
+ "type": "object",
+ "additionalProperties": false,
+ "patternProperties": {"^.+$": {"type": "string"}}
}
},
- "additionalProperties": false
+ "additionalProperties": false,
+ "patternProperties": {"^x-": {}}
},
"external": {
"type": ["boolean", "object"],
"properties": {
- "name": {"type": "string"}
+ "name": {
+ "deprecated": true,
+ "type": "string"
+ }
},
- "additionalProperties": false
+ "additionalProperties": false,
+ "patternProperties": {"^x-": {}}
},
"internal": {"type": "boolean"},
+ "enable_ipv6": {"type": "boolean"},
"attachable": {"type": "boolean"},
"labels": {"$ref": "#/definitions/list_or_dict"}
},
- "additionalProperties": false
+ "additionalProperties": false,
+ "patternProperties": {"^x-": {}}
},
"volume": {
@@ -493,13 +687,18 @@
"external": {
"type": ["boolean", "object"],
"properties": {
- "name": {"type": "string"}
+ "name": {
+ "deprecated": true,
+ "type": "string"
+ }
},
- "additionalProperties": false
+ "additionalProperties": false,
+ "patternProperties": {"^x-": {}}
},
"labels": {"$ref": "#/definitions/list_or_dict"}
},
- "additionalProperties": false
+ "additionalProperties": false,
+ "patternProperties": {"^x-": {}}
},
"secret": {
@@ -514,9 +713,18 @@
"name": {"type": "string"}
}
},
- "labels": {"$ref": "#/definitions/list_or_dict"}
+ "labels": {"$ref": "#/definitions/list_or_dict"},
+ "driver": {"type": "string"},
+ "driver_opts": {
+ "type": "object",
+ "patternProperties": {
+ "^.+$": {"type": ["string", "number"]}
+ }
+ },
+ "template_driver": {"type": "string"}
},
- "additionalProperties": false
+ "additionalProperties": false,
+ "patternProperties": {"^x-": {}}
},
"config": {
@@ -528,12 +736,17 @@
"external": {
"type": ["boolean", "object"],
"properties": {
- "name": {"type": "string"}
+ "name": {
+ "deprecated": true,
+ "type": "string"
+ }
}
},
- "labels": {"$ref": "#/definitions/list_or_dict"}
+ "labels": {"$ref": "#/definitions/list_or_dict"},
+ "template_driver": {"type": "string"}
},
- "additionalProperties": false
+ "additionalProperties": false,
+ "patternProperties": {"^x-": {}}
},
"string_or_list": {
@@ -564,6 +777,23 @@
]
},
+ "blkio_limit": {
+ "type": "object",
+ "properties": {
+ "path": {"type": "string"},
+ "rate": {"type": ["integer", "string"]}
+ },
+ "additionalProperties": false
+ },
+ "blkio_weight": {
+ "type": "object",
+ "properties": {
+ "path": {"type": "string"},
+ "weight": {"type": "integer"}
+ },
+ "additionalProperties": false
+ },
+
"constraints": {
"service": {
"id": "#/definitions/constraints/service",
diff --git a/compose/config/config.py b/compose/config/config.py
index 9f8a50c6..c2f5e95a 100644
--- a/compose/config/config.py
+++ b/compose/config/config.py
@@ -1,25 +1,24 @@
-from __future__ import absolute_import
-from __future__ import unicode_literals
-
import functools
-import io
import logging
import os
+import re
import string
import sys
from collections import namedtuple
+from itertools import chain
+from operator import attrgetter
+from operator import itemgetter
-import six
import yaml
-from cached_property import cached_property
+
+try:
+ from functools import cached_property
+except ImportError:
+ from cached_property import cached_property
from . import types
-from .. import const
+from ..const import COMPOSE_SPEC as VERSION
from ..const import COMPOSEFILE_V1 as V1
-from ..const import COMPOSEFILE_V2_1 as V2_1
-from ..const import COMPOSEFILE_V2_3 as V2_3
-from ..const import COMPOSEFILE_V3_0 as V3_0
-from ..const import COMPOSEFILE_V3_4 as V3_4
from ..utils import build_string_dict
from ..utils import json_hash
from ..utils import parse_bytes
@@ -50,9 +49,11 @@ from .validation import match_named_volumes
from .validation import validate_against_config_schema
from .validation import validate_config_section
from .validation import validate_cpu
+from .validation import validate_credential_spec
from .validation import validate_depends_on
from .validation import validate_extends_file_path
from .validation import validate_healthcheck
+from .validation import validate_ipc_mode
from .validation import validate_links
from .validation import validate_network_mode
from .validation import validate_pid_mode
@@ -91,6 +92,7 @@ DOCKER_CONFIG_KEYS = [
'healthcheck',
'image',
'ipc',
+ 'isolation',
'labels',
'links',
'mac_address',
@@ -135,6 +137,7 @@ ALLOWED_KEYS = DOCKER_CONFIG_KEYS + [
'logging',
'network_mode',
'platform',
+ 'profiles',
'scale',
'stop_grace_period',
]
@@ -150,9 +153,14 @@ DOCKER_VALID_URL_PREFIXES = (
SUPPORTED_FILENAMES = [
'docker-compose.yml',
'docker-compose.yaml',
+ 'compose.yml',
+ 'compose.yaml',
]
-DEFAULT_OVERRIDE_FILENAMES = ('docker-compose.override.yml', 'docker-compose.override.yaml')
+DEFAULT_OVERRIDE_FILENAMES = ('docker-compose.override.yml',
+ 'docker-compose.override.yaml',
+ 'compose.override.yml',
+ 'compose.override.yaml')
log = logging.getLogger(__name__)
@@ -170,7 +178,7 @@ class ConfigDetails(namedtuple('_ConfigDetails', 'working_dir config_files envir
def __new__(cls, working_dir, config_files, environment=None):
if environment is None:
environment = Environment.from_env_file(working_dir)
- return super(ConfigDetails, cls).__new__(
+ return super().__new__(
cls, working_dir, config_files, environment
)
@@ -188,42 +196,65 @@ class ConfigFile(namedtuple('_ConfigFile', 'filename config')):
return cls(filename, load_yaml(filename))
@cached_property
- def version(self):
- if 'version' not in self.config:
+ def config_version(self):
+ version = self.config.get('version', None)
+ if isinstance(version, dict):
return V1
+ return ComposeVersion(version) if version else self.version
- version = self.config['version']
+ @cached_property
+ def version(self):
+ version = self.config.get('version', None)
+ if not version:
+ # no version is specified in the config file
+ services = self.config.get('services', None)
+ networks = self.config.get('networks', None)
+ volumes = self.config.get('volumes', None)
+ if services or networks or volumes:
+ # validate V2/V3 structure
+ for section in ['services', 'networks', 'volumes']:
+ validate_config_section(
+ self.filename, self.config.get(section, {}), section)
+ return VERSION
+
+ # validate V1 structure
+ validate_config_section(
+ self.filename, self.config, 'services')
+ return V1
if isinstance(version, dict):
- log.warn('Unexpected type for "version" key in "{}". Assuming '
- '"version" is the name of a service, and defaulting to '
- 'Compose file version 1.'.format(self.filename))
+ log.warning('Unexpected type for "version" key in "{}". Assuming '
+ '"version" is the name of a service, and defaulting to '
+ 'Compose file version {}.'.format(self.filename, V1))
return V1
- if not isinstance(version, six.string_types):
+ if not isinstance(version, str):
raise ConfigurationError(
'Version in "{}" is invalid - it should be a string.'
.format(self.filename))
- if version == '1':
+ if isinstance(version, str):
+ version_pattern = re.compile(r"^[1-3]+(\.\d+)?$")
+ if not version_pattern.match(version):
+ raise ConfigurationError(
+ 'Version "{}" in "{}" is invalid.'
+ .format(version, self.filename))
+
+ if version.startswith("1"):
raise ConfigurationError(
'Version in "{}" is invalid. {}'
.format(self.filename, VERSION_EXPLANATION)
)
- if version == '2':
- return const.COMPOSEFILE_V2_0
-
- if version == '3':
- return const.COMPOSEFILE_V3_0
-
- return ComposeVersion(version)
+ return VERSION
def get_service(self, name):
return self.get_service_dicts()[name]
def get_service_dicts(self):
- return self.config if self.version == V1 else self.config.get('services', {})
+ if self.version == V1:
+ return self.config
+ return self.config.get('services', {})
def get_volumes(self):
return {} if self.version == V1 else self.config.get('volumes', {})
@@ -232,14 +263,16 @@ class ConfigFile(namedtuple('_ConfigFile', 'filename config')):
return {} if self.version == V1 else self.config.get('networks', {})
def get_secrets(self):
- return {} if self.version < const.COMPOSEFILE_V3_1 else self.config.get('secrets', {})
+ return {} if self.version == V1 else self.config.get('secrets', {})
def get_configs(self):
- return {} if self.version < const.COMPOSEFILE_V3_3 else self.config.get('configs', {})
+ return {} if self.version == V1 else self.config.get('configs', {})
-class Config(namedtuple('_Config', 'version services volumes networks secrets configs')):
+class Config(namedtuple('_Config', 'config_version version services volumes networks secrets configs')):
"""
+ :param config_version: configuration file version
+ :type config_version: int
:param version: configuration version
:type version: int
:param services: List of service description dictionaries
@@ -280,7 +313,16 @@ def find(base_dir, filenames, environment, override_dir=None):
if filenames:
filenames = [os.path.join(base_dir, f) for f in filenames]
else:
+ # search for compose files in the base dir and its parents
filenames = get_default_config_files(base_dir)
+ if not filenames and not override_dir:
+ # none found in base_dir and no override_dir defined
+ raise ComposeFileNotFound(SUPPORTED_FILENAMES)
+ if not filenames:
+ # search for compose files in the project directory and its parents
+ filenames = get_default_config_files(override_dir)
+ if not filenames:
+ raise ComposeFileNotFound(SUPPORTED_FILENAMES)
log.debug("Using configuration files: {}".format(",".join(filenames)))
return ConfigDetails(
@@ -293,13 +335,14 @@ def find(base_dir, filenames, environment, override_dir=None):
def validate_config_version(config_files):
main_file = config_files[0]
validate_top_level_object(main_file)
+
for next_file in config_files[1:]:
validate_top_level_object(next_file)
if main_file.version != next_file.version:
raise ConfigurationError(
- "Version mismatch: file {0} specifies version {1} but "
- "extension file {2} uses version {3}".format(
+ "Version mismatch: file {} specifies version {} but "
+ "extension file {} uses version {}".format(
main_file.filename,
main_file.version,
next_file.filename,
@@ -310,13 +353,13 @@ def get_default_config_files(base_dir):
(candidates, path) = find_candidates_in_parent_dirs(SUPPORTED_FILENAMES, base_dir)
if not candidates:
- raise ComposeFileNotFound(SUPPORTED_FILENAMES)
+ return None
winner = candidates[0]
if len(candidates) > 1:
- log.warn("Found multiple config files with supported names: %s", ", ".join(candidates))
- log.warn("Using %s\n", winner)
+ log.warning("Found multiple config files with supported names: %s", ", ".join(candidates))
+ log.warning("Using %s\n", winner)
return [os.path.join(path, winner)] + get_default_override_file(path)
@@ -349,39 +392,36 @@ def find_candidates_in_parent_dirs(filenames, path):
return (candidates, path)
-def check_swarm_only_config(service_dicts, compatibility=False):
+def check_swarm_only_config(service_dicts):
warning_template = (
"Some services ({services}) use the '{key}' key, which will be ignored. "
"Compose does not support '{key}' configuration - use "
"`docker stack deploy` to deploy to a swarm."
)
-
- def check_swarm_only_key(service_dicts, key):
- services = [s for s in service_dicts if s.get(key)]
- if services:
- log.warn(
- warning_template.format(
- services=", ".join(sorted(s['name'] for s in services)),
- key=key
- )
+ key = 'configs'
+ services = [s for s in service_dicts if s.get(key)]
+ if services:
+ log.warning(
+ warning_template.format(
+ services=", ".join(sorted(s['name'] for s in services)),
+ key=key
)
- if not compatibility:
- check_swarm_only_key(service_dicts, 'deploy')
- check_swarm_only_key(service_dicts, 'credential_spec')
- check_swarm_only_key(service_dicts, 'configs')
+ )
-def load(config_details, compatibility=False):
+def load(config_details, interpolate=True):
"""Load the configuration from a working directory and a list of
configuration files. Files are loaded in order, and merged on top
of each other to create the final configuration.
Return a fully interpolated, extended and validated configuration.
"""
+
+ # validate against latest version and if fails do it against v1 schema
validate_config_version(config_details.config_files)
processed_files = [
- process_config_file(config_file, config_details.environment)
+ process_config_file(config_file, config_details.environment, interpolate=interpolate)
for config_file in config_details.config_files
]
config_details = config_details._replace(config_files=processed_files)
@@ -399,17 +439,16 @@ def load(config_details, compatibility=False):
configs = load_mapping(
config_details.config_files, 'get_configs', 'Config', config_details.working_dir
)
- service_dicts = load_services(config_details, main_file, compatibility)
+ service_dicts = load_services(config_details, main_file, interpolate=interpolate)
if main_file.version != V1:
for service_dict in service_dicts:
match_named_volumes(service_dict, volumes)
- check_swarm_only_config(service_dicts, compatibility)
-
- version = V2_3 if compatibility and main_file.version >= V3_0 else main_file.version
+ check_swarm_only_config(service_dicts)
- return Config(version, service_dicts, volumes, networks, secrets, configs)
+ return Config(main_file.config_version, main_file.version,
+ service_dicts, volumes, networks, secrets, configs)
def load_mapping(config_files, get_func, entity_type, working_dir=None):
@@ -429,29 +468,48 @@ def load_mapping(config_files, get_func, entity_type, working_dir=None):
elif not config.get('name'):
config['name'] = name
- if 'driver_opts' in config:
- config['driver_opts'] = build_string_dict(
- config['driver_opts']
- )
-
if 'labels' in config:
config['labels'] = parse_labels(config['labels'])
if 'file' in config:
config['file'] = expand_path(working_dir, config['file'])
+ if 'driver_opts' in config:
+ config['driver_opts'] = build_string_dict(
+ config['driver_opts']
+ )
+ device = format_device_option(entity_type, config)
+ if device:
+ config['driver_opts']['device'] = device
return mapping
+def format_device_option(entity_type, config):
+ if entity_type != 'Volume':
+ return
+ # default driver is 'local'
+ driver = config.get('driver', 'local')
+ if driver != 'local':
+ return
+ o = config['driver_opts'].get('o')
+ device = config['driver_opts'].get('device')
+ if o and o == 'bind' and device:
+ fullpath = os.path.abspath(os.path.expanduser(device))
+ return fullpath
+
+
def validate_external(entity_type, name, config, version):
- if (version < V2_1 or (version >= V3_0 and version < V3_4)) and len(config.keys()) > 1:
- raise ConfigurationError(
- "{} {} declared as external but specifies additional attributes "
- "({}).".format(
- entity_type, name, ', '.join(k for k in config if k != 'external')))
+ for k in config.keys():
+ if entity_type == 'Network' and k == 'driver':
+ continue
+ if k not in ['external', 'name']:
+ raise ConfigurationError(
+ "{} {} declared as external but specifies additional attributes "
+ "({}).".format(
+ entity_type, name, ', '.join(k for k in config if k != 'external')))
-def load_services(config_details, config_file, compatibility=False):
+def load_services(config_details, config_file, interpolate=True):
def build_service(service_name, service_dict, service_names):
service_config = ServiceConfig.with_abs_paths(
config_details.working_dir,
@@ -470,7 +528,7 @@ def load_services(config_details, config_file, compatibility=False):
service_names,
config_file.version,
config_details.environment,
- compatibility
+ interpolate
)
return service_dict
@@ -495,15 +553,12 @@ def load_services(config_details, config_file, compatibility=False):
file.get_service_dicts() for file in config_details.config_files
]
- service_config = service_configs[0]
- for next_config in service_configs[1:]:
- service_config = merge_services(service_config, next_config)
+ service_config = functools.reduce(merge_services, service_configs)
return build_services(service_config)
def interpolate_config_section(config_file, config, section, environment):
- validate_config_section(config_file.filename, config, section)
return interpolate_environment_variables(
config_file.version,
config,
@@ -512,44 +567,63 @@ def interpolate_config_section(config_file, config, section, environment):
)
-def process_config_file(config_file, environment, service_name=None):
- services = interpolate_config_section(
+def process_config_section(config_file, config, section, environment, interpolate):
+ validate_config_section(config_file.filename, config, section)
+ if interpolate:
+ return interpolate_environment_variables(
+ config_file.version,
+ config,
+ section,
+ environment)
+ else:
+ return config
+
+
+def process_config_file(config_file, environment, service_name=None, interpolate=True):
+ services = process_config_section(
config_file,
config_file.get_service_dicts(),
'service',
- environment)
+ environment,
+ interpolate,
+ )
if config_file.version > V1:
processed_config = dict(config_file.config)
processed_config['services'] = services
- processed_config['volumes'] = interpolate_config_section(
+ processed_config['volumes'] = process_config_section(
config_file,
config_file.get_volumes(),
'volume',
- environment)
- processed_config['networks'] = interpolate_config_section(
+ environment,
+ interpolate,
+ )
+ processed_config['networks'] = process_config_section(
config_file,
config_file.get_networks(),
'network',
- environment)
- if config_file.version >= const.COMPOSEFILE_V3_1:
- processed_config['secrets'] = interpolate_config_section(
- config_file,
- config_file.get_secrets(),
- 'secret',
- environment)
- if config_file.version >= const.COMPOSEFILE_V3_3:
- processed_config['configs'] = interpolate_config_section(
- config_file,
- config_file.get_configs(),
- 'config',
- environment
- )
+ environment,
+ interpolate,
+ )
+ processed_config['secrets'] = process_config_section(
+ config_file,
+ config_file.get_secrets(),
+ 'secret',
+ environment,
+ interpolate,
+ )
+ processed_config['configs'] = process_config_section(
+ config_file,
+ config_file.get_configs(),
+ 'config',
+ environment,
+ interpolate,
+ )
else:
processed_config = services
config_file = config_file._replace(config=processed_config)
- validate_against_config_schema(config_file)
+ validate_against_config_schema(config_file, config_file.version)
if service_name and service_name not in services:
raise ConfigurationError(
@@ -559,7 +633,7 @@ def process_config_file(config_file, environment, service_name=None):
return config_file
-class ServiceExtendsResolver(object):
+class ServiceExtendsResolver:
def __init__(self, service_config, config_file, environment, already_seen=None):
self.service_config = service_config
self.working_dir = service_config.working_dir
@@ -592,7 +666,7 @@ class ServiceExtendsResolver(object):
config_path = self.get_extended_config_path(extends)
service_name = extends['service']
- if config_path == self.config_file.filename:
+ if config_path == os.path.abspath(self.config_file.filename):
try:
service_config = self.config_file.get_service(service_name)
except KeyError:
@@ -649,25 +723,25 @@ class ServiceExtendsResolver(object):
return filename
-def resolve_environment(service_dict, environment=None):
+def resolve_environment(service_dict, environment=None, interpolate=True):
"""Unpack any environment variables from an env_file, if set.
Interpolate environment values if set.
"""
env = {}
for env_file in service_dict.get('env_file', []):
- env.update(env_vars_from_file(env_file))
+ env.update(env_vars_from_file(env_file, interpolate))
env.update(parse_environment(service_dict.get('environment')))
- return dict(resolve_env_var(k, v, environment) for k, v in six.iteritems(env))
+ return dict(resolve_env_var(k, v, environment) for k, v in env.items())
def resolve_build_args(buildargs, environment):
args = parse_build_arguments(buildargs)
- return dict(resolve_env_var(k, v, environment) for k, v in six.iteritems(args))
+ return dict(resolve_env_var(k, v, environment) for k, v in args.items())
def validate_extended_service_dict(service_dict, filename, service):
- error_prefix = "Cannot extend service '%s' in %s:" % (service, filename)
+ error_prefix = "Cannot extend service '{}' in {}:".format(service, filename)
if 'links' in service_dict:
raise ConfigurationError(
@@ -693,17 +767,32 @@ def validate_extended_service_dict(service_dict, filename, service):
def validate_service(service_config, service_names, config_file):
+ def build_image():
+ args = sys.argv[1:]
+ if 'pull' in args:
+ return False
+
+ if '--no-build' in args:
+ return False
+
+ return True
+
service_dict, service_name = service_config.config, service_config.name
validate_service_constraints(service_dict, service_name, config_file)
- validate_paths(service_dict)
+
+ if build_image():
+ # We only care about valid paths when actually building images
+ validate_paths(service_dict)
validate_cpu(service_config)
validate_ulimits(service_config)
+ validate_ipc_mode(service_config, service_names)
validate_network_mode(service_config, service_names)
validate_pid_mode(service_config, service_names)
validate_depends_on(service_config, service_names)
validate_links(service_config, service_names)
validate_healthcheck(service_config)
+ validate_credential_spec(service_config)
if not service_dict.get('image') and has_uppercase(service_name):
raise ConfigurationError(
@@ -749,7 +838,7 @@ def process_service(service_config):
def process_build_section(service_dict, working_dir):
- if isinstance(service_dict['build'], six.string_types):
+ if isinstance(service_dict['build'], str):
service_dict['build'] = resolve_build_path(working_dir, service_dict['build'])
elif isinstance(service_dict['build'], dict):
if 'context' in service_dict['build']:
@@ -775,9 +864,9 @@ def process_ports(service_dict):
def process_depends_on(service_dict):
if 'depends_on' in service_dict and not isinstance(service_dict['depends_on'], dict):
- service_dict['depends_on'] = dict([
- (svc, {'condition': 'service_started'}) for svc in service_dict['depends_on']
- ])
+ service_dict['depends_on'] = {
+ svc: {'condition': 'service_started'} for svc in service_dict['depends_on']
+ }
return service_dict
@@ -817,7 +906,7 @@ def process_healthcheck(service_dict):
hc['test'] = ['NONE']
for field in ['interval', 'timeout', 'start_period']:
- if field not in hc or isinstance(hc[field], six.integer_types):
+ if field not in hc or isinstance(hc[field], int):
continue
hc[field] = parse_nanoseconds_int(hc[field])
@@ -834,16 +923,28 @@ def finalize_service_volumes(service_dict, environment):
finalized_volumes.append(MountSpec.parse(v, normalize, win_host))
else:
finalized_volumes.append(VolumeSpec.parse(v, normalize, win_host))
+
+ duplicate_mounts = []
+ mounts = [v.as_volume_spec() if isinstance(v, MountSpec) else v for v in finalized_volumes]
+ for mount in mounts:
+ if list(map(attrgetter('internal'), mounts)).count(mount.internal) > 1:
+ duplicate_mounts.append(mount.repr())
+
+ if duplicate_mounts:
+ raise ConfigurationError("Duplicate mount points: [%s]" % (
+ ', '.join(duplicate_mounts)))
+
service_dict['volumes'] = finalized_volumes
return service_dict
-def finalize_service(service_config, service_names, version, environment, compatibility):
+def finalize_service(service_config, service_names, version, environment,
+ interpolate=True):
service_dict = dict(service_config.config)
if 'environment' in service_dict or 'env_file' in service_dict:
- service_dict['environment'] = resolve_environment(service_dict, environment)
+ service_dict['environment'] = resolve_environment(service_dict, environment, interpolate)
service_dict.pop('env_file', None)
if 'volumes_from' in service_dict:
@@ -880,80 +981,10 @@ def finalize_service(service_config, service_names, version, environment, compat
normalize_build(service_dict, service_config.working_dir, environment)
- if compatibility:
- service_dict, ignored_keys = translate_deploy_keys_to_container_config(
- service_dict
- )
- if ignored_keys:
- log.warn(
- 'The following deploy sub-keys are not supported in compatibility mode and have'
- ' been ignored: {}'.format(', '.join(ignored_keys))
- )
-
service_dict['name'] = service_config.name
return normalize_v1_service_format(service_dict)
-def translate_resource_keys_to_container_config(resources_dict, service_dict):
- if 'limits' in resources_dict:
- service_dict['mem_limit'] = resources_dict['limits'].get('memory')
- if 'cpus' in resources_dict['limits']:
- service_dict['cpus'] = float(resources_dict['limits']['cpus'])
- if 'reservations' in resources_dict:
- service_dict['mem_reservation'] = resources_dict['reservations'].get('memory')
- if 'cpus' in resources_dict['reservations']:
- return ['resources.reservations.cpus']
- return []
-
-
-def convert_restart_policy(name):
- try:
- return {
- 'any': 'always',
- 'none': 'no',
- 'on-failure': 'on-failure'
- }[name]
- except KeyError:
- raise ConfigurationError('Invalid restart policy "{}"'.format(name))
-
-
-def translate_deploy_keys_to_container_config(service_dict):
- if 'deploy' not in service_dict:
- return service_dict, []
-
- deploy_dict = service_dict['deploy']
- ignored_keys = [
- k for k in ['endpoint_mode', 'labels', 'update_config', 'placement']
- if k in deploy_dict
- ]
-
- if 'replicas' in deploy_dict and deploy_dict.get('mode', 'replicated') == 'replicated':
- service_dict['scale'] = deploy_dict['replicas']
-
- if 'restart_policy' in deploy_dict:
- service_dict['restart'] = {
- 'Name': convert_restart_policy(deploy_dict['restart_policy'].get('condition', 'any')),
- 'MaximumRetryCount': deploy_dict['restart_policy'].get('max_attempts', 0)
- }
- for k in deploy_dict['restart_policy'].keys():
- if k != 'condition' and k != 'max_attempts':
- ignored_keys.append('restart_policy.{}'.format(k))
-
- ignored_keys.extend(
- translate_resource_keys_to_container_config(
- deploy_dict.get('resources', {}), service_dict
- )
- )
-
- del service_dict['deploy']
- if 'credential_spec' in service_dict:
- del service_dict['credential_spec']
- if 'configs' in service_dict:
- del service_dict['configs']
-
- return service_dict, ignored_keys
-
-
def normalize_v1_service_format(service_dict):
if 'log_driver' in service_dict or 'log_opt' in service_dict:
if 'logging' not in service_dict:
@@ -1038,21 +1069,22 @@ def merge_service_dicts(base, override, version):
md.merge_mapping('environment', parse_environment)
md.merge_mapping('labels', parse_labels)
md.merge_mapping('ulimits', parse_flat_dict)
- md.merge_mapping('networks', parse_networks)
md.merge_mapping('sysctls', parse_sysctls)
md.merge_mapping('depends_on', parse_depends_on)
+ md.merge_mapping('storage_opt', parse_flat_dict)
md.merge_sequence('links', ServiceLink.parse)
md.merge_sequence('secrets', types.ServiceSecret.parse)
md.merge_sequence('configs', types.ServiceConfig.parse)
md.merge_sequence('security_opt', types.SecurityOpt.parse)
md.merge_mapping('extra_hosts', parse_extra_hosts)
+ md.merge_field('networks', merge_networks, default={})
for field in ['volumes', 'devices']:
md.merge_field(field, merge_path_mappings)
for field in [
'cap_add', 'cap_drop', 'expose', 'external_links',
- 'volumes_from', 'device_cgroup_rules',
+ 'volumes_from', 'device_cgroup_rules', 'profiles',
]:
md.merge_field(field, merge_unique_items_lists, default=[])
@@ -1077,9 +1109,9 @@ def merge_service_dicts(base, override, version):
def merge_unique_items_lists(base, override):
- override = [str(o) for o in override]
- base = [str(b) for b in base]
- return sorted(set().union(base, override))
+ override = (str(o) for o in override)
+ base = (str(b) for b in base)
+ return sorted(set(chain(base, override)))
def merge_healthchecks(base, override):
@@ -1092,9 +1124,7 @@ def merge_healthchecks(base, override):
def merge_ports(md, base, override):
def parse_sequence_func(seq):
- acc = []
- for item in seq:
- acc.extend(ServicePort.parse(item))
+ acc = [s for item in seq for s in ServicePort.parse(item)]
return to_mapping(acc, 'merge_field')
field = 'ports'
@@ -1104,13 +1134,13 @@ def merge_ports(md, base, override):
merged = parse_sequence_func(md.base.get(field, []))
merged.update(parse_sequence_func(md.override.get(field, [])))
- md[field] = [item for item in sorted(merged.values(), key=lambda x: x.target)]
+ md[field] = [item for item in sorted(merged.values(), key=attrgetter("target"))]
def merge_build(output, base, override):
def to_dict(service):
build_config = service.get('build', {})
- if isinstance(build_config, six.string_types):
+ if isinstance(build_config, str):
return {'context': build_config}
return build_config
@@ -1135,6 +1165,7 @@ def merge_deploy(base, override):
md.merge_scalar('replicas')
md.merge_mapping('labels', parse_labels)
md.merge_mapping('update_config')
+ md.merge_mapping('rollback_config')
md.merge_mapping('restart_policy')
if md.needs_merge('resources'):
resources_md = MergeDict(md.base.get('resources') or {}, md.override.get('resources') or {})
@@ -1143,6 +1174,7 @@ def merge_deploy(base, override):
md['resources'] = dict(resources_md)
if md.needs_merge('placement'):
placement_md = MergeDict(md.base.get('placement') or {}, md.override.get('placement') or {})
+ placement_md.merge_scalar('max_replicas_per_node')
placement_md.merge_field('constraints', merge_unique_items_lists, default=[])
placement_md.merge_field('preferences', merge_unique_objects_lists, default=[])
md['placement'] = dict(placement_md)
@@ -1150,17 +1182,34 @@ def merge_deploy(base, override):
return dict(md)
+def merge_networks(base, override):
+ merged_networks = {}
+ all_network_names = set(base) | set(override)
+ base = {k: {} for k in base} if isinstance(base, list) else base
+ override = {k: {} for k in override} if isinstance(override, list) else override
+ for network_name in all_network_names:
+ md = MergeDict(base.get(network_name) or {}, override.get(network_name) or {})
+ md.merge_field('aliases', merge_unique_items_lists, [])
+ md.merge_field('link_local_ips', merge_unique_items_lists, [])
+ md.merge_scalar('priority')
+ md.merge_scalar('ipv4_address')
+ md.merge_scalar('ipv6_address')
+ merged_networks[network_name] = dict(md)
+ return merged_networks
+
+
def merge_reservations(base, override):
md = MergeDict(base, override)
md.merge_scalar('cpus')
md.merge_scalar('memory')
md.merge_sequence('generic_resources', types.GenericResource.parse)
+ md.merge_field('devices', merge_unique_objects_lists, default=[])
return dict(md)
def merge_unique_objects_lists(base, override):
- result = dict((json_hash(i), i) for i in base + override)
- return [i[1] for i in sorted([(k, v) for k, v in result.items()], key=lambda x: x[0])]
+ result = {json_hash(i): i for i in base + override}
+ return [i[1] for i in sorted(((k, v) for k, v in result.items()), key=itemgetter(0))]
def merge_blkio_config(base, override):
@@ -1168,11 +1217,11 @@ def merge_blkio_config(base, override):
md.merge_scalar('weight')
def merge_blkio_limits(base, override):
- index = dict((b['path'], b) for b in base)
- for o in override:
- index[o['path']] = o
+ get_path = itemgetter('path')
+ index = {get_path(b): b for b in base}
+ index.update((get_path(o), o) for o in override)
- return sorted(list(index.values()), key=lambda x: x['path'])
+ return sorted(index.values(), key=get_path)
for field in [
"device_read_bps", "device_read_iops", "device_write_bps",
@@ -1279,7 +1328,7 @@ def resolve_volume_paths(working_dir, service_dict):
def resolve_volume_path(working_dir, volume):
if isinstance(volume, dict):
- if volume.get('source', '').startswith('.') and volume['type'] == 'bind':
+ if volume.get('source', '').startswith(('.', '~')) and volume['type'] == 'bind':
volume['source'] = expand_path(working_dir, volume['source'])
return volume
@@ -1293,7 +1342,7 @@ def resolve_volume_path(working_dir, volume):
if host_path.startswith('.'):
host_path = expand_path(working_dir, host_path)
host_path = os.path.expanduser(host_path)
- return u"{}:{}{}".format(host_path, container_path, (':' + mode if mode else ''))
+ return "{}:{}{}".format(host_path, container_path, (':' + mode if mode else ''))
return container_path
@@ -1303,7 +1352,7 @@ def normalize_build(service_dict, working_dir, environment):
if 'build' in service_dict:
build = {}
# Shortcut where specifying a string is treated as the build context
- if isinstance(service_dict['build'], six.string_types):
+ if isinstance(service_dict['build'], str):
build['context'] = service_dict.pop('build')
else:
build.update(service_dict['build'])
@@ -1329,7 +1378,7 @@ def validate_paths(service_dict):
if 'build' in service_dict:
build = service_dict.get('build', {})
- if isinstance(build, six.string_types):
+ if isinstance(build, str):
build_path = build
elif isinstance(build, dict) and 'context' in build:
build_path = build['context']
@@ -1420,7 +1469,7 @@ def merge_list_or_string(base, override):
def to_list(value):
if value is None:
return []
- elif isinstance(value, six.string_types):
+ elif isinstance(value, str):
return [value]
else:
return value
@@ -1434,15 +1483,15 @@ def has_uppercase(name):
return any(char in string.ascii_uppercase for char in name)
-def load_yaml(filename, encoding=None):
+def load_yaml(filename, encoding=None, binary=True):
try:
- with io.open(filename, 'r', encoding=encoding) as fh:
+ with open(filename, 'rb' if binary else 'r', encoding=encoding) as fh:
return yaml.safe_load(fh)
- except (IOError, yaml.YAMLError, UnicodeDecodeError) as e:
+ except (OSError, yaml.YAMLError, UnicodeDecodeError) as e:
if encoding is None:
# Sometimes the user's locale sets an encoding that doesn't match
# the YAML files. Im such cases, retry once with the "default"
# UTF-8 encoding
- return load_yaml(filename, encoding='utf-8')
+ return load_yaml(filename, encoding='utf-8-sig', binary=False)
error_name = getattr(e, '__module__', '') + '.' + e.__class__.__name__
- raise ConfigurationError(u"{}: {}".format(error_name, e))
+ raise ConfigurationError("{}: {}".format(error_name, e))
diff --git a/compose/config/config_schema_v2.0.json b/compose/config/config_schema_v2.0.json
deleted file mode 100644
index eddf787e..00000000
--- a/compose/config/config_schema_v2.0.json
+++ /dev/null
@@ -1,405 +0,0 @@
-{
- "$schema": "http://json-schema.org/draft-04/schema#",
- "id": "config_schema_v2.0.json",
- "type": "object",
-
- "properties": {
- "version": {
- "type": "string"
- },
-
- "services": {
- "id": "#/properties/services",
- "type": "object",
- "patternProperties": {
- "^[a-zA-Z0-9._-]+$": {
- "$ref": "#/definitions/service"
- }
- },
- "additionalProperties": false
- },
-
- "networks": {
- "id": "#/properties/networks",
- "type": "object",
- "patternProperties": {
- "^[a-zA-Z0-9._-]+$": {
- "$ref": "#/definitions/network"
- }
- }
- },
-
- "volumes": {
- "id": "#/properties/volumes",
- "type": "object",
- "patternProperties": {
- "^[a-zA-Z0-9._-]+$": {
- "$ref": "#/definitions/volume"
- }
- },
- "additionalProperties": false
- }
- },
-
- "patternProperties": {"^x-": {}},
- "additionalProperties": false,
-
- "definitions": {
-
- "service": {
- "id": "#/definitions/service",
- "type": "object",
-
- "properties": {
- "blkio_config": {
- "type": "object",
- "properties": {
- "device_read_bps": {
- "type": "array",
- "items": {"$ref": "#/definitions/blkio_limit"}
- },
- "device_read_iops": {
- "type": "array",
- "items": {"$ref": "#/definitions/blkio_limit"}
- },
- "device_write_bps": {
- "type": "array",
- "items": {"$ref": "#/definitions/blkio_limit"}
- },
- "device_write_iops": {
- "type": "array",
- "items": {"$ref": "#/definitions/blkio_limit"}
- },
- "weight": {"type": "integer"},
- "weight_device": {
- "type": "array",
- "items": {"$ref": "#/definitions/blkio_weight"}
- }
- },
- "additionalProperties": false
- },
- "build": {
- "oneOf": [
- {"type": "string"},
- {
- "type": "object",
- "properties": {
- "context": {"type": "string"},
- "dockerfile": {"type": "string"},
- "args": {"$ref": "#/definitions/list_or_dict"}
- },
- "additionalProperties": false
- }
- ]
- },
- "cap_add": {"type": "array", "items": {"type": "string"}, "uniqueItems": true},
- "cap_drop": {"type": "array", "items": {"type": "string"}, "uniqueItems": true},
- "cgroup_parent": {"type": "string"},
- "command": {
- "oneOf": [
- {"type": "string"},
- {"type": "array", "items": {"type": "string"}}
- ]
- },
- "container_name": {"type": "string"},
- "cpu_shares": {"type": ["number", "string"]},
- "cpu_quota": {"type": ["number", "string"]},
- "cpuset": {"type": "string"},
- "depends_on": {"$ref": "#/definitions/list_of_strings"},
- "devices": {"type": "array", "items": {"type": "string"}, "uniqueItems": true},
- "dns": {"$ref": "#/definitions/string_or_list"},
- "dns_opt": {
- "type": "array",
- "items": {
- "type": "string"
- },
- "uniqueItems": true
- },
- "dns_search": {"$ref": "#/definitions/string_or_list"},
- "domainname": {"type": "string"},
- "entrypoint": {
- "oneOf": [
- {"type": "string"},
- {"type": "array", "items": {"type": "string"}}
- ]
- },
- "env_file": {"$ref": "#/definitions/string_or_list"},
- "environment": {"$ref": "#/definitions/list_or_dict"},
-
- "expose": {
- "type": "array",
- "items": {
- "type": ["string", "number"],
- "format": "expose"
- },
- "uniqueItems": true
- },
-
- "extends": {
- "oneOf": [
- {
- "type": "string"
- },
- {
- "type": "object",
-
- "properties": {
- "service": {"type": "string"},
- "file": {"type": "string"}
- },
- "required": ["service"],
- "additionalProperties": false
- }
- ]
- },
-
- "external_links": {"type": "array", "items": {"type": "string"}, "uniqueItems": true},
- "extra_hosts": {"$ref": "#/definitions/list_or_dict"},
- "hostname": {"type": "string"},
- "image": {"type": "string"},
- "ipc": {"type": "string"},
- "labels": {"$ref": "#/definitions/labels"},
- "links": {"type": "array", "items": {"type": "string"}, "uniqueItems": true},
-
- "logging": {
- "type": "object",
-
- "properties": {
- "driver": {"type": "string"},
- "options": {"type": "object"}
- },
- "additionalProperties": false
- },
-
- "mac_address": {"type": "string"},
- "mem_limit": {"type": ["number", "string"]},
- "mem_reservation": {"type": ["string", "integer"]},
- "mem_swappiness": {"type": "integer"},
- "memswap_limit": {"type": ["number", "string"]},
- "network_mode": {"type": "string"},
-
- "networks": {
- "oneOf": [
- {"$ref": "#/definitions/list_of_strings"},
- {
- "type": "object",
- "patternProperties": {
- "^[a-zA-Z0-9._-]+$": {
- "oneOf": [
- {
- "type": "object",
- "properties": {
- "aliases": {"$ref": "#/definitions/list_of_strings"},
- "ipv4_address": {"type": "string"},
- "ipv6_address": {"type": "string"},
- "priority": {"type": "number"}
- },
- "additionalProperties": false
- },
- {"type": "null"}
- ]
- }
- },
- "additionalProperties": false
- }
- ]
- },
- "oom_score_adj": {"type": "integer", "minimum": -1000, "maximum": 1000},
- "group_add": {
- "type": "array",
- "items": {
- "type": ["string", "number"]
- },
- "uniqueItems": true
- },
- "pid": {"type": ["string", "null"]},
-
- "ports": {
- "type": "array",
- "items": {
- "type": ["string", "number"],
- "format": "ports"
- },
- "uniqueItems": true
- },
-
- "privileged": {"type": "boolean"},
- "read_only": {"type": "boolean"},
- "restart": {"type": "string"},
- "security_opt": {"type": "array", "items": {"type": "string"}, "uniqueItems": true},
- "shm_size": {"type": ["number", "string"]},
- "stdin_open": {"type": "boolean"},
- "stop_grace_period": {"type": "string", "format": "duration"},
- "stop_signal": {"type": "string"},
- "tmpfs": {"$ref": "#/definitions/string_or_list"},
- "tty": {"type": "boolean"},
- "ulimits": {
- "type": "object",
- "patternProperties": {
- "^[a-z]+$": {
- "oneOf": [
- {"type": "integer"},
- {
- "type":"object",
- "properties": {
- "hard": {"type": "integer"},
- "soft": {"type": "integer"}
- },
- "required": ["soft", "hard"],
- "additionalProperties": false
- }
- ]
- }
- }
- },
- "user": {"type": "string"},
- "volumes": {"type": "array", "items": {"type": "string"}, "uniqueItems": true},
- "volume_driver": {"type": "string"},
- "volumes_from": {"type": "array", "items": {"type": "string"}, "uniqueItems": true},
- "working_dir": {"type": "string"}
- },
-
- "dependencies": {
- "memswap_limit": ["mem_limit"]
- },
- "additionalProperties": false
- },
-
- "network": {
- "id": "#/definitions/network",
- "type": "object",
- "properties": {
- "driver": {"type": "string"},
- "driver_opts": {
- "type": "object",
- "patternProperties": {
- "^.+$": {"type": ["string", "number"]}
- }
- },
- "ipam": {
- "type": "object",
- "properties": {
- "driver": {"type": "string"},
- "config": {
- "type": "array"
- },
- "options": {
- "type": "object",
- "patternProperties": {
- "^.+$": {"type": "string"}
- },
- "additionalProperties": false
- }
- },
- "additionalProperties": false
- },
- "external": {
- "type": ["boolean", "object"],
- "properties": {
- "name": {"type": "string"}
- },
- "additionalProperties": false
- },
- "internal": {"type": "boolean"}
- },
- "additionalProperties": false
- },
-
- "volume": {
- "id": "#/definitions/volume",
- "type": ["object", "null"],
- "properties": {
- "driver": {"type": "string"},
- "driver_opts": {
- "type": "object",
- "patternProperties": {
- "^.+$": {"type": ["string", "number"]}
- }
- },
- "external": {
- "type": ["boolean", "object"],
- "properties": {
- "name": {"type": "string"}
- },
- "additionalProperties": false
- }
- },
- "additionalProperties": false
- },
-
- "string_or_list": {
- "oneOf": [
- {"type": "string"},
- {"$ref": "#/definitions/list_of_strings"}
- ]
- },
-
- "list_of_strings": {
- "type": "array",
- "items": {"type": "string"},
- "uniqueItems": true
- },
-
- "list_or_dict": {
- "oneOf": [
- {
- "type": "object",
- "patternProperties": {
- ".+": {
- "type": ["string", "number", "null"]
- }
- },
- "additionalProperties": false
- },
- {"type": "array", "items": {"type": "string"}, "uniqueItems": true}
- ]
- },
-
- "labels": {
- "oneOf": [
- {
- "type": "object",
- "patternProperties": {
- ".+": {
- "type": "string"
- }
- },
- "additionalProperties": false
- },
- {"type": "array", "items": {"type": "string"}, "uniqueItems": true}
- ]
- },
-
- "blkio_limit": {
- "type": "object",
- "properties": {
- "path": {"type": "string"},
- "rate": {"type": ["integer", "string"]}
- },
- "additionalProperties": false
- },
- "blkio_weight": {
- "type": "object",
- "properties": {
- "path": {"type": "string"},
- "weight": {"type": "integer"}
- },
- "additionalProperties": false
- },
-
- "constraints": {
- "service": {
- "id": "#/definitions/constraints/service",
- "anyOf": [
- {"required": ["build"]},
- {"required": ["image"]}
- ],
- "properties": {
- "build": {
- "required": ["context"]
- }
- }
- }
- }
- }
-}
diff --git a/compose/config/config_schema_v2.1.json b/compose/config/config_schema_v2.1.json
deleted file mode 100644
index 5ad5a20e..00000000
--- a/compose/config/config_schema_v2.1.json
+++ /dev/null
@@ -1,461 +0,0 @@
-{
- "$schema": "http://json-schema.org/draft-04/schema#",
- "id": "config_schema_v2.1.json",
- "type": "object",
-
- "properties": {
- "version": {
- "type": "string"
- },
-
- "services": {
- "id": "#/properties/services",
- "type": "object",
- "patternProperties": {
- "^[a-zA-Z0-9._-]+$": {
- "$ref": "#/definitions/service"
- }
- },
- "additionalProperties": false
- },
-
- "networks": {
- "id": "#/properties/networks",
- "type": "object",
- "patternProperties": {
- "^[a-zA-Z0-9._-]+$": {
- "$ref": "#/definitions/network"
- }
- }
- },
-
- "volumes": {
- "id": "#/properties/volumes",
- "type": "object",
- "patternProperties": {
- "^[a-zA-Z0-9._-]+$": {
- "$ref": "#/definitions/volume"
- }
- },
- "additionalProperties": false
- }
- },
-
- "patternProperties": {"^x-": {}},
- "additionalProperties": false,
-
- "definitions": {
-
- "service": {
- "id": "#/definitions/service",
- "type": "object",
-
- "properties": {
- "blkio_config": {
- "type": "object",
- "properties": {
- "device_read_bps": {
- "type": "array",
- "items": {"$ref": "#/definitions/blkio_limit"}
- },
- "device_read_iops": {
- "type": "array",
- "items": {"$ref": "#/definitions/blkio_limit"}
- },
- "device_write_bps": {
- "type": "array",
- "items": {"$ref": "#/definitions/blkio_limit"}
- },
- "device_write_iops": {
- "type": "array",
- "items": {"$ref": "#/definitions/blkio_limit"}
- },
- "weight": {"type": "integer"},
- "weight_device": {
- "type": "array",
- "items": {"$ref": "#/definitions/blkio_weight"}
- }
- },
- "additionalProperties": false
- },
-
- "build": {
- "oneOf": [
- {"type": "string"},
- {
- "type": "object",
- "properties": {
- "context": {"type": "string"},
- "dockerfile": {"type": "string"},
- "args": {"$ref": "#/definitions/list_or_dict"},
- "labels": {"$ref": "#/definitions/labels"},
- "isolation": {"type": "string"}
- },
- "additionalProperties": false
- }
- ]
- },
- "cap_add": {"type": "array", "items": {"type": "string"}, "uniqueItems": true},
- "cap_drop": {"type": "array", "items": {"type": "string"}, "uniqueItems": true},
- "cgroup_parent": {"type": "string"},
- "command": {
- "oneOf": [
- {"type": "string"},
- {"type": "array", "items": {"type": "string"}}
- ]
- },
- "container_name": {"type": "string"},
- "cpu_shares": {"type": ["number", "string"]},
- "cpu_quota": {"type": ["number", "string"]},
- "cpu_period": {"type": ["number", "string"]},
- "cpuset": {"type": "string"},
- "depends_on": {
- "oneOf": [
- {"$ref": "#/definitions/list_of_strings"},
- {
- "type": "object",
- "additionalProperties": false,
- "patternProperties": {
- "^[a-zA-Z0-9._-]+$": {
- "type": "object",
- "additionalProperties": false,
- "properties": {
- "condition": {
- "type": "string",
- "enum": ["service_started", "service_healthy"]
- }
- },
- "required": ["condition"]
- }
- }
- }
- ]
- },
- "devices": {"type": "array", "items": {"type": "string"}, "uniqueItems": true},
- "dns_opt": {
- "type": "array",
- "items": {
- "type": "string"
- },
- "uniqueItems": true
- },
- "dns": {"$ref": "#/definitions/string_or_list"},
- "dns_search": {"$ref": "#/definitions/string_or_list"},
- "domainname": {"type": "string"},
- "entrypoint": {
- "oneOf": [
- {"type": "string"},
- {"type": "array", "items": {"type": "string"}}
- ]
- },
- "env_file": {"$ref": "#/definitions/string_or_list"},
- "environment": {"$ref": "#/definitions/list_or_dict"},
-
- "expose": {
- "type": "array",
- "items": {
- "type": ["string", "number"],
- "format": "expose"
- },
- "uniqueItems": true
- },
-
- "extends": {
- "oneOf": [
- {
- "type": "string"
- },
- {
- "type": "object",
-
- "properties": {
- "service": {"type": "string"},
- "file": {"type": "string"}
- },
- "required": ["service"],
- "additionalProperties": false
- }
- ]
- },
-
- "external_links": {"type": "array", "items": {"type": "string"}, "uniqueItems": true},
- "extra_hosts": {"$ref": "#/definitions/list_or_dict"},
- "healthcheck": {"$ref": "#/definitions/healthcheck"},
- "hostname": {"type": "string"},
- "image": {"type": "string"},
- "ipc": {"type": "string"},
- "isolation": {"type": "string"},
- "labels": {"$ref": "#/definitions/labels"},
- "links": {"type": "array", "items": {"type": "string"}, "uniqueItems": true},
-
- "logging": {
- "type": "object",
-
- "properties": {
- "driver": {"type": "string"},
- "options": {"type": "object"}
- },
- "additionalProperties": false
- },
-
- "mac_address": {"type": "string"},
- "mem_limit": {"type": ["number", "string"]},
- "mem_reservation": {"type": ["string", "integer"]},
- "mem_swappiness": {"type": "integer"},
- "memswap_limit": {"type": ["number", "string"]},
- "network_mode": {"type": "string"},
-
- "networks": {
- "oneOf": [
- {"$ref": "#/definitions/list_of_strings"},
- {
- "type": "object",
- "patternProperties": {
- "^[a-zA-Z0-9._-]+$": {
- "oneOf": [
- {
- "type": "object",
- "properties": {
- "aliases": {"$ref": "#/definitions/list_of_strings"},
- "ipv4_address": {"type": "string"},
- "ipv6_address": {"type": "string"},
- "link_local_ips": {"$ref": "#/definitions/list_of_strings"},
- "priority": {"type": "number"}
- },
- "additionalProperties": false
- },
- {"type": "null"}
- ]
- }
- },
- "additionalProperties": false
- }
- ]
- },
- "oom_kill_disable": {"type": "boolean"},
- "oom_score_adj": {"type": "integer", "minimum": -1000, "maximum": 1000},
- "group_add": {
- "type": "array",
- "items": {
- "type": ["string", "number"]
- },
- "uniqueItems": true
- },
- "pid": {"type": ["string", "null"]},
-
- "ports": {
- "type": "array",
- "items": {
- "type": ["string", "number"],
- "format": "ports"
- },
- "uniqueItems": true
- },
-
- "privileged": {"type": "boolean"},
- "read_only": {"type": "boolean"},
- "restart": {"type": "string"},
- "security_opt": {"type": "array", "items": {"type": "string"}, "uniqueItems": true},
- "shm_size": {"type": ["number", "string"]},
- "sysctls": {"$ref": "#/definitions/list_or_dict"},
- "pids_limit": {"type": ["number", "string"]},
- "stdin_open": {"type": "boolean"},
- "stop_grace_period": {"type": "string", "format": "duration"},
- "stop_signal": {"type": "string"},
- "storage_opt": {"type": "object"},
- "tmpfs": {"$ref": "#/definitions/string_or_list"},
- "tty": {"type": "boolean"},
- "ulimits": {
- "type": "object",
- "patternProperties": {
- "^[a-z]+$": {
- "oneOf": [
- {"type": "integer"},
- {
- "type":"object",
- "properties": {
- "hard": {"type": "integer"},
- "soft": {"type": "integer"}
- },
- "required": ["soft", "hard"],
- "additionalProperties": false
- }
- ]
- }
- }
- },
- "user": {"type": "string"},
- "userns_mode": {"type": "string"},
- "volumes": {"type": "array", "items": {"type": "string"}, "uniqueItems": true},
- "volume_driver": {"type": "string"},
- "volumes_from": {"type": "array", "items": {"type": "string"}, "uniqueItems": true},
- "working_dir": {"type": "string"}
- },
-
- "dependencies": {
- "memswap_limit": ["mem_limit"]
- },
- "additionalProperties": false
- },
-
- "healthcheck": {
- "id": "#/definitions/healthcheck",
- "type": "object",
- "additionalProperties": false,
- "properties": {
- "disable": {"type": "boolean"},
- "interval": {"type": "string"},
- "retries": {"type": "number"},
- "test": {
- "oneOf": [
- {"type": "string"},
- {"type": "array", "items": {"type": "string"}}
- ]
- },
- "timeout": {"type": "string"}
- }
- },
-
- "network": {
- "id": "#/definitions/network",
- "type": "object",
- "properties": {
- "driver": {"type": "string"},
- "driver_opts": {
- "type": "object",
- "patternProperties": {
- "^.+$": {"type": ["string", "number"]}
- }
- },
- "ipam": {
- "type": "object",
- "properties": {
- "driver": {"type": "string"},
- "config": {
- "type": "array"
- },
- "options": {
- "type": "object",
- "patternProperties": {
- "^.+$": {"type": "string"}
- },
- "additionalProperties": false
- }
- },
- "additionalProperties": false
- },
- "external": {
- "type": ["boolean", "object"],
- "properties": {
- "name": {"type": "string"}
- },
- "additionalProperties": false
- },
- "internal": {"type": "boolean"},
- "enable_ipv6": {"type": "boolean"},
- "labels": {"$ref": "#/definitions/labels"},
- "name": {"type": "string"}
- },
- "additionalProperties": false
- },
-
- "volume": {
- "id": "#/definitions/volume",
- "type": ["object", "null"],
- "properties": {
- "driver": {"type": "string"},
- "driver_opts": {
- "type": "object",
- "patternProperties": {
- "^.+$": {"type": ["string", "number"]}
- }
- },
- "external": {
- "type": ["boolean", "object"],
- "properties": {
- "name": {"type": "string"}
- },
- "additionalProperties": false
- },
- "labels": {"$ref": "#/definitions/labels"},
- "name": {"type": "string"}
- },
- "additionalProperties": false
- },
-
- "string_or_list": {
- "oneOf": [
- {"type": "string"},
- {"$ref": "#/definitions/list_of_strings"}
- ]
- },
-
- "list_of_strings": {
- "type": "array",
- "items": {"type": "string"},
- "uniqueItems": true
- },
-
- "list_or_dict": {
- "oneOf": [
- {
- "type": "object",
- "patternProperties": {
- ".+": {
- "type": ["string", "number", "null"]
- }
- },
- "additionalProperties": false
- },
- {"type": "array", "items": {"type": "string"}, "uniqueItems": true}
- ]
- },
-
- "labels": {
- "oneOf": [
- {
- "type": "object",
- "patternProperties": {
- ".+": {
- "type": "string"
- }
- },
- "additionalProperties": false
- },
- {"type": "array", "items": {"type": "string"}, "uniqueItems": true}
- ]
- },
-
- "blkio_limit": {
- "type": "object",
- "properties": {
- "path": {"type": "string"},
- "rate": {"type": ["integer", "string"]}
- },
- "additionalProperties": false
- },
- "blkio_weight": {
- "type": "object",
- "properties": {
- "path": {"type": "string"},
- "weight": {"type": "integer"}
- },
- "additionalProperties": false
- },
-
- "constraints": {
- "service": {
- "id": "#/definitions/constraints/service",
- "anyOf": [
- {"required": ["build"]},
- {"required": ["image"]}
- ],
- "properties": {
- "build": {
- "required": ["context"]
- }
- }
- }
- }
- }
-}
diff --git a/compose/config/config_schema_v2.2.json b/compose/config/config_schema_v2.2.json
deleted file mode 100644
index 26044b65..00000000
--- a/compose/config/config_schema_v2.2.json
+++ /dev/null
@@ -1,470 +0,0 @@
-{
- "$schema": "http://json-schema.org/draft-04/schema#",
- "id": "config_schema_v2.2.json",
- "type": "object",
-
- "properties": {
- "version": {
- "type": "string"
- },
-
- "services": {
- "id": "#/properties/services",
- "type": "object",
- "patternProperties": {
- "^[a-zA-Z0-9._-]+$": {
- "$ref": "#/definitions/service"
- }
- },
- "additionalProperties": false
- },
-
- "networks": {
- "id": "#/properties/networks",
- "type": "object",
- "patternProperties": {
- "^[a-zA-Z0-9._-]+$": {
- "$ref": "#/definitions/network"
- }
- }
- },
-
- "volumes": {
- "id": "#/properties/volumes",
- "type": "object",
- "patternProperties": {
- "^[a-zA-Z0-9._-]+$": {
- "$ref": "#/definitions/volume"
- }
- },
- "additionalProperties": false
- }
- },
-
- "patternProperties": {"^x-": {}},
- "additionalProperties": false,
-
- "definitions": {
-
- "service": {
- "id": "#/definitions/service",
- "type": "object",
-
- "properties": {
- "blkio_config": {
- "type": "object",
- "properties": {
- "device_read_bps": {
- "type": "array",
- "items": {"$ref": "#/definitions/blkio_limit"}
- },
- "device_read_iops": {
- "type": "array",
- "items": {"$ref": "#/definitions/blkio_limit"}
- },
- "device_write_bps": {
- "type": "array",
- "items": {"$ref": "#/definitions/blkio_limit"}
- },
- "device_write_iops": {
- "type": "array",
- "items": {"$ref": "#/definitions/blkio_limit"}
- },
- "weight": {"type": "integer"},
- "weight_device": {
- "type": "array",
- "items": {"$ref": "#/definitions/blkio_weight"}
- }
- },
- "additionalProperties": false
- },
-
- "build": {
- "oneOf": [
- {"type": "string"},
- {
- "type": "object",
- "properties": {
- "context": {"type": "string"},
- "dockerfile": {"type": "string"},
- "args": {"$ref": "#/definitions/list_or_dict"},
- "labels": {"$ref": "#/definitions/labels"},
- "cache_from": {"$ref": "#/definitions/list_of_strings"},
- "network": {"type": "string"},
- "isolation": {"type": "string"}
- },
- "additionalProperties": false
- }
- ]
- },
- "cap_add": {"type": "array", "items": {"type": "string"}, "uniqueItems": true},
- "cap_drop": {"type": "array", "items": {"type": "string"}, "uniqueItems": true},
- "cgroup_parent": {"type": "string"},
- "command": {
- "oneOf": [
- {"type": "string"},
- {"type": "array", "items": {"type": "string"}}
- ]
- },
- "container_name": {"type": "string"},
- "cpu_count": {"type": "integer", "minimum": 0},
- "cpu_percent": {"type": "integer", "minimum": 0, "maximum": 100},
- "cpu_shares": {"type": ["number", "string"]},
- "cpu_quota": {"type": ["number", "string"]},
- "cpu_period": {"type": ["number", "string"]},
- "cpu_rt_period": {"type": ["number", "string"]},
- "cpu_rt_runtime": {"type": ["number", "string"]},
- "cpus": {"type": "number", "minimum": 0},
- "cpuset": {"type": "string"},
- "depends_on": {
- "oneOf": [
- {"$ref": "#/definitions/list_of_strings"},
- {
- "type": "object",
- "additionalProperties": false,
- "patternProperties": {
- "^[a-zA-Z0-9._-]+$": {
- "type": "object",
- "additionalProperties": false,
- "properties": {
- "condition": {
- "type": "string",
- "enum": ["service_started", "service_healthy"]
- }
- },
- "required": ["condition"]
- }
- }
- }
- ]
- },
- "devices": {"type": "array", "items": {"type": "string"}, "uniqueItems": true},
- "dns_opt": {
- "type": "array",
- "items": {
- "type": "string"
- },
- "uniqueItems": true
- },
- "dns": {"$ref": "#/definitions/string_or_list"},
- "dns_search": {"$ref": "#/definitions/string_or_list"},
- "domainname": {"type": "string"},
- "entrypoint": {
- "oneOf": [
- {"type": "string"},
- {"type": "array", "items": {"type": "string"}}
- ]
- },
- "env_file": {"$ref": "#/definitions/string_or_list"},
- "environment": {"$ref": "#/definitions/list_or_dict"},
-
- "expose": {
- "type": "array",
- "items": {
- "type": ["string", "number"],
- "format": "expose"
- },
- "uniqueItems": true
- },
-
- "extends": {
- "oneOf": [
- {
- "type": "string"
- },
- {
- "type": "object",
-
- "properties": {
- "service": {"type": "string"},
- "file": {"type": "string"}
- },
- "required": ["service"],
- "additionalProperties": false
- }
- ]
- },
-
- "external_links": {"type": "array", "items": {"type": "string"}, "uniqueItems": true},
- "extra_hosts": {"$ref": "#/definitions/list_or_dict"},
- "healthcheck": {"$ref": "#/definitions/healthcheck"},
- "hostname": {"type": "string"},
- "image": {"type": "string"},
- "init": {"type": ["boolean", "string"]},
- "ipc": {"type": "string"},
- "isolation": {"type": "string"},
- "labels": {"$ref": "#/definitions/labels"},
- "links": {"type": "array", "items": {"type": "string"}, "uniqueItems": true},
-
- "logging": {
- "type": "object",
-
- "properties": {
- "driver": {"type": "string"},
- "options": {"type": "object"}
- },
- "additionalProperties": false
- },
-
- "mac_address": {"type": "string"},
- "mem_limit": {"type": ["number", "string"]},
- "mem_reservation": {"type": ["string", "integer"]},
- "mem_swappiness": {"type": "integer"},
- "memswap_limit": {"type": ["number", "string"]},
- "network_mode": {"type": "string"},
-
- "networks": {
- "oneOf": [
- {"$ref": "#/definitions/list_of_strings"},
- {
- "type": "object",
- "patternProperties": {
- "^[a-zA-Z0-9._-]+$": {
- "oneOf": [
- {
- "type": "object",
- "properties": {
- "aliases": {"$ref": "#/definitions/list_of_strings"},
- "ipv4_address": {"type": "string"},
- "ipv6_address": {"type": "string"},
- "link_local_ips": {"$ref": "#/definitions/list_of_strings"},
- "priority": {"type": "number"}
- },
- "additionalProperties": false
- },
- {"type": "null"}
- ]
- }
- },
- "additionalProperties": false
- }
- ]
- },
- "oom_kill_disable": {"type": "boolean"},
- "oom_score_adj": {"type": "integer", "minimum": -1000, "maximum": 1000},
- "group_add": {
- "type": "array",
- "items": {
- "type": ["string", "number"]
- },
- "uniqueItems": true
- },
- "pid": {"type": ["string", "null"]},
-
- "ports": {
- "type": "array",
- "items": {
- "type": ["string", "number"],
- "format": "ports"
- },
- "uniqueItems": true
- },
-
- "privileged": {"type": "boolean"},
- "read_only": {"type": "boolean"},
- "restart": {"type": "string"},
- "scale": {"type": "integer"},
- "security_opt": {"type": "array", "items": {"type": "string"}, "uniqueItems": true},
- "shm_size": {"type": ["number", "string"]},
- "sysctls": {"$ref": "#/definitions/list_or_dict"},
- "pids_limit": {"type": ["number", "string"]},
- "stdin_open": {"type": "boolean"},
- "stop_grace_period": {"type": "string", "format": "duration"},
- "stop_signal": {"type": "string"},
- "storage_opt": {"type": "object"},
- "tmpfs": {"$ref": "#/definitions/string_or_list"},
- "tty": {"type": "boolean"},
- "ulimits": {
- "type": "object",
- "patternProperties": {
- "^[a-z]+$": {
- "oneOf": [
- {"type": "integer"},
- {
- "type":"object",
- "properties": {
- "hard": {"type": "integer"},
- "soft": {"type": "integer"}
- },
- "required": ["soft", "hard"],
- "additionalProperties": false
- }
- ]
- }
- }
- },
- "user": {"type": "string"},
- "userns_mode": {"type": "string"},
- "volumes": {"type": "array", "items": {"type": "string"}, "uniqueItems": true},
- "volume_driver": {"type": "string"},
- "volumes_from": {"type": "array", "items": {"type": "string"}, "uniqueItems": true},
- "working_dir": {"type": "string"}
- },
-
- "dependencies": {
- "memswap_limit": ["mem_limit"]
- },
- "additionalProperties": false
- },
-
- "healthcheck": {
- "id": "#/definitions/healthcheck",
- "type": "object",
- "additionalProperties": false,
- "properties": {
- "disable": {"type": "boolean"},
- "interval": {"type": "string"},
- "retries": {"type": "number"},
- "test": {
- "oneOf": [
- {"type": "string"},
- {"type": "array", "items": {"type": "string"}}
- ]
- },
- "timeout": {"type": "string"}
- }
- },
-
- "network": {
- "id": "#/definitions/network",
- "type": "object",
- "properties": {
- "driver": {"type": "string"},
- "driver_opts": {
- "type": "object",
- "patternProperties": {
- "^.+$": {"type": ["string", "number"]}
- }
- },
- "ipam": {
- "type": "object",
- "properties": {
- "driver": {"type": "string"},
- "config": {
- "type": "array"
- },
- "options": {
- "type": "object",
- "patternProperties": {
- "^.+$": {"type": "string"}
- },
- "additionalProperties": false
- }
- },
- "additionalProperties": false
- },
- "external": {
- "type": ["boolean", "object"],
- "properties": {
- "name": {"type": "string"}
- },
- "additionalProperties": false
- },
- "internal": {"type": "boolean"},
- "enable_ipv6": {"type": "boolean"},
- "labels": {"$ref": "#/definitions/labels"},
- "name": {"type": "string"}
- },
- "additionalProperties": false
- },
-
- "volume": {
- "id": "#/definitions/volume",
- "type": ["object", "null"],
- "properties": {
- "driver": {"type": "string"},
- "driver_opts": {
- "type": "object",
- "patternProperties": {
- "^.+$": {"type": ["string", "number"]}
- }
- },
- "external": {
- "type": ["boolean", "object"],
- "properties": {
- "name": {"type": "string"}
- },
- "additionalProperties": false
- },
- "labels": {"$ref": "#/definitions/labels"},
- "name": {"type": "string"}
- },
- "additionalProperties": false
- },
-
- "string_or_list": {
- "oneOf": [
- {"type": "string"},
- {"$ref": "#/definitions/list_of_strings"}
- ]
- },
-
- "list_of_strings": {
- "type": "array",
- "items": {"type": "string"},
- "uniqueItems": true
- },
-
- "list_or_dict": {
- "oneOf": [
- {
- "type": "object",
- "patternProperties": {
- ".+": {
- "type": ["string", "number", "null"]
- }
- },
- "additionalProperties": false
- },
- {"type": "array", "items": {"type": "string"}, "uniqueItems": true}
- ]
- },
-
- "labels": {
- "oneOf": [
- {
- "type": "object",
- "patternProperties": {
- ".+": {
- "type": "string"
- }
- },
- "additionalProperties": false
- },
- {"type": "array", "items": {"type": "string"}, "uniqueItems": true}
- ]
- },
-
- "blkio_limit": {
- "type": "object",
- "properties": {
- "path": {"type": "string"},
- "rate": {"type": ["integer", "string"]}
- },
- "additionalProperties": false
- },
- "blkio_weight": {
- "type": "object",
- "properties": {
- "path": {"type": "string"},
- "weight": {"type": "integer"}
- },
- "additionalProperties": false
- },
-
- "constraints": {
- "service": {
- "id": "#/definitions/constraints/service",
- "anyOf": [
- {"required": ["build"]},
- {"required": ["image"]}
- ],
- "properties": {
- "build": {
- "required": ["context"]
- }
- }
- }
- }
- }
-}
diff --git a/compose/config/config_schema_v2.3.json b/compose/config/config_schema_v2.3.json
deleted file mode 100644
index ac0778f2..00000000
--- a/compose/config/config_schema_v2.3.json
+++ /dev/null
@@ -1,514 +0,0 @@
-{
- "$schema": "http://json-schema.org/draft-04/schema#",
- "id": "config_schema_v2.3.json",
- "type": "object",
-
- "properties": {
- "version": {
- "type": "string"
- },
-
- "services": {
- "id": "#/properties/services",
- "type": "object",
- "patternProperties": {
- "^[a-zA-Z0-9._-]+$": {
- "$ref": "#/definitions/service"
- }
- },
- "additionalProperties": false
- },
-
- "networks": {
- "id": "#/properties/networks",
- "type": "object",
- "patternProperties": {
- "^[a-zA-Z0-9._-]+$": {
- "$ref": "#/definitions/network"
- }
- }
- },
-
- "volumes": {
- "id": "#/properties/volumes",
- "type": "object",
- "patternProperties": {
- "^[a-zA-Z0-9._-]+$": {
- "$ref": "#/definitions/volume"
- }
- },
- "additionalProperties": false
- }
- },
-
- "patternProperties": {"^x-": {}},
- "additionalProperties": false,
-
- "definitions": {
-
- "service": {
- "id": "#/definitions/service",
- "type": "object",
-
- "properties": {
- "blkio_config": {
- "type": "object",
- "properties": {
- "device_read_bps": {
- "type": "array",
- "items": {"$ref": "#/definitions/blkio_limit"}
- },
- "device_read_iops": {
- "type": "array",
- "items": {"$ref": "#/definitions/blkio_limit"}
- },
- "device_write_bps": {
- "type": "array",
- "items": {"$ref": "#/definitions/blkio_limit"}
- },
- "device_write_iops": {
- "type": "array",
- "items": {"$ref": "#/definitions/blkio_limit"}
- },
- "weight": {"type": "integer"},
- "weight_device": {
- "type": "array",
- "items": {"$ref": "#/definitions/blkio_weight"}
- }
- },
- "additionalProperties": false
- },
-
- "build": {
- "oneOf": [
- {"type": "string"},
- {
- "type": "object",
- "properties": {
- "context": {"type": "string"},
- "dockerfile": {"type": "string"},
- "args": {"$ref": "#/definitions/list_or_dict"},
- "labels": {"$ref": "#/definitions/labels"},
- "cache_from": {"$ref": "#/definitions/list_of_strings"},
- "network": {"type": "string"},
- "target": {"type": "string"},
- "shm_size": {"type": ["integer", "string"]},
- "extra_hosts": {"$ref": "#/definitions/list_or_dict"},
- "isolation": {"type": "string"}
- },
- "additionalProperties": false
- }
- ]
- },
- "cap_add": {"$ref": "#/definitions/list_of_strings"},
- "cap_drop": {"$ref": "#/definitions/list_of_strings"},
- "cgroup_parent": {"type": "string"},
- "command": {
- "oneOf": [
- {"type": "string"},
- {"type": "array", "items": {"type": "string"}}
- ]
- },
- "container_name": {"type": "string"},
- "cpu_count": {"type": "integer", "minimum": 0},
- "cpu_percent": {"type": "integer", "minimum": 0, "maximum": 100},
- "cpu_shares": {"type": ["number", "string"]},
- "cpu_quota": {"type": ["number", "string"]},
- "cpu_period": {"type": ["number", "string"]},
- "cpu_rt_period": {"type": ["number", "string"]},
- "cpu_rt_runtime": {"type": ["number", "string"]},
- "cpus": {"type": "number", "minimum": 0},
- "cpuset": {"type": "string"},
- "depends_on": {
- "oneOf": [
- {"$ref": "#/definitions/list_of_strings"},
- {
- "type": "object",
- "additionalProperties": false,
- "patternProperties": {
- "^[a-zA-Z0-9._-]+$": {
- "type": "object",
- "additionalProperties": false,
- "properties": {
- "condition": {
- "type": "string",
- "enum": ["service_started", "service_healthy"]
- }
- },
- "required": ["condition"]
- }
- }
- }
- ]
- },
- "device_cgroup_rules": {"$ref": "#/definitions/list_of_strings"},
- "devices": {"$ref": "#/definitions/list_of_strings"},
- "dns_opt": {
- "type": "array",
- "items": {
- "type": "string"
- },
- "uniqueItems": true
- },
- "dns": {"$ref": "#/definitions/string_or_list"},
- "dns_search": {"$ref": "#/definitions/string_or_list"},
- "domainname": {"type": "string"},
- "entrypoint": {
- "oneOf": [
- {"type": "string"},
- {"type": "array", "items": {"type": "string"}}
- ]
- },
- "env_file": {"$ref": "#/definitions/string_or_list"},
- "environment": {"$ref": "#/definitions/list_or_dict"},
-
- "expose": {
- "type": "array",
- "items": {
- "type": ["string", "number"],
- "format": "expose"
- },
- "uniqueItems": true
- },
-
- "extends": {
- "oneOf": [
- {
- "type": "string"
- },
- {
- "type": "object",
-
- "properties": {
- "service": {"type": "string"},
- "file": {"type": "string"}
- },
- "required": ["service"],
- "additionalProperties": false
- }
- ]
- },
-
- "external_links": {"$ref": "#/definitions/list_of_strings"},
- "extra_hosts": {"$ref": "#/definitions/list_or_dict"},
- "healthcheck": {"$ref": "#/definitions/healthcheck"},
- "hostname": {"type": "string"},
- "image": {"type": "string"},
- "init": {"type": ["boolean", "string"]},
- "ipc": {"type": "string"},
- "isolation": {"type": "string"},
- "labels": {"$ref": "#/definitions/labels"},
- "links": {"$ref": "#/definitions/list_of_strings"},
-
- "logging": {
- "type": "object",
-
- "properties": {
- "driver": {"type": "string"},
- "options": {"type": "object"}
- },
- "additionalProperties": false
- },
-
- "mac_address": {"type": "string"},
- "mem_limit": {"type": ["number", "string"]},
- "mem_reservation": {"type": ["string", "integer"]},
- "mem_swappiness": {"type": "integer"},
- "memswap_limit": {"type": ["number", "string"]},
- "network_mode": {"type": "string"},
-
- "networks": {
- "oneOf": [
- {"$ref": "#/definitions/list_of_strings"},
- {
- "type": "object",
- "patternProperties": {
- "^[a-zA-Z0-9._-]+$": {
- "oneOf": [
- {
- "type": "object",
- "properties": {
- "aliases": {"$ref": "#/definitions/list_of_strings"},
- "ipv4_address": {"type": "string"},
- "ipv6_address": {"type": "string"},
- "link_local_ips": {"$ref": "#/definitions/list_of_strings"},
- "priority": {"type": "number"}
- },
- "additionalProperties": false
- },
- {"type": "null"}
- ]
- }
- },
- "additionalProperties": false
- }
- ]
- },
- "oom_kill_disable": {"type": "boolean"},
- "oom_score_adj": {"type": "integer", "minimum": -1000, "maximum": 1000},
- "group_add": {
- "type": "array",
- "items": {
- "type": ["string", "number"]
- },
- "uniqueItems": true
- },
- "pid": {"type": ["string", "null"]},
-
- "ports": {
- "type": "array",
- "items": {
- "type": ["string", "number"],
- "format": "ports"
- },
- "uniqueItems": true
- },
-
- "privileged": {"type": "boolean"},
- "read_only": {"type": "boolean"},
- "restart": {"type": "string"},
- "runtime": {"type": "string"},
- "scale": {"type": "integer"},
- "security_opt": {"$ref": "#/definitions/list_of_strings"},
- "shm_size": {"type": ["number", "string"]},
- "sysctls": {"$ref": "#/definitions/list_or_dict"},
- "pids_limit": {"type": ["number", "string"]},
- "stdin_open": {"type": "boolean"},
- "stop_grace_period": {"type": "string", "format": "duration"},
- "stop_signal": {"type": "string"},
- "storage_opt": {"type": "object"},
- "tmpfs": {"$ref": "#/definitions/string_or_list"},
- "tty": {"type": "boolean"},
- "ulimits": {
- "type": "object",
- "patternProperties": {
- "^[a-z]+$": {
- "oneOf": [
- {"type": "integer"},
- {
- "type":"object",
- "properties": {
- "hard": {"type": "integer"},
- "soft": {"type": "integer"}
- },
- "required": ["soft", "hard"],
- "additionalProperties": false
- }
- ]
- }
- }
- },
- "user": {"type": "string"},
- "userns_mode": {"type": "string"},
- "volumes": {
- "type": "array",
- "items": {
- "oneOf": [
- {"type": "string"},
- {
- "type": "object",
- "required": ["type"],
- "additionalProperties": false,
- "properties": {
- "type": {"type": "string"},
- "source": {"type": "string"},
- "target": {"type": "string"},
- "read_only": {"type": "boolean"},
- "consistency": {"type": "string"},
- "bind": {
- "type": "object",
- "properties": {
- "propagation": {"type": "string"}
- }
- },
- "volume": {
- "type": "object",
- "properties": {
- "nocopy": {"type": "boolean"}
- }
- },
- "tmpfs": {
- "type": "object",
- "properties": {
- "size": {"type": ["integer", "string"]}
- }
- }
- }
- }
- ],
- "uniqueItems": true
- }
- },
- "volume_driver": {"type": "string"},
- "volumes_from": {"$ref": "#/definitions/list_of_strings"},
- "working_dir": {"type": "string"}
- },
-
- "dependencies": {
- "memswap_limit": ["mem_limit"]
- },
- "additionalProperties": false
- },
-
- "healthcheck": {
- "id": "#/definitions/healthcheck",
- "type": "object",
- "additionalProperties": false,
- "properties": {
- "disable": {"type": "boolean"},
- "interval": {"type": "string"},
- "retries": {"type": "number"},
- "start_period": {"type": "string"},
- "test": {
- "oneOf": [
- {"type": "string"},
- {"type": "array", "items": {"type": "string"}}
- ]
- },
- "timeout": {"type": "string"}
- }
- },
-
- "network": {
- "id": "#/definitions/network",
- "type": "object",
- "properties": {
- "driver": {"type": "string"},
- "driver_opts": {
- "type": "object",
- "patternProperties": {
- "^.+$": {"type": ["string", "number"]}
- }
- },
- "ipam": {
- "type": "object",
- "properties": {
- "driver": {"type": "string"},
- "config": {
- "type": "array"
- },
- "options": {
- "type": "object",
- "patternProperties": {
- "^.+$": {"type": "string"}
- },
- "additionalProperties": false
- }
- },
- "additionalProperties": false
- },
- "external": {
- "type": ["boolean", "object"],
- "properties": {
- "name": {"type": "string"}
- },
- "additionalProperties": false
- },
- "internal": {"type": "boolean"},
- "enable_ipv6": {"type": "boolean"},
- "labels": {"$ref": "#/definitions/labels"},
- "name": {"type": "string"}
- },
- "additionalProperties": false
- },
-
- "volume": {
- "id": "#/definitions/volume",
- "type": ["object", "null"],
- "properties": {
- "driver": {"type": "string"},
- "driver_opts": {
- "type": "object",
- "patternProperties": {
- "^.+$": {"type": ["string", "number"]}
- }
- },
- "external": {
- "type": ["boolean", "object"],
- "properties": {
- "name": {"type": "string"}
- },
- "additionalProperties": false
- },
- "labels": {"$ref": "#/definitions/labels"},
- "name": {"type": "string"}
- },
- "additionalProperties": false
- },
-
- "string_or_list": {
- "oneOf": [
- {"type": "string"},
- {"$ref": "#/definitions/list_of_strings"}
- ]
- },
-
- "list_of_strings": {
- "type": "array",
- "items": {"type": "string"},
- "uniqueItems": true
- },
-
- "list_or_dict": {
- "oneOf": [
- {
- "type": "object",
- "patternProperties": {
- ".+": {
- "type": ["string", "number", "null"]
- }
- },
- "additionalProperties": false
- },
- {"type": "array", "items": {"type": "string"}, "uniqueItems": true}
- ]
- },
-
- "labels": {
- "oneOf": [
- {
- "type": "object",
- "patternProperties": {
- ".+": {
- "type": "string"
- }
- },
- "additionalProperties": false
- },
- {"type": "array", "items": {"type": "string"}, "uniqueItems": true}
- ]
- },
-
- "blkio_limit": {
- "type": "object",
- "properties": {
- "path": {"type": "string"},
- "rate": {"type": ["integer", "string"]}
- },
- "additionalProperties": false
- },
- "blkio_weight": {
- "type": "object",
- "properties": {
- "path": {"type": "string"},
- "weight": {"type": "integer"}
- },
- "additionalProperties": false
- },
-
- "constraints": {
- "service": {
- "id": "#/definitions/constraints/service",
- "anyOf": [
- {"required": ["build"]},
- {"required": ["image"]}
- ],
- "properties": {
- "build": {
- "required": ["context"]
- }
- }
- }
- }
- }
-}
diff --git a/compose/config/config_schema_v2.4.json b/compose/config/config_schema_v2.4.json
deleted file mode 100644
index 731fa2f9..00000000
--- a/compose/config/config_schema_v2.4.json
+++ /dev/null
@@ -1,513 +0,0 @@
-{
- "$schema": "http://json-schema.org/draft-04/schema#",
- "id": "config_schema_v2.4.json",
- "type": "object",
-
- "properties": {
- "version": {
- "type": "string"
- },
-
- "services": {
- "id": "#/properties/services",
- "type": "object",
- "patternProperties": {
- "^[a-zA-Z0-9._-]+$": {
- "$ref": "#/definitions/service"
- }
- },
- "additionalProperties": false
- },
-
- "networks": {
- "id": "#/properties/networks",
- "type": "object",
- "patternProperties": {
- "^[a-zA-Z0-9._-]+$": {
- "$ref": "#/definitions/network"
- }
- }
- },
-
- "volumes": {
- "id": "#/properties/volumes",
- "type": "object",
- "patternProperties": {
- "^[a-zA-Z0-9._-]+$": {
- "$ref": "#/definitions/volume"
- }
- },
- "additionalProperties": false
- }
- },
-
- "patternProperties": {"^x-": {}},
- "additionalProperties": false,
-
- "definitions": {
-
- "service": {
- "id": "#/definitions/service",
- "type": "object",
-
- "properties": {
- "blkio_config": {
- "type": "object",
- "properties": {
- "device_read_bps": {
- "type": "array",
- "items": {"$ref": "#/definitions/blkio_limit"}
- },
- "device_read_iops": {
- "type": "array",
- "items": {"$ref": "#/definitions/blkio_limit"}
- },
- "device_write_bps": {
- "type": "array",
- "items": {"$ref": "#/definitions/blkio_limit"}
- },
- "device_write_iops": {
- "type": "array",
- "items": {"$ref": "#/definitions/blkio_limit"}
- },
- "weight": {"type": "integer"},
- "weight_device": {
- "type": "array",
- "items": {"$ref": "#/definitions/blkio_weight"}
- }
- },
- "additionalProperties": false
- },
-
- "build": {
- "oneOf": [
- {"type": "string"},
- {
- "type": "object",
- "properties": {
- "context": {"type": "string"},
- "dockerfile": {"type": "string"},
- "args": {"$ref": "#/definitions/list_or_dict"},
- "labels": {"$ref": "#/definitions/labels"},
- "cache_from": {"$ref": "#/definitions/list_of_strings"},
- "network": {"type": "string"},
- "target": {"type": "string"},
- "shm_size": {"type": ["integer", "string"]},
- "extra_hosts": {"$ref": "#/definitions/list_or_dict"},
- "isolation": {"type": "string"}
- },
- "additionalProperties": false
- }
- ]
- },
- "cap_add": {"$ref": "#/definitions/list_of_strings"},
- "cap_drop": {"$ref": "#/definitions/list_of_strings"},
- "cgroup_parent": {"type": "string"},
- "command": {
- "oneOf": [
- {"type": "string"},
- {"type": "array", "items": {"type": "string"}}
- ]
- },
- "container_name": {"type": "string"},
- "cpu_count": {"type": "integer", "minimum": 0},
- "cpu_percent": {"type": "integer", "minimum": 0, "maximum": 100},
- "cpu_shares": {"type": ["number", "string"]},
- "cpu_quota": {"type": ["number", "string"]},
- "cpu_period": {"type": ["number", "string"]},
- "cpu_rt_period": {"type": ["number", "string"]},
- "cpu_rt_runtime": {"type": ["number", "string"]},
- "cpus": {"type": "number", "minimum": 0},
- "cpuset": {"type": "string"},
- "depends_on": {
- "oneOf": [
- {"$ref": "#/definitions/list_of_strings"},
- {
- "type": "object",
- "additionalProperties": false,
- "patternProperties": {
- "^[a-zA-Z0-9._-]+$": {
- "type": "object",
- "additionalProperties": false,
- "properties": {
- "condition": {
- "type": "string",
- "enum": ["service_started", "service_healthy"]
- }
- },
- "required": ["condition"]
- }
- }
- }
- ]
- },
- "device_cgroup_rules": {"$ref": "#/definitions/list_of_strings"},
- "devices": {"$ref": "#/definitions/list_of_strings"},
- "dns_opt": {
- "type": "array",
- "items": {
- "type": "string"
- },
- "uniqueItems": true
- },
- "dns": {"$ref": "#/definitions/string_or_list"},
- "dns_search": {"$ref": "#/definitions/string_or_list"},
- "domainname": {"type": "string"},
- "entrypoint": {
- "oneOf": [
- {"type": "string"},
- {"type": "array", "items": {"type": "string"}}
- ]
- },
- "env_file": {"$ref": "#/definitions/string_or_list"},
- "environment": {"$ref": "#/definitions/list_or_dict"},
-
- "expose": {
- "type": "array",
- "items": {
- "type": ["string", "number"],
- "format": "expose"
- },
- "uniqueItems": true
- },
-
- "extends": {
- "oneOf": [
- {
- "type": "string"
- },
- {
- "type": "object",
-
- "properties": {
- "service": {"type": "string"},
- "file": {"type": "string"}
- },
- "required": ["service"],
- "additionalProperties": false
- }
- ]
- },
-
- "external_links": {"$ref": "#/definitions/list_of_strings"},
- "extra_hosts": {"$ref": "#/definitions/list_or_dict"},
- "group_add": {
- "type": "array",
- "items": {
- "type": ["string", "number"]
- },
- "uniqueItems": true
- },
- "healthcheck": {"$ref": "#/definitions/healthcheck"},
- "hostname": {"type": "string"},
- "image": {"type": "string"},
- "init": {"type": ["boolean", "string"]},
- "ipc": {"type": "string"},
- "isolation": {"type": "string"},
- "labels": {"$ref": "#/definitions/labels"},
- "links": {"$ref": "#/definitions/list_of_strings"},
-
- "logging": {
- "type": "object",
-
- "properties": {
- "driver": {"type": "string"},
- "options": {"type": "object"}
- },
- "additionalProperties": false
- },
-
- "mac_address": {"type": "string"},
- "mem_limit": {"type": ["number", "string"]},
- "mem_reservation": {"type": ["string", "integer"]},
- "mem_swappiness": {"type": "integer"},
- "memswap_limit": {"type": ["number", "string"]},
- "network_mode": {"type": "string"},
-
- "networks": {
- "oneOf": [
- {"$ref": "#/definitions/list_of_strings"},
- {
- "type": "object",
- "patternProperties": {
- "^[a-zA-Z0-9._-]+$": {
- "oneOf": [
- {
- "type": "object",
- "properties": {
- "aliases": {"$ref": "#/definitions/list_of_strings"},
- "ipv4_address": {"type": "string"},
- "ipv6_address": {"type": "string"},
- "link_local_ips": {"$ref": "#/definitions/list_of_strings"},
- "priority": {"type": "number"}
- },
- "additionalProperties": false
- },
- {"type": "null"}
- ]
- }
- },
- "additionalProperties": false
- }
- ]
- },
- "oom_kill_disable": {"type": "boolean"},
- "oom_score_adj": {"type": "integer", "minimum": -1000, "maximum": 1000},
- "pid": {"type": ["string", "null"]},
- "platform": {"type": "string"},
- "ports": {
- "type": "array",
- "items": {
- "type": ["string", "number"],
- "format": "ports"
- },
- "uniqueItems": true
- },
- "privileged": {"type": "boolean"},
- "read_only": {"type": "boolean"},
- "restart": {"type": "string"},
- "runtime": {"type": "string"},
- "scale": {"type": "integer"},
- "security_opt": {"$ref": "#/definitions/list_of_strings"},
- "shm_size": {"type": ["number", "string"]},
- "sysctls": {"$ref": "#/definitions/list_or_dict"},
- "pids_limit": {"type": ["number", "string"]},
- "stdin_open": {"type": "boolean"},
- "stop_grace_period": {"type": "string", "format": "duration"},
- "stop_signal": {"type": "string"},
- "storage_opt": {"type": "object"},
- "tmpfs": {"$ref": "#/definitions/string_or_list"},
- "tty": {"type": "boolean"},
- "ulimits": {
- "type": "object",
- "patternProperties": {
- "^[a-z]+$": {
- "oneOf": [
- {"type": "integer"},
- {
- "type":"object",
- "properties": {
- "hard": {"type": "integer"},
- "soft": {"type": "integer"}
- },
- "required": ["soft", "hard"],
- "additionalProperties": false
- }
- ]
- }
- }
- },
- "user": {"type": "string"},
- "userns_mode": {"type": "string"},
- "volumes": {
- "type": "array",
- "items": {
- "oneOf": [
- {"type": "string"},
- {
- "type": "object",
- "required": ["type"],
- "additionalProperties": false,
- "properties": {
- "type": {"type": "string"},
- "source": {"type": "string"},
- "target": {"type": "string"},
- "read_only": {"type": "boolean"},
- "consistency": {"type": "string"},
- "bind": {
- "type": "object",
- "properties": {
- "propagation": {"type": "string"}
- }
- },
- "volume": {
- "type": "object",
- "properties": {
- "nocopy": {"type": "boolean"}
- }
- },
- "tmpfs": {
- "type": "object",
- "properties": {
- "size": {"type": ["integer", "string"]}
- }
- }
- }
- }
- ],
- "uniqueItems": true
- }
- },
- "volume_driver": {"type": "string"},
- "volumes_from": {"$ref": "#/definitions/list_of_strings"},
- "working_dir": {"type": "string"}
- },
-
- "dependencies": {
- "memswap_limit": ["mem_limit"]
- },
- "additionalProperties": false
- },
-
- "healthcheck": {
- "id": "#/definitions/healthcheck",
- "type": "object",
- "additionalProperties": false,
- "properties": {
- "disable": {"type": "boolean"},
- "interval": {"type": "string"},
- "retries": {"type": "number"},
- "start_period": {"type": "string"},
- "test": {
- "oneOf": [
- {"type": "string"},
- {"type": "array", "items": {"type": "string"}}
- ]
- },
- "timeout": {"type": "string"}
- }
- },
-
- "network": {
- "id": "#/definitions/network",
- "type": "object",
- "properties": {
- "driver": {"type": "string"},
- "driver_opts": {
- "type": "object",
- "patternProperties": {
- "^.+$": {"type": ["string", "number"]}
- }
- },
- "ipam": {
- "type": "object",
- "properties": {
- "driver": {"type": "string"},
- "config": {
- "type": "array"
- },
- "options": {
- "type": "object",
- "patternProperties": {
- "^.+$": {"type": "string"}
- },
- "additionalProperties": false
- }
- },
- "additionalProperties": false
- },
- "external": {
- "type": ["boolean", "object"],
- "properties": {
- "name": {"type": "string"}
- },
- "additionalProperties": false
- },
- "internal": {"type": "boolean"},
- "enable_ipv6": {"type": "boolean"},
- "labels": {"$ref": "#/definitions/labels"},
- "name": {"type": "string"}
- },
- "additionalProperties": false
- },
-
- "volume": {
- "id": "#/definitions/volume",
- "type": ["object", "null"],
- "properties": {
- "driver": {"type": "string"},
- "driver_opts": {
- "type": "object",
- "patternProperties": {
- "^.+$": {"type": ["string", "number"]}
- }
- },
- "external": {
- "type": ["boolean", "object"],
- "properties": {
- "name": {"type": "string"}
- },
- "additionalProperties": false
- },
- "labels": {"$ref": "#/definitions/labels"},
- "name": {"type": "string"}
- },
- "additionalProperties": false
- },
-
- "string_or_list": {
- "oneOf": [
- {"type": "string"},
- {"$ref": "#/definitions/list_of_strings"}
- ]
- },
-
- "list_of_strings": {
- "type": "array",
- "items": {"type": "string"},
- "uniqueItems": true
- },
-
- "list_or_dict": {
- "oneOf": [
- {
- "type": "object",
- "patternProperties": {
- ".+": {
- "type": ["string", "number", "null"]
- }
- },
- "additionalProperties": false
- },
- {"type": "array", "items": {"type": "string"}, "uniqueItems": true}
- ]
- },
-
- "labels": {
- "oneOf": [
- {
- "type": "object",
- "patternProperties": {
- ".+": {
- "type": "string"
- }
- },
- "additionalProperties": false
- },
- {"type": "array", "items": {"type": "string"}, "uniqueItems": true}
- ]
- },
-
- "blkio_limit": {
- "type": "object",
- "properties": {
- "path": {"type": "string"},
- "rate": {"type": ["integer", "string"]}
- },
- "additionalProperties": false
- },
- "blkio_weight": {
- "type": "object",
- "properties": {
- "path": {"type": "string"},
- "weight": {"type": "integer"}
- },
- "additionalProperties": false
- },
-
- "constraints": {
- "service": {
- "id": "#/definitions/constraints/service",
- "anyOf": [
- {"required": ["build"]},
- {"required": ["image"]}
- ],
- "properties": {
- "build": {
- "required": ["context"]
- }
- }
- }
- }
- }
-}
diff --git a/compose/config/config_schema_v3.0.json b/compose/config/config_schema_v3.0.json
deleted file mode 100644
index 10c36352..00000000
--- a/compose/config/config_schema_v3.0.json
+++ /dev/null
@@ -1,399 +0,0 @@
-{
- "$schema": "http://json-schema.org/draft-04/schema#",
- "id": "config_schema_v3.0.json",
- "type": "object",
- "required": ["version"],
-
- "properties": {
- "version": {
- "type": "string"
- },
-
- "services": {
- "id": "#/properties/services",
- "type": "object",
- "patternProperties": {
- "^[a-zA-Z0-9._-]+$": {
- "$ref": "#/definitions/service"
- }
- },
- "additionalProperties": false
- },
-
- "networks": {
- "id": "#/properties/networks",
- "type": "object",
- "patternProperties": {
- "^[a-zA-Z0-9._-]+$": {
- "$ref": "#/definitions/network"
- }
- }
- },
-
- "volumes": {
- "id": "#/properties/volumes",
- "type": "object",
- "patternProperties": {
- "^[a-zA-Z0-9._-]+$": {
- "$ref": "#/definitions/volume"
- }
- },
- "additionalProperties": false
- }
- },
-
- "additionalProperties": false,
-
- "definitions": {
-
- "service": {
- "id": "#/definitions/service",
- "type": "object",
-
- "properties": {
- "deploy": {"$ref": "#/definitions/deployment"},
- "build": {
- "oneOf": [
- {"type": "string"},
- {
- "type": "object",
- "properties": {
- "context": {"type": "string"},
- "dockerfile": {"type": "string"},
- "args": {"$ref": "#/definitions/list_or_dict"}
- },
- "additionalProperties": false
- }
- ]
- },
- "cap_add": {"type": "array", "items": {"type": "string"}, "uniqueItems": true},
- "cap_drop": {"type": "array", "items": {"type": "string"}, "uniqueItems": true},
- "cgroup_parent": {"type": "string"},
- "command": {
- "oneOf": [
- {"type": "string"},
- {"type": "array", "items": {"type": "string"}}
- ]
- },
- "container_name": {"type": "string"},
- "depends_on": {"$ref": "#/definitions/list_of_strings"},
- "devices": {"type": "array", "items": {"type": "string"}, "uniqueItems": true},
- "dns": {"$ref": "#/definitions/string_or_list"},
- "dns_search": {"$ref": "#/definitions/string_or_list"},
- "domainname": {"type": "string"},
- "entrypoint": {
- "oneOf": [
- {"type": "string"},
- {"type": "array", "items": {"type": "string"}}
- ]
- },
- "env_file": {"$ref": "#/definitions/string_or_list"},
- "environment": {"$ref": "#/definitions/list_or_dict"},
-
- "expose": {
- "type": "array",
- "items": {
- "type": ["string", "number"],
- "format": "expose"
- },
- "uniqueItems": true
- },
-
- "external_links": {"type": "array", "items": {"type": "string"}, "uniqueItems": true},
- "extra_hosts": {"$ref": "#/definitions/list_or_dict"},
- "healthcheck": {"$ref": "#/definitions/healthcheck"},
- "hostname": {"type": "string"},
- "image": {"type": "string"},
- "ipc": {"type": "string"},
- "labels": {"$ref": "#/definitions/labels"},
- "links": {"type": "array", "items": {"type": "string"}, "uniqueItems": true},
-
- "logging": {
- "type": "object",
-
- "properties": {
- "driver": {"type": "string"},
- "options": {
- "type": "object",
- "patternProperties": {
- "^.+$": {"type": ["string", "number", "null"]}
- }
- }
- },
- "additionalProperties": false
- },
-
- "mac_address": {"type": "string"},
- "network_mode": {"type": "string"},
-
- "networks": {
- "oneOf": [
- {"$ref": "#/definitions/list_of_strings"},
- {
- "type": "object",
- "patternProperties": {
- "^[a-zA-Z0-9._-]+$": {
- "oneOf": [
- {
- "type": "object",
- "properties": {
- "aliases": {"$ref": "#/definitions/list_of_strings"},
- "ipv4_address": {"type": "string"},
- "ipv6_address": {"type": "string"}
- },
- "additionalProperties": false
- },
- {"type": "null"}
- ]
- }
- },
- "additionalProperties": false
- }
- ]
- },
- "pid": {"type": ["string", "null"]},
-
- "ports": {
- "type": "array",
- "items": {
- "type": ["string", "number"],
- "format": "ports"
- },
- "uniqueItems": true
- },
-
- "privileged": {"type": "boolean"},
- "read_only": {"type": "boolean"},
- "restart": {"type": "string"},
- "security_opt": {"type": "array", "items": {"type": "string"}, "uniqueItems": true},
- "shm_size": {"type": ["number", "string"]},
- "sysctls": {"$ref": "#/definitions/list_or_dict"},
- "stdin_open": {"type": "boolean"},
- "stop_grace_period": {"type": "string", "format": "duration"},
- "stop_signal": {"type": "string"},
- "tmpfs": {"$ref": "#/definitions/string_or_list"},
- "tty": {"type": "boolean"},
- "ulimits": {
- "type": "object",
- "patternProperties": {
- "^[a-z]+$": {
- "oneOf": [
- {"type": "integer"},
- {
- "type":"object",
- "properties": {
- "hard": {"type": "integer"},
- "soft": {"type": "integer"}
- },
- "required": ["soft", "hard"],
- "additionalProperties": false
- }
- ]
- }
- }
- },
- "user": {"type": "string"},
- "userns_mode": {"type": "string"},
- "volumes": {"type": "array", "items": {"type": "string"}, "uniqueItems": true},
- "working_dir": {"type": "string"}
- },
- "additionalProperties": false
- },
-
- "healthcheck": {
- "id": "#/definitions/healthcheck",
- "type": "object",
- "additionalProperties": false,
- "properties": {
- "disable": {"type": "boolean"},
- "interval": {"type": "string"},
- "retries": {"type": "number"},
- "test": {
- "oneOf": [
- {"type": "string"},
- {"type": "array", "items": {"type": "string"}}
- ]
- },
- "timeout": {"type": "string"}
- }
- },
- "deployment": {
- "id": "#/definitions/deployment",
- "type": ["object", "null"],
- "properties": {
- "mode": {"type": "string"},
- "replicas": {"type": "integer"},
- "labels": {"$ref": "#/definitions/labels"},
- "update_config": {
- "type": "object",
- "properties": {
- "parallelism": {"type": "integer"},
- "delay": {"type": "string", "format": "duration"},
- "failure_action": {"type": "string"},
- "monitor": {"type": "string", "format": "duration"},
- "max_failure_ratio": {"type": "number"}
- },
- "additionalProperties": false
- },
- "resources": {
- "type": "object",
- "properties": {
- "limits": {"$ref": "#/definitions/resource"},
- "reservations": {"$ref": "#/definitions/resource"}
- },
- "additionalProperties": false
- },
- "restart_policy": {
- "type": "object",
- "properties": {
- "condition": {"type": "string"},
- "delay": {"type": "string", "format": "duration"},
- "max_attempts": {"type": "integer"},
- "window": {"type": "string", "format": "duration"}
- },
- "additionalProperties": false
- },
- "placement": {
- "type": "object",
- "properties": {
- "constraints": {"type": "array", "items": {"type": "string"}}
- },
- "additionalProperties": false
- }
- },
- "additionalProperties": false
- },
-
- "resource": {
- "id": "#/definitions/resource",
- "type": "object",
- "properties": {
- "cpus": {"type": "string"},
- "memory": {"type": "string"}
- },
- "additionalProperties": false
- },
-
- "network": {
- "id": "#/definitions/network",
- "type": ["object", "null"],
- "properties": {
- "driver": {"type": "string"},
- "driver_opts": {
- "type": "object",
- "patternProperties": {
- "^.+$": {"type": ["string", "number"]}
- }
- },
- "ipam": {
- "type": "object",
- "properties": {
- "driver": {"type": "string"},
- "config": {
- "type": "array",
- "items": {
- "type": "object",
- "properties": {
- "subnet": {"type": "string", "format": "subnet_ip_address"}
- },
- "additionalProperties": false
- }
- }
- },
- "additionalProperties": false
- },
- "external": {
- "type": ["boolean", "object"],
- "properties": {
- "name": {"type": "string"}
- },
- "additionalProperties": false
- },
- "internal": {"type": "boolean"},
- "labels": {"$ref": "#/definitions/labels"}
- },
- "additionalProperties": false
- },
-
- "volume": {
- "id": "#/definitions/volume",
- "type": ["object", "null"],
- "properties": {
- "driver": {"type": "string"},
- "driver_opts": {
- "type": "object",
- "patternProperties": {
- "^.+$": {"type": ["string", "number"]}
- }
- },
- "external": {
- "type": ["boolean", "object"],
- "properties": {
- "name": {"type": "string"}
- },
- "additionalProperties": false
- },
- "labels": {"$ref": "#/definitions/labels"}
- },
- "additionalProperties": false
- },
-
- "string_or_list": {
- "oneOf": [
- {"type": "string"},
- {"$ref": "#/definitions/list_of_strings"}
- ]
- },
-
- "list_of_strings": {
- "type": "array",
- "items": {"type": "string"},
- "uniqueItems": true
- },
-
- "list_or_dict": {
- "oneOf": [
- {
- "type": "object",
- "patternProperties": {
- ".+": {
- "type": ["string", "number", "null"]
- }
- },
- "additionalProperties": false
- },
- {"type": "array", "items": {"type": "string"}, "uniqueItems": true}
- ]
- },
-
- "labels": {
- "oneOf": [
- {
- "type": "object",
- "patternProperties": {
- ".+": {
- "type": "string"
- }
- },
- "additionalProperties": false
- },
- {"type": "array", "items": {"type": "string"}, "uniqueItems": true}
- ]
- },
-
- "constraints": {
- "service": {
- "id": "#/definitions/constraints/service",
- "anyOf": [
- {"required": ["build"]},
- {"required": ["image"]}
- ],
- "properties": {
- "build": {
- "required": ["context"]
- }
- }
- }
- }
- }
-}
diff --git a/compose/config/config_schema_v3.1.json b/compose/config/config_schema_v3.1.json
deleted file mode 100644
index 8630ec31..00000000
--- a/compose/config/config_schema_v3.1.json
+++ /dev/null
@@ -1,444 +0,0 @@
-{
- "$schema": "http://json-schema.org/draft-04/schema#",
- "id": "config_schema_v3.1.json",
- "type": "object",
- "required": ["version"],
-
- "properties": {
- "version": {
- "type": "string"
- },
-
- "services": {
- "id": "#/properties/services",
- "type": "object",
- "patternProperties": {
- "^[a-zA-Z0-9._-]+$": {
- "$ref": "#/definitions/service"
- }
- },
- "additionalProperties": false
- },
-
- "networks": {
- "id": "#/properties/networks",
- "type": "object",
- "patternProperties": {
- "^[a-zA-Z0-9._-]+$": {
- "$ref": "#/definitions/network"
- }
- }
- },
-
- "volumes": {
- "id": "#/properties/volumes",
- "type": "object",
- "patternProperties": {
- "^[a-zA-Z0-9._-]+$": {
- "$ref": "#/definitions/volume"
- }
- },
- "additionalProperties": false
- },
-
- "secrets": {
- "id": "#/properties/secrets",
- "type": "object",
- "patternProperties": {
- "^[a-zA-Z0-9._-]+$": {
- "$ref": "#/definitions/secret"
- }
- },
- "additionalProperties": false
- }
- },
-
- "additionalProperties": false,
-
- "definitions": {
-
- "service": {
- "id": "#/definitions/service",
- "type": "object",
-
- "properties": {
- "deploy": {"$ref": "#/definitions/deployment"},
- "build": {
- "oneOf": [
- {"type": "string"},
- {
- "type": "object",
- "properties": {
- "context": {"type": "string"},
- "dockerfile": {"type": "string"},
- "args": {"$ref": "#/definitions/list_or_dict"}
- },
- "additionalProperties": false
- }
- ]
- },
- "cap_add": {"type": "array", "items": {"type": "string"}, "uniqueItems": true},
- "cap_drop": {"type": "array", "items": {"type": "string"}, "uniqueItems": true},
- "cgroup_parent": {"type": "string"},
- "command": {
- "oneOf": [
- {"type": "string"},
- {"type": "array", "items": {"type": "string"}}
- ]
- },
- "container_name": {"type": "string"},
- "depends_on": {"$ref": "#/definitions/list_of_strings"},
- "devices": {"type": "array", "items": {"type": "string"}, "uniqueItems": true},
- "dns": {"$ref": "#/definitions/string_or_list"},
- "dns_search": {"$ref": "#/definitions/string_or_list"},
- "domainname": {"type": "string"},
- "entrypoint": {
- "oneOf": [
- {"type": "string"},
- {"type": "array", "items": {"type": "string"}}
- ]
- },
- "env_file": {"$ref": "#/definitions/string_or_list"},
- "environment": {"$ref": "#/definitions/list_or_dict"},
-
- "expose": {
- "type": "array",
- "items": {
- "type": ["string", "number"],
- "format": "expose"
- },
- "uniqueItems": true
- },
-
- "external_links": {"type": "array", "items": {"type": "string"}, "uniqueItems": true},
- "extra_hosts": {"$ref": "#/definitions/list_or_dict"},
- "healthcheck": {"$ref": "#/definitions/healthcheck"},
- "hostname": {"type": "string"},
- "image": {"type": "string"},
- "ipc": {"type": "string"},
- "labels": {"$ref": "#/definitions/labels"},
- "links": {"type": "array", "items": {"type": "string"}, "uniqueItems": true},
-
- "logging": {
- "type": "object",
-
- "properties": {
- "driver": {"type": "string"},
- "options": {
- "type": "object",
- "patternProperties": {
- "^.+$": {"type": ["string", "number", "null"]}
- }
- }
- },
- "additionalProperties": false
- },
-
- "mac_address": {"type": "string"},
- "network_mode": {"type": "string"},
-
- "networks": {
- "oneOf": [
- {"$ref": "#/definitions/list_of_strings"},
- {
- "type": "object",
- "patternProperties": {
- "^[a-zA-Z0-9._-]+$": {
- "oneOf": [
- {
- "type": "object",
- "properties": {
- "aliases": {"$ref": "#/definitions/list_of_strings"},
- "ipv4_address": {"type": "string"},
- "ipv6_address": {"type": "string"}
- },
- "additionalProperties": false
- },
- {"type": "null"}
- ]
- }
- },
- "additionalProperties": false
- }
- ]
- },
- "pid": {"type": ["string", "null"]},
-
- "ports": {
- "type": "array",
- "items": {
- "type": ["string", "number"],
- "format": "ports"
- },
- "uniqueItems": true
- },
-
- "privileged": {"type": "boolean"},
- "read_only": {"type": "boolean"},
- "restart": {"type": "string"},
- "security_opt": {"type": "array", "items": {"type": "string"}, "uniqueItems": true},
- "shm_size": {"type": ["number", "string"]},
- "secrets": {
- "type": "array",
- "items": {
- "oneOf": [
- {"type": "string"},
- {
- "type": "object",
- "properties": {
- "source": {"type": "string"},
- "target": {"type": "string"},
- "uid": {"type": "string"},
- "gid": {"type": "string"},
- "mode": {"type": "number"}
- }
- }
- ]
- }
- },
- "sysctls": {"$ref": "#/definitions/list_or_dict"},
- "stdin_open": {"type": "boolean"},
- "stop_grace_period": {"type": "string", "format": "duration"},
- "stop_signal": {"type": "string"},
- "tmpfs": {"$ref": "#/definitions/string_or_list"},
- "tty": {"type": "boolean"},
- "ulimits": {
- "type": "object",
- "patternProperties": {
- "^[a-z]+$": {
- "oneOf": [
- {"type": "integer"},
- {
- "type":"object",
- "properties": {
- "hard": {"type": "integer"},
- "soft": {"type": "integer"}
- },
- "required": ["soft", "hard"],
- "additionalProperties": false
- }
- ]
- }
- }
- },
- "user": {"type": "string"},
- "userns_mode": {"type": "string"},
- "volumes": {"type": "array", "items": {"type": "string"}, "uniqueItems": true},
- "working_dir": {"type": "string"}
- },
- "additionalProperties": false
- },
-
- "healthcheck": {
- "id": "#/definitions/healthcheck",
- "type": "object",
- "additionalProperties": false,
- "properties": {
- "disable": {"type": "boolean"},
- "interval": {"type": "string"},
- "retries": {"type": "number"},
- "test": {
- "oneOf": [
- {"type": "string"},
- {"type": "array", "items": {"type": "string"}}
- ]
- },
- "timeout": {"type": "string"}
- }
- },
- "deployment": {
- "id": "#/definitions/deployment",
- "type": ["object", "null"],
- "properties": {
- "mode": {"type": "string"},
- "replicas": {"type": "integer"},
- "labels": {"$ref": "#/definitions/labels"},
- "update_config": {
- "type": "object",
- "properties": {
- "parallelism": {"type": "integer"},
- "delay": {"type": "string", "format": "duration"},
- "failure_action": {"type": "string"},
- "monitor": {"type": "string", "format": "duration"},
- "max_failure_ratio": {"type": "number"}
- },
- "additionalProperties": false
- },
- "resources": {
- "type": "object",
- "properties": {
- "limits": {"$ref": "#/definitions/resource"},
- "reservations": {"$ref": "#/definitions/resource"}
- },
- "additionalProperties": false
- },
- "restart_policy": {
- "type": "object",
- "properties": {
- "condition": {"type": "string"},
- "delay": {"type": "string", "format": "duration"},
- "max_attempts": {"type": "integer"},
- "window": {"type": "string", "format": "duration"}
- },
- "additionalProperties": false
- },
- "placement": {
- "type": "object",
- "properties": {
- "constraints": {"type": "array", "items": {"type": "string"}}
- },
- "additionalProperties": false
- }
- },
- "additionalProperties": false
- },
-
- "resource": {
- "id": "#/definitions/resource",
- "type": "object",
- "properties": {
- "cpus": {"type": "string"},
- "memory": {"type": "string"}
- },
- "additionalProperties": false
- },
-
- "network": {
- "id": "#/definitions/network",
- "type": ["object", "null"],
- "properties": {
- "driver": {"type": "string"},
- "driver_opts": {
- "type": "object",
- "patternProperties": {
- "^.+$": {"type": ["string", "number"]}
- }
- },
- "ipam": {
- "type": "object",
- "properties": {
- "driver": {"type": "string"},
- "config": {
- "type": "array",
- "items": {
- "type": "object",
- "properties": {
- "subnet": {"type": "string", "format": "subnet_ip_address"}
- },
- "additionalProperties": false
- }
- }
- },
- "additionalProperties": false
- },
- "external": {
- "type": ["boolean", "object"],
- "properties": {
- "name": {"type": "string"}
- },
- "additionalProperties": false
- },
- "internal": {"type": "boolean"},
- "labels": {"$ref": "#/definitions/labels"}
- },
- "additionalProperties": false
- },
-
- "volume": {
- "id": "#/definitions/volume",
- "type": ["object", "null"],
- "properties": {
- "driver": {"type": "string"},
- "driver_opts": {
- "type": "object",
- "patternProperties": {
- "^.+$": {"type": ["string", "number"]}
- }
- },
- "external": {
- "type": ["boolean", "object"],
- "properties": {
- "name": {"type": "string"}
- },
- "additionalProperties": false
- },
- "labels": {"$ref": "#/definitions/labels"}
- },
- "additionalProperties": false
- },
-
- "secret": {
- "id": "#/definitions/secret",
- "type": "object",
- "properties": {
- "file": {"type": "string"},
- "external": {
- "type": ["boolean", "object"],
- "properties": {
- "name": {"type": "string"}
- }
- },
- "labels": {"$ref": "#/definitions/labels"}
- },
- "additionalProperties": false
- },
-
- "string_or_list": {
- "oneOf": [
- {"type": "string"},
- {"$ref": "#/definitions/list_of_strings"}
- ]
- },
-
- "list_of_strings": {
- "type": "array",
- "items": {"type": "string"},
- "uniqueItems": true
- },
-
- "list_or_dict": {
- "oneOf": [
- {
- "type": "object",
- "patternProperties": {
- ".+": {
- "type": ["string", "number", "null"]
- }
- },
- "additionalProperties": false
- },
- {"type": "array", "items": {"type": "string"}, "uniqueItems": true}
- ]
- },
-
- "labels": {
- "oneOf": [
- {
- "type": "object",
- "patternProperties": {
- ".+": {
- "type": "string"
- }
- },
- "additionalProperties": false
- },
- {"type": "array", "items": {"type": "string"}, "uniqueItems": true}
- ]
- },
-
- "constraints": {
- "service": {
- "id": "#/definitions/constraints/service",
- "anyOf": [
- {"required": ["build"]},
- {"required": ["image"]}
- ],
- "properties": {
- "build": {
- "required": ["context"]
- }
- }
- }
- }
- }
-}
diff --git a/compose/config/config_schema_v3.2.json b/compose/config/config_schema_v3.2.json
deleted file mode 100644
index 5eccdea7..00000000
--- a/compose/config/config_schema_v3.2.json
+++ /dev/null
@@ -1,492 +0,0 @@
-{
- "$schema": "http://json-schema.org/draft-04/schema#",
- "id": "config_schema_v3.2.json",
- "type": "object",
- "required": ["version"],
-
- "properties": {
- "version": {
- "type": "string"
- },
-
- "services": {
- "id": "#/properties/services",
- "type": "object",
- "patternProperties": {
- "^[a-zA-Z0-9._-]+$": {
- "$ref": "#/definitions/service"
- }
- },
- "additionalProperties": false
- },
-
- "networks": {
- "id": "#/properties/networks",
- "type": "object",
- "patternProperties": {
- "^[a-zA-Z0-9._-]+$": {
- "$ref": "#/definitions/network"
- }
- }
- },
-
- "volumes": {
- "id": "#/properties/volumes",
- "type": "object",
- "patternProperties": {
- "^[a-zA-Z0-9._-]+$": {
- "$ref": "#/definitions/volume"
- }
- },
- "additionalProperties": false
- },
-
- "secrets": {
- "id": "#/properties/secrets",
- "type": "object",
- "patternProperties": {
- "^[a-zA-Z0-9._-]+$": {
- "$ref": "#/definitions/secret"
- }
- },
- "additionalProperties": false
- }
- },
-
- "additionalProperties": false,
-
- "definitions": {
-
- "service": {
- "id": "#/definitions/service",
- "type": "object",
-
- "properties": {
- "deploy": {"$ref": "#/definitions/deployment"},
- "build": {
- "oneOf": [
- {"type": "string"},
- {
- "type": "object",
- "properties": {
- "context": {"type": "string"},
- "dockerfile": {"type": "string"},
- "args": {"$ref": "#/definitions/list_or_dict"},
- "labels": {"$ref": "#/definitions/list_or_dict"},
- "cache_from": {"$ref": "#/definitions/list_of_strings"}
- },
- "additionalProperties": false
- }
- ]
- },
- "cap_add": {"type": "array", "items": {"type": "string"}, "uniqueItems": true},
- "cap_drop": {"type": "array", "items": {"type": "string"}, "uniqueItems": true},
- "cgroup_parent": {"type": "string"},
- "command": {
- "oneOf": [
- {"type": "string"},
- {"type": "array", "items": {"type": "string"}}
- ]
- },
- "container_name": {"type": "string"},
- "depends_on": {"$ref": "#/definitions/list_of_strings"},
- "devices": {"type": "array", "items": {"type": "string"}, "uniqueItems": true},
- "dns": {"$ref": "#/definitions/string_or_list"},
- "dns_search": {"$ref": "#/definitions/string_or_list"},
- "domainname": {"type": "string"},
- "entrypoint": {
- "oneOf": [
- {"type": "string"},
- {"type": "array", "items": {"type": "string"}}
- ]
- },
- "env_file": {"$ref": "#/definitions/string_or_list"},
- "environment": {"$ref": "#/definitions/list_or_dict"},
-
- "expose": {
- "type": "array",
- "items": {
- "type": ["string", "number"],
- "format": "expose"
- },
- "uniqueItems": true
- },
-
- "external_links": {"type": "array", "items": {"type": "string"}, "uniqueItems": true},
- "extra_hosts": {"$ref": "#/definitions/list_or_dict"},
- "healthcheck": {"$ref": "#/definitions/healthcheck"},
- "hostname": {"type": "string"},
- "image": {"type": "string"},
- "ipc": {"type": "string"},
- "labels": {"$ref": "#/definitions/labels"},
- "links": {"type": "array", "items": {"type": "string"}, "uniqueItems": true},
-
- "logging": {
- "type": "object",
-
- "properties": {
- "driver": {"type": "string"},
- "options": {
- "type": "object",
- "patternProperties": {
- "^.+$": {"type": ["string", "number", "null"]}
- }
- }
- },
- "additionalProperties": false
- },
-
- "mac_address": {"type": "string"},
- "network_mode": {"type": "string"},
-
- "networks": {
- "oneOf": [
- {"$ref": "#/definitions/list_of_strings"},
- {
- "type": "object",
- "patternProperties": {
- "^[a-zA-Z0-9._-]+$": {
- "oneOf": [
- {
- "type": "object",
- "properties": {
- "aliases": {"$ref": "#/definitions/list_of_strings"},
- "ipv4_address": {"type": "string"},
- "ipv6_address": {"type": "string"}
- },
- "additionalProperties": false
- },
- {"type": "null"}
- ]
- }
- },
- "additionalProperties": false
- }
- ]
- },
- "pid": {"type": ["string", "null"]},
-
- "ports": {
- "type": "array",
- "items": {
- "oneOf": [
- {"type": "number", "format": "ports"},
- {"type": "string", "format": "ports"},
- {
- "type": "object",
- "properties": {
- "mode": {"type": "string"},
- "target": {"type": "integer"},
- "published": {"type": "integer"},
- "protocol": {"type": "string"}
- },
- "additionalProperties": false
- }
- ]
- },
- "uniqueItems": true
- },
-
- "privileged": {"type": "boolean"},
- "read_only": {"type": "boolean"},
- "restart": {"type": "string"},
- "security_opt": {"type": "array", "items": {"type": "string"}, "uniqueItems": true},
- "shm_size": {"type": ["number", "string"]},
- "secrets": {
- "type": "array",
- "items": {
- "oneOf": [
- {"type": "string"},
- {
- "type": "object",
- "properties": {
- "source": {"type": "string"},
- "target": {"type": "string"},
- "uid": {"type": "string"},
- "gid": {"type": "string"},
- "mode": {"type": "number"}
- }
- }
- ]
- }
- },
- "sysctls": {"$ref": "#/definitions/list_or_dict"},
- "stdin_open": {"type": "boolean"},
- "stop_grace_period": {"type": "string", "format": "duration"},
- "stop_signal": {"type": "string"},
- "tmpfs": {"$ref": "#/definitions/string_or_list"},
- "tty": {"type": "boolean"},
- "ulimits": {
- "type": "object",
- "patternProperties": {
- "^[a-z]+$": {
- "oneOf": [
- {"type": "integer"},
- {
- "type":"object",
- "properties": {
- "hard": {"type": "integer"},
- "soft": {"type": "integer"}
- },
- "required": ["soft", "hard"],
- "additionalProperties": false
- }
- ]
- }
- }
- },
- "user": {"type": "string"},
- "userns_mode": {"type": "string"},
- "volumes": {
- "type": "array",
- "items": {
- "oneOf": [
- {"type": "string"},
- {
- "type": "object",
- "required": ["type"],
- "additionalProperties": false,
- "properties": {
- "type": {"type": "string"},
- "source": {"type": "string"},
- "target": {"type": "string"},
- "read_only": {"type": "boolean"},
- "consistency": {"type": "string"},
- "bind": {
- "type": "object",
- "properties": {
- "propagation": {"type": "string"}
- }
- },
- "volume": {
- "type": "object",
- "properties": {
- "nocopy": {"type": "boolean"}
- }
- }
- }
- }
- ],
- "uniqueItems": true
- }
- },
- "working_dir": {"type": "string"}
- },
- "additionalProperties": false
- },
-
- "healthcheck": {
- "id": "#/definitions/healthcheck",
- "type": "object",
- "additionalProperties": false,
- "properties": {
- "disable": {"type": "boolean"},
- "interval": {"type": "string"},
- "retries": {"type": "number"},
- "test": {
- "oneOf": [
- {"type": "string"},
- {"type": "array", "items": {"type": "string"}}
- ]
- },
- "timeout": {"type": "string"}
- }
- },
- "deployment": {
- "id": "#/definitions/deployment",
- "type": ["object", "null"],
- "properties": {
- "mode": {"type": "string"},
- "endpoint_mode": {"type": "string"},
- "replicas": {"type": "integer"},
- "labels": {"$ref": "#/definitions/labels"},
- "update_config": {
- "type": "object",
- "properties": {
- "parallelism": {"type": "integer"},
- "delay": {"type": "string", "format": "duration"},
- "failure_action": {"type": "string"},
- "monitor": {"type": "string", "format": "duration"},
- "max_failure_ratio": {"type": "number"}
- },
- "additionalProperties": false
- },
- "resources": {
- "type": "object",
- "properties": {
- "limits": {"$ref": "#/definitions/resource"},
- "reservations": {"$ref": "#/definitions/resource"}
- },
- "additionalProperties": false
- },
- "restart_policy": {
- "type": "object",
- "properties": {
- "condition": {"type": "string"},
- "delay": {"type": "string", "format": "duration"},
- "max_attempts": {"type": "integer"},
- "window": {"type": "string", "format": "duration"}
- },
- "additionalProperties": false
- },
- "placement": {
- "type": "object",
- "properties": {
- "constraints": {"type": "array", "items": {"type": "string"}}
- },
- "additionalProperties": false
- }
- },
- "additionalProperties": false
- },
-
- "resource": {
- "id": "#/definitions/resource",
- "type": "object",
- "properties": {
- "cpus": {"type": "string"},
- "memory": {"type": "string"}
- },
- "additionalProperties": false
- },
-
- "network": {
- "id": "#/definitions/network",
- "type": ["object", "null"],
- "properties": {
- "driver": {"type": "string"},
- "driver_opts": {
- "type": "object",
- "patternProperties": {
- "^.+$": {"type": ["string", "number"]}
- }
- },
- "ipam": {
- "type": "object",
- "properties": {
- "driver": {"type": "string"},
- "config": {
- "type": "array",
- "items": {
- "type": "object",
- "properties": {
- "subnet": {"type": "string", "format": "subnet_ip_address"}
- },
- "additionalProperties": false
- }
- }
- },
- "additionalProperties": false
- },
- "external": {
- "type": ["boolean", "object"],
- "properties": {
- "name": {"type": "string"}
- },
- "additionalProperties": false
- },
- "internal": {"type": "boolean"},
- "attachable": {"type": "boolean"},
- "labels": {"$ref": "#/definitions/labels"}
- },
- "additionalProperties": false
- },
-
- "volume": {
- "id": "#/definitions/volume",
- "type": ["object", "null"],
- "properties": {
- "driver": {"type": "string"},
- "driver_opts": {
- "type": "object",
- "patternProperties": {
- "^.+$": {"type": ["string", "number"]}
- }
- },
- "external": {
- "type": ["boolean", "object"],
- "properties": {
- "name": {"type": "string"}
- },
- "additionalProperties": false
- },
- "labels": {"$ref": "#/definitions/labels"}
- },
- "additionalProperties": false
- },
-
- "secret": {
- "id": "#/definitions/secret",
- "type": "object",
- "properties": {
- "file": {"type": "string"},
- "external": {
- "type": ["boolean", "object"],
- "properties": {
- "name": {"type": "string"}
- }
- },
- "labels": {"$ref": "#/definitions/labels"}
- },
- "additionalProperties": false
- },
-
- "string_or_list": {
- "oneOf": [
- {"type": "string"},
- {"$ref": "#/definitions/list_of_strings"}
- ]
- },
-
- "list_of_strings": {
- "type": "array",
- "items": {"type": "string"},
- "uniqueItems": true
- },
-
- "list_or_dict": {
- "oneOf": [
- {
- "type": "object",
- "patternProperties": {
- ".+": {
- "type": ["string", "number", "null"]
- }
- },
- "additionalProperties": false
- },
- {"type": "array", "items": {"type": "string"}, "uniqueItems": true}
- ]
- },
-
- "labels": {
- "oneOf": [
- {
- "type": "object",
- "patternProperties": {
- ".+": {
- "type": "string"
- }
- },
- "additionalProperties": false
- },
- {"type": "array", "items": {"type": "string"}, "uniqueItems": true}
- ]
- },
-
- "constraints": {
- "service": {
- "id": "#/definitions/constraints/service",
- "anyOf": [
- {"required": ["build"]},
- {"required": ["image"]}
- ],
- "properties": {
- "build": {
- "required": ["context"]
- }
- }
- }
- }
- }
-}
diff --git a/compose/config/config_schema_v3.3.json b/compose/config/config_schema_v3.3.json
deleted file mode 100644
index f63842b9..00000000
--- a/compose/config/config_schema_v3.3.json
+++ /dev/null
@@ -1,551 +0,0 @@
-{
- "$schema": "http://json-schema.org/draft-04/schema#",
- "id": "config_schema_v3.3.json",
- "type": "object",
- "required": ["version"],
-
- "properties": {
- "version": {
- "type": "string"
- },
-
- "services": {
- "id": "#/properties/services",
- "type": "object",
- "patternProperties": {
- "^[a-zA-Z0-9._-]+$": {
- "$ref": "#/definitions/service"
- }
- },
- "additionalProperties": false
- },
-
- "networks": {
- "id": "#/properties/networks",
- "type": "object",
- "patternProperties": {
- "^[a-zA-Z0-9._-]+$": {
- "$ref": "#/definitions/network"
- }
- }
- },
-
- "volumes": {
- "id": "#/properties/volumes",
- "type": "object",
- "patternProperties": {
- "^[a-zA-Z0-9._-]+$": {
- "$ref": "#/definitions/volume"
- }
- },
- "additionalProperties": false
- },
-
- "secrets": {
- "id": "#/properties/secrets",
- "type": "object",
- "patternProperties": {
- "^[a-zA-Z0-9._-]+$": {
- "$ref": "#/definitions/secret"
- }
- },
- "additionalProperties": false
- },
-
- "configs": {
- "id": "#/properties/configs",
- "type": "object",
- "patternProperties": {
- "^[a-zA-Z0-9._-]+$": {
- "$ref": "#/definitions/config"
- }
- },
- "additionalProperties": false
- }
- },
-
- "additionalProperties": false,
-
- "definitions": {
-
- "service": {
- "id": "#/definitions/service",
- "type": "object",
-
- "properties": {
- "deploy": {"$ref": "#/definitions/deployment"},
- "build": {
- "oneOf": [
- {"type": "string"},
- {
- "type": "object",
- "properties": {
- "context": {"type": "string"},
- "dockerfile": {"type": "string"},
- "args": {"$ref": "#/definitions/list_or_dict"},
- "labels": {"$ref": "#/definitions/labels"},
- "cache_from": {"$ref": "#/definitions/list_of_strings"}
- },
- "additionalProperties": false
- }
- ]
- },
- "cap_add": {"type": "array", "items": {"type": "string"}, "uniqueItems": true},
- "cap_drop": {"type": "array", "items": {"type": "string"}, "uniqueItems": true},
- "cgroup_parent": {"type": "string"},
- "command": {
- "oneOf": [
- {"type": "string"},
- {"type": "array", "items": {"type": "string"}}
- ]
- },
- "configs": {
- "type": "array",
- "items": {
- "oneOf": [
- {"type": "string"},
- {
- "type": "object",
- "properties": {
- "source": {"type": "string"},
- "target": {"type": "string"},
- "uid": {"type": "string"},
- "gid": {"type": "string"},
- "mode": {"type": "number"}
- }
- }
- ]
- }
- },
- "container_name": {"type": "string"},
- "credential_spec": {"type": "object", "properties": {
- "file": {"type": "string"},
- "registry": {"type": "string"}
- }},
- "depends_on": {"$ref": "#/definitions/list_of_strings"},
- "devices": {"type": "array", "items": {"type": "string"}, "uniqueItems": true},
- "dns": {"$ref": "#/definitions/string_or_list"},
- "dns_search": {"$ref": "#/definitions/string_or_list"},
- "domainname": {"type": "string"},
- "entrypoint": {
- "oneOf": [
- {"type": "string"},
- {"type": "array", "items": {"type": "string"}}
- ]
- },
- "env_file": {"$ref": "#/definitions/string_or_list"},
- "environment": {"$ref": "#/definitions/list_or_dict"},
-
- "expose": {
- "type": "array",
- "items": {
- "type": ["string", "number"],
- "format": "expose"
- },
- "uniqueItems": true
- },
-
- "external_links": {"type": "array", "items": {"type": "string"}, "uniqueItems": true},
- "extra_hosts": {"$ref": "#/definitions/list_or_dict"},
- "healthcheck": {"$ref": "#/definitions/healthcheck"},
- "hostname": {"type": "string"},
- "image": {"type": "string"},
- "ipc": {"type": "string"},
- "labels": {"$ref": "#/definitions/labels"},
- "links": {"type": "array", "items": {"type": "string"}, "uniqueItems": true},
-
- "logging": {
- "type": "object",
-
- "properties": {
- "driver": {"type": "string"},
- "options": {
- "type": "object",
- "patternProperties": {
- "^.+$": {"type": ["string", "number", "null"]}
- }
- }
- },
- "additionalProperties": false
- },
-
- "mac_address": {"type": "string"},
- "network_mode": {"type": "string"},
-
- "networks": {
- "oneOf": [
- {"$ref": "#/definitions/list_of_strings"},
- {
- "type": "object",
- "patternProperties": {
- "^[a-zA-Z0-9._-]+$": {
- "oneOf": [
- {
- "type": "object",
- "properties": {
- "aliases": {"$ref": "#/definitions/list_of_strings"},
- "ipv4_address": {"type": "string"},
- "ipv6_address": {"type": "string"}
- },
- "additionalProperties": false
- },
- {"type": "null"}
- ]
- }
- },
- "additionalProperties": false
- }
- ]
- },
- "pid": {"type": ["string", "null"]},
-
- "ports": {
- "type": "array",
- "items": {
- "oneOf": [
- {"type": "number", "format": "ports"},
- {"type": "string", "format": "ports"},
- {
- "type": "object",
- "properties": {
- "mode": {"type": "string"},
- "target": {"type": "integer"},
- "published": {"type": "integer"},
- "protocol": {"type": "string"}
- },
- "additionalProperties": false
- }
- ]
- },
- "uniqueItems": true
- },
-
- "privileged": {"type": "boolean"},
- "read_only": {"type": "boolean"},
- "restart": {"type": "string"},
- "security_opt": {"type": "array", "items": {"type": "string"}, "uniqueItems": true},
- "shm_size": {"type": ["number", "string"]},
- "secrets": {
- "type": "array",
- "items": {
- "oneOf": [
- {"type": "string"},
- {
- "type": "object",
- "properties": {
- "source": {"type": "string"},
- "target": {"type": "string"},
- "uid": {"type": "string"},
- "gid": {"type": "string"},
- "mode": {"type": "number"}
- }
- }
- ]
- }
- },
- "sysctls": {"$ref": "#/definitions/list_or_dict"},
- "stdin_open": {"type": "boolean"},
- "stop_grace_period": {"type": "string", "format": "duration"},
- "stop_signal": {"type": "string"},
- "tmpfs": {"$ref": "#/definitions/string_or_list"},
- "tty": {"type": "boolean"},
- "ulimits": {
- "type": "object",
- "patternProperties": {
- "^[a-z]+$": {
- "oneOf": [
- {"type": "integer"},
- {
- "type":"object",
- "properties": {
- "hard": {"type": "integer"},
- "soft": {"type": "integer"}
- },
- "required": ["soft", "hard"],
- "additionalProperties": false
- }
- ]
- }
- }
- },
- "user": {"type": "string"},
- "userns_mode": {"type": "string"},
- "volumes": {
- "type": "array",
- "items": {
- "oneOf": [
- {"type": "string"},
- {
- "type": "object",
- "required": ["type"],
- "additionalProperties": false,
- "properties": {
- "type": {"type": "string"},
- "source": {"type": "string"},
- "target": {"type": "string"},
- "read_only": {"type": "boolean"},
- "consistency": {"type": "string"},
- "bind": {
- "type": "object",
- "properties": {
- "propagation": {"type": "string"}
- }
- },
- "volume": {
- "type": "object",
- "properties": {
- "nocopy": {"type": "boolean"}
- }
- }
- }
- }
- ],
- "uniqueItems": true
- }
- },
- "working_dir": {"type": "string"}
- },
- "additionalProperties": false
- },
-
- "healthcheck": {
- "id": "#/definitions/healthcheck",
- "type": "object",
- "additionalProperties": false,
- "properties": {
- "disable": {"type": "boolean"},
- "interval": {"type": "string"},
- "retries": {"type": "number"},
- "test": {
- "oneOf": [
- {"type": "string"},
- {"type": "array", "items": {"type": "string"}}
- ]
- },
- "timeout": {"type": "string"}
- }
- },
- "deployment": {
- "id": "#/definitions/deployment",
- "type": ["object", "null"],
- "properties": {
- "mode": {"type": "string"},
- "endpoint_mode": {"type": "string"},
- "replicas": {"type": "integer"},
- "labels": {"$ref": "#/definitions/labels"},
- "update_config": {
- "type": "object",
- "properties": {
- "parallelism": {"type": "integer"},
- "delay": {"type": "string", "format": "duration"},
- "failure_action": {"type": "string"},
- "monitor": {"type": "string", "format": "duration"},
- "max_failure_ratio": {"type": "number"}
- },
- "additionalProperties": false
- },
- "resources": {
- "type": "object",
- "properties": {
- "limits": {"$ref": "#/definitions/resource"},
- "reservations": {"$ref": "#/definitions/resource"}
- },
- "additionalProperties": false
- },
- "restart_policy": {
- "type": "object",
- "properties": {
- "condition": {"type": "string"},
- "delay": {"type": "string", "format": "duration"},
- "max_attempts": {"type": "integer"},
- "window": {"type": "string", "format": "duration"}
- },
- "additionalProperties": false
- },
- "placement": {
- "type": "object",
- "properties": {
- "constraints": {"type": "array", "items": {"type": "string"}},
- "preferences": {
- "type": "array",
- "items": {
- "type": "object",
- "properties": {
- "spread": {"type": "string"}
- },
- "additionalProperties": false
- }
- }
- },
- "additionalProperties": false
- }
- },
- "additionalProperties": false
- },
-
- "resource": {
- "id": "#/definitions/resource",
- "type": "object",
- "properties": {
- "cpus": {"type": "string"},
- "memory": {"type": "string"}
- },
- "additionalProperties": false
- },
-
- "network": {
- "id": "#/definitions/network",
- "type": ["object", "null"],
- "properties": {
- "driver": {"type": "string"},
- "driver_opts": {
- "type": "object",
- "patternProperties": {
- "^.+$": {"type": ["string", "number"]}
- }
- },
- "ipam": {
- "type": "object",
- "properties": {
- "driver": {"type": "string"},
- "config": {
- "type": "array",
- "items": {
- "type": "object",
- "properties": {
- "subnet": {"type": "string", "format": "subnet_ip_address"}
- },
- "additionalProperties": false
- }
- }
- },
- "additionalProperties": false
- },
- "external": {
- "type": ["boolean", "object"],
- "properties": {
- "name": {"type": "string"}
- },
- "additionalProperties": false
- },
- "internal": {"type": "boolean"},
- "attachable": {"type": "boolean"},
- "labels": {"$ref": "#/definitions/labels"}
- },
- "additionalProperties": false
- },
-
- "volume": {
- "id": "#/definitions/volume",
- "type": ["object", "null"],
- "properties": {
- "driver": {"type": "string"},
- "driver_opts": {
- "type": "object",
- "patternProperties": {
- "^.+$": {"type": ["string", "number"]}
- }
- },
- "external": {
- "type": ["boolean", "object"],
- "properties": {
- "name": {"type": "string"}
- },
- "additionalProperties": false
- },
- "labels": {"$ref": "#/definitions/labels"}
- },
- "additionalProperties": false
- },
-
- "secret": {
- "id": "#/definitions/secret",
- "type": "object",
- "properties": {
- "file": {"type": "string"},
- "external": {
- "type": ["boolean", "object"],
- "properties": {
- "name": {"type": "string"}
- }
- },
- "labels": {"$ref": "#/definitions/labels"}
- },
- "additionalProperties": false
- },
-
- "config": {
- "id": "#/definitions/config",
- "type": "object",
- "properties": {
- "file": {"type": "string"},
- "external": {
- "type": ["boolean", "object"],
- "properties": {
- "name": {"type": "string"}
- }
- },
- "labels": {"$ref": "#/definitions/labels"}
- },
- "additionalProperties": false
- },
-
- "string_or_list": {
- "oneOf": [
- {"type": "string"},
- {"$ref": "#/definitions/list_of_strings"}
- ]
- },
-
- "list_of_strings": {
- "type": "array",
- "items": {"type": "string"},
- "uniqueItems": true
- },
-
- "list_or_dict": {
- "oneOf": [
- {
- "type": "object",
- "patternProperties": {
- ".+": {
- "type": ["string", "number", "null"]
- }
- },
- "additionalProperties": false
- },
- {"type": "array", "items": {"type": "string"}, "uniqueItems": true}
- ]
- },
-
- "labels": {
- "oneOf": [
- {
- "type": "object",
- "patternProperties": {
- ".+": {
- "type": "string"
- }
- },
- "additionalProperties": false
- },
- {"type": "array", "items": {"type": "string"}, "uniqueItems": true}
- ]
- },
-
- "constraints": {
- "service": {
- "id": "#/definitions/constraints/service",
- "anyOf": [
- {"required": ["build"]},
- {"required": ["image"]}
- ],
- "properties": {
- "build": {
- "required": ["context"]
- }
- }
- }
- }
- }
-}
diff --git a/compose/config/config_schema_v3.4.json b/compose/config/config_schema_v3.4.json
deleted file mode 100644
index 23e95544..00000000
--- a/compose/config/config_schema_v3.4.json
+++ /dev/null
@@ -1,560 +0,0 @@
-
-{
- "$schema": "http://json-schema.org/draft-04/schema#",
- "id": "config_schema_v3.4.json",
- "type": "object",
- "required": ["version"],
-
- "properties": {
- "version": {
- "type": "string"
- },
-
- "services": {
- "id": "#/properties/services",
- "type": "object",
- "patternProperties": {
- "^[a-zA-Z0-9._-]+$": {
- "$ref": "#/definitions/service"
- }
- },
- "additionalProperties": false
- },
-
- "networks": {
- "id": "#/properties/networks",
- "type": "object",
- "patternProperties": {
- "^[a-zA-Z0-9._-]+$": {
- "$ref": "#/definitions/network"
- }
- }
- },
-
- "volumes": {
- "id": "#/properties/volumes",
- "type": "object",
- "patternProperties": {
- "^[a-zA-Z0-9._-]+$": {
- "$ref": "#/definitions/volume"
- }
- },
- "additionalProperties": false
- },
-
- "secrets": {
- "id": "#/properties/secrets",
- "type": "object",
- "patternProperties": {
- "^[a-zA-Z0-9._-]+$": {
- "$ref": "#/definitions/secret"
- }
- },
- "additionalProperties": false
- },
-
- "configs": {
- "id": "#/properties/configs",
- "type": "object",
- "patternProperties": {
- "^[a-zA-Z0-9._-]+$": {
- "$ref": "#/definitions/config"
- }
- },
- "additionalProperties": false
- }
- },
-
- "patternProperties": {"^x-": {}},
- "additionalProperties": false,
-
- "definitions": {
-
- "service": {
- "id": "#/definitions/service",
- "type": "object",
-
- "properties": {
- "deploy": {"$ref": "#/definitions/deployment"},
- "build": {
- "oneOf": [
- {"type": "string"},
- {
- "type": "object",
- "properties": {
- "context": {"type": "string"},
- "dockerfile": {"type": "string"},
- "args": {"$ref": "#/definitions/list_or_dict"},
- "labels": {"$ref": "#/definitions/labels"},
- "cache_from": {"$ref": "#/definitions/list_of_strings"},
- "network": {"type": "string"},
- "target": {"type": "string"}
- },
- "additionalProperties": false
- }
- ]
- },
- "cap_add": {"type": "array", "items": {"type": "string"}, "uniqueItems": true},
- "cap_drop": {"type": "array", "items": {"type": "string"}, "uniqueItems": true},
- "cgroup_parent": {"type": "string"},
- "command": {
- "oneOf": [
- {"type": "string"},
- {"type": "array", "items": {"type": "string"}}
- ]
- },
- "configs": {
- "type": "array",
- "items": {
- "oneOf": [
- {"type": "string"},
- {
- "type": "object",
- "properties": {
- "source": {"type": "string"},
- "target": {"type": "string"},
- "uid": {"type": "string"},
- "gid": {"type": "string"},
- "mode": {"type": "number"}
- }
- }
- ]
- }
- },
- "container_name": {"type": "string"},
- "credential_spec": {"type": "object", "properties": {
- "file": {"type": "string"},
- "registry": {"type": "string"}
- }},
- "depends_on": {"$ref": "#/definitions/list_of_strings"},
- "devices": {"type": "array", "items": {"type": "string"}, "uniqueItems": true},
- "dns": {"$ref": "#/definitions/string_or_list"},
- "dns_search": {"$ref": "#/definitions/string_or_list"},
- "domainname": {"type": "string"},
- "entrypoint": {
- "oneOf": [
- {"type": "string"},
- {"type": "array", "items": {"type": "string"}}
- ]
- },
- "env_file": {"$ref": "#/definitions/string_or_list"},
- "environment": {"$ref": "#/definitions/list_or_dict"},
-
- "expose": {
- "type": "array",
- "items": {
- "type": ["string", "number"],
- "format": "expose"
- },
- "uniqueItems": true
- },
-
- "external_links": {"type": "array", "items": {"type": "string"}, "uniqueItems": true},
- "extra_hosts": {"$ref": "#/definitions/list_or_dict"},
- "healthcheck": {"$ref": "#/definitions/healthcheck"},
- "hostname": {"type": "string"},
- "image": {"type": "string"},
- "ipc": {"type": "string"},
- "labels": {"$ref": "#/definitions/labels"},
- "links": {"type": "array", "items": {"type": "string"}, "uniqueItems": true},
-
- "logging": {
- "type": "object",
-
- "properties": {
- "driver": {"type": "string"},
- "options": {
- "type": "object",
- "patternProperties": {
- "^.+$": {"type": ["string", "number", "null"]}
- }
- }
- },
- "additionalProperties": false
- },
-
- "mac_address": {"type": "string"},
- "network_mode": {"type": "string"},
-
- "networks": {
- "oneOf": [
- {"$ref": "#/definitions/list_of_strings"},
- {
- "type": "object",
- "patternProperties": {
- "^[a-zA-Z0-9._-]+$": {
- "oneOf": [
- {
- "type": "object",
- "properties": {
- "aliases": {"$ref": "#/definitions/list_of_strings"},
- "ipv4_address": {"type": "string"},
- "ipv6_address": {"type": "string"}
- },
- "additionalProperties": false
- },
- {"type": "null"}
- ]
- }
- },
- "additionalProperties": false
- }
- ]
- },
- "pid": {"type": ["string", "null"]},
-
- "ports": {
- "type": "array",
- "items": {
- "oneOf": [
- {"type": "number", "format": "ports"},
- {"type": "string", "format": "ports"},
- {
- "type": "object",
- "properties": {
- "mode": {"type": "string"},
- "target": {"type": "integer"},
- "published": {"type": "integer"},
- "protocol": {"type": "string"}
- },
- "additionalProperties": false
- }
- ]
- },
- "uniqueItems": true
- },
-
- "privileged": {"type": "boolean"},
- "read_only": {"type": "boolean"},
- "restart": {"type": "string"},
- "security_opt": {"type": "array", "items": {"type": "string"}, "uniqueItems": true},
- "shm_size": {"type": ["number", "string"]},
- "secrets": {
- "type": "array",
- "items": {
- "oneOf": [
- {"type": "string"},
- {
- "type": "object",
- "properties": {
- "source": {"type": "string"},
- "target": {"type": "string"},
- "uid": {"type": "string"},
- "gid": {"type": "string"},
- "mode": {"type": "number"}
- }
- }
- ]
- }
- },
- "sysctls": {"$ref": "#/definitions/list_or_dict"},
- "stdin_open": {"type": "boolean"},
- "stop_grace_period": {"type": "string", "format": "duration"},
- "stop_signal": {"type": "string"},
- "tmpfs": {"$ref": "#/definitions/string_or_list"},
- "tty": {"type": "boolean"},
- "ulimits": {
- "type": "object",
- "patternProperties": {
- "^[a-z]+$": {
- "oneOf": [
- {"type": "integer"},
- {
- "type":"object",
- "properties": {
- "hard": {"type": "integer"},
- "soft": {"type": "integer"}
- },
- "required": ["soft", "hard"],
- "additionalProperties": false
- }
- ]
- }
- }
- },
- "user": {"type": "string"},
- "userns_mode": {"type": "string"},
- "volumes": {
- "type": "array",
- "items": {
- "oneOf": [
- {"type": "string"},
- {
- "type": "object",
- "required": ["type"],
- "additionalProperties": false,
- "properties": {
- "type": {"type": "string"},
- "source": {"type": "string"},
- "target": {"type": "string"},
- "read_only": {"type": "boolean"},
- "consistency": {"type": "string"},
- "bind": {
- "type": "object",
- "properties": {
- "propagation": {"type": "string"}
- }
- },
- "volume": {
- "type": "object",
- "properties": {
- "nocopy": {"type": "boolean"}
- }
- }
- }
- }
- ],
- "uniqueItems": true
- }
- },
- "working_dir": {"type": "string"}
- },
- "additionalProperties": false
- },
-
- "healthcheck": {
- "id": "#/definitions/healthcheck",
- "type": "object",
- "additionalProperties": false,
- "properties": {
- "disable": {"type": "boolean"},
- "interval": {"type": "string", "format": "duration"},
- "retries": {"type": "number"},
- "test": {
- "oneOf": [
- {"type": "string"},
- {"type": "array", "items": {"type": "string"}}
- ]
- },
- "timeout": {"type": "string", "format": "duration"},
- "start_period": {"type": "string", "format": "duration"}
- }
- },
- "deployment": {
- "id": "#/definitions/deployment",
- "type": ["object", "null"],
- "properties": {
- "mode": {"type": "string"},
- "endpoint_mode": {"type": "string"},
- "replicas": {"type": "integer"},
- "labels": {"$ref": "#/definitions/labels"},
- "update_config": {
- "type": "object",
- "properties": {
- "parallelism": {"type": "integer"},
- "delay": {"type": "string", "format": "duration"},
- "failure_action": {"type": "string"},
- "monitor": {"type": "string", "format": "duration"},
- "max_failure_ratio": {"type": "number"},
- "order": {"type": "string", "enum": [
- "start-first", "stop-first"
- ]}
- },
- "additionalProperties": false
- },
- "resources": {
- "type": "object",
- "properties": {
- "limits": {"$ref": "#/definitions/resource"},
- "reservations": {"$ref": "#/definitions/resource"}
- },
- "additionalProperties": false
- },
- "restart_policy": {
- "type": "object",
- "properties": {
- "condition": {"type": "string"},
- "delay": {"type": "string", "format": "duration"},
- "max_attempts": {"type": "integer"},
- "window": {"type": "string", "format": "duration"}
- },
- "additionalProperties": false
- },
- "placement": {
- "type": "object",
- "properties": {
- "constraints": {"type": "array", "items": {"type": "string"}},
- "preferences": {
- "type": "array",
- "items": {
- "type": "object",
- "properties": {
- "spread": {"type": "string"}
- },
- "additionalProperties": false
- }
- }
- },
- "additionalProperties": false
- }
- },
- "additionalProperties": false
- },
-
- "resource": {
- "id": "#/definitions/resource",
- "type": "object",
- "properties": {
- "cpus": {"type": "string"},
- "memory": {"type": "string"}
- },
- "additionalProperties": false
- },
-
- "network": {
- "id": "#/definitions/network",
- "type": ["object", "null"],
- "properties": {
- "driver": {"type": "string"},
- "driver_opts": {
- "type": "object",
- "patternProperties": {
- "^.+$": {"type": ["string", "number"]}
- }
- },
- "ipam": {
- "type": "object",
- "properties": {
- "driver": {"type": "string"},
- "config": {
- "type": "array",
- "items": {
- "type": "object",
- "properties": {
- "subnet": {"type": "string", "format": "subnet_ip_address"}
- },
- "additionalProperties": false
- }
- }
- },
- "additionalProperties": false
- },
- "external": {
- "type": ["boolean", "object"],
- "properties": {
- "name": {"type": "string"}
- },
- "additionalProperties": false
- },
- "internal": {"type": "boolean"},
- "attachable": {"type": "boolean"},
- "labels": {"$ref": "#/definitions/labels"}
- },
- "additionalProperties": false
- },
-
- "volume": {
- "id": "#/definitions/volume",
- "type": ["object", "null"],
- "properties": {
- "name": {"type": "string"},
- "driver": {"type": "string"},
- "driver_opts": {
- "type": "object",
- "patternProperties": {
- "^.+$": {"type": ["string", "number"]}
- }
- },
- "external": {
- "type": ["boolean", "object"],
- "properties": {
- "name": {"type": "string"}
- },
- "additionalProperties": false
- },
- "labels": {"$ref": "#/definitions/labels"}
- },
- "additionalProperties": false
- },
-
- "secret": {
- "id": "#/definitions/secret",
- "type": "object",
- "properties": {
- "file": {"type": "string"},
- "external": {
- "type": ["boolean", "object"],
- "properties": {
- "name": {"type": "string"}
- }
- },
- "labels": {"$ref": "#/definitions/labels"}
- },
- "additionalProperties": false
- },
-
- "config": {
- "id": "#/definitions/config",
- "type": "object",
- "properties": {
- "file": {"type": "string"},
- "external": {
- "type": ["boolean", "object"],
- "properties": {
- "name": {"type": "string"}
- }
- },
- "labels": {"$ref": "#/definitions/labels"}
- },
- "additionalProperties": false
- },
-
- "string_or_list": {
- "oneOf": [
- {"type": "string"},
- {"$ref": "#/definitions/list_of_strings"}
- ]
- },
-
- "list_of_strings": {
- "type": "array",
- "items": {"type": "string"},
- "uniqueItems": true
- },
-
- "list_or_dict": {
- "oneOf": [
- {
- "type": "object",
- "patternProperties": {
- ".+": {
- "type": ["string", "number", "null"]
- }
- },
- "additionalProperties": false
- },
- {"type": "array", "items": {"type": "string"}, "uniqueItems": true}
- ]
- },
-
- "labels": {
- "oneOf": [
- {
- "type": "object",
- "patternProperties": {
- ".+": {
- "type": "string"
- }
- },
- "additionalProperties": false
- },
- {"type": "array", "items": {"type": "string"}, "uniqueItems": true}
- ]
- },
-
- "constraints": {
- "service": {
- "id": "#/definitions/constraints/service",
- "anyOf": [
- {"required": ["build"]},
- {"required": ["image"]}
- ],
- "properties": {
- "build": {
- "required": ["context"]
- }
- }
- }
- }
- }
-}
diff --git a/compose/config/config_schema_v3.5.json b/compose/config/config_schema_v3.5.json
deleted file mode 100644
index e3bdecbc..00000000
--- a/compose/config/config_schema_v3.5.json
+++ /dev/null
@@ -1,588 +0,0 @@
-{
- "$schema": "http://json-schema.org/draft-04/schema#",
- "id": "config_schema_v3.5.json",
- "type": "object",
- "required": ["version"],
-
- "properties": {
- "version": {
- "type": "string"
- },
-
- "services": {
- "id": "#/properties/services",
- "type": "object",
- "patternProperties": {
- "^[a-zA-Z0-9._-]+$": {
- "$ref": "#/definitions/service"
- }
- },
- "additionalProperties": false
- },
-
- "networks": {
- "id": "#/properties/networks",
- "type": "object",
- "patternProperties": {
- "^[a-zA-Z0-9._-]+$": {
- "$ref": "#/definitions/network"
- }
- }
- },
-
- "volumes": {
- "id": "#/properties/volumes",
- "type": "object",
- "patternProperties": {
- "^[a-zA-Z0-9._-]+$": {
- "$ref": "#/definitions/volume"
- }
- },
- "additionalProperties": false
- },
-
- "secrets": {
- "id": "#/properties/secrets",
- "type": "object",
- "patternProperties": {
- "^[a-zA-Z0-9._-]+$": {
- "$ref": "#/definitions/secret"
- }
- },
- "additionalProperties": false
- },
-
- "configs": {
- "id": "#/properties/configs",
- "type": "object",
- "patternProperties": {
- "^[a-zA-Z0-9._-]+$": {
- "$ref": "#/definitions/config"
- }
- },
- "additionalProperties": false
- }
- },
-
- "patternProperties": {"^x-": {}},
- "additionalProperties": false,
-
- "definitions": {
-
- "service": {
- "id": "#/definitions/service",
- "type": "object",
-
- "properties": {
- "deploy": {"$ref": "#/definitions/deployment"},
- "build": {
- "oneOf": [
- {"type": "string"},
- {
- "type": "object",
- "properties": {
- "context": {"type": "string"},
- "dockerfile": {"type": "string"},
- "args": {"$ref": "#/definitions/list_or_dict"},
- "labels": {"$ref": "#/definitions/labels"},
- "cache_from": {"$ref": "#/definitions/list_of_strings"},
- "network": {"type": "string"},
- "target": {"type": "string"},
- "shm_size": {"type": ["integer", "string"]}
- },
- "additionalProperties": false
- }
- ]
- },
- "cap_add": {"type": "array", "items": {"type": "string"}, "uniqueItems": true},
- "cap_drop": {"type": "array", "items": {"type": "string"}, "uniqueItems": true},
- "cgroup_parent": {"type": "string"},
- "command": {
- "oneOf": [
- {"type": "string"},
- {"type": "array", "items": {"type": "string"}}
- ]
- },
- "configs": {
- "type": "array",
- "items": {
- "oneOf": [
- {"type": "string"},
- {
- "type": "object",
- "properties": {
- "source": {"type": "string"},
- "target": {"type": "string"},
- "uid": {"type": "string"},
- "gid": {"type": "string"},
- "mode": {"type": "number"}
- }
- }
- ]
- }
- },
- "container_name": {"type": "string"},
- "credential_spec": {"type": "object", "properties": {
- "file": {"type": "string"},
- "registry": {"type": "string"}
- }},
- "depends_on": {"$ref": "#/definitions/list_of_strings"},
- "devices": {"type": "array", "items": {"type": "string"}, "uniqueItems": true},
- "dns": {"$ref": "#/definitions/string_or_list"},
- "dns_search": {"$ref": "#/definitions/string_or_list"},
- "domainname": {"type": "string"},
- "entrypoint": {
- "oneOf": [
- {"type": "string"},
- {"type": "array", "items": {"type": "string"}}
- ]
- },
- "env_file": {"$ref": "#/definitions/string_or_list"},
- "environment": {"$ref": "#/definitions/list_or_dict"},
-
- "expose": {
- "type": "array",
- "items": {
- "type": ["string", "number"],
- "format": "expose"
- },
- "uniqueItems": true
- },
-
- "external_links": {"type": "array", "items": {"type": "string"}, "uniqueItems": true},
- "extra_hosts": {"$ref": "#/definitions/list_or_dict"},
- "healthcheck": {"$ref": "#/definitions/healthcheck"},
- "hostname": {"type": "string"},
- "image": {"type": "string"},
- "ipc": {"type": "string"},
- "isolation": {"type": "string"},
- "labels": {"$ref": "#/definitions/labels"},
- "links": {"type": "array", "items": {"type": "string"}, "uniqueItems": true},
-
- "logging": {
- "type": "object",
-
- "properties": {
- "driver": {"type": "string"},
- "options": {
- "type": "object",
- "patternProperties": {
- "^.+$": {"type": ["string", "number", "null"]}
- }
- }
- },
- "additionalProperties": false
- },
-
- "mac_address": {"type": "string"},
- "network_mode": {"type": "string"},
-
- "networks": {
- "oneOf": [
- {"$ref": "#/definitions/list_of_strings"},
- {
- "type": "object",
- "patternProperties": {
- "^[a-zA-Z0-9._-]+$": {
- "oneOf": [
- {
- "type": "object",
- "properties": {
- "aliases": {"$ref": "#/definitions/list_of_strings"},
- "ipv4_address": {"type": "string"},
- "ipv6_address": {"type": "string"}
- },
- "additionalProperties": false
- },
- {"type": "null"}
- ]
- }
- },
- "additionalProperties": false
- }
- ]
- },
- "pid": {"type": ["string", "null"]},
-
- "ports": {
- "type": "array",
- "items": {
- "oneOf": [
- {"type": "number", "format": "ports"},
- {"type": "string", "format": "ports"},
- {
- "type": "object",
- "properties": {
- "mode": {"type": "string"},
- "target": {"type": "integer"},
- "published": {"type": "integer"},
- "protocol": {"type": "string"}
- },
- "additionalProperties": false
- }
- ]
- },
- "uniqueItems": true
- },
-
- "privileged": {"type": "boolean"},
- "read_only": {"type": "boolean"},
- "restart": {"type": "string"},
- "security_opt": {"type": "array", "items": {"type": "string"}, "uniqueItems": true},
- "shm_size": {"type": ["number", "string"]},
- "secrets": {
- "type": "array",
- "items": {
- "oneOf": [
- {"type": "string"},
- {
- "type": "object",
- "properties": {
- "source": {"type": "string"},
- "target": {"type": "string"},
- "uid": {"type": "string"},
- "gid": {"type": "string"},
- "mode": {"type": "number"}
- }
- }
- ]
- }
- },
- "sysctls": {"$ref": "#/definitions/list_or_dict"},
- "stdin_open": {"type": "boolean"},
- "stop_grace_period": {"type": "string", "format": "duration"},
- "stop_signal": {"type": "string"},
- "tmpfs": {"$ref": "#/definitions/string_or_list"},
- "tty": {"type": "boolean"},
- "ulimits": {
- "type": "object",
- "patternProperties": {
- "^[a-z]+$": {
- "oneOf": [
- {"type": "integer"},
- {
- "type":"object",
- "properties": {
- "hard": {"type": "integer"},
- "soft": {"type": "integer"}
- },
- "required": ["soft", "hard"],
- "additionalProperties": false
- }
- ]
- }
- }
- },
- "user": {"type": "string"},
- "userns_mode": {"type": "string"},
- "volumes": {
- "type": "array",
- "items": {
- "oneOf": [
- {"type": "string"},
- {
- "type": "object",
- "required": ["type"],
- "properties": {
- "type": {"type": "string"},
- "source": {"type": "string"},
- "target": {"type": "string"},
- "read_only": {"type": "boolean"},
- "consistency": {"type": "string"},
- "bind": {
- "type": "object",
- "properties": {
- "propagation": {"type": "string"}
- }
- },
- "volume": {
- "type": "object",
- "properties": {
- "nocopy": {"type": "boolean"}
- }
- }
- },
- "additionalProperties": false
- }
- ],
- "uniqueItems": true
- }
- },
- "working_dir": {"type": "string"}
- },
- "additionalProperties": false
- },
-
- "healthcheck": {
- "id": "#/definitions/healthcheck",
- "type": "object",
- "additionalProperties": false,
- "properties": {
- "disable": {"type": "boolean"},
- "interval": {"type": "string", "format": "duration"},
- "retries": {"type": "number"},
- "test": {
- "oneOf": [
- {"type": "string"},
- {"type": "array", "items": {"type": "string"}}
- ]
- },
- "timeout": {"type": "string", "format": "duration"},
- "start_period": {"type": "string", "format": "duration"}
- }
- },
- "deployment": {
- "id": "#/definitions/deployment",
- "type": ["object", "null"],
- "properties": {
- "mode": {"type": "string"},
- "endpoint_mode": {"type": "string"},
- "replicas": {"type": "integer"},
- "labels": {"$ref": "#/definitions/labels"},
- "update_config": {
- "type": "object",
- "properties": {
- "parallelism": {"type": "integer"},
- "delay": {"type": "string", "format": "duration"},
- "failure_action": {"type": "string"},
- "monitor": {"type": "string", "format": "duration"},
- "max_failure_ratio": {"type": "number"},
- "order": {"type": "string", "enum": [
- "start-first", "stop-first"
- ]}
- },
- "additionalProperties": false
- },
- "resources": {
- "type": "object",
- "properties": {
- "limits": {
- "type": "object",
- "properties": {
- "cpus": {"type": "string"},
- "memory": {"type": "string"}
- },
- "additionalProperties": false
- },
- "reservations": {
- "type": "object",
- "properties": {
- "cpus": {"type": "string"},
- "memory": {"type": "string"},
- "generic_resources": {"$ref": "#/definitions/generic_resources"}
- },
- "additionalProperties": false
- }
- },
- "additionalProperties": false
- },
- "restart_policy": {
- "type": "object",
- "properties": {
- "condition": {"type": "string"},
- "delay": {"type": "string", "format": "duration"},
- "max_attempts": {"type": "integer"},
- "window": {"type": "string", "format": "duration"}
- },
- "additionalProperties": false
- },
- "placement": {
- "type": "object",
- "properties": {
- "constraints": {"type": "array", "items": {"type": "string"}},
- "preferences": {
- "type": "array",
- "items": {
- "type": "object",
- "properties": {
- "spread": {"type": "string"}
- },
- "additionalProperties": false
- }
- }
- },
- "additionalProperties": false
- }
- },
- "additionalProperties": false
- },
-
- "generic_resources": {
- "id": "#/definitions/generic_resources",
- "type": "array",
- "items": {
- "type": "object",
- "properties": {
- "discrete_resource_spec": {
- "type": "object",
- "properties": {
- "kind": {"type": "string"},
- "value": {"type": "number"}
- },
- "additionalProperties": false
- }
- },
- "additionalProperties": false
- }
- },
-
- "network": {
- "id": "#/definitions/network",
- "type": ["object", "null"],
- "properties": {
- "name": {"type": "string"},
- "driver": {"type": "string"},
- "driver_opts": {
- "type": "object",
- "patternProperties": {
- "^.+$": {"type": ["string", "number"]}
- }
- },
- "ipam": {
- "type": "object",
- "properties": {
- "driver": {"type": "string"},
- "config": {
- "type": "array",
- "items": {
- "type": "object",
- "properties": {
- "subnet": {"type": "string", "format": "subnet_ip_address"}
- },
- "additionalProperties": false
- }
- }
- },
- "additionalProperties": false
- },
- "external": {
- "type": ["boolean", "object"],
- "properties": {
- "name": {"type": "string"}
- },
- "additionalProperties": false
- },
- "internal": {"type": "boolean"},
- "attachable": {"type": "boolean"},
- "labels": {"$ref": "#/definitions/labels"}
- },
- "additionalProperties": false
- },
-
- "volume": {
- "id": "#/definitions/volume",
- "type": ["object", "null"],
- "properties": {
- "name": {"type": "string"},
- "driver": {"type": "string"},
- "driver_opts": {
- "type": "object",
- "patternProperties": {
- "^.+$": {"type": ["string", "number"]}
- }
- },
- "external": {
- "type": ["boolean", "object"],
- "properties": {
- "name": {"type": "string"}
- },
- "additionalProperties": false
- },
- "labels": {"$ref": "#/definitions/labels"}
- },
- "additionalProperties": false
- },
-
- "secret": {
- "id": "#/definitions/secret",
- "type": "object",
- "properties": {
- "name": {"type": "string"},
- "file": {"type": "string"},
- "external": {
- "type": ["boolean", "object"],
- "properties": {
- "name": {"type": "string"}
- }
- },
- "labels": {"$ref": "#/definitions/labels"}
- },
- "additionalProperties": false
- },
-
- "config": {
- "id": "#/definitions/config",
- "type": "object",
- "properties": {
- "name": {"type": "string"},
- "file": {"type": "string"},
- "external": {
- "type": ["boolean", "object"],
- "properties": {
- "name": {"type": "string"}
- }
- },
- "labels": {"$ref": "#/definitions/labels"}
- },
- "additionalProperties": false
- },
-
- "string_or_list": {
- "oneOf": [
- {"type": "string"},
- {"$ref": "#/definitions/list_of_strings"}
- ]
- },
-
- "list_of_strings": {
- "type": "array",
- "items": {"type": "string"},
- "uniqueItems": true
- },
-
- "list_or_dict": {
- "oneOf": [
- {
- "type": "object",
- "patternProperties": {
- ".+": {
- "type": ["string", "number", "null"]
- }
- },
- "additionalProperties": false
- },
- {"type": "array", "items": {"type": "string"}, "uniqueItems": true}
- ]
- },
-
- "labels": {
- "oneOf": [
- {
- "type": "object",
- "patternProperties": {
- ".+": {
- "type": "string"
- }
- },
- "additionalProperties": false
- },
- {"type": "array", "items": {"type": "string"}, "uniqueItems": true}
- ]
- },
-
- "constraints": {
- "service": {
- "id": "#/definitions/constraints/service",
- "anyOf": [
- {"required": ["build"]},
- {"required": ["image"]}
- ],
- "properties": {
- "build": {
- "required": ["context"]
- }
- }
- }
- }
- }
-}
diff --git a/compose/config/environment.py b/compose/config/environment.py
index 0087b612..5045a730 100644
--- a/compose/config/environment.py
+++ b/compose/config/environment.py
@@ -1,63 +1,69 @@
-from __future__ import absolute_import
-from __future__ import unicode_literals
-
-import codecs
-import contextlib
import logging
import os
+import re
-import six
+import dotenv
from ..const import IS_WINDOWS_PLATFORM
from .errors import ConfigurationError
+from .errors import EnvFileNotFound
log = logging.getLogger(__name__)
def split_env(env):
- if isinstance(env, six.binary_type):
+ if isinstance(env, bytes):
env = env.decode('utf-8', 'replace')
+ key = value = None
if '=' in env:
- return env.split('=', 1)
+ key, value = env.split('=', 1)
else:
- return env, None
+ key = env
+ if re.search(r'\s', key):
+ raise ConfigurationError(
+ "environment variable name '{}' may not contain whitespace.".format(key)
+ )
+ return key, value
-def env_vars_from_file(filename):
+def env_vars_from_file(filename, interpolate=True):
"""
Read in a line delimited file of environment variables.
"""
if not os.path.exists(filename):
- raise ConfigurationError("Couldn't find env file: %s" % filename)
+ raise EnvFileNotFound("Couldn't find env file: {}".format(filename))
elif not os.path.isfile(filename):
- raise ConfigurationError("%s is not a file." % (filename))
- env = {}
- with contextlib.closing(codecs.open(filename, 'r', 'utf-8-sig')) as fileobj:
- for line in fileobj:
- line = line.strip()
- if line and not line.startswith('#'):
- k, v = split_env(line)
- env[k] = v
+ raise EnvFileNotFound("{} is not a file.".format(filename))
+
+ env = dotenv.dotenv_values(dotenv_path=filename, encoding='utf-8-sig', interpolate=interpolate)
+ for k, v in env.items():
+ env[k] = v if interpolate else v.replace('$', '$$')
return env
class Environment(dict):
def __init__(self, *args, **kwargs):
- super(Environment, self).__init__(*args, **kwargs)
+ super().__init__(*args, **kwargs)
self.missing_keys = []
+ self.silent = False
@classmethod
- def from_env_file(cls, base_dir):
+ def from_env_file(cls, base_dir, env_file=None):
def _initialize():
result = cls()
if base_dir is None:
return result
+ if env_file:
+ env_file_path = os.path.join(os.getcwd(), env_file)
+ return cls(env_vars_from_file(env_file_path))
+
env_file_path = os.path.join(base_dir, '.env')
try:
return cls(env_vars_from_file(env_file_path))
- except ConfigurationError:
+ except EnvFileNotFound:
pass
return result
+
instance = _initialize()
instance.update(os.environ)
return instance
@@ -76,15 +82,15 @@ class Environment(dict):
def __getitem__(self, key):
try:
- return super(Environment, self).__getitem__(key)
+ return super().__getitem__(key)
except KeyError:
if IS_WINDOWS_PLATFORM:
try:
- return super(Environment, self).__getitem__(key.upper())
+ return super().__getitem__(key.upper())
except KeyError:
pass
- if key not in self.missing_keys:
- log.warn(
+ if not self.silent and key not in self.missing_keys:
+ log.warning(
"The {} variable is not set. Defaulting to a blank string."
.format(key)
)
@@ -93,28 +99,28 @@ class Environment(dict):
return ""
def __contains__(self, key):
- result = super(Environment, self).__contains__(key)
+ result = super().__contains__(key)
if IS_WINDOWS_PLATFORM:
return (
- result or super(Environment, self).__contains__(key.upper())
+ result or super().__contains__(key.upper())
)
return result
def get(self, key, *args, **kwargs):
if IS_WINDOWS_PLATFORM:
- return super(Environment, self).get(
+ return super().get(
key,
- super(Environment, self).get(key.upper(), *args, **kwargs)
+ super().get(key.upper(), *args, **kwargs)
)
- return super(Environment, self).get(key, *args, **kwargs)
+ return super().get(key, *args, **kwargs)
- def get_boolean(self, key):
+ def get_boolean(self, key, default=False):
# Convert a value to a boolean using "common sense" rules.
# Unset, empty, "0" and "false" (i-case) yield False.
# All other values yield True.
value = self.get(key)
if not value:
- return False
+ return default
if value.lower() in ['0', 'false']:
return False
return True
diff --git a/compose/config/errors.py b/compose/config/errors.py
index f5c03808..b66433a7 100644
--- a/compose/config/errors.py
+++ b/compose/config/errors.py
@@ -1,7 +1,3 @@
-from __future__ import absolute_import
-from __future__ import unicode_literals
-
-
VERSION_EXPLANATION = (
'You might be seeing this error because you\'re using the wrong Compose file version. '
'Either specify a supported version (e.g "2.2" or "3.3") and place '
@@ -19,6 +15,10 @@ class ConfigurationError(Exception):
return self.msg
+class EnvFileNotFound(ConfigurationError):
+ pass
+
+
class DependencyError(ConfigurationError):
pass
@@ -38,7 +38,7 @@ class CircularReference(ConfigurationError):
class ComposeFileNotFound(ConfigurationError):
def __init__(self, supported_filenames):
- super(ComposeFileNotFound, self).__init__("""
+ super().__init__("""
Can't find a suitable configuration file in this directory or any
parent. Are you in the right directory?
@@ -49,7 +49,7 @@ class ComposeFileNotFound(ConfigurationError):
class DuplicateOverrideFileFound(ConfigurationError):
def __init__(self, override_filenames):
self.override_filenames = override_filenames
- super(DuplicateOverrideFileFound, self).__init__(
+ super().__init__(
"Multiple override files found: {}. You may only use a single "
"override file.".format(", ".join(override_filenames))
)
diff --git a/compose/config/interpolation.py b/compose/config/interpolation.py
index 8845d73b..ff0d90e8 100644
--- a/compose/config/interpolation.py
+++ b/compose/config/interpolation.py
@@ -1,14 +1,9 @@
-from __future__ import absolute_import
-from __future__ import unicode_literals
-
import logging
import re
from string import Template
-import six
-
from .errors import ConfigurationError
-from compose.const import COMPOSEFILE_V2_0 as V2_0
+from compose.const import COMPOSEFILE_V1 as V1
from compose.utils import parse_bytes
from compose.utils import parse_nanoseconds_int
@@ -16,7 +11,7 @@ from compose.utils import parse_nanoseconds_int
log = logging.getLogger(__name__)
-class Interpolator(object):
+class Interpolator:
def __init__(self, templater, mapping):
self.templater = templater
@@ -30,25 +25,25 @@ class Interpolator(object):
def interpolate_environment_variables(version, config, section, environment):
- if version <= V2_0:
+ if version == V1:
interpolator = Interpolator(Template, environment)
else:
interpolator = Interpolator(TemplateWithDefaults, environment)
def process_item(name, config_dict):
- return dict(
- (key, interpolate_value(name, key, val, section, interpolator))
+ return {
+ key: interpolate_value(name, key, val, section, interpolator)
for key, val in (config_dict or {}).items()
- )
+ }
- return dict(
- (name, process_item(name, config_dict or {}))
+ return {
+ name: process_item(name, config_dict or {})
for name, config_dict in config.items()
- )
+ }
def get_config_path(config_key, section, name):
- return '{}.{}.{}'.format(section, name, config_key)
+ return '{}/{}/{}'.format(section, name, config_key)
def interpolate_value(name, config_key, value, section, interpolator):
@@ -64,26 +59,26 @@ def interpolate_value(name, config_key, value, section, interpolator):
string=e.string))
except UnsetRequiredSubstitution as e:
raise ConfigurationError(
- 'Missing mandatory value for "{config_key}" option in {section} "{name}": {err}'.format(
- config_key=config_key,
- name=name,
- section=section,
- err=e.err
- )
+ 'Missing mandatory value for "{config_key}" option interpolating {value} '
+ 'in {section} "{name}": {err}'.format(config_key=config_key,
+ value=value,
+ name=name,
+ section=section,
+ err=e.err)
)
def recursive_interpolate(obj, interpolator, config_path):
def append(config_path, key):
- return '{}.{}'.format(config_path, key)
+ return '{}/{}'.format(config_path, key)
- if isinstance(obj, six.string_types):
+ if isinstance(obj, str):
return converter.convert(config_path, interpolator.interpolate(obj))
if isinstance(obj, dict):
- return dict(
- (key, recursive_interpolate(val, interpolator, append(config_path, key)))
- for (key, val) in obj.items()
- )
+ return {
+ key: recursive_interpolate(val, interpolator, append(config_path, key))
+ for key, val in obj.items()
+ }
if isinstance(obj, list):
return [recursive_interpolate(val, interpolator, config_path) for val in obj]
return converter.convert(config_path, obj)
@@ -91,17 +86,17 @@ def recursive_interpolate(obj, interpolator, config_path):
class TemplateWithDefaults(Template):
pattern = r"""
- %(delim)s(?:
- (?P<escaped>%(delim)s) |
- (?P<named>%(id)s) |
- {(?P<braced>%(bid)s)} |
+ {delim}(?:
+ (?P<escaped>{delim}) |
+ (?P<named>{id}) |
+ {{(?P<braced>{bid})}} |
(?P<invalid>)
)
- """ % {
- 'delim': re.escape('$'),
- 'id': r'[_a-z][_a-z0-9]*',
- 'bid': r'[_a-z][_a-z0-9]*(?:(?P<sep>:?[-?])[^}]*)?',
- }
+ """.format(
+ delim=re.escape('$'),
+ id=r'[_a-z][_a-z0-9]*',
+ bid=r'[_a-z][_a-z0-9]*(?:(?P<sep>:?[-?])[^}]*)?',
+ )
@staticmethod
def process_braced_group(braced, sep, mapping):
@@ -116,12 +111,14 @@ class TemplateWithDefaults(Template):
var, _, err = braced.partition(':?')
result = mapping.get(var)
if not result:
+ err = err or var
raise UnsetRequiredSubstitution(err)
return result
elif '?' == sep:
var, _, err = braced.partition('?')
if var in mapping:
return mapping.get(var)
+ err = err or var
raise UnsetRequiredSubstitution(err)
# Modified from python2.7/string.py
@@ -138,9 +135,9 @@ class TemplateWithDefaults(Template):
if named is not None:
val = mapping[named]
- if isinstance(val, six.binary_type):
+ if isinstance(val, bytes):
val = val.decode('utf-8')
- return '%s' % (val,)
+ return '{}'.format(val)
if mo.group('escaped') is not None:
return self.delimiter
if mo.group('invalid') is not None:
@@ -160,12 +157,12 @@ class UnsetRequiredSubstitution(Exception):
self.err = custom_err_msg
-PATH_JOKER = '[^.]+'
+PATH_JOKER = '[^/]+'
FULL_JOKER = '.+'
def re_path(*args):
- return re.compile('^{}$'.format('\.'.join(args)))
+ return re.compile('^{}$'.format('/'.join(args)))
def re_path_basic(section, name):
@@ -177,7 +174,7 @@ def service_path(*args):
def to_boolean(s):
- if not isinstance(s, six.string_types):
+ if not isinstance(s, str):
return s
s = s.lower()
if s in ['y', 'yes', 'true', 'on']:
@@ -188,11 +185,11 @@ def to_boolean(s):
def to_int(s):
- if not isinstance(s, six.string_types):
+ if not isinstance(s, str):
return s
# We must be able to handle octal representation for `mode` values notably
- if six.PY3 and re.match('^0[0-9]+$', s.strip()):
+ if re.match('^0[0-9]+$', s.strip()):
s = '0o' + s[1:]
try:
return int(s, base=0)
@@ -201,7 +198,7 @@ def to_int(s):
def to_float(s):
- if not isinstance(s, six.string_types):
+ if not isinstance(s, str):
return s
try:
@@ -224,12 +221,12 @@ def bytes_to_int(s):
def to_microseconds(v):
- if not isinstance(v, six.string_types):
+ if not isinstance(v, str):
return v
return int(parse_nanoseconds_int(v) / 1000)
-class ConversionMap(object):
+class ConversionMap:
map = {
service_path('blkio_config', 'weight'): to_int,
service_path('blkio_config', 'weight_device', 'weight'): to_int,
@@ -246,8 +243,12 @@ class ConversionMap(object):
service_path('healthcheck', 'disable'): to_boolean,
service_path('deploy', 'labels', PATH_JOKER): to_str,
service_path('deploy', 'replicas'): to_int,
+ service_path('deploy', 'placement', 'max_replicas_per_node'): to_int,
+ service_path('deploy', 'resources', 'limits', "cpus"): to_float,
service_path('deploy', 'update_config', 'parallelism'): to_int,
service_path('deploy', 'update_config', 'max_failure_ratio'): to_float,
+ service_path('deploy', 'rollback_config', 'parallelism'): to_int,
+ service_path('deploy', 'rollback_config', 'max_failure_ratio'): to_float,
service_path('deploy', 'restart_policy', 'max_attempts'): to_int,
service_path('mem_swappiness'): to_int,
service_path('labels', FULL_JOKER): to_str,
@@ -286,7 +287,7 @@ class ConversionMap(object):
except ValueError as e:
raise ConfigurationError(
'Error while attempting to convert {} to appropriate type: {}'.format(
- path, e
+ path.replace('/', '.'), e
)
)
return value
diff --git a/compose/config/serialize.py b/compose/config/serialize.py
index c0cf35c1..e3295df7 100644
--- a/compose/config/serialize.py
+++ b/compose/config/serialize.py
@@ -1,21 +1,12 @@
-from __future__ import absolute_import
-from __future__ import unicode_literals
-
-import six
import yaml
from compose.config import types
+from compose.const import COMPOSE_SPEC as VERSION
from compose.const import COMPOSEFILE_V1 as V1
-from compose.const import COMPOSEFILE_V2_1 as V2_1
-from compose.const import COMPOSEFILE_V2_3 as V2_3
-from compose.const import COMPOSEFILE_V3_0 as V3_0
-from compose.const import COMPOSEFILE_V3_2 as V3_2
-from compose.const import COMPOSEFILE_V3_4 as V3_4
-from compose.const import COMPOSEFILE_V3_5 as V3_5
def serialize_config_type(dumper, data):
- representer = dumper.represent_str if six.PY3 else dumper.represent_unicode
+ representer = dumper.represent_str
return representer(data.repr())
@@ -24,14 +15,12 @@ def serialize_dict_type(dumper, data):
def serialize_string(dumper, data):
- """ Ensure boolean-like strings are quoted in the output and escape $ characters """
- representer = dumper.represent_str if six.PY3 else dumper.represent_unicode
+ """ Ensure boolean-like strings are quoted in the output """
+ representer = dumper.represent_str
- if isinstance(data, six.binary_type):
+ if isinstance(data, bytes):
data = data.decode('utf-8')
- data = data.replace('$', '$$')
-
if data.lower() in ('y', 'n', 'yes', 'no', 'on', 'off', 'true', 'false'):
# Empirically only y/n appears to be an issue, but this might change
# depending on which PyYaml version is being used. Err on safe side.
@@ -39,6 +28,12 @@ def serialize_string(dumper, data):
return representer(data)
+def serialize_string_escape_dollar(dumper, data):
+ """ Ensure boolean-like strings are quoted in the output and escape $ characters """
+ data = data.replace('$', '$$')
+ return serialize_string(dumper, data)
+
+
yaml.SafeDumper.add_representer(types.MountSpec, serialize_dict_type)
yaml.SafeDumper.add_representer(types.VolumeFromSpec, serialize_config_type)
yaml.SafeDumper.add_representer(types.VolumeSpec, serialize_config_type)
@@ -46,12 +41,10 @@ yaml.SafeDumper.add_representer(types.SecurityOpt, serialize_config_type)
yaml.SafeDumper.add_representer(types.ServiceSecret, serialize_dict_type)
yaml.SafeDumper.add_representer(types.ServiceConfig, serialize_dict_type)
yaml.SafeDumper.add_representer(types.ServicePort, serialize_dict_type)
-yaml.SafeDumper.add_representer(str, serialize_string)
-yaml.SafeDumper.add_representer(six.text_type, serialize_string)
def denormalize_config(config, image_digests=None):
- result = {'version': str(V2_1) if config.version == V1 else str(config.version)}
+ result = {'version': str(config.config_version)}
denormalized_services = [
denormalize_service_dict(
service_dict,
@@ -74,22 +67,18 @@ def denormalize_config(config, image_digests=None):
del conf['external_name']
if 'name' in conf:
- if config.version < V2_1 or (
- config.version >= V3_0 and config.version < v3_introduced_name_key(key)):
- del conf['name']
- elif 'external' in conf:
- conf['external'] = True
-
+ if 'external' in conf:
+ conf['external'] = bool(conf['external'])
return result
-def v3_introduced_name_key(key):
- if key == 'volumes':
- return V3_4
- return V3_5
-
-
-def serialize_config(config, image_digests=None):
+def serialize_config(config, image_digests=None, escape_dollar=True):
+ if escape_dollar:
+ yaml.SafeDumper.add_representer(str, serialize_string_escape_dollar)
+ yaml.SafeDumper.add_representer(str, serialize_string_escape_dollar)
+ else:
+ yaml.SafeDumper.add_representer(str, serialize_string)
+ yaml.SafeDumper.add_representer(str, serialize_string)
return yaml.safe_dump(
denormalize_config(config, image_digests),
default_flow_style=False,
@@ -115,7 +104,7 @@ def serialize_ns_time_value(value):
result = (int(value), stage[1])
else:
break
- return '{0}{1}'.format(*result)
+ return '{}{}'.format(*result)
def denormalize_service_dict(service_dict, version, image_digest=None):
@@ -132,11 +121,6 @@ def denormalize_service_dict(service_dict, version, image_digest=None):
if version == V1 and 'network_mode' not in service_dict:
service_dict['network_mode'] = 'bridge'
- if 'depends_on' in service_dict and (version < V2_1 or version >= V3_0):
- service_dict['depends_on'] = sorted([
- svc for svc in service_dict['depends_on'].keys()
- ])
-
if 'healthcheck' in service_dict:
if 'interval' in service_dict['healthcheck']:
service_dict['healthcheck']['interval'] = serialize_ns_time_value(
@@ -154,10 +138,10 @@ def denormalize_service_dict(service_dict, version, image_digest=None):
if 'ports' in service_dict:
service_dict['ports'] = [
- p.legacy_repr() if p.external_ip or version < V3_2 else p
+ p.legacy_repr() if p.external_ip or version < VERSION else p
for p in service_dict['ports']
]
- if 'volumes' in service_dict and (version < V2_3 or (version > V3_0 and version < V3_2)):
+ if 'volumes' in service_dict and (version == V1):
service_dict['volumes'] = [
v.legacy_repr() if isinstance(v, types.MountSpec) else v for v in service_dict['volumes']
]
diff --git a/compose/config/sort_services.py b/compose/config/sort_services.py
index 42f548a6..0a7eb2b4 100644
--- a/compose/config/sort_services.py
+++ b/compose/config/sort_services.py
@@ -1,6 +1,3 @@
-from __future__ import absolute_import
-from __future__ import unicode_literals
-
from compose.config.errors import DependencyError
@@ -24,7 +21,7 @@ def get_source_name_from_network_mode(network_mode, source_type):
def get_service_names(links):
- return [link.split(':')[0] for link in links]
+ return [link.split(':', 1)[0] for link in links]
def get_service_names_from_volumes_from(volumes_from):
@@ -39,6 +36,7 @@ def get_service_dependents(service_dict, services):
name in get_service_names_from_volumes_from(service.get('volumes_from', [])) or
name == get_service_name_from_network_mode(service.get('network_mode')) or
name == get_service_name_from_network_mode(service.get('pid')) or
+ name == get_service_name_from_network_mode(service.get('ipc')) or
name in service.get('depends_on', []))
]
diff --git a/compose/config/types.py b/compose/config/types.py
index ff987521..f52b5654 100644
--- a/compose/config/types.py
+++ b/compose/config/types.py
@@ -1,16 +1,12 @@
"""
Types for objects parsed from the configuration.
"""
-from __future__ import absolute_import
-from __future__ import unicode_literals
-
import json
import ntpath
import os
import re
from collections import namedtuple
-import six
from docker.utils.ports import build_port_bindings
from ..const import COMPOSEFILE_V1 as V1
@@ -104,7 +100,7 @@ def serialize_restart_spec(restart_spec):
return ''
parts = [restart_spec['Name']]
if restart_spec['MaximumRetryCount']:
- parts.append(six.text_type(restart_spec['MaximumRetryCount']))
+ parts.append(str(restart_spec['MaximumRetryCount']))
return ':'.join(parts)
@@ -125,7 +121,7 @@ def parse_extra_hosts(extra_hosts_config):
def normalize_path_for_engine(path):
- """Windows paths, c:\my\path\shiny, need to be changed to be compatible with
+ """Windows paths, c:\\my\\path\\shiny, need to be changed to be compatible with
the Engine. Volume paths are expected to be linux style /c/my/path/shiny/
"""
drive, tail = splitdrive(path)
@@ -136,7 +132,21 @@ def normalize_path_for_engine(path):
return path.replace('\\', '/')
-class MountSpec(object):
+def normpath(path, win_host=False):
+ """ Custom path normalizer that handles Compose-specific edge cases like
+ UNIX paths on Windows hosts and vice-versa. """
+
+ sysnorm = ntpath.normpath if win_host else os.path.normpath
+ # If a path looks like a UNIX absolute path on Windows, it probably is;
+ # we'll need to revert the backslashes to forward slashes after normalization
+ flip_slashes = path.startswith('/') and IS_WINDOWS_PLATFORM
+ path = sysnorm(path)
+ if flip_slashes:
+ path = path.replace('\\', '/')
+ return path
+
+
+class MountSpec:
options_map = {
'volume': {
'nocopy': 'no_copy'
@@ -152,12 +162,11 @@ class MountSpec(object):
@classmethod
def parse(cls, mount_dict, normalize=False, win_host=False):
- normpath = ntpath.normpath if win_host else os.path.normpath
if mount_dict.get('source'):
if mount_dict['type'] == 'tmpfs':
raise ConfigurationError('tmpfs mounts can not specify a source')
- mount_dict['source'] = normpath(mount_dict['source'])
+ mount_dict['source'] = normpath(mount_dict['source'], win_host)
if normalize:
mount_dict['source'] = normalize_path_for_engine(mount_dict['source'])
@@ -247,7 +256,7 @@ class VolumeSpec(namedtuple('_VolumeSpec', 'external internal mode')):
else:
external = parts[0]
parts = separate_next_section(parts[1])
- external = ntpath.normpath(external)
+ external = normpath(external, True)
internal = parts[0]
if len(parts) > 1:
if ':' in parts[1]:
@@ -313,7 +322,7 @@ class ServiceLink(namedtuple('_ServiceLink', 'target alias')):
class ServiceConfigBase(namedtuple('_ServiceConfigBase', 'source target uid gid mode name')):
@classmethod
def parse(cls, spec):
- if isinstance(spec, six.string_types):
+ if isinstance(spec, str):
return cls(spec, None, None, None, None, None)
return cls(
spec.get('source'),
@@ -329,9 +338,9 @@ class ServiceConfigBase(namedtuple('_ServiceConfigBase', 'source target uid gid
return self.source
def repr(self):
- return dict(
- [(k, v) for k, v in zip(self._fields, self) if v is not None]
- )
+ return {
+ k: v for k, v in zip(self._fields, self) if v is not None
+ }
class ServiceSecret(ServiceConfigBase):
@@ -351,12 +360,9 @@ class ServicePort(namedtuple('_ServicePort', 'target published protocol mode ext
raise ConfigurationError('Invalid target port: {}'.format(target))
if published:
- if isinstance(published, six.string_types) and '-' in published: # "x-y:z" format
+ if isinstance(published, str) and '-' in published: # "x-y:z" format
a, b = published.split('-', 1)
- try:
- int(a)
- int(b)
- except ValueError:
+ if not a.isdigit() or not b.isdigit():
raise ConfigurationError('Invalid published port: {}'.format(published))
else:
try:
@@ -364,7 +370,7 @@ class ServicePort(namedtuple('_ServicePort', 'target published protocol mode ext
except ValueError:
raise ConfigurationError('Invalid published port: {}'.format(published))
- return super(ServicePort, cls).__new__(
+ return super().__new__(
cls, target, published, *args, **kwargs
)
@@ -413,9 +419,9 @@ class ServicePort(namedtuple('_ServicePort', 'target published protocol mode ext
return (self.target, self.published, self.external_ip, self.protocol)
def repr(self):
- return dict(
- [(k, v) for k, v in zip(self._fields, self) if v is not None]
- )
+ return {
+ k: v for k, v in zip(self._fields, self) if v is not None
+ }
def legacy_repr(self):
return normalize_port_dict(self.repr())
@@ -464,7 +470,7 @@ def normalize_port_dict(port):
class SecurityOpt(namedtuple('_SecurityOpt', 'value src_file')):
@classmethod
def parse(cls, value):
- if not isinstance(value, six.string_types):
+ if not isinstance(value, str):
return value
# based on https://github.com/docker/cli/blob/9de1b162f/cli/command/container/opts.go#L673-L697
con = value.split('=', 2)
@@ -475,9 +481,9 @@ class SecurityOpt(namedtuple('_SecurityOpt', 'value src_file')):
if con[0] == 'seccomp' and con[1] != 'unconfined':
try:
- with open(unquote_path(con[1]), 'r') as f:
+ with open(unquote_path(con[1])) as f:
seccomp_data = json.load(f)
- except (IOError, ValueError) as e:
+ except (OSError, ValueError) as e:
raise ConfigurationError('Error reading seccomp profile: {}'.format(e))
return cls(
'seccomp={}'.format(json.dumps(seccomp_data)), con[1]
diff --git a/compose/config/validation.py b/compose/config/validation.py
index 0fdcb37e..d9aaeda4 100644
--- a/compose/config/validation.py
+++ b/compose/config/validation.py
@@ -1,13 +1,9 @@
-from __future__ import absolute_import
-from __future__ import unicode_literals
-
import json
import logging
import os
import re
import sys
-import six
from docker.utils.ports import split_port
from jsonschema import Draft4Validator
from jsonschema import FormatChecker
@@ -41,15 +37,15 @@ DOCKER_CONFIG_HINTS = {
}
-VALID_NAME_CHARS = '[a-zA-Z0-9\._\-]'
+VALID_NAME_CHARS = r'[a-zA-Z0-9\._\-]'
VALID_EXPOSE_FORMAT = r'^\d+(\-\d+)?(\/[a-zA-Z]+)?$'
VALID_IPV4_SEG = r'(\d{1,2}|1\d{2}|2[0-4]\d|25[0-5])'
-VALID_IPV4_ADDR = "({IPV4_SEG}\.){{3}}{IPV4_SEG}".format(IPV4_SEG=VALID_IPV4_SEG)
-VALID_REGEX_IPV4_CIDR = "^{IPV4_ADDR}/(\d|[1-2]\d|3[0-2])$".format(IPV4_ADDR=VALID_IPV4_ADDR)
+VALID_IPV4_ADDR = r"({IPV4_SEG}\.){{3}}{IPV4_SEG}".format(IPV4_SEG=VALID_IPV4_SEG)
+VALID_REGEX_IPV4_CIDR = r"^{IPV4_ADDR}/(\d|[1-2]\d|3[0-2])$".format(IPV4_ADDR=VALID_IPV4_ADDR)
VALID_IPV6_SEG = r'[0-9a-fA-F]{1,4}'
-VALID_REGEX_IPV6_CIDR = "".join("""
+VALID_REGEX_IPV6_CIDR = "".join(r"""
^
(
(({IPV6_SEG}:){{7}}{IPV6_SEG})|
@@ -75,13 +71,13 @@ def format_ports(instance):
try:
split_port(instance)
except ValueError as e:
- raise ValidationError(six.text_type(e))
+ raise ValidationError(str(e))
return True
@FormatChecker.cls_checks(format="expose", raises=ValidationError)
def format_expose(instance):
- if isinstance(instance, six.string_types):
+ if isinstance(instance, str):
if not re.match(VALID_EXPOSE_FORMAT, instance):
raise ValidationError(
"should be of the format 'PORT[/PROTOCOL]'")
@@ -91,7 +87,7 @@ def format_expose(instance):
@FormatChecker.cls_checks("subnet_ip_address", raises=ValidationError)
def format_subnet_ip_address(instance):
- if isinstance(instance, six.string_types):
+ if isinstance(instance, str):
if not re.match(VALID_REGEX_IPV4_CIDR, instance) and \
not re.match(VALID_REGEX_IPV6_CIDR, instance):
raise ValidationError("should use the CIDR format")
@@ -104,7 +100,7 @@ def match_named_volumes(service_dict, project_volumes):
for volume_spec in service_volumes:
if volume_spec.is_named_volume and volume_spec.external not in project_volumes:
raise ConfigurationError(
- 'Named volume "{0}" is used in service "{1}" but no'
+ 'Named volume "{}" is used in service "{}" but no'
' declaration was found in the volumes section.'.format(
volume_spec.repr(), service_dict.get('name')
)
@@ -138,7 +134,7 @@ def validate_config_section(filename, config, section):
type=anglicize_json_type(python_type_to_yaml_type(config))))
for key, value in config.items():
- if not isinstance(key, six.string_types):
+ if not isinstance(key, str):
raise ConfigurationError(
"In file '{filename}', the {section} name {name} must be a "
"quoted string, i.e. '{name}'.".format(
@@ -166,7 +162,7 @@ def validate_top_level_object(config_file):
def validate_ulimits(service_config):
ulimit_config = service_config.config.get('ulimits', {})
- for limit_name, soft_hard_values in six.iteritems(ulimit_config):
+ for limit_name, soft_hard_values in ulimit_config.items():
if isinstance(soft_hard_values, dict):
if not soft_hard_values['soft'] <= soft_hard_values['hard']:
raise ConfigurationError(
@@ -222,6 +218,21 @@ def validate_pid_mode(service_config, service_names):
)
+def validate_ipc_mode(service_config, service_names):
+ ipc_mode = service_config.config.get('ipc')
+ if not ipc_mode:
+ return
+
+ dependency = get_service_name_from_network_mode(ipc_mode)
+ if not dependency:
+ return
+ if dependency not in service_names:
+ raise ConfigurationError(
+ "Service '{s.name}' uses the IPC namespace of service '{dep}' which "
+ "is undefined.".format(s=service_config, dep=dependency)
+ )
+
+
def validate_links(service_config, service_names):
for link in service_config.config.get('links', []):
if link.split(':')[0] not in service_names:
@@ -240,6 +251,18 @@ def validate_depends_on(service_config, service_names):
)
+def validate_credential_spec(service_config):
+ credential_spec = service_config.config.get('credential_spec')
+ if not credential_spec:
+ return
+
+ if 'registry' not in credential_spec and 'file' not in credential_spec:
+ raise ConfigurationError(
+ "Service '{s.name}' is missing 'credential_spec.file' or "
+ "credential_spec.registry'".format(s=service_config)
+ )
+
+
def get_unsupported_config_msg(path, error_key):
msg = "Unsupported config option for {}: '{}'".format(path_string(path), error_key)
if error_key in DOCKER_CONFIG_HINTS:
@@ -274,7 +297,7 @@ def handle_error_for_schema_with_id(error, path):
invalid_config_key = parse_key_from_error_msg(error)
return get_unsupported_config_msg(path, invalid_config_key)
- if schema_id.startswith('config_schema_v'):
+ if schema_id.startswith('config_schema_'):
invalid_config_key = parse_key_from_error_msg(error)
return ('Invalid top-level property "{key}". Valid top-level '
'sections for this Compose file are: {properties}, and '
@@ -317,7 +340,7 @@ def handle_generic_error(error, path):
required_keys)
elif error.cause:
- error_msg = six.text_type(error.cause)
+ error_msg = str(error.cause)
msg_format = "{path} is invalid: {msg}"
elif error.path:
@@ -330,11 +353,14 @@ def handle_generic_error(error, path):
def parse_key_from_error_msg(error):
- return error.message.split("'")[1]
+ try:
+ return error.message.split("'")[1]
+ except IndexError:
+ return error.message.split('(')[1].split(' ')[0].strip("'")
def path_string(path):
- return ".".join(c for c in path if isinstance(c, six.string_types))
+ return ".".join(c for c in path if isinstance(c, str))
def _parse_valid_types_from_validator(validator):
@@ -424,15 +450,29 @@ def process_config_schema_errors(error):
return handle_generic_error(error, path)
-def validate_against_config_schema(config_file):
- schema = load_jsonschema(config_file)
+def keys_to_str(config_file):
+ """
+ Non-string keys may break validator with patterned fields.
+ """
+ d = {}
+ for k, v in config_file.items():
+ d[str(k)] = v
+ if isinstance(v, dict):
+ d[str(k)] = keys_to_str(v)
+ return d
+
+
+def validate_against_config_schema(config_file, version):
+ schema = load_jsonschema(version)
+ config = keys_to_str(config_file.config)
+
format_checker = FormatChecker(["ports", "expose", "subnet_ip_address"])
validator = Draft4Validator(
schema,
resolver=RefResolver(get_resolver_path(), schema),
format_checker=format_checker)
handle_errors(
- validator.iter_errors(config_file.config),
+ validator.iter_errors(config),
process_config_schema_errors,
config_file.filename)
@@ -442,7 +482,7 @@ def validate_service_constraints(config, service_name, config_file):
return process_service_constraint_errors(
errors, service_name, config_file.version)
- schema = load_jsonschema(config_file)
+ schema = load_jsonschema(config_file.version)
validator = Draft4Validator(schema['definitions']['constraints']['service'])
handle_errors(validator.iter_errors(config), handler, None)
@@ -461,17 +501,20 @@ def get_schema_path():
return os.path.dirname(os.path.abspath(__file__))
-def load_jsonschema(config_file):
+def load_jsonschema(version):
+ name = "compose_spec"
+ if version == V1:
+ name = "config_schema_v1"
+
filename = os.path.join(
get_schema_path(),
- "config_schema_v{0}.json".format(config_file.version))
+ "{}.json".format(name))
if not os.path.exists(filename):
raise ConfigurationError(
'Version in "{}" is unsupported. {}'
- .format(config_file.filename, VERSION_EXPLANATION))
-
- with open(filename, "r") as fh:
+ .format(filename, VERSION_EXPLANATION))
+ with open(filename) as fh:
return json.load(fh)
@@ -491,7 +534,7 @@ def handle_errors(errors, format_error_func, filename):
gone wrong. Process each error and pull out relevant information and re-write
helpful error messages that are relevant.
"""
- errors = list(sorted(errors, key=str))
+ errors = sorted(errors, key=str)
if not errors:
return
diff --git a/compose/const.py b/compose/const.py
index 200a458a..90cd38e8 100644
--- a/compose/const.py
+++ b/compose/const.py
@@ -1,70 +1,40 @@
-from __future__ import absolute_import
-from __future__ import unicode_literals
-
import sys
from .version import ComposeVersion
DEFAULT_TIMEOUT = 10
HTTP_TIMEOUT = 60
-IMAGE_EVENTS = ['delete', 'import', 'load', 'pull', 'push', 'save', 'tag', 'untag']
IS_WINDOWS_PLATFORM = (sys.platform == "win32")
+IS_LINUX_PLATFORM = (sys.platform == "linux")
LABEL_CONTAINER_NUMBER = 'com.docker.compose.container-number'
LABEL_ONE_OFF = 'com.docker.compose.oneoff'
LABEL_PROJECT = 'com.docker.compose.project'
+LABEL_WORKING_DIR = 'com.docker.compose.project.working_dir'
+LABEL_CONFIG_FILES = 'com.docker.compose.project.config_files'
+LABEL_ENVIRONMENT_FILE = 'com.docker.compose.project.environment_file'
LABEL_SERVICE = 'com.docker.compose.service'
LABEL_NETWORK = 'com.docker.compose.network'
LABEL_VERSION = 'com.docker.compose.version'
+LABEL_SLUG = 'com.docker.compose.slug'
LABEL_VOLUME = 'com.docker.compose.volume'
LABEL_CONFIG_HASH = 'com.docker.compose.config-hash'
NANOCPUS_SCALE = 1000000000
PARALLEL_LIMIT = 64
SECRETS_PATH = '/run/secrets'
+WINDOWS_LONGPATH_PREFIX = '\\\\?\\'
COMPOSEFILE_V1 = ComposeVersion('1')
-COMPOSEFILE_V2_0 = ComposeVersion('2.0')
-COMPOSEFILE_V2_1 = ComposeVersion('2.1')
-COMPOSEFILE_V2_2 = ComposeVersion('2.2')
-COMPOSEFILE_V2_3 = ComposeVersion('2.3')
-COMPOSEFILE_V2_4 = ComposeVersion('2.4')
-
-COMPOSEFILE_V3_0 = ComposeVersion('3.0')
-COMPOSEFILE_V3_1 = ComposeVersion('3.1')
-COMPOSEFILE_V3_2 = ComposeVersion('3.2')
-COMPOSEFILE_V3_3 = ComposeVersion('3.3')
-COMPOSEFILE_V3_4 = ComposeVersion('3.4')
-COMPOSEFILE_V3_5 = ComposeVersion('3.5')
-COMPOSEFILE_V3_6 = ComposeVersion('3.6')
+COMPOSE_SPEC = ComposeVersion('3.9')
+# minimum DOCKER ENGINE API version needed to support
+# features for each compose schema version
API_VERSIONS = {
COMPOSEFILE_V1: '1.21',
- COMPOSEFILE_V2_0: '1.22',
- COMPOSEFILE_V2_1: '1.24',
- COMPOSEFILE_V2_2: '1.25',
- COMPOSEFILE_V2_3: '1.30',
- COMPOSEFILE_V2_4: '1.35',
- COMPOSEFILE_V3_0: '1.25',
- COMPOSEFILE_V3_1: '1.25',
- COMPOSEFILE_V3_2: '1.25',
- COMPOSEFILE_V3_3: '1.30',
- COMPOSEFILE_V3_4: '1.30',
- COMPOSEFILE_V3_5: '1.30',
- COMPOSEFILE_V3_6: '1.36',
+ COMPOSE_SPEC: '1.38',
}
API_VERSION_TO_ENGINE_VERSION = {
API_VERSIONS[COMPOSEFILE_V1]: '1.9.0',
- API_VERSIONS[COMPOSEFILE_V2_0]: '1.10.0',
- API_VERSIONS[COMPOSEFILE_V2_1]: '1.12.0',
- API_VERSIONS[COMPOSEFILE_V2_2]: '1.13.0',
- API_VERSIONS[COMPOSEFILE_V2_3]: '17.06.0',
- API_VERSIONS[COMPOSEFILE_V2_4]: '17.12.0',
- API_VERSIONS[COMPOSEFILE_V3_0]: '1.13.0',
- API_VERSIONS[COMPOSEFILE_V3_1]: '1.13.0',
- API_VERSIONS[COMPOSEFILE_V3_2]: '1.13.0',
- API_VERSIONS[COMPOSEFILE_V3_3]: '17.06.0',
- API_VERSIONS[COMPOSEFILE_V3_4]: '17.06.0',
- API_VERSIONS[COMPOSEFILE_V3_5]: '17.06.0',
- API_VERSIONS[COMPOSEFILE_V3_6]: '18.02.0',
+ API_VERSIONS[COMPOSE_SPEC]: '18.06.0',
}
diff --git a/compose/container.py b/compose/container.py
index 0c2ca990..c1c85d14 100644
--- a/compose/container.py
+++ b/compose/container.py
@@ -1,17 +1,18 @@
-from __future__ import absolute_import
-from __future__ import unicode_literals
-
from functools import reduce
-import six
from docker.errors import ImageNotFound
from .const import LABEL_CONTAINER_NUMBER
+from .const import LABEL_ONE_OFF
from .const import LABEL_PROJECT
from .const import LABEL_SERVICE
+from .const import LABEL_SLUG
+from .const import LABEL_VERSION
+from .utils import truncate_id
+from .version import ComposeVersion
-class Container(object):
+class Container:
"""
Represents a Docker container, constructed from the output of
GET /containers/:id:/json.
@@ -77,20 +78,38 @@ class Container(object):
@property
def name_without_project(self):
- if self.name.startswith('{0}_{1}'.format(self.project, self.service)):
- return '{0}_{1}'.format(self.service, self.number)
+ if self.name.startswith('{}_{}'.format(self.project, self.service)):
+ return '{}_{}'.format(self.service, self.number if self.number is not None else self.slug)
else:
return self.name
@property
def number(self):
+ if self.one_off:
+ # One-off containers are no longer assigned numbers and use slugs instead.
+ return None
+
number = self.labels.get(LABEL_CONTAINER_NUMBER)
if not number:
- raise ValueError("Container {0} does not have a {1} label".format(
+ raise ValueError("Container {} does not have a {} label".format(
self.short_id, LABEL_CONTAINER_NUMBER))
return int(number)
@property
+ def slug(self):
+ if not self.full_slug:
+ return None
+ return truncate_id(self.full_slug)
+
+ @property
+ def full_slug(self):
+ return self.labels.get(LABEL_SLUG)
+
+ @property
+ def one_off(self):
+ return self.labels.get(LABEL_ONE_OFF) == 'True'
+
+ @property
def ports(self):
self.inspect_if_not_inspected()
return self.get('NetworkSettings.Ports') or {}
@@ -107,7 +126,7 @@ class Container(object):
return ', '.join(
','.join(format_port(*item))
- for item in sorted(six.iteritems(self.ports))
+ for item in sorted(self.ports.items())
)
@property
@@ -168,11 +187,6 @@ class Container(object):
return self.get('HostConfig.LogConfig.Type')
@property
- def has_api_logs(self):
- log_type = self.log_driver
- return not log_type or log_type in ('json-file', 'journald')
-
- @property
def human_readable_health_status(self):
""" Generate UP status string with up time and health
"""
@@ -185,11 +199,7 @@ class Container(object):
return status_string
def attach_log_stream(self):
- """A log stream can only be attached if the container uses a json-file
- log driver.
- """
- if self.has_api_logs:
- self.log_stream = self.attach(stdout=True, stderr=True, stream=True)
+ self.log_stream = self.attach(stdout=True, stderr=True, stream=True)
def get(self, key):
"""Return a value from the container or None if the value is not set.
@@ -205,7 +215,7 @@ class Container(object):
return reduce(get_value, key.split('.'), self.dictionary)
def get_local_port(self, port, protocol='tcp'):
- port = self.ports.get("%s/%s" % (port, protocol))
+ port = self.ports.get("{}/{}".format(port, protocol))
return "{HostIp}:{HostPort}".format(**port[0]) if port else None
def get_mount(self, mount_dest):
@@ -247,7 +257,7 @@ class Container(object):
"""
if not self.name.startswith(self.short_id):
self.client.rename(
- self.id, '{0}_{1}'.format(self.short_id, self.name)
+ self.id, '{}_{}'.format(self.short_id, self.name)
)
def inspect_if_not_inspected(self):
@@ -283,8 +293,14 @@ class Container(object):
def attach(self, *args, **kwargs):
return self.client.attach(self.id, *args, **kwargs)
+ def has_legacy_proj_name(self, project_name):
+ return (
+ ComposeVersion(self.labels.get(LABEL_VERSION)) < ComposeVersion('1.21.0') and
+ self.project != project_name
+ )
+
def __repr__(self):
- return '<Container: %s (%s)>' % (self.name, self.id[:6])
+ return '<Container: {} ({})>'.format(self.name, self.id[:6])
def __eq__(self, other):
if type(self) != type(other):
diff --git a/compose/errors.py b/compose/errors.py
index 415b41e7..502b64b8 100644
--- a/compose/errors.py
+++ b/compose/errors.py
@@ -1,7 +1,3 @@
-from __future__ import absolute_import
-from __future__ import unicode_literals
-
-
class OperationFailedError(Exception):
def __init__(self, reason):
self.msg = reason
@@ -19,15 +15,20 @@ class HealthCheckException(Exception):
class HealthCheckFailed(HealthCheckException):
def __init__(self, container_id):
- super(HealthCheckFailed, self).__init__(
+ super().__init__(
'Container "{}" is unhealthy.'.format(container_id)
)
class NoHealthCheckConfigured(HealthCheckException):
def __init__(self, service_name):
- super(NoHealthCheckConfigured, self).__init__(
+ super().__init__(
'Service "{}" is missing a healthcheck configuration'.format(
service_name
)
)
+
+
+class CompletedUnsuccessfully(Exception):
+ def __init__(self, container_id, exit_code):
+ self.msg = 'Container "{}" exited with code {}.'.format(container_id, exit_code)
diff --git a/compose/state.py b/compose/metrics/__init__.py
index e69de29b..e69de29b 100644
--- a/compose/state.py
+++ b/compose/metrics/__init__.py
diff --git a/compose/metrics/client.py b/compose/metrics/client.py
new file mode 100644
index 00000000..204d7e55
--- /dev/null
+++ b/compose/metrics/client.py
@@ -0,0 +1,64 @@
+import os
+from enum import Enum
+
+import requests
+from docker import ContextAPI
+from docker.transport import UnixHTTPAdapter
+
+from compose.const import IS_WINDOWS_PLATFORM
+
+if IS_WINDOWS_PLATFORM:
+ from docker.transport import NpipeHTTPAdapter
+
+
+class Status(Enum):
+ SUCCESS = "success"
+ FAILURE = "failure"
+ CANCELED = "canceled"
+
+
+class MetricsSource:
+ CLI = "docker-compose"
+
+
+if IS_WINDOWS_PLATFORM:
+ METRICS_SOCKET_FILE = 'npipe://\\\\.\\pipe\\docker_cli'
+else:
+ METRICS_SOCKET_FILE = 'http+unix:///var/run/docker-cli.sock'
+
+
+class MetricsCommand(requests.Session):
+ """
+ Representation of a command in the metrics.
+ """
+
+ def __init__(self, command,
+ context_type=None, status=Status.SUCCESS,
+ source=MetricsSource.CLI, uri=None):
+ super().__init__()
+ self.command = ("compose " + command).strip() if command else "compose --help"
+ self.context = context_type or ContextAPI.get_current_context().context_type or 'moby'
+ self.source = source
+ self.status = status.value
+ self.uri = uri or os.environ.get("METRICS_SOCKET_FILE", METRICS_SOCKET_FILE)
+ if IS_WINDOWS_PLATFORM:
+ self.mount("http+unix://", NpipeHTTPAdapter(self.uri))
+ else:
+ self.mount("http+unix://", UnixHTTPAdapter(self.uri))
+
+ def send_metrics(self):
+ try:
+ return self.post("http+unix://localhost/usage",
+ json=self.to_map(),
+ timeout=.05,
+ headers={'Content-Type': 'application/json'})
+ except Exception as e:
+ return e
+
+ def to_map(self):
+ return {
+ 'command': self.command,
+ 'context': self.context,
+ 'source': self.source,
+ 'status': self.status,
+ }
diff --git a/compose/metrics/decorator.py b/compose/metrics/decorator.py
new file mode 100644
index 00000000..3126e694
--- /dev/null
+++ b/compose/metrics/decorator.py
@@ -0,0 +1,21 @@
+import functools
+
+from compose.metrics.client import MetricsCommand
+from compose.metrics.client import Status
+
+
+class metrics:
+ def __init__(self, command_name=None):
+ self.command_name = command_name
+
+ def __call__(self, fn):
+ @functools.wraps(fn,
+ assigned=functools.WRAPPER_ASSIGNMENTS,
+ updated=functools.WRAPPER_UPDATES)
+ def wrapper(*args, **kwargs):
+ if not self.command_name:
+ self.command_name = fn.__name__
+ result = fn(*args, **kwargs)
+ MetricsCommand(self.command_name, status=Status.SUCCESS).send_metrics()
+ return result
+ return wrapper
diff --git a/compose/network.py b/compose/network.py
index 1a080c40..a67c703c 100644
--- a/compose/network.py
+++ b/compose/network.py
@@ -1,8 +1,7 @@
-from __future__ import absolute_import
-from __future__ import unicode_literals
-
import logging
+import re
from collections import OrderedDict
+from operator import itemgetter
from docker.errors import NotFound
from docker.types import IPAMConfig
@@ -10,9 +9,11 @@ from docker.types import IPAMPool
from docker.utils import version_gte
from docker.utils import version_lt
+from . import __version__
from .config import ConfigurationError
from .const import LABEL_NETWORK
from .const import LABEL_PROJECT
+from .const import LABEL_VERSION
log = logging.getLogger(__name__)
@@ -24,7 +25,7 @@ OPTS_EXCEPTIONS = [
]
-class Network(object):
+class Network:
def __init__(self, client, project, name, driver=None, driver_opts=None,
ipam=None, external=False, internal=False, enable_ipv6=False,
labels=None, custom_name=False):
@@ -39,6 +40,7 @@ class Network(object):
self.enable_ipv6 = enable_ipv6
self.labels = labels
self.custom_name = custom_name
+ self.legacy = None
def ensure(self):
if self.external:
@@ -50,7 +52,7 @@ class Network(object):
try:
self.inspect()
log.debug(
- 'Network {0} declared as external. No new '
+ 'Network {} declared as external. No new '
'network will be created.'.format(self.name)
)
except NotFound:
@@ -64,8 +66,9 @@ class Network(object):
)
return
+ self._set_legacy_flag()
try:
- data = self.inspect()
+ data = self.inspect(legacy=self.legacy)
check_remote_network_config(data, self)
except NotFound:
driver_name = 'the default driver'
@@ -73,8 +76,7 @@ class Network(object):
driver_name = 'driver "{}"'.format(self.driver)
log.info(
- 'Creating network "{}" with {}'
- .format(self.full_name, driver_name)
+ 'Creating network "{}" with {}'.format(self.full_name, driver_name)
)
self.client.create_network(
@@ -91,20 +93,37 @@ class Network(object):
def remove(self):
if self.external:
- log.info("Network %s is external, skipping", self.full_name)
+ log.info("Network %s is external, skipping", self.true_name)
return
- log.info("Removing network {}".format(self.full_name))
- self.client.remove_network(self.full_name)
+ log.info("Removing network {}".format(self.true_name))
+ self.client.remove_network(self.true_name)
- def inspect(self):
+ def inspect(self, legacy=False):
+ if legacy:
+ return self.client.inspect_network(self.legacy_full_name)
return self.client.inspect_network(self.full_name)
@property
+ def legacy_full_name(self):
+ if self.custom_name:
+ return self.name
+ return '{}_{}'.format(
+ re.sub(r'[_-]', '', self.project), self.name
+ )
+
+ @property
def full_name(self):
if self.custom_name:
return self.name
- return '{0}_{1}'.format(self.project, self.name)
+ return '{}_{}'.format(self.project, self.name)
+
+ @property
+ def true_name(self):
+ self._set_legacy_flag()
+ if self.legacy:
+ return self.legacy_full_name
+ return self.full_name
@property
def _labels(self):
@@ -114,9 +133,19 @@ class Network(object):
labels.update({
LABEL_PROJECT: self.project,
LABEL_NETWORK: self.name,
+ LABEL_VERSION: __version__,
})
return labels
+ def _set_legacy_flag(self):
+ if self.legacy is not None:
+ return
+ try:
+ data = self.inspect(legacy=True)
+ self.legacy = data is not None
+ except NotFound:
+ self.legacy = False
+
def create_ipam_config_from_dict(ipam_dict):
if not ipam_dict:
@@ -139,7 +168,7 @@ def create_ipam_config_from_dict(ipam_dict):
class NetworkConfigChangedError(ConfigurationError):
def __init__(self, net_name, property_name):
- super(NetworkConfigChangedError, self).__init__(
+ super().__init__(
'Network "{}" needs to be recreated - {} has changed'.format(
net_name, property_name
)
@@ -150,59 +179,59 @@ def check_remote_ipam_config(remote, local):
remote_ipam = remote.get('IPAM')
ipam_dict = create_ipam_config_from_dict(local.ipam)
if local.ipam.get('driver') and local.ipam.get('driver') != remote_ipam.get('Driver'):
- raise NetworkConfigChangedError(local.full_name, 'IPAM driver')
+ raise NetworkConfigChangedError(local.true_name, 'IPAM driver')
if len(ipam_dict['Config']) != 0:
if len(ipam_dict['Config']) != len(remote_ipam['Config']):
- raise NetworkConfigChangedError(local.full_name, 'IPAM configs')
+ raise NetworkConfigChangedError(local.true_name, 'IPAM configs')
remote_configs = sorted(remote_ipam['Config'], key='Subnet')
local_configs = sorted(ipam_dict['Config'], key='Subnet')
while local_configs:
lc = local_configs.pop()
rc = remote_configs.pop()
if lc.get('Subnet') != rc.get('Subnet'):
- raise NetworkConfigChangedError(local.full_name, 'IPAM config subnet')
+ raise NetworkConfigChangedError(local.true_name, 'IPAM config subnet')
if lc.get('Gateway') is not None and lc.get('Gateway') != rc.get('Gateway'):
- raise NetworkConfigChangedError(local.full_name, 'IPAM config gateway')
+ raise NetworkConfigChangedError(local.true_name, 'IPAM config gateway')
if lc.get('IPRange') != rc.get('IPRange'):
- raise NetworkConfigChangedError(local.full_name, 'IPAM config ip_range')
+ raise NetworkConfigChangedError(local.true_name, 'IPAM config ip_range')
if sorted(lc.get('AuxiliaryAddresses')) != sorted(rc.get('AuxiliaryAddresses')):
- raise NetworkConfigChangedError(local.full_name, 'IPAM config aux_addresses')
+ raise NetworkConfigChangedError(local.true_name, 'IPAM config aux_addresses')
remote_opts = remote_ipam.get('Options') or {}
- local_opts = local.ipam.get('options') or {}
+ local_opts = local.ipam.get('Options') or {}
for k in set.union(set(remote_opts.keys()), set(local_opts.keys())):
if remote_opts.get(k) != local_opts.get(k):
- raise NetworkConfigChangedError(local.full_name, 'IPAM option "{}"'.format(k))
+ raise NetworkConfigChangedError(local.true_name, 'IPAM option "{}"'.format(k))
def check_remote_network_config(remote, local):
if local.driver and remote.get('Driver') != local.driver:
- raise NetworkConfigChangedError(local.full_name, 'driver')
+ raise NetworkConfigChangedError(local.true_name, 'driver')
local_opts = local.driver_opts or {}
remote_opts = remote.get('Options') or {}
for k in set.union(set(remote_opts.keys()), set(local_opts.keys())):
if k in OPTS_EXCEPTIONS:
continue
if remote_opts.get(k) != local_opts.get(k):
- raise NetworkConfigChangedError(local.full_name, 'option "{}"'.format(k))
+ raise NetworkConfigChangedError(local.true_name, 'option "{}"'.format(k))
if local.ipam is not None:
check_remote_ipam_config(remote, local)
if local.internal is not None and local.internal != remote.get('Internal', False):
- raise NetworkConfigChangedError(local.full_name, 'internal')
+ raise NetworkConfigChangedError(local.true_name, 'internal')
if local.enable_ipv6 is not None and local.enable_ipv6 != remote.get('EnableIPv6', False):
- raise NetworkConfigChangedError(local.full_name, 'enable_ipv6')
+ raise NetworkConfigChangedError(local.true_name, 'enable_ipv6')
local_labels = local.labels or {}
- remote_labels = remote.get('Labels', {})
+ remote_labels = remote.get('Labels') or {}
for k in set.union(set(remote_labels.keys()), set(local_labels.keys())):
if k.startswith('com.docker.'): # We are only interested in user-specified labels
continue
if remote_labels.get(k) != local_labels.get(k):
- log.warn(
+ log.warning(
'Network {}: label "{}" has changed. It may need to be'
- ' recreated.'.format(local.full_name, k)
+ ' recreated.'.format(local.true_name, k)
)
@@ -230,7 +259,7 @@ def build_networks(name, config_data, client):
return networks
-class ProjectNetworks(object):
+class ProjectNetworks:
def __init__(self, networks, use_networking):
self.networks = networks or {}
@@ -245,7 +274,7 @@ class ProjectNetworks(object):
}
unused = set(networks) - set(service_networks) - {'default'}
if unused:
- log.warn(
+ log.warning(
"Some networks were defined but are not used by any service: "
"{}".format(", ".join(unused)))
return cls(service_networks, use_networking)
@@ -257,7 +286,7 @@ class ProjectNetworks(object):
try:
network.remove()
except NotFound:
- log.warn("Network %s not found.", network.full_name)
+ log.warning("Network %s not found.", network.true_name)
def initialize(self):
if not self.use_networking:
@@ -271,10 +300,10 @@ def get_network_defs_for_service(service_dict):
if 'network_mode' in service_dict:
return {}
networks = service_dict.get('networks', {'default': None})
- return dict(
- (net, (config or {}))
+ return {
+ net: (config or {})
for net, config in networks.items()
- )
+ }
def get_network_names_for_service(service_dict):
@@ -286,13 +315,18 @@ def get_networks(service_dict, network_definitions):
for name, netdef in get_network_defs_for_service(service_dict).items():
network = network_definitions.get(name)
if network:
- networks[network.full_name] = netdef
+ networks[network.true_name] = netdef
else:
raise ConfigurationError(
'Service "{}" uses an undefined network "{}"'
.format(service_dict['name'], name))
- return OrderedDict(sorted(
- networks.items(),
- key=lambda t: t[1].get('priority') or 0, reverse=True
- ))
+ if any([v.get('priority') for v in networks.values()]):
+ return OrderedDict(sorted(
+ networks.items(),
+ key=lambda t: t[1].get('priority') or 0, reverse=True
+ ))
+ else:
+ # Ensure Compose will pick a consistent primary network if no
+ # priority is set
+ return OrderedDict(sorted(networks.items(), key=itemgetter(0)))
diff --git a/compose/parallel.py b/compose/parallel.py
index a2eb160e..316e2217 100644
--- a/compose/parallel.py
+++ b/compose/parallel.py
@@ -1,27 +1,25 @@
-from __future__ import absolute_import
-from __future__ import unicode_literals
-
+import _thread as thread
import logging
import operator
import sys
+from queue import Empty
+from queue import Queue
from threading import Lock
from threading import Semaphore
from threading import Thread
from docker.errors import APIError
from docker.errors import ImageNotFound
-from six.moves import _thread as thread
-from six.moves.queue import Empty
-from six.moves.queue import Queue
+from compose.cli.colors import AnsiMode
from compose.cli.colors import green
from compose.cli.colors import red
from compose.cli.signals import ShutdownException
from compose.const import PARALLEL_LIMIT
+from compose.errors import CompletedUnsuccessfully
from compose.errors import HealthCheckFailed
from compose.errors import NoHealthCheckConfigured
from compose.errors import OperationFailedError
-from compose.utils import get_output_stream
log = logging.getLogger(__name__)
@@ -29,7 +27,7 @@ log = logging.getLogger(__name__)
STOP = object()
-class GlobalLimit(object):
+class GlobalLimit:
"""Simple class to hold a global semaphore limiter for a project. This class
should be treated as a singleton that is instantiated when the project is.
"""
@@ -43,14 +41,17 @@ class GlobalLimit(object):
cls.global_limiter = Semaphore(value)
-def parallel_execute_watch(events, writer, errors, results, msg, get_name):
+def parallel_execute_watch(events, writer, errors, results, msg, get_name, fail_check):
""" Watch events from a parallel execution, update status and fill errors and results.
Returns exception to re-raise.
"""
error_to_reraise = None
for obj, result, exception in events:
if exception is None:
- writer.write(msg, get_name(obj), 'done', green)
+ if fail_check is not None and fail_check(obj):
+ writer.write(msg, get_name(obj), 'failed', red)
+ else:
+ writer.write(msg, get_name(obj), 'done', green)
results.append(result)
elif isinstance(exception, ImageNotFound):
# This is to bubble up ImageNotFound exceptions to the client so we
@@ -61,7 +62,8 @@ def parallel_execute_watch(events, writer, errors, results, msg, get_name):
elif isinstance(exception, APIError):
errors[get_name(obj)] = exception.explanation
writer.write(msg, get_name(obj), 'error', red)
- elif isinstance(exception, (OperationFailedError, HealthCheckFailed, NoHealthCheckConfigured)):
+ elif isinstance(exception, (OperationFailedError, HealthCheckFailed, NoHealthCheckConfigured,
+ CompletedUnsuccessfully)):
errors[get_name(obj)] = exception.msg
writer.write(msg, get_name(obj), 'error', red)
elif isinstance(exception, UpstreamError):
@@ -72,20 +74,19 @@ def parallel_execute_watch(events, writer, errors, results, msg, get_name):
return error_to_reraise
-def parallel_execute(objects, func, get_name, msg, get_deps=None, limit=None):
+def parallel_execute(objects, func, get_name, msg, get_deps=None, limit=None, fail_check=None):
"""Runs func on objects in parallel while ensuring that func is
ran on object only after it is ran on all its dependencies.
get_deps called on object must return a collection with its dependencies.
get_name called on object must return its name.
+ fail_check is an additional failure check for cases that should display as a failure
+ in the CLI logs, but don't raise an exception (such as attempting to start 0 containers)
"""
objects = list(objects)
- stream = get_output_stream(sys.stderr)
+ stream = sys.stderr
- if ParallelStreamWriter.instance:
- writer = ParallelStreamWriter.instance
- else:
- writer = ParallelStreamWriter(stream)
+ writer = ParallelStreamWriter.get_or_assign_instance(ParallelStreamWriter(stream))
for obj in objects:
writer.add_object(msg, get_name(obj))
@@ -96,7 +97,9 @@ def parallel_execute(objects, func, get_name, msg, get_deps=None, limit=None):
errors = {}
results = []
- error_to_reraise = parallel_execute_watch(events, writer, errors, results, msg, get_name)
+ error_to_reraise = parallel_execute_watch(
+ events, writer, errors, results, msg, get_name, fail_check
+ )
for obj_name, error in errors.items():
stream.write("\nERROR: for {} {}\n".format(obj_name, error))
@@ -111,7 +114,7 @@ def _no_deps(x):
return []
-class State(object):
+class State:
"""
Holds the state of a partially-complete parallel operation.
@@ -133,7 +136,7 @@ class State(object):
return set(self.objects) - self.started - self.finished - self.failed
-class NoLimit(object):
+class NoLimit:
def __enter__(self):
pass
@@ -240,6 +243,12 @@ def feed_queue(objects, func, get_deps, results, state, limiter):
'not processing'.format(obj)
)
results.put((obj, None, e))
+ except CompletedUnsuccessfully as e:
+ log.debug(
+ 'Service(s) upstream of {} did not completed successfully - '
+ 'not processing'.format(obj)
+ )
+ results.put((obj, None, e))
if state.is_done():
results.put(STOP)
@@ -249,26 +258,44 @@ class UpstreamError(Exception):
pass
-class ParallelStreamWriter(object):
+class ParallelStreamWriter:
"""Write out messages for operations happening in parallel.
Each operation has its own line, and ANSI code characters are used
to jump to the correct line, and write over the line.
"""
- noansi = False
- lock = Lock()
+ default_ansi_mode = AnsiMode.AUTO
+ write_lock = Lock()
+
instance = None
+ instance_lock = Lock()
@classmethod
- def set_noansi(cls, value=True):
- cls.noansi = value
+ def get_instance(cls):
+ return cls.instance
- def __init__(self, stream):
+ @classmethod
+ def get_or_assign_instance(cls, writer):
+ cls.instance_lock.acquire()
+ try:
+ if cls.instance is None:
+ cls.instance = writer
+ return cls.instance
+ finally:
+ cls.instance_lock.release()
+
+ @classmethod
+ def set_default_ansi_mode(cls, ansi_mode):
+ cls.default_ansi_mode = ansi_mode
+
+ def __init__(self, stream, ansi_mode=None):
+ if ansi_mode is None:
+ ansi_mode = self.default_ansi_mode
self.stream = stream
+ self.use_ansi_codes = ansi_mode.use_ansi_codes(stream)
self.lines = []
self.width = 0
- ParallelStreamWriter.instance = self
def add_object(self, msg, obj_index):
if msg is None:
@@ -282,7 +309,7 @@ class ParallelStreamWriter(object):
return self._write_noansi(msg, obj_index, '')
def _write_ansi(self, msg, obj_index, status):
- self.lock.acquire()
+ self.write_lock.acquire()
position = self.lines.index(msg + obj_index)
diff = len(self.lines) - position
# move up
@@ -294,7 +321,7 @@ class ParallelStreamWriter(object):
# move back down
self.stream.write("%c[%dB" % (27, diff))
self.stream.flush()
- self.lock.release()
+ self.write_lock.release()
def _write_noansi(self, msg, obj_index, status):
self.stream.write(
@@ -307,10 +334,10 @@ class ParallelStreamWriter(object):
def write(self, msg, obj_index, status, color_func):
if msg is None:
return
- if self.noansi:
- self._write_noansi(msg, obj_index, status)
- else:
+ if self.use_ansi_codes:
self._write_ansi(msg, obj_index, color_func(status))
+ else:
+ self._write_noansi(msg, obj_index, status)
def parallel_operation(containers, operation, options, message):
diff --git a/compose/progress_stream.py b/compose/progress_stream.py
index 5e709770..3c03cc4b 100644
--- a/compose/progress_stream.py
+++ b/compose/progress_stream.py
@@ -1,6 +1,3 @@
-from __future__ import absolute_import
-from __future__ import unicode_literals
-
from compose import utils
@@ -18,13 +15,12 @@ def write_to_stream(s, stream):
def stream_output(output, stream):
is_terminal = hasattr(stream, 'isatty') and stream.isatty()
- stream = utils.get_output_stream(stream)
- all_events = []
+ stream = stream
lines = {}
diff = 0
for event in utils.json_stream(output):
- all_events.append(event)
+ yield event
is_progress_event = 'progress' in event or 'progressDetail' in event
if not is_progress_event:
@@ -57,8 +53,6 @@ def stream_output(output, stream):
stream.flush()
- return all_events
-
def print_output_event(event, stream, is_terminal):
if 'errorDetail' in event:
@@ -85,30 +79,30 @@ def print_output_event(event, stream, is_terminal):
status = event.get('status', '')
if 'progress' in event:
- write_to_stream("%s %s%s" % (status, event['progress'], terminator), stream)
+ write_to_stream("{} {}{}".format(status, event['progress'], terminator), stream)
elif 'progressDetail' in event:
detail = event['progressDetail']
total = detail.get('total')
if 'current' in detail and total:
percentage = float(detail['current']) / float(total) * 100
- write_to_stream('%s (%.1f%%)%s' % (status, percentage, terminator), stream)
+ write_to_stream('{} ({:.1f}%){}'.format(status, percentage, terminator), stream)
else:
- write_to_stream('%s%s' % (status, terminator), stream)
+ write_to_stream('{}{}'.format(status, terminator), stream)
elif 'stream' in event:
- write_to_stream("%s%s" % (event['stream'], terminator), stream)
+ write_to_stream("{}{}".format(event['stream'], terminator), stream)
else:
- write_to_stream("%s%s\n" % (status, terminator), stream)
+ write_to_stream("{}{}\n".format(status, terminator), stream)
def get_digest_from_pull(events):
+ digest = None
for event in events:
status = event.get('status')
if not status or 'Digest' not in status:
continue
-
- _, digest = status.split(':', 1)
- return digest.strip()
- return None
+ else:
+ digest = status.split(':', 1)[1].strip()
+ return digest
def get_digest_from_push(events):
@@ -117,3 +111,13 @@ def get_digest_from_push(events):
if digest:
return digest
return None
+
+
+def read_status(event):
+ status = event['status'].lower()
+ if 'progressDetail' in event:
+ detail = event['progressDetail']
+ if 'current' in detail and 'total' in detail:
+ percentage = float(detail['current']) / float(detail['total'])
+ status = '{} ({:.1%})'.format(status, percentage)
+ return status
diff --git a/compose/project.py b/compose/project.py
index 924390b4..e862464d 100644
--- a/compose/project.py
+++ b/compose/project.py
@@ -1,21 +1,22 @@
-from __future__ import absolute_import
-from __future__ import unicode_literals
-
import datetime
+import enum
import logging
import operator
+import re
from functools import reduce
+from os import path
-import enum
-import six
from docker.errors import APIError
+from docker.errors import ImageNotFound
+from docker.errors import NotFound
+from docker.utils import version_lt
from . import parallel
+from .cli.errors import UserError
from .config import ConfigurationError
from .config.config import V1
from .config.sort_services import get_container_name_from_network_mode
from .config.sort_services import get_service_name_from_network_mode
-from .const import IMAGE_EVENTS
from .const import LABEL_ONE_OFF
from .const import LABEL_PROJECT
from .const import LABEL_SERVICE
@@ -23,20 +24,26 @@ from .container import Container
from .network import build_networks
from .network import get_networks
from .network import ProjectNetworks
+from .progress_stream import read_status
from .service import BuildAction
+from .service import ContainerIpcMode
from .service import ContainerNetworkMode
from .service import ContainerPidMode
from .service import ConvergenceStrategy
+from .service import IpcMode
from .service import NetworkMode
+from .service import NoSuchImageError
+from .service import parse_repository_tag
from .service import PidMode
from .service import Service
-from .service import ServiceName
+from .service import ServiceIpcMode
from .service import ServiceNetworkMode
from .service import ServicePidMode
+from .utils import filter_attached_for_up
from .utils import microseconds_from_time_nano
+from .utils import truncate_string
from .volume import ProjectVolumes
-
log = logging.getLogger(__name__)
@@ -49,38 +56,45 @@ class OneOffFilter(enum.Enum):
@classmethod
def update_labels(cls, value, labels):
if value == cls.only:
- labels.append('{0}={1}'.format(LABEL_ONE_OFF, "True"))
+ labels.append('{}={}'.format(LABEL_ONE_OFF, "True"))
elif value == cls.exclude:
- labels.append('{0}={1}'.format(LABEL_ONE_OFF, "False"))
+ labels.append('{}={}'.format(LABEL_ONE_OFF, "False"))
elif value == cls.include:
pass
else:
raise ValueError("Invalid value for one_off: {}".format(repr(value)))
-class Project(object):
+class Project:
"""
A collection of services.
"""
- def __init__(self, name, services, client, networks=None, volumes=None, config_version=None):
+ def __init__(self, name, services, client, networks=None, volumes=None, config_version=None,
+ enabled_profiles=None):
self.name = name
self.services = services
self.client = client
self.volumes = volumes or ProjectVolumes({})
self.networks = networks or ProjectNetworks({}, False)
self.config_version = config_version
+ self.enabled_profiles = enabled_profiles or []
- def labels(self, one_off=OneOffFilter.exclude):
- labels = ['{0}={1}'.format(LABEL_PROJECT, self.name)]
+ def labels(self, one_off=OneOffFilter.exclude, legacy=False):
+ name = self.name
+ if legacy:
+ name = re.sub(r'[_-]', '', name)
+ labels = ['{}={}'.format(LABEL_PROJECT, name)]
OneOffFilter.update_labels(one_off, labels)
return labels
@classmethod
- def from_config(cls, name, config_data, client, default_platform=None):
+ def from_config(cls, name, config_data, client, default_platform=None, extra_labels=None,
+ enabled_profiles=None):
"""
Construct a Project from a config.Config object.
"""
+ extra_labels = extra_labels or []
use_networking = (config_data.version and config_data.version != V1)
networks = build_networks(name, config_data, client)
project_networks = ProjectNetworks.from_services(
@@ -88,7 +102,7 @@ class Project(object):
networks,
use_networking)
volumes = ProjectVolumes.from_config(name, config_data, client)
- project = cls(name, [], client, project_networks, volumes, config_data.version)
+ project = cls(name, [], client, project_networks, volumes, config_data.version, enabled_profiles)
for service_dict in config_data.services:
service_dict = dict(service_dict)
@@ -99,6 +113,7 @@ class Project(object):
service_dict.pop('networks', None)
links = project.get_links(service_dict)
+ ipc_mode = project.get_ipc_mode(service_dict)
network_mode = project.get_network_mode(
service_dict, list(service_networks.keys())
)
@@ -116,6 +131,18 @@ class Project(object):
service_dict.pop('secrets', None) or [],
config_data.secrets)
+ service_dict['scale'] = project.get_service_scale(service_dict)
+ service_dict['device_requests'] = project.get_device_requests(service_dict)
+ service_dict = translate_credential_spec_to_security_opt(service_dict)
+ service_dict, ignored_keys = translate_deploy_keys_to_container_config(
+ service_dict
+ )
+ if ignored_keys:
+ log.warning(
+ 'The following deploy sub-keys are not supported and have'
+ ' been ignored: {}'.format(', '.join(ignored_keys))
+ )
+
project.services.append(
Service(
service_dict.pop('name'),
@@ -128,7 +155,10 @@ class Project(object):
volumes_from=volumes_from,
secrets=secrets,
pid_mode=pid_mode,
- platform=service_dict.pop('platform', default_platform),
+ ipc_mode=ipc_mode,
+ platform=service_dict.pop('platform', None),
+ default_platform=default_platform,
+ extra_labels=extra_labels,
**service_dict)
)
@@ -159,7 +189,7 @@ class Project(object):
if name not in valid_names:
raise NoSuchService(name)
- def get_services(self, service_names=None, include_deps=False):
+ def get_services(self, service_names=None, include_deps=False, auto_enable_profiles=True):
"""
Returns a list of this project's services filtered
by the provided list of names, or all services if service_names is None
@@ -172,15 +202,36 @@ class Project(object):
reordering as needed to resolve dependencies.
Raises NoSuchService if any of the named services do not exist.
+
+ Raises ConfigurationError if any service depended on is not enabled by active profiles
"""
+ # create a copy so we can *locally* add auto-enabled profiles later
+ enabled_profiles = self.enabled_profiles.copy()
+
if service_names is None or len(service_names) == 0:
- service_names = self.service_names
+ auto_enable_profiles = False
+ service_names = [
+ service.name
+ for service in self.services
+ if service.enabled_for_profiles(enabled_profiles)
+ ]
unsorted = [self.get_service(name) for name in service_names]
services = [s for s in self.services if s in unsorted]
+ if auto_enable_profiles:
+ # enable profiles of explicitly targeted services
+ for service in services:
+ for profile in service.get_profiles():
+ if profile not in enabled_profiles:
+ enabled_profiles.append(profile)
+
if include_deps:
- services = reduce(self._inject_deps, services, [])
+ services = reduce(
+ lambda acc, s: self._inject_deps(acc, s, enabled_profiles),
+ services,
+ []
+ )
uniques = []
[uniques.append(s) for s in services if s not in uniques]
@@ -193,25 +244,6 @@ class Project(object):
service.remove_duplicate_containers()
return services
- def get_scaled_services(self, services, scale_override):
- """
- Returns a list of this project's services as scaled ServiceName objects.
-
- services: a list of Service objects
- scale_override: a dict with the scale to apply to each service (k: service_name, v: scale)
- """
- service_names = []
- for service in services:
- if service.name in scale_override:
- scale = scale_override[service.name]
- else:
- scale = service.scale_num
-
- for i in range(1, scale + 1):
- service_names.append(ServiceName(self.name, service.name, i))
-
- return service_names
-
def get_links(self, service_dict):
links = []
if 'links' in service_dict:
@@ -272,6 +304,83 @@ class Project(object):
return PidMode(pid_mode)
+ def get_ipc_mode(self, service_dict):
+ ipc_mode = service_dict.pop('ipc', None)
+ if not ipc_mode:
+ return IpcMode(None)
+
+ service_name = get_service_name_from_network_mode(ipc_mode)
+ if service_name:
+ return ServiceIpcMode(self.get_service(service_name))
+
+ container_name = get_container_name_from_network_mode(ipc_mode)
+ if container_name:
+ try:
+ return ContainerIpcMode(Container.from_id(self.client, container_name))
+ except APIError:
+ raise ConfigurationError(
+ "Service '{name}' uses the IPC namespace of container '{dep}' which "
+ "does not exist.".format(name=service_dict['name'], dep=container_name)
+ )
+
+ return IpcMode(ipc_mode)
+
+ def get_service_scale(self, service_dict):
+ # service.scale for v2 and deploy.replicas for v3
+ scale = service_dict.get('scale', None)
+ deploy_dict = service_dict.get('deploy', None)
+ if not deploy_dict:
+ return 1 if scale is None else scale
+
+ if deploy_dict.get('mode', 'replicated') != 'replicated':
+ return 1 if scale is None else scale
+
+ replicas = deploy_dict.get('replicas', None)
+ if scale is not None and replicas is not None:
+ raise ConfigurationError(
+ "Both service.scale and service.deploy.replicas are set."
+ " Only one of them must be set."
+ )
+ if replicas is not None:
+ scale = replicas
+ if scale is None:
+ return 1
+ # deploy may contain placement constraints introduced in v3.8
+ max_replicas = deploy_dict.get('placement', {}).get(
+ 'max_replicas_per_node',
+ scale)
+
+ scale = min(scale, max_replicas)
+ if max_replicas < scale:
+ log.warning("Scale is limited to {} ('max_replicas_per_node' field).".format(
+ max_replicas))
+ return scale
+
+ def get_device_requests(self, service_dict):
+ deploy_dict = service_dict.get('deploy', None)
+ if not deploy_dict:
+ return
+
+ resources = deploy_dict.get('resources', None)
+ if not resources or not resources.get('reservations', None):
+ return
+ devices = resources['reservations'].get('devices')
+ if not devices:
+ return
+
+ for dev in devices:
+ count = dev.get("count", -1)
+ if not isinstance(count, int):
+ if count != "all":
+ raise ConfigurationError(
+ 'Invalid value "{}" for devices count'.format(dev["count"]),
+ '(expected integer or "all")')
+ dev["count"] = -1
+
+ if 'capabilities' in dev:
+ dev['capabilities'] = [dev['capabilities']]
+ return devices
+
def start(self, service_names=None, **options):
containers = []
@@ -293,6 +402,7 @@ class Project(object):
operator.attrgetter('name'),
'Starting',
get_deps,
+ fail_check=lambda obj: not obj.containers(),
)
return containers
@@ -352,10 +462,12 @@ class Project(object):
self.remove_images(remove_image_type)
def remove_images(self, remove_image_type):
- for service in self.get_services():
+ for service in self.services:
service.remove_image(remove_image_type)
def restart(self, service_names=None, **options):
+ # filter service_names by enabled profiles
+ service_names = [s.name for s in self.get_services(service_names)]
containers = self.containers(service_names, stopped=True)
parallel.parallel_execute(
@@ -367,13 +479,45 @@ class Project(object):
return containers
def build(self, service_names=None, no_cache=False, pull=False, force_rm=False, memory=None,
- build_args=None, gzip=False):
+ build_args=None, gzip=False, parallel_build=False, rm=True, silent=False, cli=False,
+ progress=None):
+
+ services = []
for service in self.get_services(service_names):
if service.can_be_built():
- service.build(no_cache, pull, force_rm, memory, build_args, gzip)
- else:
+ services.append(service)
+ elif not silent:
log.info('%s uses an image, skipping' % service.name)
+ if cli:
+ if parallel_build:
+ log.warning("Flag '--parallel' is ignored when building with "
+ "COMPOSE_DOCKER_CLI_BUILD=1")
+ if gzip:
+ log.warning("Flag '--compress' is ignored when building with "
+ "COMPOSE_DOCKER_CLI_BUILD=1")
+
+ def build_service(service):
+ service.build(no_cache, pull, force_rm, memory, build_args, gzip, rm, silent, cli, progress)
+
+ if parallel_build:
+ _, errors = parallel.parallel_execute(
+ services,
+ build_service,
+ operator.attrgetter('name'),
+ 'Building',
+ limit=5,
+ )
+ if len(errors):
+ combined_errors = '\n'.join([
+ e.decode('utf-8') if isinstance(e, bytes) else e for e in errors.values()
+ ])
+ raise ProjectError(combined_errors)
+
+ else:
+ for service in services:
+ build_service(service)
+
def create(
self,
service_names=None,
@@ -392,11 +536,13 @@ class Project(object):
detached=True,
start=False)
- def events(self, service_names=None):
+ def _legacy_event_processor(self, service_names):
+ # Only for v1 files or when Compose is forced to use an older API version
def build_container_event(event, container):
time = datetime.datetime.fromtimestamp(event['time'])
time = time.replace(
- microsecond=microseconds_from_time_nano(event['timeNano']))
+ microsecond=microseconds_from_time_nano(event['timeNano'])
+ )
return {
'time': time,
'type': 'container',
@@ -415,17 +561,15 @@ class Project(object):
filters={'label': self.labels()},
decode=True
):
- # The first part of this condition is a guard against some events
- # broadcasted by swarm that don't have a status field.
+ # This is a guard against some events broadcasted by swarm that
+ # don't have a status field.
# See https://github.com/docker/compose/issues/3316
- if 'status' not in event or event['status'] in IMAGE_EVENTS:
- # We don't receive any image events because labels aren't applied
- # to images
+ if 'status' not in event:
continue
- # TODO: get labels from the API v1.22 , see github issue 2618
try:
- # this can fail if the container has been removed
+ # this can fail if the container has been removed or if the event
+ # refers to an image
container = Container.from_id(self.client, event['id'])
except APIError:
continue
@@ -433,6 +577,56 @@ class Project(object):
continue
yield build_container_event(event, container)
+ def events(self, service_names=None):
+ if version_lt(self.client.api_version, '1.22'):
+ # New, better event API was introduced in 1.22.
+ return self._legacy_event_processor(service_names)
+
+ def build_container_event(event):
+ container_attrs = event['Actor']['Attributes']
+ time = datetime.datetime.fromtimestamp(event['time'])
+ time = time.replace(
+ microsecond=microseconds_from_time_nano(event['timeNano'])
+ )
+
+ container = None
+ try:
+ container = Container.from_id(self.client, event['id'])
+ except APIError:
+ # Container may have been removed (e.g. if this is a destroy event)
+ pass
+
+ return {
+ 'time': time,
+ 'type': 'container',
+ 'action': event['status'],
+ 'id': event['Actor']['ID'],
+ 'service': container_attrs.get(LABEL_SERVICE),
+ 'attributes': {
+ k: v for k, v in container_attrs.items()
+ if not k.startswith('com.docker.compose.')
+ },
+ 'container': container,
+ }
+
+ def yield_loop(service_names):
+ for event in self.client.events(
+ filters={'label': self.labels()},
+ decode=True
+ ):
+ # TODO: support other event types
+ if event.get('Type') != 'container':
+ continue
+
+ try:
+ if event['Actor']['Attributes'][LABEL_SERVICE] not in service_names:
+ continue
+ except KeyError:
+ continue
+ yield build_container_event(event)
+
+ return yield_loop(set(service_names) if service_names else self.service_names)
+
def up(self,
service_names=None,
start_deps=True,
@@ -449,6 +643,10 @@ class Project(object):
reset_container_image=False,
renew_anonymous_volumes=False,
silent=False,
+ cli=False,
+ one_off=False,
+ attach_dependencies=False,
+ override_options=None,
):
self.initialize()
@@ -463,23 +661,31 @@ class Project(object):
include_deps=start_deps)
for svc in services:
- svc.ensure_image_exists(do_build=do_build, silent=silent)
+ svc.ensure_image_exists(do_build=do_build, silent=silent, cli=cli)
plans = self._get_convergence_plans(
- services, strategy, always_recreate_deps=always_recreate_deps)
- scaled_services = self.get_scaled_services(services, scale_override)
+ services,
+ strategy,
+ always_recreate_deps=always_recreate_deps,
+ one_off=service_names if one_off else [],
+ )
- def do(service):
+ services_to_attach = filter_attached_for_up(
+ services,
+ service_names,
+ attach_dependencies,
+ lambda service: service.name)
+ def do(service):
return service.execute_convergence_plan(
plans[service.name],
timeout=timeout,
- detached=detached,
+ detached=detached or (service not in services_to_attach),
scale_override=scale_override.get(service.name),
rescale=rescale,
start=start,
- project_services=scaled_services,
reset_container_image=reset_container_image,
renew_anonymous_volumes=renew_anonymous_volumes,
+ override_options=override_options,
)
def get_deps(service):
@@ -511,7 +717,7 @@ class Project(object):
self.networks.initialize()
self.volumes.initialize()
- def _get_convergence_plans(self, services, strategy, always_recreate_deps=False):
+ def _get_convergence_plans(self, services, strategy, always_recreate_deps=False, one_off=None):
plans = {}
for service in services:
@@ -521,6 +727,7 @@ class Project(object):
if name in plans and
plans[name].action in ('recreate', 'create')
]
+ is_one_off = one_off and service.name in one_off
if updated_dependencies and strategy.allows_recreate:
log.debug('%s has upstream changes (%s)',
@@ -528,54 +735,115 @@ class Project(object):
", ".join(updated_dependencies))
containers_stopped = any(
service.containers(stopped=True, filters={'status': ['created', 'exited']}))
- has_links = any(c.get('HostConfig.Links') for c in service.containers())
- if always_recreate_deps or containers_stopped or not has_links:
- plan = service.convergence_plan(ConvergenceStrategy.always)
+ service_has_links = any(service.get_link_names())
+ container_has_links = any(c.get('HostConfig.Links') for c in service.containers())
+ should_recreate_for_links = service_has_links ^ container_has_links
+ if always_recreate_deps or containers_stopped or should_recreate_for_links:
+ plan = service.convergence_plan(ConvergenceStrategy.always, is_one_off)
else:
- plan = service.convergence_plan(strategy)
+ plan = service.convergence_plan(strategy, is_one_off)
else:
- plan = service.convergence_plan(strategy)
+ plan = service.convergence_plan(strategy, is_one_off)
plans[service.name] = plan
return plans
- def pull(self, service_names=None, ignore_pull_failures=False, parallel_pull=False, silent=False,
+ def pull(self, service_names=None, ignore_pull_failures=False, parallel_pull=True, silent=False,
include_deps=False):
services = self.get_services(service_names, include_deps)
if parallel_pull:
- def pull_service(service):
- service.pull(ignore_pull_failures, True)
-
- _, errors = parallel.parallel_execute(
- services,
- pull_service,
- operator.attrgetter('name'),
- not silent and 'Pulling' or None,
- limit=5,
- )
- if len(errors):
- combined_errors = '\n'.join([
- e.decode('utf-8') if isinstance(e, six.binary_type) else e for e in errors.values()
- ])
- raise ProjectError(combined_errors)
+ self.parallel_pull(services, silent=silent)
else:
+ must_build = []
for service in services:
- service.pull(ignore_pull_failures, silent=silent)
+ try:
+ service.pull(ignore_pull_failures, silent=silent)
+ except (ImageNotFound, NotFound):
+ if service.can_be_built():
+ must_build.append(service.name)
+ else:
+ raise
+
+ if len(must_build):
+ log.warning('Some service image(s) must be built from source by running:\n'
+ ' docker-compose build {}'
+ .format(' '.join(must_build)))
+
+ def parallel_pull(self, services, ignore_pull_failures=False, silent=False):
+ msg = 'Pulling' if not silent else None
+ must_build = []
+
+ def pull_service(service):
+ strm = service.pull(ignore_pull_failures, True, stream=True)
+
+ if strm is None: # Attempting to pull service with no `image` key is a no-op
+ return
+
+ try:
+ writer = parallel.ParallelStreamWriter.get_instance()
+ if writer is None:
+ raise RuntimeError('ParallelStreamWriter has not yet been instantiated')
+ for event in strm:
+ if 'status' not in event:
+ continue
+ status = read_status(event)
+ writer.write(
+ msg, service.name, truncate_string(status), lambda s: s
+ )
+ except (ImageNotFound, NotFound):
+ if service.can_be_built():
+ must_build.append(service.name)
+ else:
+ raise
+
+ _, errors = parallel.parallel_execute(
+ services,
+ pull_service,
+ operator.attrgetter('name'),
+ msg,
+ limit=5,
+ )
+
+ if len(must_build):
+ log.warning('Some service image(s) must be built from source by running:\n'
+ ' docker-compose build {}'
+ .format(' '.join(must_build)))
+ if len(errors):
+ combined_errors = '\n'.join([
+ e.decode('utf-8') if isinstance(e, bytes) else e for e in errors.values()
+ ])
+ raise ProjectError(combined_errors)
def push(self, service_names=None, ignore_push_failures=False):
+ unique_images = set()
for service in self.get_services(service_names, include_deps=False):
- service.push(ignore_push_failures)
+ # Considering <image> and <image:latest> as the same
+ repo, tag, sep = parse_repository_tag(service.image_name)
+ service_image_name = sep.join((repo, tag)) if tag else sep.join((repo, 'latest'))
+
+ if service_image_name not in unique_images:
+ service.push(ignore_push_failures)
+ unique_images.add(service_image_name)
def _labeled_containers(self, stopped=False, one_off=OneOffFilter.exclude):
- return list(filter(None, [
+ ctnrs = list(filter(None, [
Container.from_ps(self.client, container)
for container in self.client.containers(
all=stopped,
filters={'label': self.labels(one_off=one_off)})])
)
+ if ctnrs:
+ return ctnrs
+
+ return list(filter(lambda c: c.has_legacy_proj_name(self.name), filter(None, [
+ Container.from_ps(self.client, container)
+ for container in self.client.containers(
+ all=stopped,
+ filters={'label': self.labels(one_off=one_off, legacy=True)})])
+ ))
def containers(self, service_names=None, stopped=False, one_off=OneOffFilter.exclude):
if service_names:
@@ -592,7 +860,7 @@ class Project(object):
def find_orphan_containers(self, remove_orphans):
def _find():
- containers = self._labeled_containers()
+ containers = set(self._labeled_containers() + self._labeled_containers(stopped=True))
for ctnr in containers:
service_name = ctnr.labels.get(LABEL_SERVICE)
if service_name not in self.service_names:
@@ -602,12 +870,15 @@ class Project(object):
return
if remove_orphans:
for ctnr in orphans:
- log.info('Removing orphan container "{0}"'.format(ctnr.name))
- ctnr.kill()
+ log.info('Removing orphan container "{}"'.format(ctnr.name))
+ try:
+ ctnr.kill()
+ except APIError:
+ pass
ctnr.remove(force=True)
else:
log.warning(
- 'Found orphan containers ({0}) for this project. If '
+ 'Found orphan containers ({}) for this project. If '
'you removed or renamed this service in your compose '
'file, you can run this command with the '
'--remove-orphans flag to clean it up.'.format(
@@ -615,14 +886,26 @@ class Project(object):
)
)
- def _inject_deps(self, acc, service):
+ def _inject_deps(self, acc, service, enabled_profiles):
dep_names = service.get_dependency_names()
if len(dep_names) > 0:
dep_services = self.get_services(
service_names=list(set(dep_names)),
- include_deps=True
+ include_deps=True,
+ auto_enable_profiles=False
)
+
+ for dep in dep_services:
+ if not dep.enabled_for_profiles(enabled_profiles):
+ raise ConfigurationError(
+ 'Service "{dep_name}" was pulled in as a dependency of '
+ 'service "{service_name}" but is not enabled by the '
+ 'active profiles. '
+ 'You may fix this by adding a common profile to '
+ '"{dep_name}" and "{service_name}".'
+ .format(dep_name=dep.name, service_name=service.name)
+ )
else:
dep_services = []
@@ -631,13 +914,89 @@ class Project(object):
def build_container_operation_with_timeout_func(self, operation, options):
def container_operation_with_timeout(container):
- if options.get('timeout') is None:
+ _options = options.copy()
+ if _options.get('timeout') is None:
service = self.get_service(container.service)
- options['timeout'] = service.stop_timeout(None)
- return getattr(container, operation)(**options)
+ _options['timeout'] = service.stop_timeout(None)
+ return getattr(container, operation)(**_options)
return container_operation_with_timeout
+def translate_credential_spec_to_security_opt(service_dict):
+ result = []
+
+ if 'credential_spec' in service_dict:
+ spec = convert_credential_spec_to_security_opt(service_dict['credential_spec'])
+ result.append('credentialspec={spec}'.format(spec=spec))
+
+ if result:
+ service_dict['security_opt'] = result
+
+ return service_dict
+
+
+def translate_resource_keys_to_container_config(resources_dict, service_dict):
+ if 'limits' in resources_dict:
+ service_dict['mem_limit'] = resources_dict['limits'].get('memory')
+ if 'cpus' in resources_dict['limits']:
+ service_dict['cpus'] = float(resources_dict['limits']['cpus'])
+ if 'reservations' in resources_dict:
+ service_dict['mem_reservation'] = resources_dict['reservations'].get('memory')
+ if 'cpus' in resources_dict['reservations']:
+ return ['resources.reservations.cpus']
+ return []
+
+
+def convert_restart_policy(name):
+ try:
+ return {
+ 'any': 'always',
+ 'none': 'no',
+ 'on-failure': 'on-failure'
+ }[name]
+ except KeyError:
+ raise ConfigurationError('Invalid restart policy "{}"'.format(name))
+
+
+def convert_credential_spec_to_security_opt(credential_spec):
+ if 'file' in credential_spec:
+ return 'file://{file}'.format(file=credential_spec['file'])
+ return 'registry://{registry}'.format(registry=credential_spec['registry'])
+
+
+def translate_deploy_keys_to_container_config(service_dict):
+ if 'credential_spec' in service_dict:
+ del service_dict['credential_spec']
+ if 'configs' in service_dict:
+ del service_dict['configs']
+
+ if 'deploy' not in service_dict:
+ return service_dict, []
+
+ deploy_dict = service_dict['deploy']
+ ignored_keys = [
+ k for k in ['endpoint_mode', 'labels', 'update_config', 'rollback_config']
+ if k in deploy_dict
+ ]
+
+ if 'restart_policy' in deploy_dict:
+ service_dict['restart'] = {
+ 'Name': convert_restart_policy(deploy_dict['restart_policy'].get('condition', 'any')),
+ 'MaximumRetryCount': deploy_dict['restart_policy'].get('max_attempts', 0)
+ }
+ for k in deploy_dict['restart_policy'].keys():
+ if k != 'condition' and k != 'max_attempts':
+ ignored_keys.append('restart_policy.{}'.format(k))
+
+ ignored_keys.extend(
+ translate_resource_keys_to_container_config(
+ deploy_dict.get('resources', {}), service_dict
+ )
+ )
+ del service_dict['deploy']
+ return service_dict, ignored_keys
+
+
def get_volumes_from(project, service_dict):
volumes_from = service_dict.pop('volumes_from', None)
if not volumes_from:
@@ -677,28 +1036,121 @@ def get_secrets(service, service_secrets, secret_defs):
.format(service=service, secret=secret.source))
if secret_def.get('external'):
- log.warn("Service \"{service}\" uses secret \"{secret}\" which is external. "
- "External secrets are not available to containers created by "
- "docker-compose.".format(service=service, secret=secret.source))
+ log.warning('Service "{service}" uses secret "{secret}" which is external. '
+ 'External secrets are not available to containers created by '
+ 'docker-compose.'.format(service=service, secret=secret.source))
continue
if secret.uid or secret.gid or secret.mode:
- log.warn(
- "Service \"{service}\" uses secret \"{secret}\" with uid, "
- "gid, or mode. These fields are not supported by this "
- "implementation of the Compose file".format(
+ log.warning(
+ 'Service "{service}" uses secret "{secret}" with uid, '
+ 'gid, or mode. These fields are not supported by this '
+ 'implementation of the Compose file'.format(
service=service, secret=secret.source
)
)
- secrets.append({'secret': secret, 'file': secret_def.get('file')})
+ secret_file = secret_def.get('file')
+ if not path.isfile(str(secret_file)):
+ log.warning(
+ 'Service "{service}" uses an undefined secret file "{secret_file}", '
+ 'the following file should be created "{secret_file}"'.format(
+ service=service, secret_file=secret_file
+ )
+ )
+ secrets.append({'secret': secret, 'file': secret_file})
return secrets
+def get_image_digests(project):
+ digests = {}
+ needs_push = set()
+ needs_pull = set()
+
+ for service in project.services:
+ try:
+ digests[service.name] = get_image_digest(service)
+ except NeedsPush as e:
+ needs_push.add(e.image_name)
+ except NeedsPull as e:
+ needs_pull.add(e.service_name)
+
+ if needs_push or needs_pull:
+ raise MissingDigests(needs_push, needs_pull)
+
+ return digests
+
+
+def get_image_digest(service):
+ if 'image' not in service.options:
+ raise UserError(
+ "Service '{s.name}' doesn't define an image tag. An image name is "
+ "required to generate a proper image digest. Specify an image repo "
+ "and tag with the 'image' option.".format(s=service))
+
+ _, _, separator = parse_repository_tag(service.options['image'])
+ # Compose file already uses a digest, no lookup required
+ if separator == '@':
+ return service.options['image']
+
+ digest = get_digest(service)
+
+ if digest:
+ return digest
+
+ if 'build' not in service.options:
+ raise NeedsPull(service.image_name, service.name)
+
+ raise NeedsPush(service.image_name)
+
+
+def get_digest(service):
+ digest = None
+ try:
+ image = service.image()
+ # TODO: pick a digest based on the image tag if there are multiple
+ # digests
+ if image['RepoDigests']:
+ digest = image['RepoDigests'][0]
+ except NoSuchImageError:
+ try:
+ # Fetch the image digest from the registry
+ distribution = service.get_image_registry_data()
+
+ if distribution['Descriptor']['digest']:
+ digest = '{image_name}@{digest}'.format(
+ image_name=service.image_name,
+ digest=distribution['Descriptor']['digest']
+ )
+ except NoSuchImageError:
+ raise UserError(
+ "Digest not found for service '{service}'. "
+ "Repository does not exist or may require 'docker login'"
+ .format(service=service.name))
+ return digest
+
+
+class MissingDigests(Exception):
+ def __init__(self, needs_push, needs_pull):
+ self.needs_push = needs_push
+ self.needs_pull = needs_pull
+
+
+class NeedsPush(Exception):
+ def __init__(self, image_name):
+ self.image_name = image_name
+
+
+class NeedsPull(Exception):
+ def __init__(self, image_name, service_name):
+ self.image_name = image_name
+ self.service_name = service_name
+
+
class NoSuchService(Exception):
def __init__(self, name):
- if isinstance(name, six.binary_type):
+ if isinstance(name, bytes):
name = name.decode('utf-8')
self.name = name
self.msg = "No such service: %s" % self.name
diff --git a/compose/service.py b/compose/service.py
index bb9e26ba..17505fb1 100644
--- a/compose/service.py
+++ b/compose/service.py
@@ -1,16 +1,15 @@
-from __future__ import absolute_import
-from __future__ import unicode_literals
-
+import enum
+import itertools
import logging
import os
import re
+import subprocess
import sys
+import tempfile
from collections import namedtuple
from collections import OrderedDict
from operator import attrgetter
-import enum
-import six
from docker.errors import APIError
from docker.errors import ImageNotFound
from docker.errors import NotFound
@@ -26,6 +25,7 @@ from . import __version__
from . import const
from . import progress_stream
from .config import DOCKER_CONFIG_KEYS
+from .config import is_url
from .config import merge_environment
from .config import merge_labels
from .config.errors import DependencyError
@@ -39,23 +39,29 @@ from .const import LABEL_CONTAINER_NUMBER
from .const import LABEL_ONE_OFF
from .const import LABEL_PROJECT
from .const import LABEL_SERVICE
+from .const import LABEL_SLUG
from .const import LABEL_VERSION
from .const import NANOCPUS_SCALE
+from .const import WINDOWS_LONGPATH_PREFIX
from .container import Container
+from .errors import CompletedUnsuccessfully
from .errors import HealthCheckFailed
from .errors import NoHealthCheckConfigured
from .errors import OperationFailedError
from .parallel import parallel_execute
from .progress_stream import stream_output
from .progress_stream import StreamOutputError
+from .utils import generate_random_id
from .utils import json_hash
from .utils import parse_bytes
from .utils import parse_seconds_float
+from .utils import truncate_id
+from .utils import unique_everseen
+from compose.cli.utils import binarystr_to_unicode
log = logging.getLogger(__name__)
-
HOST_CONFIG_KEYS = [
'cap_add',
'cap_drop',
@@ -71,6 +77,7 @@ HOST_CONFIG_KEYS = [
'cpuset',
'device_cgroup_rules',
'devices',
+ 'device_requests',
'dns',
'dns_search',
'dns_opt',
@@ -79,6 +86,7 @@ HOST_CONFIG_KEYS = [
'group_add',
'init',
'ipc',
+ 'isolation',
'read_only',
'log_driver',
'log_opt',
@@ -104,6 +112,7 @@ HOST_CONFIG_KEYS = [
CONDITION_STARTED = 'service_started'
CONDITION_HEALTHY = 'service_healthy'
+CONDITION_COMPLETED_SUCCESSFULLY = 'service_completed_successfully'
class BuildError(Exception):
@@ -123,7 +132,6 @@ class NoSuchImageError(Exception):
ServiceName = namedtuple('ServiceName', 'project service number')
-
ConvergencePlan = namedtuple('ConvergencePlan', 'action containers')
@@ -157,21 +165,24 @@ class BuildAction(enum.Enum):
skip = 2
-class Service(object):
+class Service:
def __init__(
- self,
- name,
- client=None,
- project='default',
- use_networking=False,
- links=None,
- volumes_from=None,
- network_mode=None,
- networks=None,
- secrets=None,
- scale=None,
- pid_mode=None,
- **options
+ self,
+ name,
+ client=None,
+ project='default',
+ use_networking=False,
+ links=None,
+ volumes_from=None,
+ network_mode=None,
+ networks=None,
+ secrets=None,
+ scale=1,
+ ipc_mode=None,
+ pid_mode=None,
+ default_platform=None,
+ extra_labels=None,
+ **options
):
self.name = name
self.client = client
@@ -179,34 +190,52 @@ class Service(object):
self.use_networking = use_networking
self.links = links or []
self.volumes_from = volumes_from or []
+ self.ipc_mode = ipc_mode or IpcMode(None)
self.network_mode = network_mode or NetworkMode(None)
self.pid_mode = pid_mode or PidMode(None)
self.networks = networks or {}
self.secrets = secrets or []
- self.scale_num = scale or 1
+ self.scale_num = scale
+ self.default_platform = default_platform
self.options = options
+ self.extra_labels = extra_labels or []
def __repr__(self):
return '<Service: {}>'.format(self.name)
- def containers(self, stopped=False, one_off=False, filters={}):
- filters.update({'label': self.labels(one_off=one_off)})
+ def containers(self, stopped=False, one_off=False, filters=None, labels=None):
+ if filters is None:
+ filters = {}
+ filters.update({'label': self.labels(one_off=one_off) + (labels or [])})
- return list(filter(None, [
+ result = list(filter(None, [
Container.from_ps(self.client, container)
for container in self.client.containers(
all=stopped,
- filters=filters)]))
+ filters=filters)])
+ )
+ if result:
+ return result
+
+ filters.update({'label': self.labels(one_off=one_off, legacy=True) + (labels or [])})
+ return list(
+ filter(
+ lambda c: c.has_legacy_proj_name(self.project), filter(None, [
+ Container.from_ps(self.client, container)
+ for container in self.client.containers(
+ all=stopped,
+ filters=filters)])
+ )
+ )
def get_container(self, number=1):
"""Return a :class:`compose.container.Container` for this service. The
container must be active, and match `number`.
"""
- labels = self.labels() + ['{0}={1}'.format(LABEL_CONTAINER_NUMBER, number)]
- for container in self.client.containers(filters={'label': labels}):
- return Container.from_ps(self.client, container)
+ for container in self.containers(labels=['{}={}'.format(LABEL_CONTAINER_NUMBER, number)]):
+ return container
- raise ValueError("No container found for %s_%s" % (self.name, number))
+ raise ValueError("No container found for {}_{}".format(self.name, number))
def start(self, **options):
containers = self.containers(stopped=True)
@@ -216,15 +245,15 @@ class Service(object):
def show_scale_warnings(self, desired_num):
if self.custom_container_name and desired_num > 1:
- log.warn('The "%s" service is using the custom container name "%s". '
- 'Docker requires each container to have a unique name. '
- 'Remove the custom name to scale the service.'
- % (self.name, self.custom_container_name))
+ log.warning('The "%s" service is using the custom container name "%s". '
+ 'Docker requires each container to have a unique name. '
+ 'Remove the custom name to scale the service.'
+ % (self.name, self.custom_container_name))
if self.specifies_host_port() and desired_num > 1:
- log.warn('The "%s" service specifies a port on the host. If multiple containers '
- 'for this service are created on a single host, the port will clash.'
- % self.name)
+ log.warning('The "%s" service specifies a port on the host. If multiple containers '
+ 'for this service are created on a single host, the port will clash.'
+ % self.name)
def scale(self, desired_num, timeout=None):
"""
@@ -241,6 +270,11 @@ class Service(object):
running_containers = self.containers(stopped=False)
num_running = len(running_containers)
+ for c in running_containers:
+ if not c.has_legacy_proj_name(self.project):
+ continue
+ log.info('Recreating container with legacy name %s' % c.name)
+ self.recreate_container(c, timeout, start_new_container=False)
if desired_num == num_running:
# do nothing as we already have the desired number
@@ -261,7 +295,7 @@ class Service(object):
c for c in stopped_containers if self._containers_have_diverged([c])
]
for c in divergent_containers:
- c.remove()
+ c.remove()
all_containers = list(set(all_containers) - set(divergent_containers))
@@ -307,11 +341,11 @@ class Service(object):
return Container.create(self.client, **container_options)
except APIError as ex:
raise OperationFailedError("Cannot create container for service %s: %s" %
- (self.name, ex.explanation))
+ (self.name, binarystr_to_unicode(ex.explanation)))
- def ensure_image_exists(self, do_build=BuildAction.none, silent=False):
+ def ensure_image_exists(self, do_build=BuildAction.none, silent=False, cli=False):
if self.can_be_built() and do_build == BuildAction.force:
- self.build()
+ self.build(cli=cli)
return
try:
@@ -327,12 +361,18 @@ class Service(object):
if do_build == BuildAction.skip:
raise NeedsBuildError(self)
- self.build()
- log.warn(
+ self.build(cli=cli)
+ log.warning(
"Image for service {} was built because it did not already exist. To "
"rebuild this image you must use `docker-compose build` or "
"`docker-compose up --build`.".format(self.name))
+ def get_image_registry_data(self):
+ try:
+ return self.client.inspect_distribution(self.image_name)
+ except APIError:
+ raise NoSuchImageError("Image '{}' not found".format(self.image_name))
+
def image(self):
try:
return self.client.inspect_image(self.image_name)
@@ -341,11 +381,23 @@ class Service(object):
@property
def image_name(self):
- return self.options.get('image', '{s.project}_{s.name}'.format(s=self))
+ return self.options.get('image', '{project}_{s.name}'.format(
+ s=self, project=self.project.lstrip('_-')
+ ))
- def convergence_plan(self, strategy=ConvergenceStrategy.changed):
+ @property
+ def platform(self):
+ platform = self.options.get('platform')
+ if not platform and version_gte(self.client.api_version, '1.35'):
+ platform = self.default_platform
+ return platform
+
+ def convergence_plan(self, strategy=ConvergenceStrategy.changed, one_off=False):
containers = self.containers(stopped=True)
+ if one_off:
+ return ConvergencePlan('one_off', [])
+
if not containers:
return ConvergencePlan('create', [])
@@ -353,15 +405,15 @@ class Service(object):
return ConvergencePlan('start', containers)
if (
- strategy is ConvergenceStrategy.always or
- self._containers_have_diverged(containers)
+ strategy is ConvergenceStrategy.always or
+ self._containers_have_diverged(containers)
):
return ConvergencePlan('recreate', containers)
stopped = [c for c in containers if not c.is_running]
if stopped:
- return ConvergencePlan('start', stopped)
+ return ConvergencePlan('start', containers)
return ConvergencePlan('noop', containers)
@@ -373,13 +425,17 @@ class Service(object):
except NoSuchImageError as e:
log.debug(
'Service %s has diverged: %s',
- self.name, six.text_type(e),
+ self.name, str(e),
)
return True
has_diverged = False
for c in containers:
+ if c.has_legacy_proj_name(self.project):
+ log.debug('%s has diverged: Legacy project name' % c.name)
+ has_diverged = True
+ continue
container_config_hash = c.labels.get(LABEL_CONFIG_HASH, None)
if container_config_hash != config_hash:
log.debug(
@@ -390,74 +446,92 @@ class Service(object):
return has_diverged
- def _execute_convergence_create(self, scale, detached, start, project_services=None):
- i = self._next_container_number()
+ def _execute_convergence_create(self, scale, detached, start, one_off=False, override_options=None):
+
+ i = self._next_container_number()
- def create_and_start(service, n):
+ def create_and_start(service, n):
+ if one_off:
+ container = service.create_container(one_off=True, quiet=True, **override_options)
+ else:
container = service.create_container(number=n, quiet=True)
- if not detached:
- container.attach_log_stream()
- if start:
- self.start_container(container)
- return container
-
- containers, errors = parallel_execute(
- [ServiceName(self.project, self.name, index) for index in range(i, i + scale)],
- lambda service_name: create_and_start(self, service_name.number),
- lambda service_name: self.get_container_name(service_name.service, service_name.number),
- "Creating"
- )
- for error in errors.values():
- raise OperationFailedError(error)
+ if not detached:
+ container.attach_log_stream()
+ if start and not one_off:
+ self.start_container(container)
+ return container
+
+ def get_name(service_name):
+ if one_off:
+ return "_".join([
+ service_name.project,
+ service_name.service,
+ "run",
+ ])
+ return self.get_container_name(service_name.service, service_name.number)
+
+ containers, errors = parallel_execute(
+ [
+ ServiceName(self.project, self.name, index)
+ for index in range(i, i + scale)
+ ],
+ lambda service_name: create_and_start(self, service_name.number),
+ get_name,
+ "Creating"
+ )
+ for error in errors.values():
+ raise OperationFailedError(error)
- return containers
+ return containers
def _execute_convergence_recreate(self, containers, scale, timeout, detached, start,
renew_anonymous_volumes):
- if scale is not None and len(containers) > scale:
- self._downscale(containers[scale:], timeout)
- containers = containers[:scale]
-
- def recreate(container):
- return self.recreate_container(
- container, timeout=timeout, attach_logs=not detached,
- start_new_container=start, renew_anonymous_volumes=renew_anonymous_volumes
- )
- containers, errors = parallel_execute(
- containers,
- recreate,
- lambda c: c.name,
- "Recreating",
+ if scale is not None and len(containers) > scale:
+ self._downscale(containers[scale:], timeout)
+ containers = containers[:scale]
+
+ def recreate(container):
+ return self.recreate_container(
+ container, timeout=timeout, attach_logs=not detached,
+ start_new_container=start, renew_anonymous_volumes=renew_anonymous_volumes
)
- for error in errors.values():
- raise OperationFailedError(error)
- if scale is not None and len(containers) < scale:
- containers.extend(self._execute_convergence_create(
- scale - len(containers), detached, start
- ))
- return containers
+ containers, errors = parallel_execute(
+ containers,
+ recreate,
+ lambda c: c.name,
+ "Recreating",
+ )
+ for error in errors.values():
+ raise OperationFailedError(error)
+
+ if scale is not None and len(containers) < scale:
+ containers.extend(self._execute_convergence_create(
+ scale - len(containers), detached, start
+ ))
+ return containers
def _execute_convergence_start(self, containers, scale, timeout, detached, start):
- if scale is not None and len(containers) > scale:
- self._downscale(containers[scale:], timeout)
- containers = containers[:scale]
- if start:
- _, errors = parallel_execute(
- containers,
- lambda c: self.start_container_if_stopped(c, attach_logs=not detached, quiet=True),
- lambda c: c.name,
- "Starting",
- )
+ if scale is not None and len(containers) > scale:
+ self._downscale(containers[scale:], timeout)
+ containers = containers[:scale]
+ if start:
+ stopped = [c for c in containers if not c.is_running]
+ _, errors = parallel_execute(
+ stopped,
+ lambda c: self.start_container_if_stopped(c, attach_logs=not detached, quiet=True),
+ lambda c: c.name,
+ "Starting",
+ )
- for error in errors.values():
- raise OperationFailedError(error)
+ for error in errors.values():
+ raise OperationFailedError(error)
- if scale is not None and len(containers) < scale:
- containers.extend(self._execute_convergence_create(
- scale - len(containers), detached, start
- ))
- return containers
+ if scale is not None and len(containers) < scale:
+ containers.extend(self._execute_convergence_create(
+ scale - len(containers), detached, start
+ ))
+ return containers
def _downscale(self, containers, timeout=None):
def stop_and_remove(container):
@@ -473,17 +547,21 @@ class Service(object):
def execute_convergence_plan(self, plan, timeout=None, detached=False,
start=True, scale_override=None,
- rescale=True, project_services=None,
- reset_container_image=False, renew_anonymous_volumes=False):
+ rescale=True, reset_container_image=False,
+ renew_anonymous_volumes=False, override_options=None):
(action, containers) = plan
scale = scale_override if scale_override is not None else self.scale_num
containers = sorted(containers, key=attrgetter('number'))
self.show_scale_warnings(scale)
- if action == 'create':
+ if action in ['create', 'one_off']:
return self._execute_convergence_create(
- scale, detached, start, project_services
+ scale,
+ detached,
+ start,
+ one_off=(action == 'one_off'),
+ override_options=override_options
)
# The create action needs always needs an initial scale, but otherwise,
@@ -533,7 +611,7 @@ class Service(object):
container.rename_to_tmp_name()
new_container = self.create_container(
previous_container=container if not renew_anonymous_volumes else None,
- number=container.labels.get(LABEL_CONTAINER_NUMBER),
+ number=container.number,
quiet=True,
)
if attach_logs:
@@ -564,7 +642,10 @@ class Service(object):
try:
container.start()
except APIError as ex:
- raise OperationFailedError("Cannot start service %s: %s" % (self.name, ex.explanation))
+ expl = binarystr_to_unicode(ex.explanation)
+ if "driver failed programming external connectivity" in expl:
+ log.warn("Host is already in use by another container")
+ raise OperationFailedError("Cannot start service {}: {}".format(self.name, expl))
return container
@property
@@ -621,47 +702,61 @@ class Service(object):
return json_hash(self.config_dict())
def config_dict(self):
+ def image_id():
+ try:
+ return self.image()['Id']
+ except NoSuchImageError:
+ return None
+
return {
'options': self.options,
- 'image_id': self.image()['Id'],
+ 'image_id': image_id(),
'links': self.get_link_names(),
'net': self.network_mode.id,
+ 'ipc_mode': self.ipc_mode.mode,
'networks': self.networks,
+ 'secrets': self.secrets,
'volumes_from': [
(v.source.name, v.mode)
for v in self.volumes_from if isinstance(v.source, Service)
- ],
+ ]
}
def get_dependency_names(self):
net_name = self.network_mode.service_name
pid_namespace = self.pid_mode.service_name
+ ipc_namespace = self.ipc_mode.service_name
return (
- self.get_linked_service_names() +
- self.get_volumes_from_names() +
- ([net_name] if net_name else []) +
- ([pid_namespace] if pid_namespace else []) +
- list(self.options.get('depends_on', {}).keys())
+ self.get_linked_service_names() +
+ self.get_volumes_from_names() +
+ ([net_name] if net_name else []) +
+ ([pid_namespace] if pid_namespace else []) +
+ ([ipc_namespace] if ipc_namespace else []) +
+ list(self.options.get('depends_on', {}).keys())
)
def get_dependency_configs(self):
net_name = self.network_mode.service_name
pid_namespace = self.pid_mode.service_name
+ ipc_namespace = self.ipc_mode.service_name
- configs = dict(
- [(name, None) for name in self.get_linked_service_names()]
+ configs = {
+ name: None for name in self.get_linked_service_names()
+ }
+ configs.update(
+ (name, None) for name in self.get_volumes_from_names()
)
- configs.update(dict(
- [(name, None) for name in self.get_volumes_from_names()]
- ))
configs.update({net_name: None} if net_name else {})
configs.update({pid_namespace: None} if pid_namespace else {})
+ configs.update({ipc_namespace: None} if ipc_namespace else {})
configs.update(self.options.get('depends_on', {}))
for svc, config in self.options.get('depends_on', {}).items():
if config['condition'] == CONDITION_STARTED:
configs[svc] = lambda s: True
elif config['condition'] == CONDITION_HEALTHY:
configs[svc] = lambda s: s.is_healthy()
+ elif config['condition'] == CONDITION_COMPLETED_SUCCESSFULLY:
+ configs[svc] = lambda s: s.is_completed_successfully()
else:
# The config schema already prevents this, but it might be
# bypassed if Compose is called programmatically.
@@ -682,14 +777,19 @@ class Service(object):
def get_volumes_from_names(self):
return [s.source.name for s in self.volumes_from if isinstance(s.source, Service)]
- # TODO: this would benefit from github.com/docker/docker/pull/14699
- # to remove the need to inspect every container
def _next_container_number(self, one_off=False):
- containers = self._fetch_containers(
- all=True,
- filters={'label': self.labels(one_off=one_off)}
+ if one_off:
+ return None
+ containers = itertools.chain(
+ self._fetch_containers(
+ all=True,
+ filters={'label': self.labels(one_off=False)}
+ ), self._fetch_containers(
+ all=True,
+ filters={'label': self.labels(one_off=False, legacy=True)}
+ )
)
- numbers = [c.number for c in containers]
+ numbers = [c.number for c in containers if c.number is not None]
return 1 if not numbers else max(numbers) + 1
def _fetch_containers(self, **fetch_options):
@@ -767,15 +867,16 @@ class Service(object):
one_off=False,
previous_container=None):
add_config_hash = (not one_off and not override_options)
+ slug = generate_random_id() if one_off else None
- container_options = dict(
- (k, self.options[k])
- for k in DOCKER_CONFIG_KEYS if k in self.options)
+ container_options = {
+ k: self.options[k]
+ for k in DOCKER_CONFIG_KEYS if k in self.options}
override_volumes = override_options.pop('volumes', [])
container_options.update(override_options)
if not container_options.get('name'):
- container_options['name'] = self.get_container_name(self.name, number, one_off)
+ container_options['name'] = self.get_container_name(self.name, number, slug)
container_options.setdefault('detach', True)
@@ -825,9 +926,11 @@ class Service(object):
container_options['labels'] = build_container_labels(
container_options.get('labels', {}),
- self.labels(one_off=one_off),
+ self.labels(one_off=one_off) + self.extra_labels,
number,
- self.config_hash if add_config_hash else None)
+ self.config_hash if add_config_hash else None,
+ slug
+ )
# Delete options which are only used in HostConfig
for key in HOST_CONFIG_KEYS:
@@ -858,10 +961,9 @@ class Service(object):
container_volumes, self.options.get('tmpfs') or [], previous_container,
container_mounts
)
- override_options['binds'] = binds
container_options['environment'].update(affinity)
- container_options['volumes'] = dict((v.internal, {}) for v in container_volumes or {})
+ container_options['volumes'] = {v.internal: {} for v in container_volumes or {}}
if version_gte(self.client.api_version, '1.30'):
override_options['mounts'] = [build_mount(v) for v in container_mounts] or None
else:
@@ -871,13 +973,13 @@ class Service(object):
if m.is_tmpfs:
override_options['tmpfs'].append(m.target)
else:
- override_options['binds'].append(m.legacy_repr())
+ binds.append(m.legacy_repr())
container_options['volumes'][m.target] = {}
secret_volumes = self.get_secret_volumes()
if secret_volumes:
if version_lt(self.client.api_version, '1.30'):
- override_options['binds'].extend(v.legacy_repr() for v in secret_volumes)
+ binds.extend(v.legacy_repr() for v in secret_volumes)
container_options['volumes'].update(
(v.target, {}) for v in secret_volumes
)
@@ -885,6 +987,9 @@ class Service(object):
override_options['mounts'] = override_options.get('mounts') or []
override_options['mounts'].extend([build_mount(v) for v in secret_volumes])
+ # Remove possible duplicates (see e.g. https://github.com/docker/compose/issues/5885).
+ # unique_everseen preserves order. (see https://github.com/docker/compose/issues/6091).
+ override_options['binds'] = list(unique_everseen(binds))
return container_options, override_options
def _get_container_host_config(self, override_options, one_off=False):
@@ -894,7 +999,7 @@ class Service(object):
blkio_config = convert_blkio_config(options.get('blkio_config', None))
log_config = get_log_config(logging_dict)
init_path = None
- if isinstance(options.get('init'), six.string_types):
+ if isinstance(options.get('init'), str):
init_path = options.get('init')
options['init'] = True
@@ -916,6 +1021,7 @@ class Service(object):
privileged=options.get('privileged', False),
network_mode=self.network_mode.mode,
devices=options.get('devices'),
+ device_requests=options.get('device_requests'),
dns=options.get('dns'),
dns_opt=options.get('dns_opt'),
dns_search=options.get('dns_search'),
@@ -932,7 +1038,7 @@ class Service(object):
read_only=options.get('read_only'),
pid_mode=self.pid_mode.mode,
security_opt=security_opt,
- ipc_mode=options.get('ipc'),
+ ipc_mode=self.ipc_mode.mode,
cgroup_parent=options.get('cgroup_parent'),
cpu_quota=options.get('cpu_quota'),
shm_size=options.get('shm_size'),
@@ -980,8 +1086,11 @@ class Service(object):
return [build_spec(secret) for secret in self.secrets]
def build(self, no_cache=False, pull=False, force_rm=False, memory=None, build_args_override=None,
- gzip=False):
- log.info('Building %s' % self.name)
+ gzip=False, rm=True, silent=False, cli=False, progress=None):
+ output_stream = open(os.devnull, 'w')
+ if not silent:
+ output_stream = sys.stdout
+ log.info('Building %s' % self.name)
build_opts = self.options.get('build', {})
@@ -992,27 +1101,23 @@ class Service(object):
for k, v in self._parse_proxy_config().items():
build_args.setdefault(k, v)
- # python2 os.stat() doesn't support unicode on some UNIX, so we
- # encode it to a bytestring to be safe
- path = build_opts.get('context')
- if not six.PY3 and not IS_WINDOWS_PLATFORM:
- path = path.encode('utf8')
-
- platform = self.options.get('platform')
- if platform and version_lt(self.client.api_version, '1.35'):
+ path = rewrite_build_path(build_opts.get('context'))
+ if self.platform and version_lt(self.client.api_version, '1.35'):
raise OperationFailedError(
'Impossible to perform platform-targeted builds for API version < 1.35'
)
- build_output = self.client.build(
+ builder = _ClientBuilder(self.client) if not cli else _CLIBuilder(progress)
+ return builder.build(
+ service=self,
path=path,
tag=self.image_name,
- rm=True,
+ rm=rm,
forcerm=force_rm,
pull=pull,
nocache=no_cache,
dockerfile=build_opts.get('dockerfile', None),
- cache_from=build_opts.get('cache_from', None),
+ cache_from=self.get_cache_from(build_opts),
labels=build_opts.get('labels', None),
buildargs=build_args,
network_mode=build_opts.get('network', None),
@@ -1024,57 +1129,41 @@ class Service(object):
},
gzip=gzip,
isolation=build_opts.get('isolation', self.options.get('isolation', None)),
- platform=platform,
- )
+ platform=self.platform,
+ output_stream=output_stream)
- try:
- all_events = stream_output(build_output, sys.stdout)
- except StreamOutputError as e:
- raise BuildError(self, six.text_type(e))
-
- # Ensure the HTTP connection is not reused for another
- # streaming command, as the Docker daemon can sometimes
- # complain about it
- self.client.close()
-
- image_id = None
-
- for event in all_events:
- if 'stream' in event:
- match = re.search(r'Successfully built ([0-9a-f]+)', event.get('stream', ''))
- if match:
- image_id = match.group(1)
-
- if image_id is None:
- raise BuildError(self, event if all_events else 'Unknown')
-
- return image_id
+ def get_cache_from(self, build_opts):
+ cache_from = build_opts.get('cache_from', None)
+ if cache_from is not None:
+ cache_from = [tag for tag in cache_from if tag]
+ return cache_from
def can_be_built(self):
return 'build' in self.options
- def labels(self, one_off=False):
+ def labels(self, one_off=False, legacy=False):
+ proj_name = self.project if not legacy else re.sub(r'[_-]', '', self.project)
return [
- '{0}={1}'.format(LABEL_PROJECT, self.project),
- '{0}={1}'.format(LABEL_SERVICE, self.name),
- '{0}={1}'.format(LABEL_ONE_OFF, "True" if one_off else "False")
+ '{}={}'.format(LABEL_PROJECT, proj_name),
+ '{}={}'.format(LABEL_SERVICE, self.name),
+ '{}={}'.format(LABEL_ONE_OFF, "True" if one_off else "False"),
]
@property
def custom_container_name(self):
return self.options.get('container_name')
- def get_container_name(self, service_name, number, one_off=False):
- if self.custom_container_name and not one_off:
+ def get_container_name(self, service_name, number, slug=None):
+ if self.custom_container_name and slug is None:
return self.custom_container_name
container_name = build_container_name(
- self.project, service_name, number, one_off,
+ self.project, service_name, number, slug,
)
- ext_links_origins = [l.split(':')[0] for l in self.options.get('external_links', [])]
+ ext_links_origins = [link.split(':')[0] for link in self.options.get('external_links', [])]
if container_name in ext_links_origins:
raise DependencyError(
- 'Service {0} has a self-referential external link: {1}'.format(
+ 'Service {} has a self-referential external link: {}'.format(
self.name, container_name
)
)
@@ -1090,6 +1179,9 @@ class Service(object):
try:
self.client.remove_image(self.image_name)
return True
+ except ImageNotFound:
+ log.warning("Image %s not found.", self.image_name)
+ return False
except APIError as e:
log.error("Failed to remove image for service %s: %s", self.name, e)
return False
@@ -1121,7 +1213,21 @@ class Service(object):
return any(has_host_port(binding) for binding in self.options.get('ports', []))
- def pull(self, ignore_pull_failures=False, silent=False):
+ def _do_pull(self, repo, pull_kwargs, silent, ignore_pull_failures):
+ try:
+ output = self.client.pull(repo, **pull_kwargs)
+ if silent:
+ with open(os.devnull, 'w') as devnull:
+ yield from stream_output(output, devnull)
+ else:
+ yield from stream_output(output, sys.stdout)
+ except (StreamOutputError, NotFound) as e:
+ if not ignore_pull_failures:
+ raise
+ else:
+ log.error(str(e))
+
+ def pull(self, ignore_pull_failures=False, silent=False, stream=False):
if 'image' not in self.options:
return
@@ -1129,29 +1235,20 @@ class Service(object):
kwargs = {
'tag': tag or 'latest',
'stream': True,
- 'platform': self.options.get('platform'),
+ 'platform': self.platform,
}
if not silent:
- log.info('Pulling %s (%s%s%s)...' % (self.name, repo, separator, tag))
+ log.info('Pulling {} ({}{}{})...'.format(self.name, repo, separator, tag))
if kwargs['platform'] and version_lt(self.client.api_version, '1.35'):
raise OperationFailedError(
- 'Impossible to perform platform-targeted builds for API version < 1.35'
+ 'Impossible to perform platform-targeted pulls for API version < 1.35'
)
- try:
- output = self.client.pull(repo, **kwargs)
- if silent:
- with open(os.devnull, 'w') as devnull:
- return progress_stream.get_digest_from_pull(
- stream_output(output, devnull))
- else:
- return progress_stream.get_digest_from_pull(
- stream_output(output, sys.stdout))
- except (StreamOutputError, NotFound) as e:
- if not ignore_pull_failures:
- raise
- else:
- log.error(six.text_type(e))
+
+ event_stream = self._do_pull(repo, kwargs, silent, ignore_pull_failures)
+ if stream:
+ return event_stream
+ return progress_stream.get_digest_from_pull(event_stream)
def push(self, ignore_push_failures=False):
if 'image' not in self.options or 'build' not in self.options:
@@ -1159,7 +1256,7 @@ class Service(object):
repo, tag, separator = parse_repository_tag(self.options['image'])
tag = tag or 'latest'
- log.info('Pushing %s (%s%s%s)...' % (self.name, repo, separator, tag))
+ log.info('Pushing {} ({}{}{})...'.format(self.name, repo, separator, tag))
output = self.client.push(repo, tag=tag, stream=True)
try:
@@ -1169,7 +1266,7 @@ class Service(object):
if not ignore_push_failures:
raise
else:
- log.error(six.text_type(e))
+ log.error(str(e))
def is_healthy(self):
""" Check that all containers for this service report healthy.
@@ -1189,6 +1286,21 @@ class Service(object):
raise HealthCheckFailed(ctnr.short_id)
return result
+ def is_completed_successfully(self):
+ """ Check that all containers for this service has completed successfully
+ Returns false if at least one container does not exited and
+ raises CompletedUnsuccessfully exception if at least one container
+ exited with non-zero exit code.
+ """
+ result = True
+ for ctnr in self.containers(stopped=True):
+ ctnr.inspect()
+ if ctnr.get('State.Status') != 'exited':
+ result = False
+ elif ctnr.exit_code != 0:
+ raise CompletedUnsuccessfully(ctnr.short_id, ctnr.exit_code)
+ return result
+
def _parse_proxy_config(self):
client = self.client
if 'proxies' not in client._general_configs:
@@ -1214,6 +1326,24 @@ class Service(object):
return result
+ def get_profiles(self):
+ if 'profiles' not in self.options:
+ return []
+
+ return self.options.get('profiles')
+
+ def enabled_for_profiles(self, enabled_profiles):
+ # if service has no profiles specified it is always enabled
+ if 'profiles' not in self.options:
+ return True
+
+ service_profiles = self.options.get('profiles')
+ for profile in enabled_profiles:
+ if profile in service_profiles:
+ return True
+
+ return False
+
def short_id_alias_exists(container, network):
aliases = container.get(
@@ -1221,7 +1351,47 @@ def short_id_alias_exists(container, network):
return container.short_id in aliases
-class PidMode(object):
+class IpcMode:
+ def __init__(self, mode):
+ self._mode = mode
+
+ @property
+ def mode(self):
+ return self._mode
+
+ @property
+ def service_name(self):
+ return None
+
+
+class ServiceIpcMode(IpcMode):
+ def __init__(self, service):
+ self.service = service
+
+ @property
+ def service_name(self):
+ return self.service.name
+
+ @property
+ def mode(self):
+ containers = self.service.containers()
+ if containers:
+ return 'container:' + containers[0].id
+
+ log.warning(
+ "Service %s is trying to use reuse the IPC namespace "
+ "of another service that is not running." % (self.service_name)
+ )
+ return None
+
+
+class ContainerIpcMode(IpcMode):
+ def __init__(self, container):
+ self.container = container
+ self._mode = 'container:{}'.format(container.id)
+
+
+class PidMode:
def __init__(self, mode):
self._mode = mode
@@ -1248,7 +1418,7 @@ class ServicePidMode(PidMode):
if containers:
return 'container:' + containers[0].id
- log.warn(
+ log.warning(
"Service %s is trying to use reuse the PID namespace "
"of another service that is not running." % (self.service_name)
)
@@ -1261,7 +1431,7 @@ class ContainerPidMode(PidMode):
self._mode = 'container:{}'.format(container.id)
-class NetworkMode(object):
+class NetworkMode:
"""A `standard` network mode (ex: host, bridge)"""
service_name = None
@@ -1276,7 +1446,7 @@ class NetworkMode(object):
mode = id
-class ContainerNetworkMode(object):
+class ContainerNetworkMode:
"""A network mode that uses a container's network stack."""
service_name = None
@@ -1293,7 +1463,7 @@ class ContainerNetworkMode(object):
return 'container:' + self.container.id
-class ServiceNetworkMode(object):
+class ServiceNetworkMode:
"""A network mode that uses a service's network stack."""
def __init__(self, service):
@@ -1311,19 +1481,21 @@ class ServiceNetworkMode(object):
if containers:
return 'container:' + containers[0].id
- log.warn("Service %s is trying to use reuse the network stack "
- "of another service that is not running." % (self.id))
+ log.warning("Service %s is trying to use reuse the network stack "
+ "of another service that is not running." % (self.id))
return None
# Names
-def build_container_name(project, service, number, one_off=False):
- bits = [project, service]
- if one_off:
- bits.append('run')
- return '_'.join(bits + [str(number)])
+def build_container_name(project, service, number, slug=None):
+ bits = [project.lstrip('-_'), service]
+ if slug:
+ bits.extend(['run', truncate_id(slug)])
+ else:
+ bits.append(str(number))
+ return '_'.join(bits)
# Images
@@ -1366,7 +1538,7 @@ def merge_volume_bindings(volumes, tmpfs, previous_container, mounts):
"""
affinity = {}
- volume_bindings = dict(
+ volume_bindings = OrderedDict(
build_volume_binding(volume)
for volume in volumes
if volume.external
@@ -1396,10 +1568,10 @@ def get_container_data_volumes(container, volumes_option, tmpfs_option, mounts_o
volumes = []
volumes_option = volumes_option or []
- container_mounts = dict(
- (mount['Destination'], mount)
+ container_mounts = {
+ mount['Destination']: mount
for mount in container.get('Mounts') or {}
- )
+ }
image_volumes = [
VolumeSpec.parse(volume)
@@ -1426,6 +1598,11 @@ def get_container_data_volumes(container, volumes_option, tmpfs_option, mounts_o
if not mount.get('Name'):
continue
+ # Volume (probably an image volume) is overridden by a mount in the service's config
+ # and would cause a duplicate mountpoint error
+ if volume.internal in [m.target for m in mounts_option]:
+ continue
+
# Copy existing volume from old container
volume = volume._replace(external=mount['Name'])
volumes.append(volume)
@@ -1446,17 +1623,17 @@ def get_container_data_volumes(container, volumes_option, tmpfs_option, mounts_o
def warn_on_masked_volume(volumes_option, container_volumes, service):
- container_volumes = dict(
- (volume.internal, volume.external)
- for volume in container_volumes)
+ container_volumes = {
+ volume.internal: volume.external
+ for volume in container_volumes}
for volume in volumes_option:
if (
- volume.external and
- volume.internal in container_volumes and
- container_volumes.get(volume.internal) != volume.external
+ volume.external and
+ volume.internal in container_volumes and
+ container_volumes.get(volume.internal) != volume.external
):
- log.warn((
+ log.warning((
"Service \"{service}\" is using volume \"{volume}\" from the "
"previous container. Host mapping \"{host_path}\" has no effect. "
"Remove the existing containers (with `docker-compose rm {service}`) "
@@ -1501,13 +1678,17 @@ def build_mount(mount_spec):
read_only=mount_spec.read_only, consistency=mount_spec.consistency, **kwargs
)
+
# Labels
-def build_container_labels(label_options, service_labels, number, config_hash):
+def build_container_labels(label_options, service_labels, number, config_hash, slug):
labels = dict(label_options or {})
labels.update(label.split('=', 1) for label in service_labels)
- labels[LABEL_CONTAINER_NUMBER] = str(number)
+ if number is not None:
+ labels[LABEL_CONTAINER_NUMBER] = str(number)
+ if slug is not None:
+ labels[LABEL_SLUG] = slug
labels[LABEL_VERSION] = __version__
if config_hash:
@@ -1524,8 +1705,8 @@ def build_ulimits(ulimit_config):
if not ulimit_config:
return None
ulimits = []
- for limit_name, soft_hard_values in six.iteritems(ulimit_config):
- if isinstance(soft_hard_values, six.integer_types):
+ for limit_name, soft_hard_values in ulimit_config.items():
+ if isinstance(soft_hard_values, int):
ulimits.append({'name': limit_name, 'soft': soft_hard_values, 'hard': soft_hard_values})
elif isinstance(soft_hard_values, dict):
ulimit_dict = {'name': limit_name}
@@ -1549,9 +1730,10 @@ def format_environment(environment):
def format_env(key, value):
if value is None:
return key
- if isinstance(value, six.binary_type):
+ if isinstance(value, bytes):
value = value.decode('utf-8')
return '{key}={value}'.format(key=key, value=value)
+
return [format_env(*item) for item in environment.items()]
@@ -1593,6 +1775,205 @@ def convert_blkio_config(blkio_config):
continue
arr = []
for item in blkio_config[field]:
- arr.append(dict([(k.capitalize(), v) for k, v in item.items()]))
+ arr.append({k.capitalize(): v for k, v in item.items()})
result[field] = arr
return result
+
+
+def rewrite_build_path(path):
+ if IS_WINDOWS_PLATFORM and not is_url(path) and not path.startswith(WINDOWS_LONGPATH_PREFIX):
+ path = WINDOWS_LONGPATH_PREFIX + os.path.normpath(path)
+
+ return path
+
+
+class _ClientBuilder:
+ def __init__(self, client):
+ self.client = client
+
+ def build(self, service, path, tag=None, quiet=False, fileobj=None,
+ nocache=False, rm=False, timeout=None,
+ custom_context=False, encoding=None, pull=False,
+ forcerm=False, dockerfile=None, container_limits=None,
+ decode=False, buildargs=None, gzip=False, shmsize=None,
+ labels=None, cache_from=None, target=None, network_mode=None,
+ squash=None, extra_hosts=None, platform=None, isolation=None,
+ use_config_proxy=True, output_stream=sys.stdout):
+ build_output = self.client.build(
+ path=path,
+ tag=tag,
+ nocache=nocache,
+ rm=rm,
+ pull=pull,
+ forcerm=forcerm,
+ dockerfile=dockerfile,
+ labels=labels,
+ cache_from=cache_from,
+ buildargs=buildargs,
+ network_mode=network_mode,
+ target=target,
+ shmsize=shmsize,
+ extra_hosts=extra_hosts,
+ container_limits=container_limits,
+ gzip=gzip,
+ isolation=isolation,
+ platform=platform)
+
+ try:
+ all_events = list(stream_output(build_output, output_stream))
+ except StreamOutputError as e:
+ raise BuildError(service, str(e))
+
+ # Ensure the HTTP connection is not reused for another
+ # streaming command, as the Docker daemon can sometimes
+ # complain about it
+ self.client.close()
+
+ image_id = None
+
+ for event in all_events:
+ if 'stream' in event:
+ match = re.search(r'Successfully built ([0-9a-f]+)', event.get('stream', ''))
+ if match:
+ image_id = match.group(1)
+
+ if image_id is None:
+ raise BuildError(service, event if all_events else 'Unknown')
+
+ return image_id
+
+
+class _CLIBuilder:
+ def __init__(self, progress):
+ self._progress = progress
+
+ def build(self, service, path, tag=None, quiet=False, fileobj=None,
+ nocache=False, rm=False, timeout=None,
+ custom_context=False, encoding=None, pull=False,
+ forcerm=False, dockerfile=None, container_limits=None,
+ decode=False, buildargs=None, gzip=False, shmsize=None,
+ labels=None, cache_from=None, target=None, network_mode=None,
+ squash=None, extra_hosts=None, platform=None, isolation=None,
+ use_config_proxy=True, output_stream=sys.stdout):
+ """
+ Args:
+ service (str): Service to be built
+ path (str): Path to the directory containing the Dockerfile
+ buildargs (dict): A dictionary of build arguments
+ cache_from (:py:class:`list`): A list of images used for build
+ cache resolution
+ container_limits (dict): A dictionary of limits applied to each
+ container created by the build process. Valid keys:
+ - memory (int): set memory limit for build
+ - memswap (int): Total memory (memory + swap), -1 to disable
+ swap
+ - cpushares (int): CPU shares (relative weight)
+ - cpusetcpus (str): CPUs in which to allow execution, e.g.,
+ ``"0-3"``, ``"0,1"``
+ custom_context (bool): Optional if using ``fileobj``
+ decode (bool): If set to ``True``, the returned stream will be
+ decoded into dicts on the fly. Default ``False``
+ dockerfile (str): path within the build context to the Dockerfile
+ encoding (str): The encoding for a stream. Set to ``gzip`` for
+ compressing
+ extra_hosts (dict): Extra hosts to add to /etc/hosts in building
+ containers, as a mapping of hostname to IP address.
+ fileobj: A file object to use as the Dockerfile. (Or a file-like
+ object)
+ forcerm (bool): Always remove intermediate containers, even after
+ unsuccessful builds
+ isolation (str): Isolation technology used during build.
+ Default: `None`.
+ labels (dict): A dictionary of labels to set on the image
+ network_mode (str): networking mode for the run commands during
+ build
+ nocache (bool): Don't use the cache when set to ``True``
+ platform (str): Platform in the format ``os[/arch[/variant]]``
+ pull (bool): Downloads any updates to the FROM image in Dockerfiles
+ quiet (bool): Whether to return the status
+ rm (bool): Remove intermediate containers. The ``docker build``
+ command now defaults to ``--rm=true``, but we have kept the old
+ default of `False` to preserve backward compatibility
+ shmsize (int): Size of `/dev/shm` in bytes. The size must be
+ greater than 0. If omitted the system uses 64MB
+ squash (bool): Squash the resulting images layers into a
+ single layer.
+ tag (str): A tag to add to the final image
+ target (str): Name of the build-stage to build in a multi-stage
+ Dockerfile
+ timeout (int): HTTP timeout
+ use_config_proxy (bool): If ``True``, and if the docker client
+ configuration file (``~/.docker/config.json`` by default)
+ contains a proxy configuration, the corresponding environment
+ variables will be set in the container being built.
+ output_stream (writer): stream to use for build logs
+ Returns:
+ A generator for the build output.
+ """
+ if dockerfile and os.path.isdir(path):
+ dockerfile = os.path.join(path, dockerfile)
+ iidfile = tempfile.mktemp()
+
+ command_builder = _CommandBuilder()
+ command_builder.add_params("--build-arg", buildargs)
+ command_builder.add_list("--cache-from", cache_from)
+ command_builder.add_arg("--file", dockerfile)
+ command_builder.add_flag("--force-rm", forcerm)
+ command_builder.add_params("--label", labels)
+ command_builder.add_arg("--memory", container_limits.get("memory"))
+ command_builder.add_arg("--network", network_mode)
+ command_builder.add_flag("--no-cache", nocache)
+ command_builder.add_arg("--progress", self._progress)
+ command_builder.add_flag("--pull", pull)
+ command_builder.add_arg("--tag", tag)
+ command_builder.add_arg("--target", target)
+ command_builder.add_arg("--iidfile", iidfile)
+ command_builder.add_arg("--platform", platform)
+ command_builder.add_arg("--isolation", isolation)
+
+ if extra_hosts:
+ if isinstance(extra_hosts, dict):
+ extra_hosts = ["{}:{}".format(host, ip) for host, ip in extra_hosts.items()]
+ for host in extra_hosts:
+ command_builder.add_arg("--add-host", "{}".format(host))
+
+ args = command_builder.build([path])
+
+ with subprocess.Popen(args, stdout=output_stream, stderr=sys.stderr,
+ universal_newlines=True) as p:
+ p.communicate()
+ if p.returncode != 0:
+ raise BuildError(service, "Build failed")
+
+ with open(iidfile) as f:
+ line = f.readline()
+ image_id = line.split(":")[1].strip()
+ os.remove(iidfile)
+
+ return image_id
+
+
+class _CommandBuilder:
+ def __init__(self):
+ self._args = ["docker", "build"]
+
+ def add_arg(self, name, value):
+ if value:
+ self._args.extend([name, str(value)])
+
+ def add_flag(self, name, flag):
+ if flag:
+ self._args.extend([name])
+
+ def add_params(self, name, params):
+ if params:
+ for key, val in params.items():
+ self._args.extend([name, "{}={}".format(key, val)])
+
+ def add_list(self, name, values):
+ if values:
+ for val in values:
+ self._args.extend([name, val])
+
+ def build(self, args):
+ return self._args + args
diff --git a/compose/timeparse.py b/compose/timeparse.py
index 16ef8a6d..47744562 100644
--- a/compose/timeparse.py
+++ b/compose/timeparse.py
@@ -1,5 +1,4 @@
#!/usr/bin/env python
-# -*- coding: utf-8 -*-
'''
timeparse.py
(c) Will Roberts <wildwilhelm@gmail.com> 1 February, 2014
@@ -31,9 +30,6 @@ https://golang.org/pkg/time/#ParseDuration
# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
-from __future__ import absolute_import
-from __future__ import unicode_literals
-
import re
HOURS = r'(?P<hours>[\d.]+)h'
@@ -57,14 +53,14 @@ TIMEFORMAT = r'{HOURS}{MINS}{SECS}{MILLI}{MICRO}{NANO}'.format(
NANO=opt(NANO),
)
-MULTIPLIERS = dict([
- ('hours', 60 * 60),
- ('mins', 60),
- ('secs', 1),
- ('milli', 1.0 / 1000),
- ('micro', 1.0 / 1000.0 / 1000),
- ('nano', 1.0 / 1000.0 / 1000.0 / 1000.0),
-])
+MULTIPLIERS = {
+ 'hours': 60 * 60,
+ 'mins': 60,
+ 'secs': 1,
+ 'milli': 1.0 / 1000,
+ 'micro': 1.0 / 1000.0 / 1000,
+ 'nano': 1.0 / 1000.0 / 1000.0 / 1000.0,
+}
def timeparse(sval):
@@ -93,4 +89,4 @@ def timeparse(sval):
def cast(value):
- return int(value, 10) if value.isdigit() else float(value)
+ return int(value) if value.isdigit() else float(value)
diff --git a/compose/utils.py b/compose/utils.py
index 956673b4..86af8f88 100644
--- a/compose/utils.py
+++ b/compose/utils.py
@@ -1,14 +1,9 @@
-from __future__ import absolute_import
-from __future__ import unicode_literals
-
-import codecs
import hashlib
-import json
import json.decoder
import logging
import ntpath
+import random
-import six
from docker.errors import DockerException
from docker.utils import parse_bytes as sdk_parse_bytes
@@ -21,12 +16,6 @@ json_decoder = json.JSONDecoder()
log = logging.getLogger(__name__)
-def get_output_stream(stream):
- if six.PY3:
- return stream
- return codecs.getwriter('utf-8')(stream)
-
-
def stream_as_text(stream):
"""Given a stream of bytes or text, if any of the items in the stream
are bytes convert them to text.
@@ -35,13 +24,13 @@ def stream_as_text(stream):
of byte streams.
"""
for data in stream:
- if not isinstance(data, six.text_type):
+ if not isinstance(data, str):
data = data.decode('utf-8', 'replace')
yield data
-def line_splitter(buffer, separator=u'\n'):
- index = buffer.find(six.text_type(separator))
+def line_splitter(buffer, separator='\n'):
+ index = buffer.find(str(separator))
if index == -1:
return None
return buffer[:index + 1], buffer[index + 1:]
@@ -56,7 +45,7 @@ def split_buffer(stream, splitter=None, decoder=lambda a: a):
of the input.
"""
splitter = splitter or line_splitter
- buffered = six.text_type('')
+ buffered = ''
for data in stream_as_text(stream):
buffered += data
@@ -127,7 +116,7 @@ def parse_nanoseconds_int(value):
def build_string_dict(source_dict):
- return dict((k, str(v if v is not None else '')) for k, v in source_dict.items())
+ return {k: str(v if v is not None else '') for k, v in source_dict.items()}
def splitdrive(path):
@@ -151,3 +140,52 @@ def unquote_path(s):
if s[0] == '"' and s[-1] == '"':
return s[1:-1]
return s
+
+
+def generate_random_id():
+ while True:
+ val = hex(random.getrandbits(32 * 8))[2:-1]
+ try:
+ int(truncate_id(val))
+ continue
+ except ValueError:
+ return val
+
+
+def truncate_id(value):
+ if ':' in value:
+ value = value[value.index(':') + 1:]
+ if len(value) > 12:
+ return value[:12]
+ return value
+
+
+def unique_everseen(iterable, key=lambda x: x):
+ "List unique elements, preserving order. Remember all elements ever seen."
+ seen = set()
+ for element in iterable:
+ unique_key = key(element)
+ if unique_key not in seen:
+ seen.add(unique_key)
+ yield element
+
+
+def truncate_string(s, max_chars=35):
+ if len(s) > max_chars:
+ return s[:max_chars - 2] + '...'
+ return s
+
+
+def filter_attached_for_up(items, service_names, attach_dependencies=False,
+ item_to_service_name=lambda x: x):
+ """This function contains the logic of choosing which services to
+ attach when doing docker-compose up. It may be used both with containers
+ and services, and any other entities that map to service names -
+ this mapping is provided by item_to_service_name."""
+ if attach_dependencies or not service_names:
+ return items
+
+ return [
+ item
+ for item in items if item_to_service_name(item) in service_names
+ ]
diff --git a/compose/version.py b/compose/version.py
index 0532e16c..c039263a 100644
--- a/compose/version.py
+++ b/compose/version.py
@@ -1,6 +1,3 @@
-from __future__ import absolute_import
-from __future__ import unicode_literals
-
from distutils.version import LooseVersion
diff --git a/compose/volume.py b/compose/volume.py
index 6bf18404..5f36e432 100644
--- a/compose/volume.py
+++ b/compose/volume.py
@@ -1,20 +1,22 @@
-from __future__ import absolute_import
-from __future__ import unicode_literals
-
import logging
+import re
+from itertools import chain
from docker.errors import NotFound
from docker.utils import version_lt
+from . import __version__
from .config import ConfigurationError
from .config.types import VolumeSpec
from .const import LABEL_PROJECT
+from .const import LABEL_VERSION
from .const import LABEL_VOLUME
+
log = logging.getLogger(__name__)
-class Volume(object):
+class Volume:
def __init__(self, client, project, name, driver=None, driver_opts=None,
external=False, labels=None, custom_name=False):
self.client = client
@@ -25,6 +27,7 @@ class Volume(object):
self.external = external
self.labels = labels
self.custom_name = custom_name
+ self.legacy = None
def create(self):
return self.client.create_volume(
@@ -33,17 +36,20 @@ class Volume(object):
def remove(self):
if self.external:
- log.info("Volume %s is external, skipping", self.full_name)
+ log.info("Volume %s is external, skipping", self.true_name)
return
- log.info("Removing volume %s", self.full_name)
- return self.client.remove_volume(self.full_name)
+ log.info("Removing volume %s", self.true_name)
+ return self.client.remove_volume(self.true_name)
- def inspect(self):
+ def inspect(self, legacy=None):
+ if legacy:
+ return self.client.inspect_volume(self.legacy_full_name)
return self.client.inspect_volume(self.full_name)
def exists(self):
+ self._set_legacy_flag()
try:
- self.inspect()
+ self.inspect(legacy=self.legacy)
except NotFound:
return False
return True
@@ -52,7 +58,22 @@ class Volume(object):
def full_name(self):
if self.custom_name:
return self.name
- return '{0}_{1}'.format(self.project, self.name)
+ return '{}_{}'.format(self.project.lstrip('-_'), self.name)
+
+ @property
+ def legacy_full_name(self):
+ if self.custom_name:
+ return self.name
+ return '{}_{}'.format(
+ re.sub(r'[_-]', '', self.project), self.name
+ )
+
+ @property
+ def true_name(self):
+ self._set_legacy_flag()
+ if self.legacy:
+ return self.legacy_full_name
+ return self.full_name
@property
def _labels(self):
@@ -62,11 +83,21 @@ class Volume(object):
labels.update({
LABEL_PROJECT: self.project,
LABEL_VOLUME: self.name,
+ LABEL_VERSION: __version__,
})
return labels
+ def _set_legacy_flag(self):
+ if self.legacy is not None:
+ return
+ try:
+ data = self.inspect(legacy=True)
+ self.legacy = data is not None
+ except NotFound:
+ self.legacy = False
+
-class ProjectVolumes(object):
+class ProjectVolumes:
def __init__(self, volumes):
self.volumes = volumes
@@ -94,7 +125,7 @@ class ProjectVolumes(object):
try:
volume.remove()
except NotFound:
- log.warn("Volume %s not found.", volume.full_name)
+ log.warning("Volume %s not found.", volume.true_name)
def initialize(self):
try:
@@ -102,7 +133,7 @@ class ProjectVolumes(object):
volume_exists = volume.exists()
if volume.external:
log.debug(
- 'Volume {0} declared as external. No new '
+ 'Volume {} declared as external. No new '
'volume will be created.'.format(volume.name)
)
if not volume_exists:
@@ -118,16 +149,16 @@ class ProjectVolumes(object):
if not volume_exists:
log.info(
- 'Creating volume "{0}" with {1} driver'.format(
+ 'Creating volume "{}" with {} driver'.format(
volume.full_name, volume.driver or 'default'
)
)
volume.create()
else:
- check_remote_volume_config(volume.inspect(), volume)
+ check_remote_volume_config(volume.inspect(legacy=volume.legacy), volume)
except NotFound:
raise ConfigurationError(
- 'Volume %s specifies nonexistent driver %s' % (volume.name, volume.driver)
+ 'Volume {} specifies nonexistent driver {}'.format(volume.name, volume.driver)
)
def namespace_spec(self, volume_spec):
@@ -136,15 +167,15 @@ class ProjectVolumes(object):
if isinstance(volume_spec, VolumeSpec):
volume = self.volumes[volume_spec.external]
- return volume_spec._replace(external=volume.full_name)
+ return volume_spec._replace(external=volume.true_name)
else:
- volume_spec.source = self.volumes[volume_spec.source].full_name
+ volume_spec.source = self.volumes[volume_spec.source].true_name
return volume_spec
class VolumeConfigChangedError(ConfigurationError):
def __init__(self, local, property_name, local_value, remote_value):
- super(VolumeConfigChangedError, self).__init__(
+ super().__init__(
'Configuration for volume {vol_name} specifies {property_name} '
'{local_value}, but a volume with the same name uses a different '
'{property_name} ({remote_value}). If you wish to use the new '
@@ -152,7 +183,7 @@ class VolumeConfigChangedError(ConfigurationError):
'first:\n$ docker volume rm {full_name}'.format(
vol_name=local.name, property_name=property_name,
local_value=local_value, remote_value=remote_value,
- full_name=local.full_name
+ full_name=local.true_name
)
)
@@ -162,7 +193,7 @@ def check_remote_volume_config(remote, local):
raise VolumeConfigChangedError(local, 'driver', local.driver, remote.get('Driver'))
local_opts = local.driver_opts or {}
remote_opts = remote.get('Options') or {}
- for k in set.union(set(remote_opts.keys()), set(local_opts.keys())):
+ for k in set(chain(remote_opts, local_opts)):
if k.startswith('com.docker.'): # These options are set internally
continue
if remote_opts.get(k) != local_opts.get(k):
@@ -172,11 +203,11 @@ def check_remote_volume_config(remote, local):
local_labels = local.labels or {}
remote_labels = remote.get('Labels') or {}
- for k in set.union(set(remote_labels.keys()), set(local_labels.keys())):
+ for k in set(chain(remote_labels, local_labels)):
if k.startswith('com.docker.'): # We are only interested in user-specified labels
continue
if remote_labels.get(k) != local_labels.get(k):
- log.warn(
+ log.warning(
'Volume {}: label "{}" has changed. It may need to be'
' recreated.'.format(local.name, k)
)
diff --git a/contrib/completion/bash/docker-compose b/contrib/completion/bash/docker-compose
index 90c9ce5f..c3b6157d 100644
--- a/contrib/completion/bash/docker-compose
+++ b/contrib/completion/bash/docker-compose
@@ -81,41 +81,24 @@ __docker_compose_nospace() {
type compopt &>/dev/null && compopt -o nospace
}
-# Extracts all service names from the compose file.
-___docker_compose_all_services_in_compose_file() {
- __docker_compose_q config --services
-}
-
-# All services, even those without an existing container
-__docker_compose_services_all() {
- COMPREPLY=( $(compgen -W "$(___docker_compose_all_services_in_compose_file)" -- "$cur") )
-}
-# All services that are defined by a Dockerfile reference
-__docker_compose_services_from_build() {
- COMPREPLY=( $(compgen -W "$(__docker_compose_q ps --services --filter "source=build")" -- "$cur") )
+# Outputs a list of all defined services, regardless of their running state.
+# Arguments for `docker-compose ps` may be passed in order to filter the service list,
+# e.g. `status=running`.
+__docker_compose_services() {
+ __docker_compose_q ps --services "$@"
}
-# All services that are defined by an image
-__docker_compose_services_from_image() {
- COMPREPLY=( $(compgen -W "$(__docker_compose_q ps --services --filter "source=image")" -- "$cur") )
-}
-
-# The services for which at least one paused container exists
-__docker_compose_services_paused() {
- names=$(__docker_compose_q ps --services --filter "status=paused")
- COMPREPLY=( $(compgen -W "$names" -- "$cur") )
+# Applies completion of services based on the current value of `$cur`.
+# Arguments for `docker-compose ps` may be passed in order to filter the service list,
+# see `__docker_compose_services`.
+__docker_compose_complete_services() {
+ COMPREPLY=( $(compgen -W "$(__docker_compose_services "$@")" -- "$cur") )
}
# The services for which at least one running container exists
-__docker_compose_services_running() {
- names=$(__docker_compose_q ps --services --filter "status=running")
- COMPREPLY=( $(compgen -W "$names" -- "$cur") )
-}
-
-# The services for which at least one stopped container exists
-__docker_compose_services_stopped() {
- names=$(__docker_compose_q ps --services --filter "status=stopped")
+__docker_compose_complete_running_services() {
+ local names=$(__docker_compose_services --filter status=running)
COMPREPLY=( $(compgen -W "$names" -- "$cur") )
}
@@ -127,33 +110,35 @@ _docker_compose_build() {
__docker_compose_nospace
return
;;
+ --memory|-m)
+ return
+ ;;
esac
case "$cur" in
-*)
- COMPREPLY=( $( compgen -W "--build-arg --force-rm --help --memory --no-cache --pull" -- "$cur" ) )
+ COMPREPLY=( $( compgen -W "--build-arg --compress --force-rm --help --memory -m --no-cache --no-rm --pull --parallel -q --quiet" -- "$cur" ) )
;;
*)
- __docker_compose_services_from_build
+ __docker_compose_complete_services --filter source=build
;;
esac
}
-_docker_compose_bundle() {
+_docker_compose_config() {
case "$prev" in
- --output|-o)
- _filedir
+ --hash)
+ if [[ $cur == \\* ]] ; then
+ COMPREPLY=( '\*' )
+ else
+ COMPREPLY=( $(compgen -W "$(__docker_compose_services) \\\* " -- "$cur") )
+ fi
return
;;
esac
- COMPREPLY=( $( compgen -W "--push-images --help --output -o" -- "$cur" ) )
-}
-
-
-_docker_compose_config() {
- COMPREPLY=( $( compgen -W "--help --quiet -q --resolve-image-digests --services --volumes" -- "$cur" ) )
+ COMPREPLY=( $( compgen -W "--hash --help --no-interpolate --profiles --quiet -q --resolve-image-digests --services --volumes" -- "$cur" ) )
}
@@ -163,7 +148,7 @@ _docker_compose_create() {
COMPREPLY=( $( compgen -W "--build --force-recreate --help --no-build --no-recreate" -- "$cur" ) )
;;
*)
- __docker_compose_services_all
+ __docker_compose_complete_services
;;
esac
}
@@ -179,14 +164,26 @@ _docker_compose_docker_compose() {
_filedir "y?(a)ml"
return
;;
+ --ansi)
+ COMPREPLY=( $( compgen -W "never always auto" -- "$cur" ) )
+ return
+ ;;
--log-level)
COMPREPLY=( $( compgen -W "debug info warning error critical" -- "$cur" ) )
return
;;
+ --profile)
+ COMPREPLY=( $( compgen -W "$(__docker_compose_q config --profiles)" -- "$cur" ) )
+ return
+ ;;
--project-directory)
_filedir -d
return
;;
+ --env-file)
+ _filedir
+ return
+ ;;
$(__docker_compose_to_extglob "$daemon_options_with_args") )
return
;;
@@ -234,7 +231,7 @@ _docker_compose_events() {
COMPREPLY=( $( compgen -W "--help --json" -- "$cur" ) )
;;
*)
- __docker_compose_services_all
+ __docker_compose_complete_services
;;
esac
}
@@ -242,17 +239,17 @@ _docker_compose_events() {
_docker_compose_exec() {
case "$prev" in
- --index|--user|-u)
+ --index|--user|-u|--workdir|-w)
return
;;
esac
case "$cur" in
-*)
- COMPREPLY=( $( compgen -W "-d --detach --help --index --privileged -T --user -u" -- "$cur" ) )
+ COMPREPLY=( $( compgen -W "-d --detach --help --index --privileged -T --user -u --workdir -w" -- "$cur" ) )
;;
*)
- __docker_compose_services_running
+ __docker_compose_complete_running_services
;;
esac
}
@@ -268,7 +265,7 @@ _docker_compose_images() {
COMPREPLY=( $( compgen -W "--help --quiet -q" -- "$cur" ) )
;;
*)
- __docker_compose_services_all
+ __docker_compose_complete_services
;;
esac
}
@@ -286,7 +283,7 @@ _docker_compose_kill() {
COMPREPLY=( $( compgen -W "--help -s" -- "$cur" ) )
;;
*)
- __docker_compose_services_running
+ __docker_compose_complete_running_services
;;
esac
}
@@ -301,10 +298,10 @@ _docker_compose_logs() {
case "$cur" in
-*)
- COMPREPLY=( $( compgen -W "--follow -f --help --no-color --tail --timestamps -t" -- "$cur" ) )
+ COMPREPLY=( $( compgen -W "--follow -f --help --no-color --no-log-prefix --tail --timestamps -t" -- "$cur" ) )
;;
*)
- __docker_compose_services_all
+ __docker_compose_complete_services
;;
esac
}
@@ -316,7 +313,7 @@ _docker_compose_pause() {
COMPREPLY=( $( compgen -W "--help" -- "$cur" ) )
;;
*)
- __docker_compose_services_running
+ __docker_compose_complete_running_services
;;
esac
}
@@ -338,7 +335,7 @@ _docker_compose_port() {
COMPREPLY=( $( compgen -W "--help --index --protocol" -- "$cur" ) )
;;
*)
- __docker_compose_services_all
+ __docker_compose_complete_services
;;
esac
}
@@ -367,10 +364,10 @@ _docker_compose_ps() {
case "$cur" in
-*)
- COMPREPLY=( $( compgen -W "--help --quiet -q --services --filter" -- "$cur" ) )
+ COMPREPLY=( $( compgen -W "--all -a --filter --help --quiet -q --services" -- "$cur" ) )
;;
*)
- __docker_compose_services_all
+ __docker_compose_complete_services
;;
esac
}
@@ -379,10 +376,10 @@ _docker_compose_ps() {
_docker_compose_pull() {
case "$cur" in
-*)
- COMPREPLY=( $( compgen -W "--help --ignore-pull-failures --include-deps --parallel --quiet -q" -- "$cur" ) )
+ COMPREPLY=( $( compgen -W "--help --ignore-pull-failures --include-deps --no-parallel --quiet -q" -- "$cur" ) )
;;
*)
- __docker_compose_services_from_image
+ __docker_compose_complete_services --filter source=image
;;
esac
}
@@ -394,7 +391,7 @@ _docker_compose_push() {
COMPREPLY=( $( compgen -W "--help --ignore-push-failures" -- "$cur" ) )
;;
*)
- __docker_compose_services_all
+ __docker_compose_complete_services
;;
esac
}
@@ -412,7 +409,7 @@ _docker_compose_restart() {
COMPREPLY=( $( compgen -W "--help --timeout -t" -- "$cur" ) )
;;
*)
- __docker_compose_services_running
+ __docker_compose_complete_running_services
;;
esac
}
@@ -425,9 +422,9 @@ _docker_compose_rm() {
;;
*)
if __docker_compose_has_option "--stop|-s" ; then
- __docker_compose_services_all
+ __docker_compose_complete_services
else
- __docker_compose_services_stopped
+ __docker_compose_complete_services --filter status=stopped
fi
;;
esac
@@ -451,7 +448,7 @@ _docker_compose_run() {
COMPREPLY=( $( compgen -W "--detach -d --entrypoint -e --help --label -l --name --no-deps --publish -p --rm --service-ports -T --use-aliases --user -u --volume -v --workdir -w" -- "$cur" ) )
;;
*)
- __docker_compose_services_all
+ __docker_compose_complete_services
;;
esac
}
@@ -473,7 +470,7 @@ _docker_compose_scale() {
COMPREPLY=( $( compgen -W "--help --timeout -t" -- "$cur" ) )
;;
*)
- COMPREPLY=( $(compgen -S "=" -W "$(___docker_compose_all_services_in_compose_file)" -- "$cur") )
+ COMPREPLY=( $(compgen -S "=" -W "$(__docker_compose_services)" -- "$cur") )
__docker_compose_nospace
;;
esac
@@ -486,7 +483,7 @@ _docker_compose_start() {
COMPREPLY=( $( compgen -W "--help" -- "$cur" ) )
;;
*)
- __docker_compose_services_stopped
+ __docker_compose_complete_services --filter status=stopped
;;
esac
}
@@ -504,7 +501,7 @@ _docker_compose_stop() {
COMPREPLY=( $( compgen -W "--help --timeout -t" -- "$cur" ) )
;;
*)
- __docker_compose_services_running
+ __docker_compose_complete_running_services
;;
esac
}
@@ -516,7 +513,7 @@ _docker_compose_top() {
COMPREPLY=( $( compgen -W "--help" -- "$cur" ) )
;;
*)
- __docker_compose_services_running
+ __docker_compose_complete_running_services
;;
esac
}
@@ -528,7 +525,7 @@ _docker_compose_unpause() {
COMPREPLY=( $( compgen -W "--help" -- "$cur" ) )
;;
*)
- __docker_compose_services_paused
+ __docker_compose_complete_services --filter status=paused
;;
esac
}
@@ -541,11 +538,11 @@ _docker_compose_up() {
return
;;
--exit-code-from)
- __docker_compose_services_all
+ __docker_compose_complete_services
return
;;
--scale)
- COMPREPLY=( $(compgen -S "=" -W "$(___docker_compose_all_services_in_compose_file)" -- "$cur") )
+ COMPREPLY=( $(compgen -S "=" -W "$(__docker_compose_services)" -- "$cur") )
__docker_compose_nospace
return
;;
@@ -556,10 +553,10 @@ _docker_compose_up() {
case "$cur" in
-*)
- COMPREPLY=( $( compgen -W "--abort-on-container-exit --always-recreate-deps --build -d --detach --exit-code-from --force-recreate --help --no-build --no-color --no-deps --no-recreate --no-start --renew-anon-volumes -V --remove-orphans --scale --timeout -t" -- "$cur" ) )
+ COMPREPLY=( $( compgen -W "--abort-on-container-exit --always-recreate-deps --attach-dependencies --build -d --detach --exit-code-from --force-recreate --help --no-build --no-color --no-deps --no-log-prefix --no-recreate --no-start --renew-anon-volumes -V --remove-orphans --scale --timeout -t" -- "$cur" ) )
;;
*)
- __docker_compose_services_all
+ __docker_compose_complete_services
;;
esac
}
@@ -580,7 +577,6 @@ _docker_compose() {
local commands=(
build
- bundle
config
create
down
@@ -615,6 +611,8 @@ _docker_compose() {
--tlsverify
"
local daemon_options_with_args="
+ --context -c
+ --env-file
--file -f
--host -H
--project-directory
@@ -624,9 +622,11 @@ _docker_compose() {
--tlskey
"
- # These options are require special treatment when searching the command.
+ # These options require special treatment when searching the command.
local top_level_options_with_args="
+ --ansi
--log-level
+ --profile
"
COMPREPLY=()
diff --git a/contrib/completion/fish/docker-compose.fish b/contrib/completion/fish/docker-compose.fish
index 69ecc505..9183a098 100644
--- a/contrib/completion/fish/docker-compose.fish
+++ b/contrib/completion/fish/docker-compose.fish
@@ -12,6 +12,7 @@ end
complete -c docker-compose -s f -l file -r -d 'Specify an alternate compose file'
complete -c docker-compose -s p -l project-name -x -d 'Specify an alternate project name'
+complete -c docker-compose -l env-file -r -d 'Specify an alternate environment file (default: .env)'
complete -c docker-compose -l verbose -d 'Show more output'
complete -c docker-compose -s H -l host -x -d 'Daemon socket to connect to'
complete -c docker-compose -l tls -d 'Use TLS; implied by --tlsverify'
@@ -20,5 +21,7 @@ complete -c docker-compose -l tlscert -r -d 'Path to TLS certif
complete -c docker-compose -l tlskey -r -d 'Path to TLS key file'
complete -c docker-compose -l tlsverify -d 'Use TLS and verify the remote'
complete -c docker-compose -l skip-hostname-check -d "Don't check the daemon's hostname against the name specified in the client certificate (for example if your docker host is an IP address)"
+complete -c docker-compose -l no-ansi -d 'Do not print ANSI control characters'
+complete -c docker-compose -l ansi -a 'never always auto' -d 'Control when to print ANSI control characters'
complete -c docker-compose -s h -l help -d 'Print usage'
complete -c docker-compose -s v -l version -d 'Print version and exit'
diff --git a/contrib/completion/zsh/_docker-compose b/contrib/completion/zsh/_docker-compose
index aba36770..c6b73350 100644..100755
--- a/contrib/completion/zsh/_docker-compose
+++ b/contrib/completion/zsh/_docker-compose
@@ -23,7 +23,7 @@ __docker-compose_all_services_in_compose_file() {
local already_selected
local -a services
already_selected=$(echo $words | tr " " "|")
- __docker-compose_q config --services \
+ __docker-compose_q ps --services "$@" \
| grep -Ev "^(${already_selected})$"
}
@@ -31,125 +31,42 @@ __docker-compose_all_services_in_compose_file() {
__docker-compose_services_all() {
[[ $PREFIX = -* ]] && return 1
integer ret=1
- services=$(__docker-compose_all_services_in_compose_file)
+ services=$(__docker-compose_all_services_in_compose_file "$@")
_alternative "args:services:($services)" && ret=0
return ret
}
-# All services that have an entry with the given key in their docker-compose.yml section
-__docker-compose_services_with_key() {
- local already_selected
- local -a buildable
- already_selected=$(echo $words | tr " " "|")
- # flatten sections to one line, then filter lines containing the key and return section name.
- __docker-compose_q config \
- | sed -n -e '/^services:/,/^[^ ]/p' \
- | sed -n 's/^ //p' \
- | awk '/^[a-zA-Z0-9]/{printf "\n"};{printf $0;next;}' \
- | grep " \+$1:" \
- | cut -d: -f1 \
- | grep -Ev "^(${already_selected})$"
-}
-
# All services that are defined by a Dockerfile reference
__docker-compose_services_from_build() {
[[ $PREFIX = -* ]] && return 1
- integer ret=1
- buildable=$(__docker-compose_services_with_key build)
- _alternative "args:buildable services:($buildable)" && ret=0
-
- return ret
+ __docker-compose_services_all --filter source=build
}
# All services that are defined by an image
__docker-compose_services_from_image() {
[[ $PREFIX = -* ]] && return 1
- integer ret=1
- pullable=$(__docker-compose_services_with_key image)
- _alternative "args:pullable services:($pullable)" && ret=0
-
- return ret
-}
-
-__docker-compose_get_services() {
- [[ $PREFIX = -* ]] && return 1
- integer ret=1
- local kind
- declare -a running paused stopped lines args services
-
- docker_status=$(docker ps > /dev/null 2>&1)
- if [ $? -ne 0 ]; then
- _message "Error! Docker is not running."
- return 1
- fi
-
- kind=$1
- shift
- [[ $kind =~ (stopped|all) ]] && args=($args -a)
-
- lines=(${(f)"$(_call_program commands docker $docker_options ps --format 'table' $args)"})
- services=(${(f)"$(_call_program commands docker-compose 2>/dev/null $compose_options ps -q)"})
-
- # Parse header line to find columns
- local i=1 j=1 k header=${lines[1]}
- declare -A begin end
- while (( j < ${#header} - 1 )); do
- i=$(( j + ${${header[$j,-1]}[(i)[^ ]]} - 1 ))
- j=$(( i + ${${header[$i,-1]}[(i) ]} - 1 ))
- k=$(( j + ${${header[$j,-1]}[(i)[^ ]]} - 2 ))
- begin[${header[$i,$((j-1))]}]=$i
- end[${header[$i,$((j-1))]}]=$k
- done
- lines=(${lines[2,-1]})
-
- # Container ID
- local line s name
- local -a names
- for line in $lines; do
- if [[ ${services[@]} == *"${line[${begin[CONTAINER ID]},${end[CONTAINER ID]}]%% ##}"* ]]; then
- names=(${(ps:,:)${${line[${begin[NAMES]},-1]}%% *}})
- for name in $names; do
- s="${${name%_*}#*_}:${(l:15:: :::)${${line[${begin[CREATED]},${end[CREATED]}]/ ago/}%% ##}}"
- s="$s, ${line[${begin[CONTAINER ID]},${end[CONTAINER ID]}]%% ##}"
- s="$s, ${${${line[${begin[IMAGE]},${end[IMAGE]}]}/:/\\:}%% ##}"
- if [[ ${line[${begin[STATUS]},${end[STATUS]}]} = Exit* ]]; then
- stopped=($stopped $s)
- else
- if [[ ${line[${begin[STATUS]},${end[STATUS]}]} = *\(Paused\)* ]]; then
- paused=($paused $s)
- fi
- running=($running $s)
- fi
- done
- fi
- done
-
- [[ $kind =~ (running|all) ]] && _describe -t services-running "running services" running "$@" && ret=0
- [[ $kind =~ (paused|all) ]] && _describe -t services-paused "paused services" paused "$@" && ret=0
- [[ $kind =~ (stopped|all) ]] && _describe -t services-stopped "stopped services" stopped "$@" && ret=0
-
- return ret
+ __docker-compose_services_all --filter source=image
}
__docker-compose_pausedservices() {
[[ $PREFIX = -* ]] && return 1
- __docker-compose_get_services paused "$@"
+ __docker-compose_services_all --filter status=paused
}
__docker-compose_stoppedservices() {
[[ $PREFIX = -* ]] && return 1
- __docker-compose_get_services stopped "$@"
+ __docker-compose_services_all --filter status=stopped
}
__docker-compose_runningservices() {
[[ $PREFIX = -* ]] && return 1
- __docker-compose_get_services running "$@"
+ __docker-compose_services_all --filter status=running
}
__docker-compose_services() {
[[ $PREFIX = -* ]] && return 1
- __docker-compose_get_services all "$@"
+ __docker-compose_services_all
}
__docker-compose_caching_policy() {
@@ -196,24 +113,22 @@ __docker-compose_subcommand() {
$opts_help \
"*--build-arg=[Set build-time variables for one service.]:<varname>=<value>: " \
'--force-rm[Always remove intermediate containers.]' \
- '--memory[Memory limit for the build container.]' \
+ '(--quiet -q)'{--quiet,-q}'[Curb build output]' \
+ '(--memory -m)'{--memory,-m}'[Memory limit for the build container.]' \
'--no-cache[Do not use cache when building the image.]' \
'--pull[Always attempt to pull a newer version of the image.]' \
+ '--compress[Compress the build context using gzip.]' \
+ '--parallel[Build images in parallel.]' \
'*:services:__docker-compose_services_from_build' && ret=0
;;
- (bundle)
- _arguments \
- $opts_help \
- '--push-images[Automatically push images for any services which have a `build` option specified.]' \
- '(--output -o)'{--output,-o}'[Path to write the bundle file to. Defaults to "<project name>.dab".]:file:_files' && ret=0
- ;;
(config)
_arguments \
$opts_help \
'(--quiet -q)'{--quiet,-q}"[Only validate the configuration, don't print anything.]" \
'--resolve-image-digests[Pin image tags to digests.]' \
'--services[Print the service names, one per line.]' \
- '--volumes[Print the volume names, one per line.]' && ret=0
+ '--volumes[Print the volume names, one per line.]' \
+ '--hash[Print the service config hash, one per line. Set "service1,service2" for a list of specified services.]' \ && ret=0
;;
(create)
_arguments \
@@ -222,11 +137,12 @@ __docker-compose_subcommand() {
$opts_no_recreate \
$opts_no_build \
"(--no-build)--build[Build images before creating containers.]" \
- '*:services:__docker-compose_services_all' && ret=0
+ '*:services:__docker-compose_services' && ret=0
;;
(down)
_arguments \
$opts_help \
+ $opts_timeout \
"--rmi[Remove images. Type must be one of: 'all': Remove all images used by any service. 'local': Remove only images that don't have a custom tag set by the \`image\` field.]:type:(all local)" \
'(-v --volumes)'{-v,--volumes}"[Remove named volumes declared in the \`volumes\` section of the Compose file and anonymous volumes attached to containers.]" \
$opts_remove_orphans && ret=0
@@ -235,16 +151,18 @@ __docker-compose_subcommand() {
_arguments \
$opts_help \
'--json[Output events as a stream of json objects]' \
- '*:services:__docker-compose_services_all' && ret=0
+ '*:services:__docker-compose_services' && ret=0
;;
(exec)
_arguments \
$opts_help \
'-d[Detached mode: Run command in the background.]' \
'--privileged[Give extended privileges to the process.]' \
- '(-u --user)'{-u,--user=}'[Run the command as this user.]:username:_users' \
+ '(-u --user)'{-u,--user=}'[Run the command as this user.]:username:_users' \
'-T[Disable pseudo-tty allocation. By default `docker-compose exec` allocates a TTY.]' \
'--index=[Index of the container if there are multiple instances of a service \[default: 1\]]:index: ' \
+ '*'{-e,--env}'[KEY=VAL Set an environment variable (can be used multiple times)]:environment variable KEY=VAL: ' \
+ '(-w --workdir)'{-w,--workdir=}'[Working directory inside the container]:workdir: ' \
'(-):running services:__docker-compose_runningservices' \
'(-):command: _command_names -e' \
'*::arguments: _normal' && ret=0
@@ -252,12 +170,12 @@ __docker-compose_subcommand() {
(help)
_arguments ':subcommand:__docker-compose_commands' && ret=0
;;
- (images)
- _arguments \
- $opts_help \
- '-q[Only display IDs]' \
- '*:services:__docker-compose_services_all' && ret=0
- ;;
+ (images)
+ _arguments \
+ $opts_help \
+ '-q[Only display IDs]' \
+ '*:services:__docker-compose_services' && ret=0
+ ;;
(kill)
_arguments \
$opts_help \
@@ -271,7 +189,7 @@ __docker-compose_subcommand() {
$opts_no_color \
'--tail=[Number of lines to show from the end of the logs for each container.]:number of lines: ' \
'(-t --timestamps)'{-t,--timestamps}'[Show timestamps]' \
- '*:services:__docker-compose_services_all' && ret=0
+ '*:services:__docker-compose_services' && ret=0
;;
(pause)
_arguments \
@@ -290,12 +208,16 @@ __docker-compose_subcommand() {
_arguments \
$opts_help \
'-q[Only display IDs]' \
- '*:services:__docker-compose_services_all' && ret=0
+ '--filter KEY=VAL[Filter services by a property]:<filtername>=<value>:' \
+ '*:services:__docker-compose_services' && ret=0
;;
(pull)
_arguments \
$opts_help \
'--ignore-pull-failures[Pull what it can and ignores images with pull failures.]' \
+ '--no-parallel[Disable parallel pulling]' \
+ '(-q --quiet)'{-q,--quiet}'[Pull without printing progress information]' \
+ '--include-deps[Also pull services declared as dependencies]' \
'*:services:__docker-compose_services_from_image' && ret=0
;;
(push)
@@ -317,6 +239,7 @@ __docker-compose_subcommand() {
$opts_no_deps \
'-d[Detached mode: Run container in the background, print new container name.]' \
'*-e[KEY=VAL Set an environment variable (can be used multiple times)]:environment variable KEY=VAL: ' \
+ '*'{-l,--label}'[KEY=VAL Add or override a label (can be used multiple times)]:label KEY=VAL: ' \
'--entrypoint[Overwrite the entrypoint of the image.]:entry point: ' \
'--name=[Assign a name to the container]:name: ' \
'(-p --publish)'{-p,--publish=}"[Publish a container's port(s) to the host]" \
@@ -326,6 +249,7 @@ __docker-compose_subcommand() {
'(-u --user)'{-u,--user=}'[Run as specified username or uid]:username or uid:_users' \
'(-v --volume)*'{-v,--volume=}'[Bind mount a volume]:volume: ' \
'(-w --workdir)'{-w,--workdir=}'[Working directory inside the container]:workdir: ' \
+ "--use-aliases[Use the services network aliases in the network(s) the container connects to]" \
'(-):services:__docker-compose_services' \
'(-):command: _command_names -e' \
'*::arguments: _normal' && ret=0
@@ -360,7 +284,7 @@ __docker-compose_subcommand() {
(up)
_arguments \
$opts_help \
- '(--abort-on-container-exit)-d[Detached mode: Run containers in the background, print new container names. Incompatible with --abort-on-container-exit.]' \
+ '(--abort-on-container-exit)-d[Detached mode: Run containers in the background, print new container names. Incompatible with --abort-on-container-exit and --attach-dependencies.]' \
$opts_no_color \
$opts_no_deps \
$opts_force_recreate \
@@ -368,9 +292,12 @@ __docker-compose_subcommand() {
$opts_no_build \
"(--no-build)--build[Build images before starting containers.]" \
"(-d)--abort-on-container-exit[Stops all containers if any container was stopped. Incompatible with -d.]" \
+ "(-d)--attach-dependencies[Attach to dependent containers. Incompatible with -d.]" \
'(-t --timeout)'{-t,--timeout}"[Use this timeout in seconds for container shutdown when attached or when containers are already running. (default: 10)]:seconds: " \
+ '--scale[SERVICE=NUM Scale SERVICE to NUM instances. Overrides the `scale` setting in the Compose file if present.]:service scale SERVICE=NUM: ' \
+ '--exit-code-from=[Return the exit code of the selected service container. Implies --abort-on-container-exit]:service:__docker-compose_services' \
$opts_remove_orphans \
- '*:services:__docker-compose_services_all' && ret=0
+ '*:services:__docker-compose_services' && ret=0
;;
(version)
_arguments \
@@ -409,8 +336,13 @@ _docker-compose() {
'(- :)'{-h,--help}'[Get help]' \
'*'{-f,--file}"[${file_description}]:file:_files -g '*.yml'" \
'(-p --project-name)'{-p,--project-name}'[Specify an alternate project name (default: directory name)]:project name:' \
- '--verbose[Show more output]' \
+ '--env-file[Specify an alternate environment file (default: .env)]:env-file:_files' \
+ "--compatibility[If set, Compose will attempt to convert keys in v3 files to their non-Swarm equivalent]" \
'(- :)'{-v,--version}'[Print version and exit]' \
+ '--verbose[Show more output]' \
+ '--log-level=[Set log level]:level:(DEBUG INFO WARNING ERROR CRITICAL)' \
+ '--no-ansi[Do not print ANSI control characters]' \
+ '--ansi=[Control when to print ANSI control characters]:when:(never always auto)' \
'(-H --host)'{-H,--host}'[Daemon socket to connect to]:host:' \
'--tls[Use TLS; implied by --tlsverify]' \
'--tlscacert=[Trust certs signed only by this CA]:ca path:' \
@@ -421,9 +353,10 @@ _docker-compose() {
'(-): :->command' \
'(-)*:: :->option-or-argument' && ret=0
- local -a relevant_compose_flags relevant_docker_flags compose_options docker_options
+ local -a relevant_compose_flags relevant_compose_repeatable_flags relevant_docker_flags compose_options docker_options
relevant_compose_flags=(
+ "--env-file"
"--file" "-f"
"--host" "-H"
"--project-name" "-p"
@@ -435,6 +368,10 @@ _docker-compose() {
"--skip-hostname-check"
)
+ relevant_compose_repeatable_flags=(
+ "--file" "-f"
+ )
+
relevant_docker_flags=(
"--host" "-H"
"--tls"
@@ -452,9 +389,18 @@ _docker-compose() {
fi
fi
if [[ -n "${relevant_compose_flags[(r)$k]}" ]]; then
- compose_options+=$k
- if [[ -n "$opt_args[$k]" ]]; then
- compose_options+=$opt_args[$k]
+ if [[ -n "${relevant_compose_repeatable_flags[(r)$k]}" ]]; then
+ values=("${(@s/:/)opt_args[$k]}")
+ for value in $values
+ do
+ compose_options+=$k
+ compose_options+=$value
+ done
+ else
+ compose_options+=$k
+ if [[ -n "$opt_args[$k]" ]]; then
+ compose_options+=$opt_args[$k]
+ fi
fi
fi
done
diff --git a/contrib/migration/migrate-compose-file-v1-to-v2.py b/contrib/migration/migrate-compose-file-v1-to-v2.py
index c1785b0d..26511206 100755
--- a/contrib/migration/migrate-compose-file-v1-to-v2.py
+++ b/contrib/migration/migrate-compose-file-v1-to-v2.py
@@ -3,9 +3,6 @@
Migrate a Compose file from the V1 format in Compose 1.5 to the V2 format
supported by Compose 1.6+
"""
-from __future__ import absolute_import
-from __future__ import unicode_literals
-
import argparse
import logging
import sys
@@ -44,7 +41,7 @@ def warn_for_links(name, service):
links = service.get('links')
if links:
example_service = links[0].partition(':')[0]
- log.warn(
+ log.warning(
"Service {name} has links, which no longer create environment "
"variables such as {example_service_upper}_PORT. "
"If you are using those in your application code, you should "
@@ -57,7 +54,7 @@ def warn_for_links(name, service):
def warn_for_external_links(name, service):
external_links = service.get('external_links')
if external_links:
- log.warn(
+ log.warning(
"Service {name} has external_links: {ext}, which now work "
"slightly differently. In particular, two containers must be "
"connected to at least one network in common in order to "
@@ -107,7 +104,7 @@ def rewrite_volumes_from(service, service_names):
def create_volumes_section(data):
named_volumes = get_named_volumes(data['services'])
if named_volumes:
- log.warn(
+ log.warning(
"Named volumes ({names}) must be explicitly declared. Creating a "
"'volumes' section with declarations.\n\n"
"For backwards-compatibility, they've been declared as external. "
@@ -159,7 +156,7 @@ def main(args):
opts = parse_opts(args)
- with open(opts.filename, 'r') as fh:
+ with open(opts.filename) as fh:
new_format = migrate(fh.read())
if opts.in_place:
diff --git a/debian/changelog b/debian/changelog
index 753a47a5..70155e1d 100644
--- a/debian/changelog
+++ b/debian/changelog
@@ -1,3 +1,86 @@
+docker-compose (1.29.2-6) unstable; urgency=medium
+
+ * Source-only rebuild.
+
+ -- Andrej Shadura <andrewsh@debian.org> Fri, 11 Aug 2023 17:05:52 +0200
+
+docker-compose (1.29.2-5) unstable; urgency=medium
+
+ * No change rebuild.
+
+ -- Andrej Shadura <andrewsh@debian.org> Tue, 08 Aug 2023 19:21:28 +0200
+
+docker-compose (1.29.2-4) unstable; urgency=medium
+
+ * Move the command-line tool into a separate package.
+
+ -- Andrej Shadura <andrewsh@debian.org> Tue, 21 Feb 2023 16:36:44 +0100
+
+docker-compose (1.29.2-3) unstable; urgency=medium
+
+ * Remove Felipe Satelier for uploaders.
+ Thank you for your work, Felipe.
+ * Provide python3-compose.
+
+ -- Andrej Shadura <andrewsh@debian.org> Thu, 16 Feb 2023 18:14:55 +0100
+
+docker-compose (1.29.2-2) unstable; urgency=medium
+
+ * Remove dependency on nose as tests are disabled anyway.
+
+ -- Andrej Shadura <andrewsh@debian.org> Sun, 21 Aug 2022 22:38:50 +0200
+
+docker-compose (1.29.2-1) unstable; urgency=medium
+
+ * New upstream release.
+ * Bump python3-docker dependency.
+ * Use dh-sequence-python3.
+ * Drop dependency on the ancient Python 3.6 version.
+ * Install completions for fish.
+
+ -- Andrej Shadura <andrewsh@debian.org> Tue, 25 Jan 2022 10:08:41 +0100
+
+docker-compose (1.27.4-3) unstable; urgency=medium
+
+ * Bump debhelper from old 12 to 13.
+ * Update standards version to 4.6.0, no changes needed.
+
+ -- Andrej Shadura <andrewsh@debian.org> Fri, 31 Dec 2021 21:29:16 +0100
+
+docker-compose (1.27.4-2) unstable; urgency=medium
+
+ * Limit debian/watch to v1 docker-compose only (v2 is a completely
+ different piece of software).
+
+ -- Andrej Shadura <andrewsh@debian.org> Tue, 14 Dec 2021 23:46:33 +0100
+
+docker-compose (1.27.4-1) unstable; urgency=medium
+
+ * New upstream release.
+ * Update the build dependencies.
+ * Set upstream metadata fields: Bug-Database, Bug-Submit, Repository,
+ Repository-Browse.
+ * Use canonical URL in Vcs-Git.
+ * Set Rules-Requires-Root: no.
+ * Bump Standards-Version to 4.5.1.
+ * Make the synopsis read less like advertising.
+
+ -- Andrej Shadura <andrewsh@debian.org> Mon, 16 Aug 2021 20:42:18 +0100
+
+docker-compose (1.25.0-1) unstable; urgency=medium
+
+ * New upstream version
+ - Drop Relax-Dependencies.patch - now dependencies are less strict
+ * Drop pydist-overrides and update debian/control versions.
+ Since docker-compose is python3 it was not used.
+ * Trim trailing whitespace.
+ * Use secure copyright file specification URI.
+ * Use secure URI in Homepage field.
+ * Bump debhelper from old 10 to 12.
+ * Set upstream metadata fields: Repository.
+
+ -- Felipe Sateler <fsateler@debian.org> Fri, 22 Nov 2019 21:32:27 -0300
+
docker-compose (1.21.0-3) unstable; urgency=medium
* Add dependency on python3-distutils.
@@ -97,7 +180,7 @@ docker-compose (1.3.3-1) unstable; urgency=medium
- Bump python-docker dependency
* Add manpage for docker-compose. Thanks Dariusz Dwornikowski.
Closes: #792518
-
+
-- Felipe Sateler <fsateler@debian.org> Sun, 19 Jul 2015 15:32:46 -0300
docker-compose (1.3.1-1) unstable; urgency=medium
diff --git a/debian/compat b/debian/compat
deleted file mode 100644
index f599e28b..00000000
--- a/debian/compat
+++ /dev/null
@@ -1 +0,0 @@
-10
diff --git a/debian/control b/debian/control
index 279e71ab..3b3a9953 100644
--- a/debian/control
+++ b/debian/control
@@ -1,37 +1,59 @@
Source: docker-compose
Maintainer: Docker Compose Team <team+docker-compose@tracker.debian.org>
-Uploaders: Jason Pleau <jason@jpleau.ca>,
- Felipe Sateler <fsateler@debian.org>
+Uploaders:
+ Jason Pleau <jason@jpleau.ca>,
+ Andrej Shadura <andrewsh@debian.org>
Section: admin
Priority: optional
Build-Depends:
- debhelper (>= 10),
- python3 (>= 3.6),
- dh-python,
+ debhelper-compat (= 13),
+ dh-sequence-python3,
+ python3-all,
python3-cached-property (>= 1.2.0),
python3-setuptools (>= 0.6b3),
- python3-docker (>= 3.2.1),
+ python3-distro (>= 1.5.0),
+ python3-docker (>= 5),
python3-dockerpty (>= 0.4.1),
python3-docopt (>= 0.6.1),
+ python3-dotenv (>= 0.13.0),
python3-yaml (>= 3.10),
- python3-requests (>= 2.6.1),
- python3-six (>= 1.7.3),
+ python3-requests (>= 2.20.0),
python3-texttable (>= 0.9.0),
python3-websocket (>= 0.32.0),
python3-mock (>= 1.0.1),
- python3-nose,
python3-flake8,
-Standards-Version: 4.1.4
-X-Python3-Version: >= 3.6
-Homepage: http://docs.docker.com/compose/
-Vcs-Git: https://salsa.debian.org/docker-compose-team/docker-compose
+Standards-Version: 4.6.0
+Rules-Requires-Root: no
+Homepage: https://docs.docker.com/compose/
+Vcs-Git: https://salsa.debian.org/docker-compose-team/docker-compose.git
Vcs-Browser: https://salsa.debian.org/docker-compose-team/docker-compose
Package: docker-compose
Architecture: all
-Depends: ${misc:Depends}, ${python3:Depends}, python3-distutils
+Depends:
+ ${misc:Depends},
+ ${python3:Depends},
+ python3-distutils,
+ python3-compose (= ${binary:Version}),
Recommends: docker.io (>= 1.9.0)
-Description: Punctual, lightweight development environments using Docker
+Description: define and run multi-container Docker applications with YAML
docker-compose is a service management software built on top of docker. Define
your services and their relationships in a simple YAML file, and let compose
handle the rest.
+ .
+ This package includes the command-line tool.
+
+Package: python3-compose
+Architecture: all
+Depends:
+ ${misc:Depends},
+ ${python3:Depends},
+ python3-distutils,
+Replaces: docker-compose (<< 1.29.2-4~)
+Breaks: docker-compose (<< 1.29.2-4~)
+Description: Python implementation of docker-compose file specification
+ docker-compose is a service management software built on top of docker. Define
+ your services and their relationships in a simple YAML file, and let compose
+ handle the rest.
+ .
+ This package includes a Python implementation of docker-compose.
diff --git a/debian/copyright b/debian/copyright
index 22026149..1f4b6bef 100644
--- a/debian/copyright
+++ b/debian/copyright
@@ -1,4 +1,4 @@
-Format: http://www.debian.org/doc/packaging-manuals/copyright-format/1.0/
+Format: https://www.debian.org/doc/packaging-manuals/copyright-format/1.0/
Upstream-Name: docker-compose
Source: https://github.com/docker/compose/
diff --git a/debian/docker-compose.install b/debian/docker-compose.install
new file mode 100644
index 00000000..f2cdf4f8
--- /dev/null
+++ b/debian/docker-compose.install
@@ -0,0 +1,3 @@
+contrib/completion/zsh/* /usr/share/zsh/vendor-completions
+contrib/completion/bash/* /usr/share/bash-completion/completions
+contrib/completion/fish/* /usr/share/fish/completions
diff --git a/debian/docs b/debian/docs
index d74eeb3e..ca016202 100644
--- a/debian/docs
+++ b/debian/docs
@@ -1,3 +1,2 @@
README.md
SWARM.md
-docs/*.md
diff --git a/debian/patches/Relax-dependencies.patch b/debian/patches/Relax-dependencies.patch
deleted file mode 100644
index 7ec67baf..00000000
--- a/debian/patches/Relax-dependencies.patch
+++ /dev/null
@@ -1,26 +0,0 @@
-From: Felipe Sateler <fsateler@gmail.com>
-Date: Mon, 17 Oct 2016 10:35:03 -0300
-Subject: Relax dependencies
-
-docker-py, dockerpty and requests are too restrictive
----
-diff --git a/setup.py b/setup.py
-index a7a33363..a847c61d 100644
---- a/setup.py
-+++ b/setup.py
-@@ -33,11 +33,11 @@ install_requires = [
- 'cached-property >= 1.2.0, < 2',
- 'docopt >= 0.6.1, < 0.7',
- 'PyYAML >= 3.10, < 4',
-- 'requests >= 2.6.1, != 2.11.0, != 2.12.2, != 2.18.0, < 2.19',
-- 'texttable >= 0.9.0, < 0.10',
-+ 'requests >= 2.6.1',
-+ 'texttable >= 0.9.0',
- 'websocket-client >= 0.32.0, < 1.0',
-- 'docker >= 3.2.1, < 4.0',
-- 'dockerpty >= 0.4.1, < 0.5',
-+ 'docker >= 3.2.1',
-+ 'dockerpty >= 0.4.1',
- 'six >= 1.3.0, < 2',
- 'jsonschema >= 2.5.1, < 3',
- ]
diff --git a/debian/patches/series b/debian/patches/series
deleted file mode 100644
index cb797975..00000000
--- a/debian/patches/series
+++ /dev/null
@@ -1 +0,0 @@
-Relax-dependencies.patch
diff --git a/debian/pydist-overrides b/debian/pydist-overrides
deleted file mode 100644
index 488a749b..00000000
--- a/debian/pydist-overrides
+++ /dev/null
@@ -1,9 +0,0 @@
-requests python-requests (>= 2.6.1)
-docker python-docker (>= 2.4.0)
-dockerpty python-dockerpty (>= 0.4.1)
-texttable python-texttable (>= 0.9)
-websocket-client python-websocket (>= 0.32.0)
-PyYAML python-yaml (>= 3.10)
-jsonschema python-jsonschema (>= 2.5.1)
-docopt python-docopt (>= 0.6.1)
-cached-property python-cached-property (>= 1.2.0)
diff --git a/debian/rules b/debian/rules
index b9ebff91..e6efab9a 100755
--- a/debian/rules
+++ b/debian/rules
@@ -1,15 +1,12 @@
#!/usr/bin/make -f
-export PYBUILD_NAME=docker-compose
+export PYBUILD_NAME=compose
%:
- dh $@ --with python3 --buildsystem=pybuild
+ dh $@ --buildsystem=pybuild
-DESTDIR = $(CURDIR)/debian/docker-compose
-override_dh_auto_install:
- dh_auto_install --buildsystem=pybuild
- mkdir -p $(DESTDIR)/usr/share/zsh/vendor-completions $(DESTDIR)/usr/share/bash-completion/completions
- install -m644 contrib/completion/zsh/_docker-compose $(DESTDIR)/usr/share/zsh/vendor-completions
- install -m644 contrib/completion/bash/docker-compose $(DESTDIR)/usr/share/bash-completion/completions
+execute_after_dh_auto_install:
+ mkdir -p debian/docker-compose/usr/bin
+ mv debian/python3-$(PYBUILD_NAME)/usr/bin/* debian/docker-compose/usr/bin
override_dh_auto_test:
# tests disabled because they require networking
diff --git a/debian/upstream/metadata b/debian/upstream/metadata
new file mode 100644
index 00000000..8e74fba9
--- /dev/null
+++ b/debian/upstream/metadata
@@ -0,0 +1,4 @@
+Bug-Database: https://github.com/docker/compose/issues
+Bug-Submit: https://github.com/docker/compose/issues/new
+Repository: https://github.com/docker/compose.git
+Repository-Browse: https://github.com/docker/compose
diff --git a/debian/watch b/debian/watch
index 56bc4a87..a315448f 100644
--- a/debian/watch
+++ b/debian/watch
@@ -7,5 +7,5 @@
version=3
opts="filenamemangle=s/.+\/v?(\d\S*)\.tar\.gz/docker-compose-$1\.tar\.gz/,uversionmangle=s/-?rc/~rc/" \
- https://github.com/docker/compose/tags .*/v?(\d\S*)\.tar\.gz
+ https://github.com/docker/compose/tags .*/v?(1\S*)\.tar\.gz
diff --git a/docker-compose-entrypoint.sh b/docker-compose-entrypoint.sh
new file mode 100755
index 00000000..84436fa0
--- /dev/null
+++ b/docker-compose-entrypoint.sh
@@ -0,0 +1,20 @@
+#!/bin/sh
+set -e
+
+# first arg is `-f` or `--some-option`
+if [ "${1#-}" != "$1" ]; then
+ set -- docker-compose "$@"
+fi
+
+# if our command is a valid Docker subcommand, let's invoke it through Docker instead
+# (this allows for "docker run docker ps", etc)
+if docker-compose help "$1" > /dev/null 2>&1; then
+ set -- docker-compose "$@"
+fi
+
+# if we have "--link some-docker:docker" and not DOCKER_HOST, let's set DOCKER_HOST automatically
+if [ -z "$DOCKER_HOST" -a "$DOCKER_PORT_2375_TCP" ]; then
+ export DOCKER_HOST='tcp://docker:2375'
+fi
+
+exec "$@"
diff --git a/docker-compose.spec b/docker-compose.spec
index b8c3a419..0c2fa3de 100644
--- a/docker-compose.spec
+++ b/docker-compose.spec
@@ -23,63 +23,8 @@ exe = EXE(pyz,
'DATA'
),
(
- 'compose/config/config_schema_v2.0.json',
- 'compose/config/config_schema_v2.0.json',
- 'DATA'
- ),
- (
- 'compose/config/config_schema_v2.1.json',
- 'compose/config/config_schema_v2.1.json',
- 'DATA'
- ),
- (
- 'compose/config/config_schema_v2.2.json',
- 'compose/config/config_schema_v2.2.json',
- 'DATA'
- ),
- (
- 'compose/config/config_schema_v2.3.json',
- 'compose/config/config_schema_v2.3.json',
- 'DATA'
- ),
- (
- 'compose/config/config_schema_v2.4.json',
- 'compose/config/config_schema_v2.4.json',
- 'DATA'
- ),
- (
- 'compose/config/config_schema_v3.0.json',
- 'compose/config/config_schema_v3.0.json',
- 'DATA'
- ),
- (
- 'compose/config/config_schema_v3.1.json',
- 'compose/config/config_schema_v3.1.json',
- 'DATA'
- ),
- (
- 'compose/config/config_schema_v3.2.json',
- 'compose/config/config_schema_v3.2.json',
- 'DATA'
- ),
- (
- 'compose/config/config_schema_v3.3.json',
- 'compose/config/config_schema_v3.3.json',
- 'DATA'
- ),
- (
- 'compose/config/config_schema_v3.4.json',
- 'compose/config/config_schema_v3.4.json',
- 'DATA'
- ),
- (
- 'compose/config/config_schema_v3.5.json',
- 'compose/config/config_schema_v3.5.json',
- 'DATA'
- ),
- (
- 'compose/config/config_schema_v3.6.json',
- 'compose/config/config_schema_v3.6.json',
+ 'compose/config/compose_spec.json',
+ 'compose/config/compose_spec.json',
'DATA'
),
(
@@ -93,4 +38,5 @@ exe = EXE(pyz,
debug=False,
strip=None,
upx=True,
- console=True)
+ console=True,
+ bootloader_ignore_signals=True)
diff --git a/docker-compose_darwin.spec b/docker-compose_darwin.spec
new file mode 100644
index 00000000..24889475
--- /dev/null
+++ b/docker-compose_darwin.spec
@@ -0,0 +1,48 @@
+# -*- mode: python -*-
+
+block_cipher = None
+
+a = Analysis(['bin/docker-compose'],
+ pathex=['.'],
+ hiddenimports=[],
+ hookspath=[],
+ runtime_hooks=[],
+ cipher=block_cipher)
+
+pyz = PYZ(a.pure, a.zipped_data,
+ cipher=block_cipher)
+
+exe = EXE(pyz,
+ a.scripts,
+ exclude_binaries=True,
+ name='docker-compose',
+ debug=False,
+ strip=False,
+ upx=True,
+ console=True,
+ bootloader_ignore_signals=True)
+coll = COLLECT(exe,
+ a.binaries,
+ a.zipfiles,
+ a.datas,
+ [
+ (
+ 'compose/config/config_schema_v1.json',
+ 'compose/config/config_schema_v1.json',
+ 'DATA'
+ ),
+ (
+ 'compose/config/compose_spec.json',
+ 'compose/config/compose_spec.json',
+ 'DATA'
+ ),
+ (
+ 'compose/GITSHA',
+ 'compose/GITSHA',
+ 'DATA'
+ )
+ ],
+ strip=False,
+ upx=True,
+ upx_exclude=[],
+ name='docker-compose-Darwin-x86_64')
diff --git a/docs/README.md b/docs/README.md
index 50c91d20..accc7c23 100644
--- a/docs/README.md
+++ b/docs/README.md
@@ -6,11 +6,9 @@ The documentation for Compose has been merged into
The docs for Compose are now here:
https://github.com/docker/docker.github.io/tree/master/compose
-Please submit pull requests for unpublished features on the `vnext-compose` branch (https://github.com/docker/docker.github.io/tree/vnext-compose).
+Please submit pull requests for unreleased features/changes on the `master` branch (https://github.com/docker/docker.github.io/tree/master), please prefix the PR title with `[WIP]` to indicate that it relates to an unreleased change.
-If you submit a PR to this codebase that has a docs impact, create a second docs PR on `docker.github.io`. Use the docs PR template provided (coming soon - watch this space).
-
-PRs for typos, additional information, etc. for already-published features should be labeled as `okay-to-publish` (we are still settling on a naming convention, will provide a label soon). You can submit these PRs either to `vnext-compose` or directly to `master` on `docker.github.io`
+If you submit a PR to this codebase that has a docs impact, create a second docs PR on `docker.github.io`. Use the docs PR template provided.
As always, the docs remain open-source and we appreciate your feedback and
pull requests!
diff --git a/project/RELEASE-PROCESS.md b/project/RELEASE-PROCESS.md
index d4afb87b..c8457671 100644..120000
--- a/project/RELEASE-PROCESS.md
+++ b/project/RELEASE-PROCESS.md
@@ -1,148 +1 @@
-Building a Compose release
-==========================
-
-## Prerequisites
-
-The release scripts require the following tools installed on the host:
-
-* https://hub.github.com/
-* https://stedolan.github.io/jq/
-* http://pandoc.org/
-
-## To get started with a new release
-
-Create a branch, update version, and add release notes by running `make-branch`
-
- ./script/release/make-branch $VERSION [$BASE_VERSION]
-
-`$BASE_VERSION` will default to master. Use the last version tag for a bug fix
-release.
-
-As part of this script you'll be asked to:
-
-1. Update the version in `compose/__init__.py` and `script/run/run.sh`.
-
- If the next release will be an RC, append `-rcN`, e.g. `1.4.0-rc1`.
-
-2. Write release notes in `CHANGELOG.md`.
-
- Almost every feature enhancement should be mentioned, with the most
- visible/exciting ones first. Use descriptive sentences and give context
- where appropriate.
-
- Bug fixes are worth mentioning if it's likely that they've affected lots
- of people, or if they were regressions in the previous version.
-
- Improvements to the code are not worth mentioning.
-
-3. Create a new repository on [bintray](https://bintray.com/docker-compose).
- The name has to match the name of the branch (e.g. `bump-1.9.0`) and the
- type should be "Generic". Other fields can be left blank.
-
-4. Check that the `vnext-compose` branch on
- [the docs repo](https://github.com/docker/docker.github.io/) has
- documentation for all the new additions in the upcoming release, and create
- a PR there for what needs to be amended.
-
-
-## When a PR is merged into master that we want in the release
-
-1. Check out the bump branch and run the cherry pick script
-
- git checkout bump-$VERSION
- ./script/release/cherry-pick-pr $PR_NUMBER
-
-2. When you are done cherry-picking branches move the bump version commit to HEAD
-
- ./script/release/rebase-bump-commit
- git push --force $USERNAME bump-$VERSION
-
-
-## To release a version (whether RC or stable)
-
-Check out the bump branch and run the `build-binaries` script
-
- git checkout bump-$VERSION
- ./script/release/build-binaries
-
-When prompted build the non-linux binaries and test them.
-
-1. Download the different platform binaries by running the following script:
-
- `./script/release/download-binaries $VERSION`
-
- The binaries for Linux, OSX and Windows will be downloaded in the `binaries-$VERSION` folder.
-
-3. Draft a release from the tag on GitHub (the `build-binaries` script will open the window for
- you)
-
- The tag will only be present on Github when you run the `push-release`
- script in step 7, but you can pre-fill it at that point.
-
-4. Paste in installation instructions and release notes. Here's an example -
- change the Compose version and Docker version as appropriate:
-
- If you're a Mac or Windows user, the best way to install Compose and keep it up-to-date is **[Docker for Mac and Windows](https://www.docker.com/products/docker)**.
-
- Docker for Mac and Windows will automatically install the latest version of Docker Engine for you.
-
- Alternatively, you can use the usual commands to install or upgrade Compose:
-
- ```
- curl -L https://github.com/docker/compose/releases/download/1.16.0/docker-compose-`uname -s`-`uname -m` -o /usr/local/bin/docker-compose
- chmod +x /usr/local/bin/docker-compose
- ```
-
- See the [install docs](https://docs.docker.com/compose/install/) for more install options and instructions.
-
- ## Compose file format compatibility matrix
-
- | Compose file format | Docker Engine |
- | --- | --- |
- | 3.3 | 17.06.0+ |
- | 3.0 &ndash; 3.2 | 1.13.0+ |
- | 2.3| 17.06.0+ |
- | 2.2 | 1.13.0+ |
- | 2.1 | 1.12.0+ |
- | 2.0 | 1.10.0+ |
- | 1.0 | 1.9.1+ |
-
- ## Changes
-
- ...release notes go here...
-
-5. Attach the binaries and `script/run/run.sh`
-
-6. Add "Thanks" with a list of contributors. The contributor list can be generated
- by running `./script/release/contributors`.
-
-7. If everything looks good, it's time to push the release.
-
-
- ./script/release/push-release
-
-
-8. Merge the bump PR.
-
-8. Publish the release on GitHub.
-
-9. Check that all the binaries download (following the install instructions) and run.
-
-10. Announce the release on the appropriate Slack channel(s).
-
-## If it’s a stable release (not an RC)
-
-1. Close the release’s milestone.
-
-## If it’s a minor release (1.x.0), rather than a patch release (1.x.y)
-
-1. Open a PR against `master` to:
-
- - update `CHANGELOG.md` to bring it in line with `release`
- - bump the version in `compose/__init__.py` to the *next* minor version number with `dev` appended. For example, if you just released `1.4.0`, update it to `1.5.0dev`.
-
-2. Get the PR merged.
-
-## Finally
-
-1. Celebrate, however you’d like.
+../script/release/README.md \ No newline at end of file
diff --git a/pyinstaller/ldd b/pyinstaller/ldd
new file mode 100755
index 00000000..3f10ad27
--- /dev/null
+++ b/pyinstaller/ldd
@@ -0,0 +1,13 @@
+#!/bin/sh
+
+# From http://wiki.musl-libc.org/wiki/FAQ#Q:_where_is_ldd_.3F
+#
+# Musl's dynlinker comes with ldd functionality built in. just create a
+# symlink from ld-musl-$ARCH.so to /bin/ldd. If the dynlinker was started
+# as "ldd", it will detect that and print the appropriate DSO information.
+#
+# Instead, this string replaced "ldd" with the package so that pyinstaller
+# can find the actual lib.
+exec /usr/bin/ldd "$@" | \
+ sed -r 's/([^[:space:]]+) => ldd/\1 => \/lib\/\1/g' | \
+ sed -r 's/ldd \(.*\)//g'
diff --git a/requirements-build.txt b/requirements-build.txt
index e5a77e79..9ca8d666 100644
--- a/requirements-build.txt
+++ b/requirements-build.txt
@@ -1 +1 @@
-pyinstaller==3.3.1
+pyinstaller==4.1
diff --git a/requirements-dev.txt b/requirements-dev.txt
index 32c5c23a..34a53f86 100644
--- a/requirements-dev.txt
+++ b/requirements-dev.txt
@@ -1,5 +1,9 @@
-coverage==4.4.2
-flake8==3.5.0
-mock>=1.0.1
-pytest==2.9.2
-pytest-cov==2.5.1
+Click==7.1.2
+coverage==5.5
+ddt==1.4.1
+flake8==3.8.3
+gitpython==3.1.11
+mock==3.0.5
+pytest==6.0.1; python_version >= '3.5'
+pytest==4.6.5; python_version < '3.5'
+pytest-cov==2.10.1
diff --git a/requirements-indirect.txt b/requirements-indirect.txt
new file mode 100644
index 00000000..5c6b789b
--- /dev/null
+++ b/requirements-indirect.txt
@@ -0,0 +1,28 @@
+altgraph==0.17
+appdirs==1.4.4
+attrs==20.3.0
+bcrypt==3.2.0
+cffi==1.14.4
+cryptography==3.3.2
+distlib==0.3.1
+entrypoints==0.3
+filelock==3.0.12
+gitdb2==4.0.2
+mccabe==0.6.1
+more-itertools==8.6.0; python_version >= '3.5'
+more-itertools==5.0.0; python_version < '3.5'
+packaging==20.9
+pluggy==0.13.1
+py==1.10.0
+pycodestyle==2.6.0
+pycparser==2.20
+pyflakes==2.2.0
+PyNaCl==1.4.0
+pyparsing==2.4.7
+pyrsistent==0.16.0
+smmap==3.0.4
+smmap2==3.0.1
+toml==0.10.1
+tox==3.21.2
+virtualenv==20.4.0
+wcwidth==0.2.5
diff --git a/requirements.txt b/requirements.txt
index 7dce4024..f0cc8be7 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -1,23 +1,22 @@
-backports.ssl-match-hostname==3.5.0.1; python_version < '3'
-cached-property==1.3.0
-certifi==2017.4.17
+backports.shutil_get_terminal_size==1.0.0
+cached-property==1.5.1; python_version < '3.8'
+certifi==2020.6.20
chardet==3.0.4
-docker==3.2.1
-docker-pycreds==0.2.1
+colorama==0.4.3; sys_platform == 'win32'
+distro==1.5.0
+docker==5.0.0
+docker-pycreds==0.4.0
dockerpty==0.4.1
docopt==0.6.2
-enum34==1.1.6; python_version < '3.4'
-functools32==3.2.3.post2; python_version < '3.2'
-git+git://github.com/tartley/colorama.git@bd378c725b45eba0b8e5cc091c3ca76a954c92ff; sys_platform == 'win32'
-idna==2.5
-ipaddress==1.0.18
-jsonschema==2.6.0
-pypiwin32==219; sys_platform == 'win32' and python_version < '3.6'
-pypiwin32==220; sys_platform == 'win32' and python_version >= '3.6'
-PySocks==1.6.7
-PyYAML==3.12
-requests==2.18.4
-six==1.10.0
-texttable==0.9.1
-urllib3==1.21.1
-websocket-client==0.32.0
+idna==2.10
+ipaddress==1.0.23
+jsonschema==3.2.0
+paramiko==2.7.1
+PySocks==1.7.1
+python-dotenv==0.17.0
+pywin32==227; sys_platform == 'win32'
+PyYAML==5.4.1
+requests==2.24.0
+texttable==1.6.2
+urllib3==1.25.10; python_version == '3.3'
+websocket-client==0.57.0
diff --git a/script/build/image b/script/build/image
index a3198c99..fb3f856e 100755
--- a/script/build/image
+++ b/script/build/image
@@ -7,11 +7,14 @@ if [ -z "$1" ]; then
exit 1
fi
-TAG=$1
+TAG="$1"
VERSION="$(python setup.py --version)"
-./script/build/write-git-sha
+DOCKER_COMPOSE_GITSHA="$(script/build/write-git-sha)"
+echo "${DOCKER_COMPOSE_GITSHA}" > compose/GITSHA
python setup.py sdist bdist_wheel
-./script/build/linux
-docker build -t docker/compose:$TAG -f Dockerfile.run .
+
+docker build \
+ --build-arg GIT_COMMIT="${DOCKER_COMPOSE_GITSHA}" \
+ -t "${TAG}" .
diff --git a/script/build/linux b/script/build/linux
index 1a4cd4d9..2e56b625 100755
--- a/script/build/linux
+++ b/script/build/linux
@@ -4,10 +4,13 @@ set -ex
./script/clean
-TAG="docker-compose"
-docker build -t "$TAG" . | tail -n 200
-docker run \
- --rm --entrypoint="script/build/linux-entrypoint" \
- -v $(pwd)/dist:/code/dist \
- -v $(pwd)/.git:/code/.git \
- "$TAG"
+DOCKER_COMPOSE_GITSHA="$(script/build/write-git-sha)"
+
+docker build . \
+ --target bin \
+ --build-arg DISTRO=debian \
+ --build-arg GIT_COMMIT="${DOCKER_COMPOSE_GITSHA}" \
+ --output dist/
+ARCH=$(uname -m)
+# Ensure that we output the binary with the same name as we did before
+mv dist/docker-compose-linux-amd64 "dist/docker-compose-Linux-${ARCH}"
diff --git a/script/build/linux-entrypoint b/script/build/linux-entrypoint
index 0e3c7ec1..74f47620 100755
--- a/script/build/linux-entrypoint
+++ b/script/build/linux-entrypoint
@@ -2,14 +2,39 @@
set -ex
-TARGET=dist/docker-compose-$(uname -s)-$(uname -m)
-VENV=/code/.tox/py36
+CODE_PATH=/code
+VENV="${CODE_PATH}"/.tox/py37
-mkdir -p `pwd`/dist
-chmod 777 `pwd`/dist
+cd "${CODE_PATH}"
+mkdir -p dist
+chmod 777 dist
-$VENV/bin/pip install -q -r requirements-build.txt
-./script/build/write-git-sha
-su -c "$VENV/bin/pyinstaller docker-compose.spec" user
-mv dist/docker-compose $TARGET
-$TARGET version
+"${VENV}"/bin/pip3 install -q -r requirements-build.txt
+
+# TODO(ulyssessouza) To check if really needed
+if [ -z "${DOCKER_COMPOSE_GITSHA}" ]; then
+ DOCKER_COMPOSE_GITSHA="$(script/build/write-git-sha)"
+fi
+echo "${DOCKER_COMPOSE_GITSHA}" > compose/GITSHA
+
+export PATH="${CODE_PATH}/pyinstaller:${PATH}"
+
+if [ ! -z "${BUILD_BOOTLOADER}" ]; then
+ # Build bootloader for alpine; develop is the main branch
+ git clone --single-branch --branch develop https://github.com/pyinstaller/pyinstaller.git /tmp/pyinstaller
+ cd /tmp/pyinstaller/bootloader
+ # Checkout commit corresponding to version in requirements-build
+ git checkout v4.1
+ "${VENV}"/bin/python3 ./waf configure --no-lsb all
+ "${VENV}"/bin/pip3 install ..
+ cd "${CODE_PATH}"
+ rm -Rf /tmp/pyinstaller
+else
+ echo "NOT compiling bootloader!!!"
+fi
+
+"${VENV}"/bin/pyinstaller --exclude-module pycrypto --exclude-module PyInstaller docker-compose.spec
+ls -la dist/
+ldd dist/docker-compose
+mv dist/docker-compose /usr/local/bin
+docker-compose version
diff --git a/script/build/osx b/script/build/osx
index 0c4b062b..e2d17527 100755
--- a/script/build/osx
+++ b/script/build/osx
@@ -1,15 +1,25 @@
#!/bin/bash
set -ex
-PATH="/usr/local/bin:$PATH"
+TOOLCHAIN_PATH="$(realpath $(dirname $0)/../../build/toolchain)"
rm -rf venv
-virtualenv -p /usr/local/bin/python3 venv
+virtualenv -p "${TOOLCHAIN_PATH}"/bin/python3 venv
+venv/bin/pip install -r requirements-indirect.txt
venv/bin/pip install -r requirements.txt
venv/bin/pip install -r requirements-build.txt
venv/bin/pip install --no-deps .
-./script/build/write-git-sha
+DOCKER_COMPOSE_GITSHA="$(script/build/write-git-sha)"
+echo "${DOCKER_COMPOSE_GITSHA}" > compose/GITSHA
+
+# Build as a folder for macOS Catalina.
+venv/bin/pyinstaller docker-compose_darwin.spec
+dist/docker-compose-Darwin-x86_64/docker-compose version
+(cd dist/docker-compose-Darwin-x86_64/ && tar zcvf ../docker-compose-Darwin-x86_64.tgz .)
+rm -rf dist/docker-compose-Darwin-x86_64
+
+# Build static binary for legacy.
venv/bin/pyinstaller docker-compose.spec
mv dist/docker-compose dist/docker-compose-Darwin-x86_64
dist/docker-compose-Darwin-x86_64 version
diff --git a/script/build/test-image b/script/build/test-image
index a2eb62cd..ddb8057d 100755
--- a/script/build/test-image
+++ b/script/build/test-image
@@ -7,11 +7,12 @@ if [ -z "$1" ]; then
exit 1
fi
-TAG=$1
+TAG="$1"
+IMAGE="docker/compose-tests"
-docker build -t docker-compose-tests:tmp .
-ctnr_id=$(docker create --entrypoint=tox docker-compose-tests:tmp)
-docker commit $ctnr_id docker/compose-tests:latest
-docker tag docker/compose-tests:latest docker/compose-tests:$TAG
-docker rm -f $ctnr_id
-docker rmi -f docker-compose-tests:tmp
+DOCKER_COMPOSE_GITSHA="$(script/build/write-git-sha)"
+docker build -t "${IMAGE}:${TAG}" . \
+ --target build \
+ --build-arg DISTRO="debian" \
+ --build-arg GIT_COMMIT="${DOCKER_COMPOSE_GITSHA}"
+docker tag "${IMAGE}":"${TAG}" "${IMAGE}":latest
diff --git a/script/build/windows.ps1 b/script/build/windows.ps1
index 98a74815..147d0f07 100644
--- a/script/build/windows.ps1
+++ b/script/build/windows.ps1
@@ -6,17 +6,17 @@
#
# http://git-scm.com/download/win
#
-# 2. Install Python 3.6.4:
+# 2. Install Python 3.9.x:
#
# https://www.python.org/downloads/
#
-# 3. Append ";C:\Python36;C:\Python36\Scripts" to the "Path" environment variable:
+# 3. Append ";C:\Python39;C:\Python39\Scripts" to the "Path" environment variable:
#
# https://www.microsoft.com/resources/documentation/windows/xp/all/proddocs/en-us/sysdm_advancd_environmnt_addchange_variable.mspx?mfr=true
#
# 4. In Powershell, run the following commands:
#
-# $ pip install 'virtualenv>=15.1.0'
+# $ pip install 'virtualenv==20.2.2'
# $ Set-ExecutionPolicy -Scope CurrentUser RemoteSigned
#
# 5. Clone the repository:
@@ -39,21 +39,16 @@ if (Test-Path venv) {
Get-ChildItem -Recurse -Include *.pyc | foreach ($_) { Remove-Item $_.FullName }
# Create virtualenv
-virtualenv .\venv
+virtualenv -p C:\Python39\python.exe .\venv
# pip and pyinstaller generate lots of warnings, so we need to ignore them
$ErrorActionPreference = "Continue"
-# Install dependencies
-# Fix for https://github.com/pypa/pip/issues/3964
-# Remove-Item -Recurse -Force .\venv\Lib\site-packages\pip
-# .\venv\Scripts\easy_install pip==9.0.1
-# .\venv\Scripts\pip install --upgrade pip setuptools
-# End fix
-.\venv\Scripts\pip install pypiwin32==220
+.\venv\Scripts\pip install pypiwin32==223
+.\venv\Scripts\pip install -r requirements-indirect.txt
.\venv\Scripts\pip install -r requirements.txt
.\venv\Scripts\pip install --no-deps .
-.\venv\Scripts\pip install --allow-external pyinstaller -r requirements-build.txt
+.\venv\Scripts\pip install -r requirements-build.txt
git rev-parse --short HEAD | out-file -encoding ASCII compose\GITSHA
diff --git a/script/build/write-git-sha b/script/build/write-git-sha
index d16743c6..cac4b6fd 100755
--- a/script/build/write-git-sha
+++ b/script/build/write-git-sha
@@ -2,6 +2,11 @@
#
# Write the current commit sha to the file GITSHA. This file is included in
# packaging so that `docker-compose version` can include the git sha.
-#
-set -e
-git rev-parse --short HEAD > compose/GITSHA
+# sets to 'unknown' and echoes a message if the command is not successful
+
+DOCKER_COMPOSE_GITSHA="$(git rev-parse --short HEAD)"
+if [[ "${?}" != "0" ]]; then
+ echo "Couldn't get revision of the git repository. Setting to 'unknown' instead"
+ DOCKER_COMPOSE_GITSHA="unknown"
+fi
+echo "${DOCKER_COMPOSE_GITSHA}"
diff --git a/script/circle/bintray-deploy.sh b/script/circle/bintray-deploy.sh
index 8c8871aa..a7cce726 100755
--- a/script/circle/bintray-deploy.sh
+++ b/script/circle/bintray-deploy.sh
@@ -1,7 +1,5 @@
#!/bin/bash
-set -x
-
curl -f -u$BINTRAY_USERNAME:$BINTRAY_API_KEY -X GET \
https://api.bintray.com/repos/docker-compose/${CIRCLE_BRANCH}
@@ -27,3 +25,11 @@ curl -f -T dist/docker-compose-${OS_NAME}-x86_64 -u$BINTRAY_USERNAME:$BINTRAY_AP
-H "X-Bintray-Package: ${PKG_NAME}" -H "X-Bintray-Version: $CIRCLE_BRANCH" \
-H "X-Bintray-Override: 1" -H "X-Bintray-Publish: 1" -X PUT \
https://api.bintray.com/content/docker-compose/${CIRCLE_BRANCH}/docker-compose-${OS_NAME}-x86_64 || exit 1
+
+# Upload folder format of docker-compose for macOS in addition to binary.
+if [ "${OS_NAME}" == "Darwin" ]; then
+ curl -f -T dist/docker-compose-${OS_NAME}-x86_64.tgz -u$BINTRAY_USERNAME:$BINTRAY_API_KEY \
+ -H "X-Bintray-Package: ${PKG_NAME}" -H "X-Bintray-Version: $CIRCLE_BRANCH" \
+ -H "X-Bintray-Override: 1" -H "X-Bintray-Publish: 1" -X PUT \
+ https://api.bintray.com/content/docker-compose/${CIRCLE_BRANCH}/docker-compose-${OS_NAME}-x86_64.tgz || exit 1
+fi
diff --git a/script/docs/check_help.py b/script/docs/check_help.py
new file mode 100755
index 00000000..0904f00c
--- /dev/null
+++ b/script/docs/check_help.py
@@ -0,0 +1,29 @@
+#!/usr/bin/env python3
+import glob
+import os.path
+import re
+import subprocess
+
+USAGE_RE = re.compile(r"```.*?\nUsage:.*?```", re.MULTILINE | re.DOTALL)
+USAGE_IN_CMD_RE = re.compile(r"^Usage:.*", re.MULTILINE | re.DOTALL)
+
+HELP_CMD = "docker run --rm docker/compose:latest %s --help"
+
+for file in glob.glob("compose/reference/*.md"):
+ with open(file) as f:
+ data = f.read()
+ if not USAGE_RE.search(data):
+ print("Not a command:", file)
+ continue
+ subcmd = os.path.basename(file).replace(".md", "")
+ if subcmd == "overview":
+ continue
+ print(f"Found {subcmd}: {file}")
+ help_cmd = HELP_CMD % subcmd
+ help = subprocess.check_output(help_cmd.split())
+ help = help.decode("utf-8")
+ help = USAGE_IN_CMD_RE.findall(help)[0]
+ help = help.strip()
+ data = USAGE_RE.sub(f"```none\n{help}\n```", data)
+ with open(file, "w") as f:
+ f.write(data)
diff --git a/script/release/README.md b/script/release/README.md
new file mode 100644
index 00000000..b42e4fa1
--- /dev/null
+++ b/script/release/README.md
@@ -0,0 +1,23 @@
+# Release HOWTO
+
+The release process is fully automated by `Release.Jenkinsfile`.
+
+## Usage
+
+1. In the appropriate branch, run `./script/release/release.py tag <version>`
+
+By appropriate, we mean for a version `1.26.0` or `1.26.0-rc1` you should run the script in the `1.26.x` branch.
+
+The script should check the above then ask for changelog modifications.
+
+After the executions, you should have a commit with the proper bumps for `docker-compose version` and `run.sh`
+
+2. Run `git push --tags upstream <version_branch>`
+This should trigger a new CI build on the new tag. When the CI finishes with the tests and builds a new draft release would be available on github's releases page.
+
+3. Check and confirm the release on github's release page.
+
+4. In case of a GA version, please update `docker-compose`s release notes and version on [github documentation repository](https://github.com/docker/docker.github.io):
+ - [Release Notes](https://github.com/docker/docker.github.io/blob/master/compose/release-notes.md)
+ - [Config version](https://github.com/docker/docker.github.io/blob/master/_config.yml)
+ - [Config authoring version](https://github.com/docker/docker.github.io/blob/master/_config_authoring.yml)
diff --git a/script/release/build-binaries b/script/release/build-binaries
deleted file mode 100755
index a39b186d..00000000
--- a/script/release/build-binaries
+++ /dev/null
@@ -1,40 +0,0 @@
-#!/bin/bash
-#
-# Build the release binaries
-#
-
-. "$(dirname "${BASH_SOURCE[0]}")/utils.sh"
-
-function usage() {
- >&2 cat << EOM
-Build binaries for the release.
-
-This script requires that 'git config branch.${BRANCH}.release' is set to the
-release version for the release branch.
-
-EOM
- exit 1
-}
-
-BRANCH="$(git rev-parse --abbrev-ref HEAD)"
-VERSION="$(git config "branch.${BRANCH}.release")" || usage
-REPO=docker/compose
-
-# Build the binaries
-script/clean
-script/build/linux
-
-echo "Building the container distribution"
-script/build/image $VERSION
-
-echo "Building the compose-tests image"
-script/build/test-image $VERSION
-
-echo "Create a github release"
-# TODO: script more of this https://developer.github.com/v3/repos/releases/
-browser https://github.com/$REPO/releases/new
-
-echo "Don't forget to download the osx and windows binaries from appveyor/bintray\!"
-echo "https://dl.bintray.com/docker-compose/$BRANCH/"
-echo "https://ci.appveyor.com/project/docker/compose"
-echo
diff --git a/script/release/const.py b/script/release/const.py
new file mode 100644
index 00000000..8c90eebc
--- /dev/null
+++ b/script/release/const.py
@@ -0,0 +1,4 @@
+import os
+
+
+REPO_ROOT = os.path.join(os.path.dirname(__file__), '..', '..')
diff --git a/script/release/contributors b/script/release/contributors
deleted file mode 100755
index 4657dd80..00000000
--- a/script/release/contributors
+++ /dev/null
@@ -1,30 +0,0 @@
-#!/bin/bash
-set -e
-
-
-function usage() {
- >&2 cat << EOM
-Print the list of github contributors for the release
-
-Usage:
-
- $0 <previous release tag>
-EOM
- exit 1
-}
-
-[[ -n "$1" ]] || usage
-PREV_RELEASE=$1
-BRANCH="$(git rev-parse --abbrev-ref HEAD)"
-URL="https://api.github.com/repos/docker/compose/compare"
-
-contribs=$(curl -sf "$URL/$PREV_RELEASE...$BRANCH" | \
- jq -r '.commits[].author.login' | \
- sort | \
- uniq -c | \
- sort -nr)
-
-echo "Contributions by user: "
-echo "$contribs"
-echo
-echo "$contribs" | awk '{print "@"$2","}' | xargs
diff --git a/script/release/download-binaries b/script/release/download-binaries
deleted file mode 100755
index 0b187f6c..00000000
--- a/script/release/download-binaries
+++ /dev/null
@@ -1,39 +0,0 @@
-#!/bin/bash
-
-function usage() {
- >&2 cat << EOM
-Download Linux, Mac OS and Windows binaries from remote endpoints
-
-Usage:
-
- $0 <version>
-
-Options:
-
- version version string for the release (ex: 1.6.0)
-
-EOM
- exit 1
-}
-
-
-[ -n "$1" ] || usage
-VERSION=$1
-BASE_BINTRAY_URL=https://dl.bintray.com/docker-compose/bump-$VERSION/
-DESTINATION=binaries-$VERSION
-APPVEYOR_URL=https://ci.appveyor.com/api/projects/docker/compose/\
-artifacts/dist%2Fdocker-compose-Windows-x86_64.exe?branch=bump-$VERSION
-
-mkdir $DESTINATION
-
-
-wget -O $DESTINATION/docker-compose-Darwin-x86_64 $BASE_BINTRAY_URL/docker-compose-Darwin-x86_64
-wget -O $DESTINATION/docker-compose-Linux-x86_64 $BASE_BINTRAY_URL/docker-compose-Linux-x86_64
-wget -O $DESTINATION/docker-compose-Windows-x86_64.exe $APPVEYOR_URL
-
-echo -e "\n\nCopy the following lines into the integrity check table in the release notes:\n\n"
-cd $DESTINATION
-rm -rf *.sha256
-ls | xargs sha256sum | sed 's/ / | /g' | sed -r 's/([^ |]+)/`\1`/g'
-ls | xargs -I@ bash -c "sha256sum @ | cut -d' ' -f1 > @.sha256"
-cd -
diff --git a/script/release/generate_changelog.sh b/script/release/generate_changelog.sh
new file mode 100755
index 00000000..018387de
--- /dev/null
+++ b/script/release/generate_changelog.sh
@@ -0,0 +1,42 @@
+#!/bin/bash
+
+set -e
+set -x
+
+## Usage :
+## changelog PREVIOUS_TAG..HEAD
+
+# configure refs so we get pull-requests metadata
+git config --add remote.origin.fetch +refs/pull/*/head:refs/remotes/origin/pull/*
+git fetch origin
+
+RANGE=${1:-"$(git describe --tags --abbrev=0 HEAD^)..HEAD"}
+echo "Generate changelog for range ${RANGE}"
+echo
+
+pullrequests() {
+ for commit in $(git log ${RANGE} --format='format:%H'); do
+ # Get the oldest remotes/origin/pull/* branch to include this commit, i.e. the one to introduce it
+ git branch -a --sort=committerdate --contains $commit --list 'origin/pull/*' | head -1 | cut -d'/' -f4
+ done
+}
+
+changes=$(pullrequests | uniq)
+
+echo "pull requests merged within range:"
+echo $changes
+
+echo '#Features' > FEATURES.md
+echo '#Bugs' > BUGS.md
+for pr in $changes; do
+ curl -fs -H "Authorization: token ${GITHUB_TOKEN}" https://api.github.com/repos/docker/compose/pulls/${pr} -o PR.json
+
+ cat PR.json | jq -r ' select( .labels[].name | contains("kind/feature") ) | "- "+.title' >> FEATURES.md
+ cat PR.json | jq -r ' select( .labels[].name | contains("kind/bug") ) | "- "+.title' >> BUGS.md
+done
+
+echo ${TAG_NAME} > CHANGELOG.md
+echo >> CHANGELOG.md
+cat FEATURES.md >> CHANGELOG.md
+echo >> CHANGELOG.md
+cat BUGS.md >> CHANGELOG.md
diff --git a/script/release/make-branch b/script/release/make-branch
deleted file mode 100755
index b8a0cd31..00000000
--- a/script/release/make-branch
+++ /dev/null
@@ -1,86 +0,0 @@
-#!/bin/bash
-#
-# Prepare a new release branch
-#
-
-. "$(dirname "${BASH_SOURCE[0]}")/utils.sh"
-
-function usage() {
- >&2 cat << EOM
-Create a new release branch 'release-<version>'
-
-Usage:
-
- $0 <version> [<base_version>]
-
-Options:
-
- version version string for the release (ex: 1.6.0)
- base_version branch or tag to start from. Defaults to master. For
- bug-fix releases use the previous stage release tag.
-
-EOM
- exit 1
-}
-
-
-[ -n "$1" ] || usage
-VERSION=$1
-BRANCH=bump-$VERSION
-REPO=docker/compose
-GITHUB_REPO=git@github.com:$REPO
-
-if [ -z "$2" ]; then
- BASE_VERSION="master"
-else
- BASE_VERSION=$2
-fi
-
-
-DEFAULT_REMOTE=release
-REMOTE="$(find_remote "$GITHUB_REPO")"
-# If we don't have a docker remote add one
-if [ -z "$REMOTE" ]; then
- echo "Creating $DEFAULT_REMOTE remote"
- git remote add ${DEFAULT_REMOTE} ${GITHUB_REPO}
-fi
-
-# handle the difference between a branch and a tag
-if [ -z "$(git name-rev --tags $BASE_VERSION | grep tags)" ]; then
- BASE_VERSION=$REMOTE/$BASE_VERSION
-fi
-
-echo "Creating a release branch $VERSION from $BASE_VERSION"
-read -n1 -r -p "Continue? (ctrl+c to cancel)"
-git fetch $REMOTE -p
-git checkout -b $BRANCH $BASE_VERSION
-
-echo "Merging remote release branch into new release branch"
-git merge --strategy=ours --no-edit $REMOTE/release
-
-# Store the release version for this branch in git, so that other release
-# scripts can use it
-git config "branch.${BRANCH}.release" $VERSION
-
-
-editor=${EDITOR:-vim}
-
-echo "Update versions in compose/__init__.py, script/run/run.sh"
-$editor compose/__init__.py
-$editor script/run/run.sh
-
-
-echo "Write release notes in CHANGELOG.md"
-browser "https://github.com/docker/compose/issues?q=milestone%3A$VERSION+is%3Aclosed"
-$editor CHANGELOG.md
-
-
-git diff
-echo "Verify changes before commit. Exit the shell to commit changes"
-$SHELL || true
-git commit -a -m "Bump $VERSION" --signoff --no-verify
-
-
-echo "Push branch to docker remote"
-git push $REMOTE
-browser https://github.com/$REPO/compare/docker:release...$BRANCH?expand=1
diff --git a/script/release/push-release b/script/release/push-release
deleted file mode 100755
index 0578aaff..00000000
--- a/script/release/push-release
+++ /dev/null
@@ -1,82 +0,0 @@
-#!/bin/bash
-#
-# Create the official release
-#
-
-. "$(dirname "${BASH_SOURCE[0]}")/utils.sh"
-
-function usage() {
- >&2 cat << EOM
-Publish a release by building all artifacts and pushing them.
-
-This script requires that 'git config branch.${BRANCH}.release' is set to the
-release version for the release branch.
-
-EOM
- exit 1
-}
-
-BRANCH="$(git rev-parse --abbrev-ref HEAD)"
-VERSION="$(git config "branch.${BRANCH}.release")" || usage
-
-if [ -z "$(command -v jq 2> /dev/null)" ]; then
- >&2 echo "$0 requires https://stedolan.github.io/jq/"
- >&2 echo "Please install it and make sure it is available on your \$PATH."
- exit 2
-fi
-
-
-if [ -z "$(command -v pandoc 2> /dev/null)" ]; then
- >&2 echo "$0 requires http://pandoc.org/"
- >&2 echo "Please install it and make sure it is available on your \$PATH."
- exit 2
-fi
-
-API=https://api.github.com/repos
-REPO=docker/compose
-GITHUB_REPO=git@github.com:$REPO
-
-# Check the build status is green
-sha=$(git rev-parse HEAD)
-url=$API/$REPO/statuses/$sha
-build_status=$(curl -s $url | jq -r '.[0].state')
-if [ -n "$SKIP_BUILD_CHECK" ]; then
- echo "Skipping build status check..."
-elif [[ "$build_status" != "success" ]]; then
- >&2 echo "Build status is $build_status, but it should be success."
- exit -1
-fi
-
-echo "Tagging the release as $VERSION"
-git tag $VERSION
-git push $GITHUB_REPO $VERSION
-
-echo "Uploading the docker image"
-docker push docker/compose:$VERSION
-
-echo "Uploading the compose-tests image"
-docker push docker/compose-tests:latest
-docker push docker/compose-tests:$VERSION
-
-echo "Uploading package to PyPI"
-pandoc -f markdown -t rst README.md -o README.rst
-sed -i -e 's/logo.png?raw=true/https:\/\/github.com\/docker\/compose\/raw\/master\/logo.png?raw=true/' README.rst
-./script/build/write-git-sha
-python setup.py sdist bdist_wheel
-if [ "$(command -v twine 2> /dev/null)" ]; then
- twine upload ./dist/docker-compose-${VERSION/-/}.tar.gz ./dist/docker_compose-${VERSION/-/}-py2.py3-none-any.whl
-else
- python setup.py upload
-fi
-
-echo "Testing pip package"
-deactivate || true
-virtualenv venv-test
-source venv-test/bin/activate
-pip install docker-compose==$VERSION
-docker-compose version
-deactivate
-rm -rf venv-test
-
-echo "Now publish the github release, and test the downloads."
-echo "Email maintainers@dockerproject.org and engineering@docker.com about the new release."
diff --git a/script/release/rebase-bump-commit b/script/release/rebase-bump-commit
deleted file mode 100755
index 3c2ae72b..00000000
--- a/script/release/rebase-bump-commit
+++ /dev/null
@@ -1,38 +0,0 @@
-#!/bin/bash
-#
-# Move the "bump to <version>" commit to the HEAD of the branch
-#
-
-. "$(dirname "${BASH_SOURCE[0]}")/utils.sh"
-
-function usage() {
- >&2 cat << EOM
-Move the "bump to <version>" commit to the HEAD of the branch
-
-This script requires that 'git config branch.${BRANCH}.release' is set to the
-release version for the release branch.
-
-EOM
- exit 1
-}
-
-
-BRANCH="$(git rev-parse --abbrev-ref HEAD)"
-VERSION="$(git config "branch.${BRANCH}.release")" || usage
-
-
-COMMIT_MSG="Bump $VERSION"
-sha="$(git log --grep "$COMMIT_MSG\$" --format="%H")"
-if [ -z "$sha" ]; then
- >&2 echo "No commit with message \"$COMMIT_MSG\""
- exit 2
-fi
-if [[ "$sha" == "$(git rev-parse HEAD)" ]]; then
- >&2 echo "Bump commit already at HEAD"
- exit 0
-fi
-
-commits=$(git log --format="%H" "$sha..HEAD" | wc -l | xargs echo)
-
-git rebase --onto $sha~1 HEAD~$commits $BRANCH
-git cherry-pick $sha
diff --git a/script/release/release.md.tmpl b/script/release/release.md.tmpl
new file mode 100644
index 00000000..4d0ebe92
--- /dev/null
+++ b/script/release/release.md.tmpl
@@ -0,0 +1,34 @@
+If you're a Mac or Windows user, the best way to install Compose and keep it up-to-date is **[Docker Desktop for Mac and Windows](https://www.docker.com/products/docker-desktop)**.
+
+Docker Desktop will automatically install the latest version of Docker Engine for you.
+
+Alternatively, you can use the usual commands to install or upgrade Compose:
+
+```
+curl -L https://github.com/docker/compose/releases/download/{{version}}/docker-compose-`uname -s`-`uname -m` -o /usr/local/bin/docker-compose
+chmod +x /usr/local/bin/docker-compose
+```
+
+See the [install docs](https://docs.docker.com/compose/install/) for more install options and instructions.
+
+## Compose file format compatibility matrix
+
+| Compose file format | Docker Engine |
+| --- | --- |
+{% for engine, formats in compat_matrix.items() -%}
+| {% for format in formats %}{{format}}{% if not loop.last %}, {% endif %}{% endfor %} | {{engine}}+ |
+{% endfor -%}
+
+## Changes
+
+{{changelog}}
+
+Thanks to {% for name in contributors %}@{{name}}{% if not loop.last %}, {% endif %}{% endfor %} for contributing to this release!
+
+## Integrity check
+
+Binary name | SHA-256 sum
+| --- | --- |
+{% for filename, sha in integrity.items() -%}
+| `{{filename}}` | `{{sha[1]}}` |
+{% endfor -%}
diff --git a/script/release/release.py b/script/release/release.py
new file mode 100755
index 00000000..c8e5e7f7
--- /dev/null
+++ b/script/release/release.py
@@ -0,0 +1,123 @@
+#!/usr/bin/env python3
+import re
+
+import click
+from git import Repo
+from utils import update_init_py_version
+from utils import update_run_sh_version
+from utils import yesno
+
+VALID_VERSION_PATTERN = re.compile(r"^\d+\.\d+\.\d+(-rc\d+)?$")
+
+
+class Version(str):
+ def matching_groups(self):
+ match = VALID_VERSION_PATTERN.match(self)
+ if not match:
+ return False
+
+ return match.groups()
+
+ def is_ga_version(self):
+ groups = self.matching_groups()
+ if not groups:
+ return False
+
+ rc_suffix = groups[1]
+ return not rc_suffix
+
+ def validate(self):
+ return len(self.matching_groups()) > 0
+
+ def branch_name(self):
+ if not self.validate():
+ return None
+
+ rc_part = self.matching_groups()[0]
+ ver = self
+ if rc_part:
+ ver = ver[:-len(rc_part)]
+
+ tokens = ver.split(".")
+ tokens[-1] = 'x'
+
+ return ".".join(tokens)
+
+
+def create_bump_commit(repository, version):
+ print('Creating bump commit...')
+ repository.commit('-a', '-s', '-m "Bump {}"'.format(version), '--no-verify')
+
+
+def validate_environment(version, repository):
+ if not version.validate():
+ print('Version "{}" has an invalid format. This should follow D+.D+.D+(-rcD+). '
+ 'Like: 1.26.0 or 1.26.0-rc1'.format(version))
+ return False
+
+ expected_branch = version.branch_name()
+ if str(repository.active_branch) != expected_branch:
+ print('Cannot tag in this branch with version "{}". '
+ 'Please checkout "{}" to tag'.format(version, version.branch_name()))
+ return False
+ return True
+
+
+@click.group()
+def cli():
+ pass
+
+
+@cli.command()
+@click.argument('version')
+def tag(version):
+ """
+ Updates the version related files and tag
+ """
+ repo = Repo(".")
+ version = Version(version)
+ if not validate_environment(version, repo):
+ return
+
+ update_init_py_version(version)
+ update_run_sh_version(version)
+
+ input('Please add the release notes to the CHANGELOG.md file, then press Enter to continue.')
+ proceed = False
+ while not proceed:
+ print(repo.git.diff())
+ proceed = yesno('Are these changes ok? y/N ', default=False)
+
+ if repo.git.diff():
+ create_bump_commit(repo.git, version)
+ else:
+ print('No changes to commit. Exiting...')
+ return
+
+ repo.create_tag(version)
+
+ print('Please, check the changes. If everything is OK, you just need to push with:\n'
+ '$ git push --tags upstream {}'.format(version.branch_name()))
+
+
+@cli.command()
+@click.argument('version')
+def push_latest(version):
+ """
+ TODO Pushes the latest tag pointing to a certain GA version
+ """
+ raise NotImplementedError
+
+
+@cli.command()
+@click.argument('version')
+def ghtemplate(version):
+ """
+ TODO Generates the github release page content
+ """
+ version = Version(version)
+ raise NotImplementedError
+
+
+if __name__ == '__main__':
+ cli()
diff --git a/script/release/utils.py b/script/release/utils.py
new file mode 100644
index 00000000..5ed53ec8
--- /dev/null
+++ b/script/release/utils.py
@@ -0,0 +1,44 @@
+import os
+import re
+
+from const import REPO_ROOT
+
+
+def update_init_py_version(version):
+ path = os.path.join(REPO_ROOT, 'compose', '__init__.py')
+ with open(path) as f:
+ contents = f.read()
+ contents = re.sub(r"__version__ = '[0-9a-z.-]+'", "__version__ = '{}'".format(version), contents)
+ with open(path, 'w') as f:
+ f.write(contents)
+
+
+def update_run_sh_version(version):
+ path = os.path.join(REPO_ROOT, 'script', 'run', 'run.sh')
+ with open(path) as f:
+ contents = f.read()
+ contents = re.sub(r'VERSION="[0-9a-z.-]+"', 'VERSION="{}"'.format(version), contents)
+ with open(path, 'w') as f:
+ f.write(contents)
+
+
+def yesno(prompt, default=None):
+ """
+ Prompt the user for a yes or no.
+
+ Can optionally specify a default value, which will only be
+ used if they enter a blank line.
+
+ Unrecognised input (anything other than "y", "n", "yes",
+ "no" or "") will return None.
+ """
+ answer = input(prompt).strip().lower()
+
+ if answer == "y" or answer == "yes":
+ return True
+ elif answer == "n" or answer == "no":
+ return False
+ elif answer == "":
+ return default
+ else:
+ return None
diff --git a/script/run/run.sh b/script/run/run.sh
index 1e4bd985..5ed465c6 100755
--- a/script/run/run.sh
+++ b/script/run/run.sh
@@ -15,16 +15,16 @@
set -e
-VERSION="1.21.0"
+VERSION="1.29.2"
IMAGE="docker/compose:$VERSION"
# Setup options for connecting to docker host
if [ -z "$DOCKER_HOST" ]; then
- DOCKER_HOST="/var/run/docker.sock"
+ DOCKER_HOST='unix:///var/run/docker.sock'
fi
-if [ -S "$DOCKER_HOST" ]; then
- DOCKER_ADDR="-v $DOCKER_HOST:$DOCKER_HOST -e DOCKER_HOST"
+if [ -S "${DOCKER_HOST#unix://}" ]; then
+ DOCKER_ADDR="-v ${DOCKER_HOST#unix://}:${DOCKER_HOST#unix://} -e DOCKER_HOST"
else
DOCKER_ADDR="-e DOCKER_HOST -e DOCKER_TLS_VERIFY -e DOCKER_CERT_PATH"
fi
@@ -36,22 +36,56 @@ if [ "$(pwd)" != '/' ]; then
fi
if [ -n "$COMPOSE_FILE" ]; then
COMPOSE_OPTIONS="$COMPOSE_OPTIONS -e COMPOSE_FILE=$COMPOSE_FILE"
- compose_dir=$(realpath $(dirname $COMPOSE_FILE))
+ compose_dir="$(dirname "$COMPOSE_FILE")"
+ # canonicalize dir, do not use realpath or readlink -f
+ # since they are not available in some systems (e.g. macOS).
+ compose_dir="$(cd "$compose_dir" && pwd)"
+fi
+if [ -n "$COMPOSE_PROJECT_NAME" ]; then
+ COMPOSE_OPTIONS="-e COMPOSE_PROJECT_NAME $COMPOSE_OPTIONS"
fi
-# TODO: also check --file argument
if [ -n "$compose_dir" ]; then
VOLUMES="$VOLUMES -v $compose_dir:$compose_dir"
fi
if [ -n "$HOME" ]; then
- VOLUMES="$VOLUMES -v $HOME:$HOME -v $HOME:/root" # mount $HOME in /root to share docker.config
+ VOLUMES="$VOLUMES -v $HOME:$HOME -e HOME" # Pass in HOME to share docker.config and allow ~/-relative paths to work.
fi
+i=$#
+while [ $i -gt 0 ]; do
+ arg=$1
+ i=$((i - 1))
+ shift
+
+ case "$arg" in
+ -f|--file)
+ value=$1
+ i=$((i - 1))
+ shift
+ set -- "$@" "$arg" "$value"
+
+ file_dir=$(realpath "$(dirname "$value")")
+ VOLUMES="$VOLUMES -v $file_dir:$file_dir"
+ ;;
+ *) set -- "$@" "$arg" ;;
+ esac
+done
+
+# Setup environment variables for compose config and context
+ENV_OPTIONS=$(printenv | sed -E "/^PATH=.*/d; s/^/-e /g; s/=.*//g; s/\n/ /g")
# Only allocate tty if we detect one
-if [ -t 1 ]; then
- DOCKER_RUN_OPTIONS="-t"
+if [ -t 0 ] && [ -t 1 ]; then
+ DOCKER_RUN_OPTIONS="$DOCKER_RUN_OPTIONS -t"
fi
-if [ -t 0 ]; then
- DOCKER_RUN_OPTIONS="$DOCKER_RUN_OPTIONS -i"
+
+# Always set -i to support piped and terminal input in run/exec
+DOCKER_RUN_OPTIONS="$DOCKER_RUN_OPTIONS -i"
+
+
+# Handle userns security
+if docker info --format '{{json .SecurityOptions}}' 2>/dev/null | grep -q 'name=userns'; then
+ DOCKER_RUN_OPTIONS="$DOCKER_RUN_OPTIONS --userns=host"
fi
-exec docker run --rm $DOCKER_RUN_OPTIONS $DOCKER_ADDR $COMPOSE_OPTIONS $VOLUMES -w "$(pwd)" $IMAGE "$@"
+# shellcheck disable=SC2086
+exec docker run --rm $DOCKER_RUN_OPTIONS $DOCKER_ADDR $COMPOSE_OPTIONS $ENV_OPTIONS $VOLUMES -w "$(pwd)" $IMAGE "$@"
diff --git a/script/setup/osx b/script/setup/osx
index 972e79ef..289155ba 100755
--- a/script/setup/osx
+++ b/script/setup/osx
@@ -1,43 +1,110 @@
-#!/bin/bash
+#!/usr/bin/env bash
set -ex
-python_version() {
- python -V 2>&1
-}
+. $(dirname $0)/osx_helpers.sh
-python3_version() {
- python3 -V 2>&1
-}
-
-openssl_version() {
- python -c "import ssl; print ssl.OPENSSL_VERSION"
-}
+DEPLOYMENT_TARGET=${DEPLOYMENT_TARGET:-"$(macos_version)"}
+SDK_FETCH=
+if ! [ ${DEPLOYMENT_TARGET} == "$(macos_version)" ]; then
+ SDK_FETCH=1
+ # SDK URL from https://github.com/docker/golang-cross/blob/master/osx-cross.sh
+ SDK_URL=https://s3.dockerproject.org/darwin/v2/MacOSX${DEPLOYMENT_TARGET}.sdk.tar.xz
+ SDK_SHA1=dd228a335194e3392f1904ce49aff1b1da26ca62
+fi
-desired_python3_version="3.6.4"
-desired_python3_brew_version="3.6.4_2"
-python3_formula="https://raw.githubusercontent.com/Homebrew/homebrew-core/b4e69a9a592232fa5a82741f6acecffc2f1d198d/Formula/python3.rb"
+OPENSSL_VERSION=1.1.1h
+OPENSSL_URL=https://www.openssl.org/source/openssl-${OPENSSL_VERSION}.tar.gz
+OPENSSL_SHA1=8d0d099e8973ec851368c8c775e05e1eadca1794
-PATH="/usr/local/bin:$PATH"
+PYTHON_VERSION=3.9.0
+PYTHON_URL=https://www.python.org/ftp/python/${PYTHON_VERSION}/Python-${PYTHON_VERSION}.tgz
+PYTHON_SHA1=5744a10ba989d2badacbab3c00cdcb83c83106c7
-if !(which brew); then
+#
+# Install prerequisites.
+#
+if ! [ -x "$(command -v brew)" ]; then
ruby -e "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/master/install)"
fi
+if ! [ -x "$(command -v grealpath)" ]; then
+ brew update > /dev/null
+ brew install coreutils
+fi
+if ! [ -x "$(command -v python3)" ]; then
+ brew update > /dev/null
+ brew install python3
+fi
+if ! [ -x "$(command -v virtualenv)" ]; then
+ pip3 install virtualenv==20.2.2
+fi
-brew update > /dev/null
-
-if !(python3_version | grep "$desired_python3_version"); then
- if brew list | grep python3; then
- brew unlink python3
- fi
+#
+# Create toolchain directory.
+#
+BUILD_PATH="$(grealpath $(dirname $0)/../../build)"
+mkdir -p ${BUILD_PATH}
+TOOLCHAIN_PATH="${BUILD_PATH}/toolchain"
+mkdir -p ${TOOLCHAIN_PATH}
- brew install "$python3_formula"
- brew switch python3 "$desired_python3_brew_version"
+#
+# Set macOS SDK.
+#
+if [[ ${SDK_FETCH} && ! -f ${TOOLCHAIN_PATH}/MacOSX${DEPLOYMENT_TARGET}.sdk/SDKSettings.plist ]]; then
+ SDK_PATH=${TOOLCHAIN_PATH}/MacOSX${DEPLOYMENT_TARGET}.sdk
+ fetch_tarball ${SDK_URL} ${SDK_PATH} ${SDK_SHA1}
+else
+ SDK_PATH="$(xcode-select --print-path)/Platforms/MacOSX.platform/Developer/SDKs/MacOSX${DEPLOYMENT_TARGET}.sdk"
fi
-echo "*** Using $(python3_version) ; $(python_version)"
-echo "*** Using $(openssl_version)"
+#
+# Build OpenSSL.
+#
+OPENSSL_SRC_PATH=${TOOLCHAIN_PATH}/openssl-${OPENSSL_VERSION}
+if ! [[ $(${TOOLCHAIN_PATH}/bin/openssl version) == *"${OPENSSL_VERSION}"* ]]; then
+ rm -rf ${OPENSSL_SRC_PATH}
+ fetch_tarball ${OPENSSL_URL} ${OPENSSL_SRC_PATH} ${OPENSSL_SHA1}
+ (
+ cd ${OPENSSL_SRC_PATH}
+ export MACOSX_DEPLOYMENT_TARGET=${DEPLOYMENT_TARGET}
+ export SDKROOT=${SDK_PATH}
+ ./Configure darwin64-x86_64-cc --prefix=${TOOLCHAIN_PATH}
+ make install_sw install_dev
+ )
+fi
-if !(which virtualenv); then
- pip install virtualenv
+#
+# Build Python.
+#
+PYTHON_SRC_PATH=${TOOLCHAIN_PATH}/Python-${PYTHON_VERSION}
+if ! [[ $(${TOOLCHAIN_PATH}/bin/python3 --version) == *"${PYTHON_VERSION}"* ]]; then
+ rm -rf ${PYTHON_SRC_PATH}
+ fetch_tarball ${PYTHON_URL} ${PYTHON_SRC_PATH} ${PYTHON_SHA1}
+ (
+ cd ${PYTHON_SRC_PATH}
+ ./configure --prefix=${TOOLCHAIN_PATH} \
+ --enable-ipv6 --without-ensurepip --with-dtrace --without-gcc \
+ --datarootdir=${TOOLCHAIN_PATH}/share \
+ --datadir=${TOOLCHAIN_PATH}/share \
+ --enable-framework=${TOOLCHAIN_PATH}/Frameworks \
+ --with-openssl=${TOOLCHAIN_PATH} \
+ MACOSX_DEPLOYMENT_TARGET=${DEPLOYMENT_TARGET} \
+ CFLAGS="-isysroot ${SDK_PATH} -I${TOOLCHAIN_PATH}/include" \
+ CPPFLAGS="-I${SDK_PATH}/usr/include -I${TOOLCHAIN_PATH}/include" \
+ LDFLAGS="-isysroot ${SDK_PATH} -L ${TOOLCHAIN_PATH}/lib"
+ make -j 4
+ make install PYTHONAPPSDIR=${TOOLCHAIN_PATH}
+ make frameworkinstallextras PYTHONAPPSDIR=${TOOLCHAIN_PATH}/share
+ )
fi
+
+#
+# Smoke test built Python.
+#
+openssl_version ${TOOLCHAIN_PATH}
+
+echo ""
+echo "*** Targeting macOS: ${DEPLOYMENT_TARGET}"
+echo "*** Using SDK ${SDK_PATH}"
+echo "*** Using $(python3_version ${TOOLCHAIN_PATH})"
+echo "*** Using $(openssl_version ${TOOLCHAIN_PATH})"
diff --git a/script/setup/osx_helpers.sh b/script/setup/osx_helpers.sh
new file mode 100644
index 00000000..d60a30b6
--- /dev/null
+++ b/script/setup/osx_helpers.sh
@@ -0,0 +1,41 @@
+#!/usr/bin/env bash
+
+# Check file's ($1) SHA1 ($2).
+check_sha1() {
+ echo -n "$2 *$1" | shasum -c -
+}
+
+# Download URL ($1) to path ($2).
+download() {
+ curl -L $1 -o $2
+}
+
+# Extract tarball ($1) in folder ($2).
+extract() {
+ tar xf $1 -C $2
+}
+
+# Download URL ($1), check SHA1 ($3), and extract utility ($2).
+fetch_tarball() {
+ url=$1
+ tarball=$2.tarball
+ sha1=$3
+ download $url $tarball
+ check_sha1 $tarball $sha1
+ extract $tarball $(dirname $tarball)
+}
+
+# Version of Python at toolchain path ($1).
+python3_version() {
+ $1/bin/python3 -V 2>&1
+}
+
+# Version of OpenSSL used by toolchain ($1) Python.
+openssl_version() {
+ $1/bin/python3 -c "import ssl; print(ssl.OPENSSL_VERSION)"
+}
+
+# System macOS version.
+macos_version() {
+ sw_vers -productVersion | cut -f1,2 -d'.'
+}
diff --git a/script/test/acceptance b/script/test/acceptance
new file mode 100755
index 00000000..92710a76
--- /dev/null
+++ b/script/test/acceptance
@@ -0,0 +1,3 @@
+#!/usr/bin/env bash
+
+pytest --conformity --binary ${1:-docker-compose} tests/acceptance/
diff --git a/script/test/all b/script/test/all
index e48f73bb..63fc0255 100755
--- a/script/test/all
+++ b/script/test/all
@@ -8,11 +8,10 @@ set -e
docker run --rm \
--tty \
${GIT_VOLUME} \
- --entrypoint="tox" \
- "$TAG" -e pre-commit
+ "$TAG" tox -e pre-commit
get_versions="docker run --rm
- --entrypoint=/code/.tox/py27/bin/python
+ --entrypoint=/code/.tox/py37/bin/python
$TAG
/code/script/test/versions.py docker/docker-ce,moby/moby"
@@ -22,9 +21,8 @@ elif [ "$DOCKER_VERSIONS" == "all" ]; then
DOCKER_VERSIONS=$($get_versions -n 2 recent)
fi
-
BUILD_NUMBER=${BUILD_NUMBER-$USER}
-PY_TEST_VERSIONS=${PY_TEST_VERSIONS:-py27,py36}
+PY_TEST_VERSIONS=${PY_TEST_VERSIONS:-py37}
for version in $DOCKER_VERSIONS; do
>&2 echo "Running tests against Docker $version"
@@ -40,17 +38,23 @@ for version in $DOCKER_VERSIONS; do
trap "on_exit" EXIT
- repo="dockerswarm/dind"
-
docker run \
-d \
--name "$daemon_container" \
--privileged \
--volume="/var/lib/docker" \
- "$repo:$version" \
+ -e "DOCKER_TLS_CERTDIR=" \
+ "docker:$version-dind" \
dockerd -H tcp://0.0.0.0:2375 $DOCKER_DAEMON_ARGS \
2>&1 | tail -n 10
+ docker exec "$daemon_container" sh -c "apk add --no-cache git"
+
+ # copy docker config from host for authentication with Docker Hub
+ docker exec "$daemon_container" sh -c "mkdir /root/.docker"
+ docker cp /root/.docker/config.json $daemon_container:/root/.docker/config.json
+ docker exec "$daemon_container" sh -c "chmod 644 /root/.docker/config.json"
+
docker run \
--rm \
--tty \
diff --git a/script/test/ci b/script/test/ci
index 8d3aa56c..bbcedac4 100755
--- a/script/test/ci
+++ b/script/test/ci
@@ -20,6 +20,3 @@ export DOCKER_DAEMON_ARGS="--storage-driver=$STORAGE_DRIVER"
GIT_VOLUME="--volumes-from=$(hostname)"
. script/test/all
-
->&2 echo "Building Linux binary"
-. script/build/linux-entrypoint
diff --git a/script/test/default b/script/test/default
index aabb4e42..4f307f2e 100755
--- a/script/test/default
+++ b/script/test/default
@@ -3,17 +3,18 @@
set -ex
-TAG="docker-compose:$(git rev-parse --short HEAD)"
+TAG="docker-compose:alpine-$(git rev-parse --short HEAD)"
-# By default use the Dockerfile, but can be overriden to use an alternative file
-# e.g DOCKERFILE=Dockerfile.armhf script/test/default
+# By default use the Dockerfile, but can be overridden to use an alternative file
+# e.g DOCKERFILE=Dockerfile.s390x script/test/default
DOCKERFILE="${DOCKERFILE:-Dockerfile}"
+DOCKER_BUILD_TARGET="${DOCKER_BUILD_TARGET:-build}"
rm -rf coverage-html
# Create the host directory so it's owned by $USER
mkdir -p coverage-html
-docker build -f ${DOCKERFILE} -t "$TAG" .
+docker build -f "${DOCKERFILE}" -t "${TAG}" --target "${DOCKER_BUILD_TARGET}" .
GIT_VOLUME="--volume=$(pwd)/.git:/code/.git"
. script/test/all
diff --git a/script/test/versions.py b/script/test/versions.py
index f699f268..1a28dc19 100755
--- a/script/test/versions.py
+++ b/script/test/versions.py
@@ -21,10 +21,6 @@ For example, if the list of versions is:
`default` would return `1.7.1` and
`recent -n 3` would return `1.8.0-rc2 1.7.1 1.6.2`
"""
-from __future__ import absolute_import
-from __future__ import print_function
-from __future__ import unicode_literals
-
import argparse
import itertools
import operator
@@ -36,23 +32,24 @@ import requests
GITHUB_API = 'https://api.github.com/repos'
+STAGES = ['tp', 'beta', 'rc']
+
-class Version(namedtuple('_Version', 'major minor patch rc edition')):
+class Version(namedtuple('_Version', 'major minor patch stage edition')):
@classmethod
def parse(cls, version):
edition = None
version = version.lstrip('v')
- version, _, rc = version.partition('-')
- if rc:
- if 'rc' not in rc:
- edition = rc
- rc = None
- elif '-' in rc:
- edition, rc = rc.split('-')
-
+ version, _, stage = version.partition('-')
+ if stage:
+ if not any(marker in stage for marker in STAGES):
+ edition = stage
+ stage = None
+ elif '-' in stage:
+ edition, stage = stage.split('-')
major, minor, patch = version.split('.', 3)
- return cls(major, minor, patch, rc, edition)
+ return cls(major, minor, patch, stage, edition)
@property
def major_minor(self):
@@ -63,14 +60,22 @@ class Version(namedtuple('_Version', 'major minor patch rc edition')):
"""Return a representation that allows this object to be sorted
correctly with the default comparator.
"""
- # rc releases should appear before official releases
- rc = (0, self.rc) if self.rc else (1, )
- return (int(self.major), int(self.minor), int(self.patch)) + rc
+ # non-GA releases should appear before GA releases
+ # Order: tp -> beta -> rc -> GA
+ if self.stage:
+ for st in STAGES:
+ if st in self.stage:
+ stage = (STAGES.index(st), self.stage)
+ break
+ else:
+ stage = (len(STAGES),)
+
+ return (int(self.major), int(self.minor), int(self.patch)) + stage
def __str__(self):
- rc = '-{}'.format(self.rc) if self.rc else ''
+ stage = '-{}'.format(self.stage) if self.stage else ''
edition = '-{}'.format(self.edition) if self.edition else ''
- return '.'.join(map(str, self[:3])) + edition + rc
+ return '.'.join(map(str, self[:3])) + edition + stage
BLACKLIST = [ # List of versions known to be broken and should not be used
@@ -113,9 +118,9 @@ def get_latest_versions(versions, num=1):
def get_default(versions):
- """Return a :class:`Version` for the latest non-rc version."""
+ """Return a :class:`Version` for the latest GA version."""
for version in versions:
- if not version.rc:
+ if not version.stage:
return version
@@ -123,8 +128,9 @@ def get_versions(tags):
for tag in tags:
try:
v = Version.parse(tag['name'])
- if v not in BLACKLIST:
- yield v
+ if v in BLACKLIST:
+ continue
+ yield v
except ValueError:
print("Skipping invalid tag: {name}".format(**tag), file=sys.stderr)
diff --git a/setup.py b/setup.py
index a847c61d..aaf33f74 100644
--- a/setup.py
+++ b/setup.py
@@ -1,9 +1,4 @@
#!/usr/bin/env python
-# -*- coding: utf-8 -*-
-from __future__ import absolute_import
-from __future__ import print_function
-from __future__ import unicode_literals
-
import codecs
import os
import re
@@ -30,33 +25,34 @@ def find_version(*file_paths):
install_requires = [
- 'cached-property >= 1.2.0, < 2',
- 'docopt >= 0.6.1, < 0.7',
- 'PyYAML >= 3.10, < 4',
- 'requests >= 2.6.1',
- 'texttable >= 0.9.0',
- 'websocket-client >= 0.32.0, < 1.0',
- 'docker >= 3.2.1',
- 'dockerpty >= 0.4.1',
- 'six >= 1.3.0, < 2',
- 'jsonschema >= 2.5.1, < 3',
+ 'docopt >= 0.6.1, < 1',
+ 'PyYAML >= 3.10, < 6',
+ 'requests >= 2.20.0, < 3',
+ 'texttable >= 0.9.0, < 2',
+ 'websocket-client >= 0.32.0, < 1',
+ 'distro >= 1.5.0, < 2',
+ 'docker[ssh] >= 5',
+ 'dockerpty >= 0.4.1, < 1',
+ 'jsonschema >= 2.5.1, < 4',
+ 'python-dotenv >= 0.13.0, < 1',
]
tests_require = [
- 'pytest',
+ 'ddt >= 1.2.2, < 2',
+ 'pytest < 6',
]
if sys.version_info[:2] < (3, 4):
- tests_require.append('mock >= 1.0.1')
+ tests_require.append('mock >= 1.0.1, < 4')
extras_require = {
- ':python_version < "3.4"': ['enum34 >= 1.0.4, < 2'],
- ':python_version < "3.5"': ['backports.ssl_match_hostname >= 3.5'],
- ':python_version < "3.3"': ['ipaddress >= 1.0.16'],
- ':sys_platform == "win32"': ['colorama >= 0.3.9, < 0.4'],
+ ':python_version < "3.5"': ['backports.ssl_match_hostname >= 3.5, < 4'],
+ ':python_version < "3.8"': ['cached-property >= 1.2.0, < 2'],
+ ':sys_platform == "win32"': ['colorama >= 0.4, < 1'],
'socks': ['PySocks >= 1.5.6, != 1.5.7, < 2'],
+ 'tests': tests_require,
}
@@ -77,28 +73,36 @@ setup(
name='docker-compose',
version=find_version("compose", "__init__.py"),
description='Multi-container orchestration for Docker',
+ long_description=read('README.md'),
+ long_description_content_type='text/markdown',
url='https://www.docker.com/',
+ project_urls={
+ 'Documentation': 'https://docs.docker.com/compose/overview',
+ 'Changelog': 'https://github.com/docker/compose/blob/release/CHANGELOG.md',
+ 'Source': 'https://github.com/docker/compose',
+ 'Tracker': 'https://github.com/docker/compose/issues',
+ },
author='Docker, Inc.',
license='Apache License 2.0',
packages=find_packages(exclude=['tests.*', 'tests']),
include_package_data=True,
- test_suite='nose.collector',
install_requires=install_requires,
extras_require=extras_require,
tests_require=tests_require,
- entry_points="""
- [console_scripts]
- docker-compose=compose.cli.main:main
- """,
+ python_requires='>=3.4',
+ entry_points={
+ 'console_scripts': ['docker-compose=compose.cli.main:main'],
+ },
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
- 'Programming Language :: Python :: 2',
- 'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.6',
+ 'Programming Language :: Python :: 3.7',
+ 'Programming Language :: Python :: 3.8',
+ 'Programming Language :: Python :: 3.9',
],
)
diff --git a/tests/__init__.py b/tests/__init__.py
index 1ac1b21c..9d732490 100644
--- a/tests/__init__.py
+++ b/tests/__init__.py
@@ -1,14 +1,2 @@
-from __future__ import absolute_import
-from __future__ import unicode_literals
-
-import sys
-
-if sys.version_info >= (2, 7):
- import unittest # NOQA
-else:
- import unittest2 as unittest # NOQA
-
-try:
- from unittest import mock
-except ImportError:
- import mock # NOQA
+import unittest # NOQA
+from unittest import mock # NOQA
diff --git a/tests/acceptance/cli_test.py b/tests/acceptance/cli_test.py
index 07570580..ab8d2c9d 100644
--- a/tests/acceptance/cli_test.py
+++ b/tests/acceptance/cli_test.py
@@ -1,10 +1,5 @@
-# -*- coding: utf-8 -*-
-from __future__ import absolute_import
-from __future__ import unicode_literals
-
import datetime
import json
-import os
import os.path
import re
import signal
@@ -12,17 +7,20 @@ import subprocess
import time
from collections import Counter
from collections import namedtuple
+from functools import reduce
from operator import attrgetter
import pytest
-import six
import yaml
from docker import errors
from .. import mock
+from ..helpers import BUSYBOX_IMAGE_WITH_TAG
from ..helpers import create_host_file
from compose.cli.command import get_project
from compose.config.errors import DuplicateOverrideFileFound
+from compose.const import COMPOSE_SPEC as VERSION
+from compose.const import COMPOSEFILE_V1 as V1
from compose.container import Container
from compose.project import OneOffFilter
from compose.utils import nanoseconds_from_time_seconds
@@ -32,30 +30,50 @@ from tests.integration.testcases import is_cluster
from tests.integration.testcases import no_cluster
from tests.integration.testcases import pull_busybox
from tests.integration.testcases import SWARM_SKIP_RM_VOLUMES
-from tests.integration.testcases import v2_1_only
-from tests.integration.testcases import v2_2_only
-from tests.integration.testcases import v2_only
-from tests.integration.testcases import v3_only
+
+DOCKER_COMPOSE_EXECUTABLE = 'docker-compose'
ProcessResult = namedtuple('ProcessResult', 'stdout stderr')
BUILD_CACHE_TEXT = 'Using cache'
-BUILD_PULL_TEXT = 'Status: Image is up to date for busybox:latest'
+BUILD_PULL_TEXT = 'Status: Image is up to date for busybox:1.27.2'
+COMPOSE_COMPATIBILITY_DICT = {
+ 'version': str(VERSION),
+ 'volumes': {'foo': {'driver': 'default'}},
+ 'networks': {'bar': {}},
+ 'services': {
+ 'foo': {
+ 'command': '/bin/true',
+ 'image': 'alpine:3.10.1',
+ 'scale': 3,
+ 'restart': 'always:7',
+ 'mem_limit': '300M',
+ 'mem_reservation': '100M',
+ 'cpus': 0.7,
+ 'volumes': ['foo:/bar:rw'],
+ 'networks': {'bar': None},
+ }
+ },
+}
-def start_process(base_dir, options):
+def start_process(base_dir, options, executable=None, env=None):
+ executable = executable or DOCKER_COMPOSE_EXECUTABLE
proc = subprocess.Popen(
- ['docker-compose'] + options,
+ [executable] + options,
+ stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
- cwd=base_dir)
+ cwd=base_dir,
+ env=env,
+ )
print("Running process: %s" % proc.pid)
return proc
-def wait_on_process(proc, returncode=0):
- stdout, stderr = proc.communicate()
+def wait_on_process(proc, returncode=0, stdin=None):
+ stdout, stderr = proc.communicate(input=stdin)
if proc.returncode != returncode:
print("Stderr: {}".format(stderr))
print("Stdout: {}".format(stdout))
@@ -63,6 +81,13 @@ def wait_on_process(proc, returncode=0):
return ProcessResult(stdout.decode('utf-8'), stderr.decode('utf-8'))
+def dispatch(base_dir, options,
+ project_options=None, returncode=0, stdin=None, executable=None, env=None):
+ project_options = project_options or []
+ proc = start_process(base_dir, project_options + options, executable=executable, env=env)
+ return wait_on_process(proc, returncode=returncode, stdin=stdin)
+
+
def wait_on_condition(condition, delay=0.1, timeout=40):
start_time = time.time()
while not condition():
@@ -77,7 +102,7 @@ def kill_service(service):
container.kill()
-class ContainerCountCondition(object):
+class ContainerCountCondition:
def __init__(self, project, expected):
self.project = project
@@ -90,7 +115,7 @@ class ContainerCountCondition(object):
return "waiting for counter count == %s" % self.expected
-class ContainerStateCondition(object):
+class ContainerStateCondition:
def __init__(self, client, name, status):
self.client = client
@@ -99,7 +124,14 @@ class ContainerStateCondition(object):
def __call__(self):
try:
- container = self.client.inspect_container(self.name)
+ if self.name.endswith('*'):
+ ctnrs = self.client.containers(all=True, filters={'name': self.name[:-1]})
+ if len(ctnrs) > 0:
+ container = self.client.inspect_container(ctnrs[0]['Id'])
+ else:
+ return False
+ else:
+ container = self.client.inspect_container(self.name)
return container['State']['Status'] == self.status
except errors.APIError:
return False
@@ -111,7 +143,7 @@ class ContainerStateCondition(object):
class CLITestCase(DockerClientTestCase):
def setUp(self):
- super(CLITestCase, self).setUp()
+ super().setUp()
self.base_dir = 'tests/fixtures/simple-composefile'
self.override_dir = None
@@ -133,7 +165,7 @@ class CLITestCase(DockerClientTestCase):
if hasattr(self, '_project'):
del self._project
- super(CLITestCase, self).tearDown()
+ super().tearDown()
@property
def project(self):
@@ -142,10 +174,8 @@ class CLITestCase(DockerClientTestCase):
self._project = get_project(self.base_dir, override_dir=self.override_dir)
return self._project
- def dispatch(self, options, project_options=None, returncode=0):
- project_options = project_options or []
- proc = start_process(self.base_dir, project_options + options)
- return wait_on_process(proc, returncode=returncode)
+ def dispatch(self, options, project_options=None, returncode=0, stdin=None):
+ return dispatch(self.base_dir, options, project_options, returncode, stdin)
def execute(self, container, cmd):
# Remove once Hijack and CloseNotifier sign a peace treaty
@@ -160,10 +190,17 @@ class CLITestCase(DockerClientTestCase):
def test_help(self):
self.base_dir = 'tests/fixtures/no-composefile'
result = self.dispatch(['help', 'up'], returncode=0)
- assert 'Usage: up [options] [--scale SERVICE=NUM...] [SERVICE...]' in result.stdout
+ assert 'Usage: up [options] [--scale SERVICE=NUM...] [--] [SERVICE...]' in result.stdout
# Prevent tearDown from trying to create a project
self.base_dir = None
+ def test_quiet_build(self):
+ self.base_dir = 'tests/fixtures/build-args'
+ result = self.dispatch(['build'], None)
+ quietResult = self.dispatch(['build', '-q'], None)
+ assert result.stdout != ""
+ assert quietResult.stdout == ""
+
def test_help_nonexistent(self):
self.base_dir = 'tests/fixtures/no-composefile'
result = self.dispatch(['help', 'foobar'], returncode=1)
@@ -172,14 +209,14 @@ class CLITestCase(DockerClientTestCase):
def test_shorthand_host_opt(self):
self.dispatch(
- ['-H={0}'.format(os.environ.get('DOCKER_HOST', 'unix://')),
+ ['-H={}'.format(os.environ.get('DOCKER_HOST', 'unix://')),
'up', '-d'],
returncode=0
)
def test_shorthand_host_opt_interactive(self):
self.dispatch(
- ['-H={0}'.format(os.environ.get('DOCKER_HOST', 'unix://')),
+ ['-H={}'.format(os.environ.get('DOCKER_HOST', 'unix://')),
'run', 'another', 'ls'],
returncode=0
)
@@ -200,6 +237,11 @@ class CLITestCase(DockerClientTestCase):
result = self.dispatch(['-H=tcp://doesnotexist:8000', 'ps'], returncode=1)
assert "Couldn't connect to Docker daemon" in result.stderr
+ def test_config_list_profiles(self):
+ self.base_dir = 'tests/fixtures/config-profiles'
+ result = self.dispatch(['config', '--profiles'])
+ assert set(result.stdout.rstrip().split('\n')) == {'debug', 'frontend', 'gui'}
+
def test_config_list_services(self):
self.base_dir = 'tests/fixtures/v2-full'
result = self.dispatch(['config', '--services'])
@@ -222,15 +264,36 @@ class CLITestCase(DockerClientTestCase):
self.base_dir = 'tests/fixtures/v2-full'
assert self.dispatch(['config', '--quiet']).stdout == ''
+ def test_config_stdin(self):
+ config = b"""version: "3.7"
+services:
+ web:
+ image: nginx
+ other:
+ image: alpine
+"""
+ result = self.dispatch(['-f', '-', 'config', '--services'], stdin=config)
+ assert set(result.stdout.rstrip().split('\n')) == {'web', 'other'}
+
+ def test_config_with_hash_option(self):
+ self.base_dir = 'tests/fixtures/v2-full'
+ result = self.dispatch(['config', '--hash=*'])
+ for service in self.project.get_services():
+ assert '{} {}\n'.format(service.name, service.config_hash) in result.stdout
+
+ svc = self.project.get_service('other')
+ result = self.dispatch(['config', '--hash=other'])
+ assert result.stdout == '{} {}\n'.format(svc.name, svc.config_hash)
+
def test_config_default(self):
self.base_dir = 'tests/fixtures/v2-full'
result = self.dispatch(['config'])
# assert there are no python objects encoded in the output
assert '!!' not in result.stdout
- output = yaml.load(result.stdout)
+ output = yaml.safe_load(result.stdout)
expected = {
- 'version': '2.0',
+ 'version': '2',
'volumes': {'data': {'driver': 'local'}},
'networks': {'front': {}},
'services': {
@@ -242,7 +305,7 @@ class CLITestCase(DockerClientTestCase):
'volumes_from': ['service:other:rw'],
},
'other': {
- 'image': 'busybox:latest',
+ 'image': BUSYBOX_IMAGE_WITH_TAG,
'command': 'top',
'volumes': ['/data'],
},
@@ -253,8 +316,8 @@ class CLITestCase(DockerClientTestCase):
def test_config_restart(self):
self.base_dir = 'tests/fixtures/restart'
result = self.dispatch(['config'])
- assert yaml.load(result.stdout) == {
- 'version': '2.0',
+ assert yaml.safe_load(result.stdout) == {
+ 'version': '2',
'services': {
'never': {
'image': 'busybox',
@@ -282,37 +345,84 @@ class CLITestCase(DockerClientTestCase):
def test_config_external_network(self):
self.base_dir = 'tests/fixtures/networks'
result = self.dispatch(['-f', 'external-networks.yml', 'config'])
- json_result = yaml.load(result.stdout)
+ json_result = yaml.safe_load(result.stdout)
assert 'networks' in json_result
assert json_result['networks'] == {
'networks_foo': {
- 'external': True # {'name': 'networks_foo'}
+ 'external': True,
+ 'name': 'networks_foo'
},
'bar': {
- 'external': {'name': 'networks_bar'}
+ 'external': True,
+ 'name': 'networks_bar'
+ }
+ }
+
+ def test_config_with_dot_env(self):
+ self.base_dir = 'tests/fixtures/default-env-file'
+ result = self.dispatch(['config'])
+ json_result = yaml.safe_load(result.stdout)
+ assert json_result == {
+ 'version': '2.4',
+ 'services': {
+ 'web': {
+ 'command': 'true',
+ 'image': 'alpine:latest',
+ 'ports': [{'target': 5643}, {'target': 9999}]
+ }
+ }
+ }
+
+ def test_config_with_env_file(self):
+ self.base_dir = 'tests/fixtures/default-env-file'
+ result = self.dispatch(['--env-file', '.env2', 'config'])
+ json_result = yaml.safe_load(result.stdout)
+ assert json_result == {
+ 'version': '2.4',
+ 'services': {
+ 'web': {
+ 'command': 'false',
+ 'image': 'alpine:latest',
+ 'ports': [{'target': 5644}, {'target': 9998}]
+ }
+ }
+ }
+
+ def test_config_with_dot_env_and_override_dir(self):
+ self.base_dir = 'tests/fixtures/default-env-file'
+ result = self.dispatch(['--project-directory', 'alt/', 'config'])
+ json_result = yaml.safe_load(result.stdout)
+ assert json_result == {
+ 'version': '2.4',
+ 'services': {
+ 'web': {
+ 'command': 'echo uwu',
+ 'image': 'alpine:3.10.1',
+ 'ports': [{'target': 3341}, {'target': 4449}]
+ }
}
}
def test_config_external_volume_v2(self):
self.base_dir = 'tests/fixtures/volumes'
result = self.dispatch(['-f', 'external-volumes-v2.yml', 'config'])
- json_result = yaml.load(result.stdout)
+ json_result = yaml.safe_load(result.stdout)
assert 'volumes' in json_result
assert json_result['volumes'] == {
'foo': {
'external': True,
+ 'name': 'foo',
},
'bar': {
- 'external': {
- 'name': 'some_bar',
- },
+ 'external': True,
+ 'name': 'some_bar',
}
}
def test_config_external_volume_v2_x(self):
self.base_dir = 'tests/fixtures/volumes'
result = self.dispatch(['-f', 'external-volumes-v2-x.yml', 'config'])
- json_result = yaml.load(result.stdout)
+ json_result = yaml.safe_load(result.stdout)
assert 'volumes' in json_result
assert json_result['volumes'] == {
'foo': {
@@ -328,23 +438,23 @@ class CLITestCase(DockerClientTestCase):
def test_config_external_volume_v3_x(self):
self.base_dir = 'tests/fixtures/volumes'
result = self.dispatch(['-f', 'external-volumes-v3-x.yml', 'config'])
- json_result = yaml.load(result.stdout)
+ json_result = yaml.safe_load(result.stdout)
assert 'volumes' in json_result
assert json_result['volumes'] == {
'foo': {
'external': True,
+ 'name': 'foo',
},
'bar': {
- 'external': {
- 'name': 'some_bar',
- },
+ 'external': True,
+ 'name': 'some_bar',
}
}
def test_config_external_volume_v3_4(self):
self.base_dir = 'tests/fixtures/volumes'
result = self.dispatch(['-f', 'external-volumes-v3-4.yml', 'config'])
- json_result = yaml.load(result.stdout)
+ json_result = yaml.safe_load(result.stdout)
assert 'volumes' in json_result
assert json_result['volumes'] == {
'foo': {
@@ -360,7 +470,7 @@ class CLITestCase(DockerClientTestCase):
def test_config_external_network_v3_5(self):
self.base_dir = 'tests/fixtures/networks'
result = self.dispatch(['-f', 'external-networks-v3-5.yml', 'config'])
- json_result = yaml.load(result.stdout)
+ json_result = yaml.safe_load(result.stdout)
assert 'networks' in json_result
assert json_result['networks'] == {
'foo': {
@@ -376,8 +486,8 @@ class CLITestCase(DockerClientTestCase):
def test_config_v1(self):
self.base_dir = 'tests/fixtures/v1-config'
result = self.dispatch(['config'])
- assert yaml.load(result.stdout) == {
- 'version': '2.1',
+ assert yaml.safe_load(result.stdout) == {
+ 'version': str(V1),
'services': {
'net': {
'image': 'busybox',
@@ -396,12 +506,10 @@ class CLITestCase(DockerClientTestCase):
},
}
- @v3_only()
def test_config_v3(self):
self.base_dir = 'tests/fixtures/v3-full'
result = self.dispatch(['config'])
-
- assert yaml.load(result.stdout) == {
+ assert yaml.safe_load(result.stdout) == {
'version': '3.5',
'volumes': {
'foobar': {
@@ -426,11 +534,11 @@ class CLITestCase(DockerClientTestCase):
},
'resources': {
'limits': {
- 'cpus': '0.05',
+ 'cpus': 0.05,
'memory': '50M',
},
'reservations': {
- 'cpus': '0.01',
+ 'cpus': 0.01,
'memory': '20M',
},
},
@@ -474,26 +582,30 @@ class CLITestCase(DockerClientTestCase):
},
}
+ @pytest.mark.skip(reason='deprecated option')
def test_config_compatibility_mode(self):
self.base_dir = 'tests/fixtures/compatibility-mode'
result = self.dispatch(['--compatibility', 'config'])
- assert yaml.load(result.stdout) == {
- 'version': '2.3',
- 'volumes': {'foo': {'driver': 'default'}},
- 'services': {
- 'foo': {
- 'command': '/bin/true',
- 'image': 'alpine:3.7',
- 'scale': 3,
- 'restart': 'always:7',
- 'mem_limit': '300M',
- 'mem_reservation': '100M',
- 'cpus': 0.7,
- 'volumes': ['foo:/bar:rw']
- }
- }
- }
+ assert yaml.load(result.stdout) == COMPOSE_COMPATIBILITY_DICT
+
+ @pytest.mark.skip(reason='deprecated option')
+ @mock.patch.dict(os.environ)
+ def test_config_compatibility_mode_from_env(self):
+ self.base_dir = 'tests/fixtures/compatibility-mode'
+ os.environ['COMPOSE_COMPATIBILITY'] = 'true'
+ result = self.dispatch(['config'])
+
+ assert yaml.load(result.stdout) == COMPOSE_COMPATIBILITY_DICT
+
+ @pytest.mark.skip(reason='deprecated option')
+ @mock.patch.dict(os.environ)
+ def test_config_compatibility_mode_from_env_and_option_precedence(self):
+ self.base_dir = 'tests/fixtures/compatibility-mode'
+ os.environ['COMPOSE_COMPATIBILITY'] = 'false'
+ result = self.dispatch(['--compatibility', 'config'])
+
+ assert yaml.load(result.stdout) == COMPOSE_COMPATIBILITY_DICT
def test_ps(self):
self.project.get_service('simple').create_container()
@@ -550,15 +662,25 @@ class CLITestCase(DockerClientTestCase):
assert 'with_build' in running.stdout
assert 'with_image' in running.stdout
+ def test_ps_all(self):
+ self.project.get_service('simple').create_container(one_off='blahblah')
+ result = self.dispatch(['ps'])
+ assert 'simple-composefile_simple_run_' not in result.stdout
+
+ result2 = self.dispatch(['ps', '--all'])
+ assert 'simple-composefile_simple_run_' in result2.stdout
+
def test_pull(self):
result = self.dispatch(['pull'])
assert 'Pulling simple' in result.stderr
assert 'Pulling another' in result.stderr
+ assert 'done' in result.stderr
+ assert 'failed' not in result.stderr
def test_pull_with_digest(self):
result = self.dispatch(['-f', 'digest.yml', 'pull', '--no-parallel'])
- assert 'Pulling simple (busybox:latest)...' in result.stderr
+ assert 'Pulling simple ({})...'.format(BUSYBOX_IMAGE_WITH_TAG) in result.stderr
assert ('Pulling digest (busybox@'
'sha256:38a203e1986cf79639cfb9b2e1d6e773de84002feea2d4eb006b520'
'04ee8502d)...') in result.stderr
@@ -569,7 +691,7 @@ class CLITestCase(DockerClientTestCase):
'pull', '--ignore-pull-failures', '--no-parallel']
)
- assert 'Pulling simple (busybox:latest)...' in result.stderr
+ assert 'Pulling simple ({})...'.format(BUSYBOX_IMAGE_WITH_TAG) in result.stderr
assert 'Pulling another (nonexisting-image:latest)...' in result.stderr
assert ('repository nonexisting-image not found' in result.stderr or
'image library/nonexisting-image:latest not found' in result.stderr or
@@ -596,19 +718,27 @@ class CLITestCase(DockerClientTestCase):
result.stderr
)
+ def test_pull_can_build(self):
+ result = self.dispatch([
+ '-f', 'can-build-pull-failures.yml', 'pull'],
+ returncode=0
+ )
+ assert 'Some service image(s) must be built from source' in result.stderr
+ assert 'docker-compose build can_build' in result.stderr
+
def test_pull_with_no_deps(self):
self.base_dir = 'tests/fixtures/links-composefile'
result = self.dispatch(['pull', '--no-parallel', 'web'])
assert sorted(result.stderr.split('\n'))[1:] == [
- 'Pulling web (busybox:latest)...',
+ 'Pulling web (busybox:1.27.2)...',
]
def test_pull_with_include_deps(self):
self.base_dir = 'tests/fixtures/links-composefile'
result = self.dispatch(['pull', '--no-parallel', '--include-deps', 'web'])
assert sorted(result.stderr.split('\n'))[1:] == [
- 'Pulling db (busybox:latest)...',
- 'Pulling web (busybox:latest)...',
+ 'Pulling db (busybox:1.27.2)...',
+ 'Pulling web (busybox:1.27.2)...',
]
def test_build_plain(self):
@@ -626,6 +756,20 @@ class CLITestCase(DockerClientTestCase):
assert BUILD_CACHE_TEXT not in result.stdout
assert BUILD_PULL_TEXT not in result.stdout
+ def test_up_ignore_missing_build_directory(self):
+ self.base_dir = 'tests/fixtures/no-build'
+ result = self.dispatch(['up', '--no-build'])
+
+ assert 'alpine exited with code 0' in result.stdout
+ self.base_dir = None
+
+ def test_pull_ignore_missing_build_directory(self):
+ self.base_dir = 'tests/fixtures/no-build'
+ result = self.dispatch(['pull'])
+
+ assert 'Pulling my-alpine' in result.stderr
+ self.base_dir = None
+
def test_build_pull(self):
# Make sure we have the latest busybox already
pull_busybox(self.client)
@@ -648,7 +792,11 @@ class CLITestCase(DockerClientTestCase):
assert BUILD_CACHE_TEXT not in result.stdout
assert BUILD_PULL_TEXT in result.stdout
+ @mock.patch.dict(os.environ)
def test_build_log_level(self):
+ os.environ['COMPOSE_DOCKER_CLI_BUILD'] = '0'
+ os.environ['DOCKER_BUILDKIT'] = '0'
+ self.test_env_file_relative_to_compose_file()
self.base_dir = 'tests/fixtures/simple-dockerfile'
result = self.dispatch(['--log-level', 'warning', 'build', 'simple'])
assert result.stderr == ''
@@ -689,13 +837,38 @@ class CLITestCase(DockerClientTestCase):
]
assert not containers
+ @pytest.mark.xfail(True, reason='Flaky on local')
+ def test_build_rm(self):
+ containers = [
+ Container.from_ps(self.project.client, c)
+ for c in self.project.client.containers(all=True)
+ ]
+
+ assert not containers
+
+ self.base_dir = 'tests/fixtures/simple-dockerfile'
+ self.dispatch(['build', '--no-rm', 'simple'], returncode=0)
+
+ containers = [
+ Container.from_ps(self.project.client, c)
+ for c in self.project.client.containers(all=True)
+ ]
+ assert containers
+
+ for c in self.project.client.containers(all=True):
+ self.addCleanup(self.project.client.remove_container, c, force=True)
+
+ @mock.patch.dict(os.environ)
def test_build_shm_size_build_option(self):
+ os.environ['COMPOSE_DOCKER_CLI_BUILD'] = '0'
pull_busybox(self.client)
self.base_dir = 'tests/fixtures/build-shm-size'
result = self.dispatch(['build', '--no-cache'], None)
assert 'shm_size: 96' in result.stdout
+ @mock.patch.dict(os.environ)
def test_build_memory_build_option(self):
+ os.environ['COMPOSE_DOCKER_CLI_BUILD'] = '0'
pull_busybox(self.client)
self.base_dir = 'tests/fixtures/build-memory'
result = self.dispatch(['build', '--no-cache', '--memory', '96m', 'service'], None)
@@ -728,32 +901,6 @@ class CLITestCase(DockerClientTestCase):
)
assert 'Favorite Touhou Character: hong.meiling' in result.stdout
- def test_bundle_with_digests(self):
- self.base_dir = 'tests/fixtures/bundle-with-digests/'
- tmpdir = pytest.ensuretemp('cli_test_bundle')
- self.addCleanup(tmpdir.remove)
- filename = str(tmpdir.join('example.dab'))
-
- self.dispatch(['bundle', '--output', filename])
- with open(filename, 'r') as fh:
- bundle = json.load(fh)
-
- assert bundle == {
- 'Version': '0.1',
- 'Services': {
- 'web': {
- 'Image': ('dockercloud/hello-world@sha256:fe79a2cfbd17eefc3'
- '44fb8419420808df95a1e22d93b7f621a7399fd1e9dca1d'),
- 'Networks': ['default'],
- },
- 'redis': {
- 'Image': ('redis@sha256:a84cb8f53a70e19f61ff2e1d5e73fb7ae62d'
- '374b2b7392de1e7d77be26ef8f7b'),
- 'Networks': ['default'],
- }
- },
- }
-
def test_build_override_dir(self):
self.base_dir = 'tests/fixtures/build-path-override-dir'
self.override_dir = os.path.abspath('tests/fixtures')
@@ -771,6 +918,13 @@ class CLITestCase(DockerClientTestCase):
assert 'does not exist, is not accessible, or is not a valid URL' in result.stderr
+ def test_build_parallel(self):
+ self.base_dir = 'tests/fixtures/build-multiple-composefile'
+ result = self.dispatch(['build', '--parallel'])
+ assert 'Successfully tagged build-multiple-composefile_a:latest' in result.stdout
+ assert 'Successfully tagged build-multiple-composefile_b:latest' in result.stdout
+ assert 'Successfully built' in result.stdout
+
def test_create(self):
self.dispatch(['create'])
service = self.project.get_service('simple')
@@ -895,7 +1049,6 @@ class CLITestCase(DockerClientTestCase):
result = self.dispatch(['down', '--rmi', 'bogus'], returncode=1)
assert '--rmi flag must be' in result.stderr
- @v2_only()
def test_down(self):
self.base_dir = 'tests/fixtures/v2-full'
@@ -909,11 +1062,11 @@ class CLITestCase(DockerClientTestCase):
result = self.dispatch(['down', '--rmi=local', '--volumes'])
assert 'Stopping v2-full_web_1' in result.stderr
assert 'Stopping v2-full_other_1' in result.stderr
- assert 'Stopping v2-full_web_run_2' in result.stderr
+ assert 'Stopping v2-full_web_run_' in result.stderr
assert 'Removing v2-full_web_1' in result.stderr
assert 'Removing v2-full_other_1' in result.stderr
- assert 'Removing v2-full_web_run_1' in result.stderr
- assert 'Removing v2-full_web_run_2' in result.stderr
+ assert 'Removing v2-full_web_run_' in result.stderr
+ assert 'Removing v2-full_web_run_' in result.stderr
assert 'Removing volume v2-full_data' in result.stderr
assert 'Removing image v2-full_web' in result.stderr
assert 'Removing image busybox' not in result.stderr
@@ -970,13 +1123,16 @@ class CLITestCase(DockerClientTestCase):
def test_up_attached(self):
self.base_dir = 'tests/fixtures/echo-services'
result = self.dispatch(['up', '--no-color'])
+ simple_name = self.project.get_service('simple').containers(stopped=True)[0].name_without_project
+ another_name = self.project.get_service('another').containers(
+ stopped=True
+ )[0].name_without_project
- assert 'simple_1 | simple' in result.stdout
- assert 'another_1 | another' in result.stdout
- assert 'simple_1 exited with code 0' in result.stdout
- assert 'another_1 exited with code 0' in result.stdout
+ assert '{} | simple'.format(simple_name) in result.stdout
+ assert '{} | another'.format(another_name) in result.stdout
+ assert '{} exited with code 0'.format(simple_name) in result.stdout
+ assert '{} exited with code 0'.format(another_name) in result.stdout
- @v2_only()
def test_up(self):
self.base_dir = 'tests/fixtures/v2-simple'
self.dispatch(['up', '-d'], None)
@@ -1008,7 +1164,6 @@ class CLITestCase(DockerClientTestCase):
for service in services:
assert self.lookup(container, service.name)
- @v2_only()
def test_up_no_start(self):
self.base_dir = 'tests/fixtures/v2-full'
self.dispatch(['up', '--no-start'], None)
@@ -1039,7 +1194,21 @@ class CLITestCase(DockerClientTestCase):
]
assert len(remote_volumes) > 0
- @v2_only()
+ def test_up_no_start_remove_orphans(self):
+ self.base_dir = 'tests/fixtures/v2-simple'
+ self.dispatch(['up', '--no-start'], None)
+
+ services = self.project.get_services()
+
+ stopped = reduce((lambda prev, next: prev.containers(
+ stopped=True) + next.containers(stopped=True)), services)
+ assert len(stopped) == 2
+
+ self.dispatch(['-f', 'one-container.yml', 'up', '--no-start', '--remove-orphans'], None)
+ stopped2 = reduce((lambda prev, next: prev.containers(
+ stopped=True) + next.containers(stopped=True)), services)
+ assert len(stopped2) == 1
+
def test_up_no_ansi(self):
self.base_dir = 'tests/fixtures/v2-simple'
result = self.dispatch(['--no-ansi', 'up', '-d'], None)
@@ -1047,7 +1216,6 @@ class CLITestCase(DockerClientTestCase):
assert "%c[1A" % 27 not in result.stderr
assert "%c[1B" % 27 not in result.stderr
- @v2_only()
def test_up_with_default_network_config(self):
filename = 'default-network-config.yml'
@@ -1061,7 +1229,6 @@ class CLITestCase(DockerClientTestCase):
assert networks[0]['Options']['com.docker.network.bridge.enable_icc'] == 'false'
- @v2_only()
def test_up_with_network_aliases(self):
filename = 'network-aliases.yml'
self.base_dir = 'tests/fixtures/networks'
@@ -1089,7 +1256,6 @@ class CLITestCase(DockerClientTestCase):
assert 'forward_facing' in front_aliases
assert 'ahead' in front_aliases
- @v2_only()
def test_up_with_network_internal(self):
self.require_api_version('1.23')
filename = 'network-internal.yml'
@@ -1107,7 +1273,6 @@ class CLITestCase(DockerClientTestCase):
assert networks[0]['Internal'] is True
- @v2_only()
def test_up_with_network_static_addresses(self):
filename = 'network-static-addresses.yml'
ipv4_address = '172.16.100.100'
@@ -1131,7 +1296,6 @@ class CLITestCase(DockerClientTestCase):
assert ipv4_address in ipam_config.values()
assert ipv6_address in ipam_config.values()
- @v2_only()
def test_up_with_networks(self):
self.base_dir = 'tests/fixtures/networks'
self.dispatch(['up', '-d'], None)
@@ -1179,7 +1343,6 @@ class CLITestCase(DockerClientTestCase):
# app has aliased db to "database"
assert self.lookup(app_container, "database")
- @v2_only()
def test_up_missing_network(self):
self.base_dir = 'tests/fixtures/networks'
@@ -1189,7 +1352,6 @@ class CLITestCase(DockerClientTestCase):
assert 'Service "web" uses an undefined network "foo"' in result.stderr
- @v2_only()
@no_cluster('container networks not supported in Swarm')
def test_up_with_network_mode(self):
c = self.client.create_container(
@@ -1228,7 +1390,6 @@ class CLITestCase(DockerClientTestCase):
assert not container_mode_container.get('NetworkSettings.Networks')
assert container_mode_container.get('HostConfig.NetworkMode') == container_mode_source
- @v2_only()
def test_up_external_networks(self):
filename = 'external-networks.yml'
@@ -1252,7 +1413,6 @@ class CLITestCase(DockerClientTestCase):
container = self.project.containers()[0]
assert sorted(list(container.get('NetworkSettings.Networks'))) == sorted(network_names)
- @v2_only()
def test_up_with_external_default_network(self):
filename = 'external-default.yml'
@@ -1275,7 +1435,6 @@ class CLITestCase(DockerClientTestCase):
container = self.project.containers()[0]
assert list(container.get('NetworkSettings.Networks')) == [network_name]
- @v2_1_only()
def test_up_with_network_labels(self):
filename = 'network-label.yml'
@@ -1295,7 +1454,6 @@ class CLITestCase(DockerClientTestCase):
assert 'label_key' in networks[0]['Labels']
assert networks[0]['Labels']['label_key'] == 'label_val'
- @v2_1_only()
def test_up_with_volume_labels(self):
filename = 'volume-label.yml'
@@ -1311,11 +1469,10 @@ class CLITestCase(DockerClientTestCase):
if v['Name'].split('/')[-1].startswith('{}_'.format(self.project.name))
]
- assert set([v['Name'].split('/')[-1] for v in volumes]) == set([volume_with_label])
+ assert {v['Name'].split('/')[-1] for v in volumes} == {volume_with_label}
assert 'label_key' in volumes[0]['Labels']
assert volumes[0]['Labels']['label_key'] == 'label_val'
- @v2_only()
def test_up_no_services(self):
self.base_dir = 'tests/fixtures/no-services'
self.dispatch(['up', '-d'], None)
@@ -1372,7 +1529,6 @@ class CLITestCase(DockerClientTestCase):
bar_container.id
)
- @v3_only()
def test_up_with_healthcheck(self):
def wait_on_health_status(container, status):
def condition():
@@ -1426,6 +1582,26 @@ class CLITestCase(DockerClientTestCase):
assert len(db.containers()) == 0
assert len(console.containers()) == 0
+ def test_up_with_attach_dependencies(self):
+ self.base_dir = 'tests/fixtures/echo-services-dependencies'
+ result = self.dispatch(['up', '--attach-dependencies', '--no-color', 'simple'], None)
+ simple_name = self.project.get_service('simple').containers(stopped=True)[0].name_without_project
+ another_name = self.project.get_service('another').containers(
+ stopped=True
+ )[0].name_without_project
+
+ assert '{} | simple'.format(simple_name) in result.stdout
+ assert '{} | another'.format(another_name) in result.stdout
+
+ def test_up_handles_aborted_dependencies(self):
+ self.base_dir = 'tests/fixtures/abort-on-container-exit-dependencies'
+ proc = start_process(
+ self.base_dir,
+ ['up', 'simple', '--attach-dependencies', '--abort-on-container-exit'])
+ wait_on_condition(ContainerCountCondition(self.project, 0))
+ proc.wait()
+ assert proc.returncode == 1
+
def test_up_with_force_recreate(self):
self.dispatch(['up', '-d'], None)
service = self.project.get_service('simple')
@@ -1486,7 +1662,6 @@ class CLITestCase(DockerClientTestCase):
os.kill(proc.pid, signal.SIGTERM)
wait_on_condition(ContainerCountCondition(self.project, 0))
- @v2_only()
def test_up_handles_force_shutdown(self):
self.base_dir = 'tests/fixtures/sleeps-composefile'
proc = start_process(self.base_dir, ['up', '-t', '200'])
@@ -1511,7 +1686,6 @@ class CLITestCase(DockerClientTestCase):
proc.wait()
assert proc.returncode == 1
- @v2_only()
@no_cluster('Container PID mode does not work across clusters')
def test_up_with_pid_mode(self):
c = self.client.create_container(
@@ -1537,6 +1711,123 @@ class CLITestCase(DockerClientTestCase):
host_mode_container = self.project.get_service('host').containers()[0]
assert host_mode_container.get('HostConfig.PidMode') == 'host'
+ @no_cluster('Container IPC mode does not work across clusters')
+ def test_up_with_ipc_mode(self):
+ c = self.client.create_container(
+ 'busybox', 'top', name='composetest_ipc_mode_container',
+ host_config={}
+ )
+ self.addCleanup(self.client.remove_container, c, force=True)
+ self.client.start(c)
+ container_mode_source = 'container:{}'.format(c['Id'])
+
+ self.base_dir = 'tests/fixtures/ipc-mode'
+
+ self.dispatch(['up', '-d'], None)
+
+ service_mode_source = 'container:{}'.format(
+ self.project.get_service('shareable').containers()[0].id)
+ service_mode_container = self.project.get_service('service').containers()[0]
+ assert service_mode_container.get('HostConfig.IpcMode') == service_mode_source
+
+ container_mode_container = self.project.get_service('container').containers()[0]
+ assert container_mode_container.get('HostConfig.IpcMode') == container_mode_source
+
+ shareable_mode_container = self.project.get_service('shareable').containers()[0]
+ assert shareable_mode_container.get('HostConfig.IpcMode') == 'shareable'
+
+ def test_profiles_up_with_no_profile(self):
+ self.base_dir = 'tests/fixtures/profiles'
+ self.dispatch(['up'])
+
+ containers = self.project.containers(stopped=True)
+ service_names = [c.service for c in containers]
+
+ assert 'foo' in service_names
+ assert len(containers) == 1
+
+ def test_profiles_up_with_profile(self):
+ self.base_dir = 'tests/fixtures/profiles'
+ self.dispatch(['--profile', 'test', 'up'])
+
+ containers = self.project.containers(stopped=True)
+ service_names = [c.service for c in containers]
+
+ assert 'foo' in service_names
+ assert 'bar' in service_names
+ assert 'baz' in service_names
+ assert len(containers) == 3
+
+ def test_profiles_up_invalid_dependency(self):
+ self.base_dir = 'tests/fixtures/profiles'
+ result = self.dispatch(['--profile', 'debug', 'up'], returncode=1)
+
+ assert ('Service "bar" was pulled in as a dependency of service "zot" '
+ 'but is not enabled by the active profiles.') in result.stderr
+
+ def test_profiles_up_with_multiple_profiles(self):
+ self.base_dir = 'tests/fixtures/profiles'
+ self.dispatch(['--profile', 'debug', '--profile', 'test', 'up'])
+
+ containers = self.project.containers(stopped=True)
+ service_names = [c.service for c in containers]
+
+ assert 'foo' in service_names
+ assert 'bar' in service_names
+ assert 'baz' in service_names
+ assert 'zot' in service_names
+ assert len(containers) == 4
+
+ def test_profiles_up_with_profile_enabled_by_service(self):
+ self.base_dir = 'tests/fixtures/profiles'
+ self.dispatch(['up', 'bar'])
+
+ containers = self.project.containers(stopped=True)
+ service_names = [c.service for c in containers]
+
+ assert 'bar' in service_names
+ assert len(containers) == 1
+
+ def test_profiles_up_with_dependency_and_profile_enabled_by_service(self):
+ self.base_dir = 'tests/fixtures/profiles'
+ self.dispatch(['up', 'baz'])
+
+ containers = self.project.containers(stopped=True)
+ service_names = [c.service for c in containers]
+
+ assert 'bar' in service_names
+ assert 'baz' in service_names
+ assert len(containers) == 2
+
+ def test_profiles_up_with_invalid_dependency_for_target_service(self):
+ self.base_dir = 'tests/fixtures/profiles'
+ result = self.dispatch(['up', 'zot'], returncode=1)
+
+ assert ('Service "bar" was pulled in as a dependency of service "zot" '
+ 'but is not enabled by the active profiles.') in result.stderr
+
+ def test_profiles_up_with_profile_for_dependency(self):
+ self.base_dir = 'tests/fixtures/profiles'
+ self.dispatch(['--profile', 'test', 'up', 'zot'])
+
+ containers = self.project.containers(stopped=True)
+ service_names = [c.service for c in containers]
+
+ assert 'bar' in service_names
+ assert 'zot' in service_names
+ assert len(containers) == 2
+
+ def test_profiles_up_with_merged_profiles(self):
+ self.base_dir = 'tests/fixtures/profiles'
+ self.dispatch(['-f', 'docker-compose.yml', '-f', 'merge-profiles.yml', 'up', 'zot'])
+
+ containers = self.project.containers(stopped=True)
+ service_names = [c.service for c in containers]
+
+ assert 'bar' in service_names
+ assert 'zot' in service_names
+ assert len(containers) == 2
+
def test_exec_without_tty(self):
self.base_dir = 'tests/fixtures/links-composefile'
self.dispatch(['up', '-d', 'console'])
@@ -1546,6 +1837,17 @@ class CLITestCase(DockerClientTestCase):
assert stderr == ""
assert stdout == "/\n"
+ @mock.patch.dict(os.environ)
+ def test_exec_novalue_var_dotenv_file(self):
+ os.environ['MYVAR'] = 'SUCCESS'
+ self.base_dir = 'tests/fixtures/exec-novalue-var'
+ self.dispatch(['up', '-d'])
+ assert len(self.project.containers()) == 1
+
+ stdout, stderr = self.dispatch(['exec', '-T', 'nginx', 'env'])
+ assert 'CHECK_VAR=SUCCESS' in stdout
+ assert not stderr
+
def test_exec_detach_long_form(self):
self.base_dir = 'tests/fixtures/links-composefile'
self.dispatch(['up', '--detach', 'console'])
@@ -1564,7 +1866,6 @@ class CLITestCase(DockerClientTestCase):
assert stdout == "operator\n"
assert stderr == ""
- @v3_only()
def test_exec_workdir(self):
self.base_dir = 'tests/fixtures/links-composefile'
os.environ['COMPOSE_API_VERSION'] = '1.35'
@@ -1574,7 +1875,6 @@ class CLITestCase(DockerClientTestCase):
stdout, stderr = self.dispatch(['exec', '-T', '--workdir', '/etc', 'console', 'ls'])
assert 'passwd' in stdout
- @v2_2_only()
def test_exec_service_with_environment_overridden(self):
name = 'service'
self.base_dir = 'tests/fixtures/environment-exec'
@@ -1619,7 +1919,6 @@ class CLITestCase(DockerClientTestCase):
assert len(db.containers()) == 1
assert len(console.containers()) == 0
- @v2_only()
def test_run_service_with_dependencies(self):
self.base_dir = 'tests/fixtures/v2-dependencies'
self.dispatch(['run', 'web', '/bin/true'], None)
@@ -1628,6 +1927,14 @@ class CLITestCase(DockerClientTestCase):
assert len(db.containers()) == 1
assert len(console.containers()) == 0
+ def test_run_service_with_unhealthy_dependencies(self):
+ self.base_dir = 'tests/fixtures/v2-unhealthy-dependencies'
+ result = self.dispatch(['run', 'web', '/bin/true'], returncode=1)
+ assert re.search(
+ re.compile('for web .*is unhealthy.*', re.MULTILINE),
+ result.stderr
+ )
+
def test_run_service_with_scaled_dependencies(self):
self.base_dir = 'tests/fixtures/v2-dependencies'
self.dispatch(['up', '-d', '--scale', 'db=2', '--scale', 'console=0'])
@@ -1667,22 +1974,23 @@ class CLITestCase(DockerClientTestCase):
self.dispatch(['run', 'implicit'])
service = self.project.get_service('implicit')
containers = service.containers(stopped=True, one_off=OneOffFilter.only)
- assert [c.human_readable_command for c in containers] == [u'/bin/sh -c echo "success"']
+ assert [c.human_readable_command for c in containers] == ['/bin/sh -c echo "success"']
self.dispatch(['run', 'explicit'])
service = self.project.get_service('explicit')
containers = service.containers(stopped=True, one_off=OneOffFilter.only)
- assert [c.human_readable_command for c in containers] == [u'/bin/true']
+ assert [c.human_readable_command for c in containers] == ['/bin/true']
@pytest.mark.skipif(SWARM_SKIP_RM_VOLUMES, reason='Swarm DELETE /containers/<id> bug')
def test_run_rm(self):
self.base_dir = 'tests/fixtures/volume'
proc = start_process(self.base_dir, ['run', '--rm', 'test'])
+ service = self.project.get_service('test')
wait_on_condition(ContainerStateCondition(
self.project.client,
- 'volume_test_run_1',
- 'running'))
- service = self.project.get_service('test')
+ 'volume_test_run_*',
+ 'running')
+ )
containers = service.containers(one_off=OneOffFilter.only)
assert len(containers) == 1
mounts = containers[0].get('Mounts')
@@ -1930,7 +2238,6 @@ class CLITestCase(DockerClientTestCase):
container = service.containers(stopped=True, one_off=True)[0]
assert workdir == container.get('Config.WorkingDir')
- @v2_only()
def test_run_service_with_use_aliases(self):
filename = 'network-aliases.yml'
self.base_dir = 'tests/fixtures/networks'
@@ -1952,7 +2259,6 @@ class CLITestCase(DockerClientTestCase):
assert 'forward_facing' in front_aliases
assert 'ahead' in front_aliases
- @v2_only()
def test_run_interactive_connects_to_network(self):
self.base_dir = 'tests/fixtures/networks'
@@ -1975,10 +2281,9 @@ class CLITestCase(DockerClientTestCase):
for _, config in networks.items():
# TODO: once we drop support for API <1.24, this can be changed to:
# assert config['Aliases'] == [container.short_id]
- aliases = set(config['Aliases'] or []) - set([container.short_id])
+ aliases = set(config['Aliases'] or []) - {container.short_id}
assert not aliases
- @v2_only()
def test_run_detached_connects_to_network(self):
self.base_dir = 'tests/fixtures/networks'
self.dispatch(['up', '-d'])
@@ -1995,7 +2300,7 @@ class CLITestCase(DockerClientTestCase):
for _, config in networks.items():
# TODO: once we drop support for API <1.24, this can be changed to:
# assert config['Aliases'] == [container.short_id]
- aliases = set(config['Aliases'] or []) - set([container.short_id])
+ aliases = set(config['Aliases'] or []) - {container.short_id}
assert not aliases
assert self.lookup(container, 'app')
@@ -2005,53 +2310,47 @@ class CLITestCase(DockerClientTestCase):
proc = start_process(self.base_dir, ['run', '-T', 'simple', 'top'])
wait_on_condition(ContainerStateCondition(
self.project.client,
- 'simple-composefile_simple_run_1',
+ 'simple-composefile_simple_run_*',
'running'))
os.kill(proc.pid, signal.SIGINT)
wait_on_condition(ContainerStateCondition(
self.project.client,
- 'simple-composefile_simple_run_1',
+ 'simple-composefile_simple_run_*',
'exited'))
def test_run_handles_sigterm(self):
proc = start_process(self.base_dir, ['run', '-T', 'simple', 'top'])
wait_on_condition(ContainerStateCondition(
self.project.client,
- 'simple-composefile_simple_run_1',
+ 'simple-composefile_simple_run_*',
'running'))
os.kill(proc.pid, signal.SIGTERM)
wait_on_condition(ContainerStateCondition(
self.project.client,
- 'simple-composefile_simple_run_1',
+ 'simple-composefile_simple_run_*',
'exited'))
def test_run_handles_sighup(self):
proc = start_process(self.base_dir, ['run', '-T', 'simple', 'top'])
wait_on_condition(ContainerStateCondition(
self.project.client,
- 'simple-composefile_simple_run_1',
+ 'simple-composefile_simple_run_*',
'running'))
os.kill(proc.pid, signal.SIGHUP)
wait_on_condition(ContainerStateCondition(
self.project.client,
- 'simple-composefile_simple_run_1',
+ 'simple-composefile_simple_run_*',
'exited'))
@mock.patch.dict(os.environ)
def test_run_unicode_env_values_from_system(self):
value = 'ą, ć, ę, ł, ń, ó, ś, ź, ż'
- if six.PY2: # os.environ doesn't support unicode values in Py2
- os.environ['BAR'] = value.encode('utf-8')
- else: # ... and doesn't support byte values in Py3
- os.environ['BAR'] = value
+ os.environ['BAR'] = value
self.base_dir = 'tests/fixtures/unicode-environment'
- result = self.dispatch(['run', 'simple'])
-
- if six.PY2: # Can't retrieve output on Py3. See issue #3670
- assert value in result.stdout.strip()
+ self.dispatch(['run', 'simple'])
container = self.project.containers(one_off=OneOffFilter.only, stopped=True)[0]
environment = container.get('Config.Env')
@@ -2160,9 +2459,9 @@ class CLITestCase(DockerClientTestCase):
def test_start_no_containers(self):
result = self.dispatch(['start'], returncode=1)
+ assert 'failed' in result.stderr
assert 'No containers to start' in result.stderr
- @v2_only()
def test_up_logging(self):
self.base_dir = 'tests/fixtures/logging-composefile'
self.dispatch(['up', '-d'])
@@ -2230,6 +2529,7 @@ class CLITestCase(DockerClientTestCase):
assert 'another' in result.stdout
assert 'exited with code 0' in result.stdout
+ @pytest.mark.skip(reason="race condition between up and logs")
def test_logs_follow_logs_from_new_containers(self):
self.base_dir = 'tests/fixtures/logs-composefile'
self.dispatch(['up', '-d', 'simple'])
@@ -2237,20 +2537,47 @@ class CLITestCase(DockerClientTestCase):
proc = start_process(self.base_dir, ['logs', '-f'])
self.dispatch(['up', '-d', 'another'])
- wait_on_condition(ContainerStateCondition(
- self.project.client,
- 'logs-composefile_another_1',
- 'exited'))
+ another_name = self.project.get_service('another').get_container().name_without_project
+ wait_on_condition(
+ ContainerStateCondition(
+ self.project.client,
+ 'logs-composefile_another_*',
+ 'exited'
+ )
+ )
+ simple_name = self.project.get_service('simple').get_container().name_without_project
self.dispatch(['kill', 'simple'])
result = wait_on_process(proc)
assert 'hello' in result.stdout
assert 'test' in result.stdout
- assert 'logs-composefile_another_1 exited with code 0' in result.stdout
- assert 'logs-composefile_simple_1 exited with code 137' in result.stdout
+ assert '{} exited with code 0'.format(another_name) in result.stdout
+ assert '{} exited with code 137'.format(simple_name) in result.stdout
+
+ @pytest.mark.skip(reason="race condition between up and logs")
+ def test_logs_follow_logs_from_restarted_containers(self):
+ self.base_dir = 'tests/fixtures/logs-restart-composefile'
+ proc = start_process(self.base_dir, ['up'])
+
+ wait_on_condition(
+ ContainerStateCondition(
+ self.project.client,
+ 'logs-restart-composefile_another_*',
+ 'exited'
+ )
+ )
+ self.dispatch(['kill', 'simple'])
+
+ result = wait_on_process(proc)
+
+ assert result.stdout.count(
+ r'logs-restart-composefile_another_1 exited with code 1'
+ ) == 3
+ assert result.stdout.count('world') == 3
+ @pytest.mark.skip(reason="race condition between up and logs")
def test_logs_default(self):
self.base_dir = 'tests/fixtures/logs-composefile'
self.dispatch(['up', '-d'])
@@ -2274,17 +2601,17 @@ class CLITestCase(DockerClientTestCase):
self.dispatch(['up', '-d'])
result = self.dispatch(['logs', '-f', '-t'])
- assert re.search('(\d{4})-(\d{2})-(\d{2})T(\d{2})\:(\d{2})\:(\d{2})', result.stdout)
+ assert re.search(r'(\d{4})-(\d{2})-(\d{2})T(\d{2})\:(\d{2})\:(\d{2})', result.stdout)
def test_logs_tail(self):
self.base_dir = 'tests/fixtures/logs-tail-composefile'
self.dispatch(['up'])
result = self.dispatch(['logs', '--tail', '2'])
- assert 'c\n' in result.stdout
- assert 'd\n' in result.stdout
- assert 'a\n' not in result.stdout
- assert 'b\n' not in result.stdout
+ assert 'y\n' in result.stdout
+ assert 'z\n' in result.stdout
+ assert 'w\n' not in result.stdout
+ assert 'x\n' not in result.stdout
def test_kill(self):
self.dispatch(['up', '-d'], None)
@@ -2365,11 +2692,6 @@ class CLITestCase(DockerClientTestCase):
assert len(project.get_service('simple').containers()) == 0
assert len(project.get_service('another').containers()) == 0
- def test_scale_v2_2(self):
- self.base_dir = 'tests/fixtures/scale'
- result = self.dispatch(['scale', 'web=1'], returncode=1)
- assert 'incompatible with the v2.2 format' in result.stderr
-
def test_up_scale_scale_up(self):
self.base_dir = 'tests/fixtures/scale'
project = self.project
@@ -2377,10 +2699,12 @@ class CLITestCase(DockerClientTestCase):
self.dispatch(['up', '-d'])
assert len(project.get_service('web').containers()) == 2
assert len(project.get_service('db').containers()) == 1
+ assert len(project.get_service('worker').containers()) == 0
- self.dispatch(['up', '-d', '--scale', 'web=3'])
+ self.dispatch(['up', '-d', '--scale', 'web=3', '--scale', 'worker=1'])
assert len(project.get_service('web').containers()) == 3
assert len(project.get_service('db').containers()) == 1
+ assert len(project.get_service('worker').containers()) == 1
def test_up_scale_scale_down(self):
self.base_dir = 'tests/fixtures/scale'
@@ -2389,22 +2713,26 @@ class CLITestCase(DockerClientTestCase):
self.dispatch(['up', '-d'])
assert len(project.get_service('web').containers()) == 2
assert len(project.get_service('db').containers()) == 1
+ assert len(project.get_service('worker').containers()) == 0
self.dispatch(['up', '-d', '--scale', 'web=1'])
assert len(project.get_service('web').containers()) == 1
assert len(project.get_service('db').containers()) == 1
+ assert len(project.get_service('worker').containers()) == 0
def test_up_scale_reset(self):
self.base_dir = 'tests/fixtures/scale'
project = self.project
- self.dispatch(['up', '-d', '--scale', 'web=3', '--scale', 'db=3'])
+ self.dispatch(['up', '-d', '--scale', 'web=3', '--scale', 'db=3', '--scale', 'worker=3'])
assert len(project.get_service('web').containers()) == 3
assert len(project.get_service('db').containers()) == 3
+ assert len(project.get_service('worker').containers()) == 3
self.dispatch(['up', '-d'])
assert len(project.get_service('web').containers()) == 2
assert len(project.get_service('db').containers()) == 1
+ assert len(project.get_service('worker').containers()) == 0
def test_up_scale_to_zero(self):
self.base_dir = 'tests/fixtures/scale'
@@ -2413,10 +2741,12 @@ class CLITestCase(DockerClientTestCase):
self.dispatch(['up', '-d'])
assert len(project.get_service('web').containers()) == 2
assert len(project.get_service('db').containers()) == 1
+ assert len(project.get_service('worker').containers()) == 0
- self.dispatch(['up', '-d', '--scale', 'web=0', '--scale', 'db=0'])
+ self.dispatch(['up', '-d', '--scale', 'web=0', '--scale', 'db=0', '--scale', 'worker=0'])
assert len(project.get_service('web').containers()) == 0
assert len(project.get_service('db').containers()) == 0
+ assert len(project.get_service('worker').containers()) == 0
def test_port(self):
self.base_dir = 'tests/fixtures/ports-composefile'
@@ -2458,9 +2788,9 @@ class CLITestCase(DockerClientTestCase):
result = self.dispatch(['port', '--index=' + str(index), 'simple', str(number)])
return result.stdout.rstrip()
- assert get_port(3000) == containers[0].get_local_port(3000)
- assert get_port(3000, index=1) == containers[0].get_local_port(3000)
- assert get_port(3000, index=2) == containers[1].get_local_port(3000)
+ assert get_port(3000) in (containers[0].get_local_port(3000), containers[1].get_local_port(3000))
+ assert get_port(3000, index=containers[0].number) == containers[0].get_local_port(3000)
+ assert get_port(3000, index=containers[1].number) == containers[1].get_local_port(3000)
assert get_port(3002) == ""
def test_events_json(self):
@@ -2479,7 +2809,7 @@ class CLITestCase(DockerClientTestCase):
str_iso_date, str_iso_time, container_info = string.split(' ', 2)
try:
return isinstance(datetime.datetime.strptime(
- '%s %s' % (str_iso_date, str_iso_time),
+ '{} {}'.format(str_iso_date, str_iso_time),
'%Y-%m-%d %H:%M:%S.%f'),
datetime.datetime)
except ValueError:
@@ -2496,7 +2826,7 @@ class CLITestCase(DockerClientTestCase):
container, = self.project.containers()
expected_template = ' container {} {}'
- expected_meta_info = ['image=busybox:latest', 'name=simple-composefile_simple_1']
+ expected_meta_info = ['image=busybox:1.27.2', 'name=simple-composefile_simple_']
assert expected_template.format('create', container.id) in lines[0]
assert expected_template.format('start', container.id) in lines[1]
@@ -2568,7 +2898,7 @@ class CLITestCase(DockerClientTestCase):
self.base_dir = 'tests/fixtures/extends'
self.dispatch(['up', '-d'], None)
- assert set([s.name for s in self.project.services]) == set(['mydb', 'myweb'])
+ assert {s.name for s in self.project.services} == {'mydb', 'myweb'}
# Sort by name so we get [db, web]
containers = sorted(
@@ -2578,14 +2908,11 @@ class CLITestCase(DockerClientTestCase):
assert len(containers) == 2
web = containers[1]
+ db_name = containers[0].name_without_project
- assert set(get_links(web)) == set(['db', 'mydb_1', 'extends_mydb_1'])
+ assert set(get_links(web)) == {'db', db_name, 'extends_{}'.format(db_name)}
- expected_env = set([
- "FOO=1",
- "BAR=2",
- "BAZ=2",
- ])
+ expected_env = {"FOO=1", "BAR=2", "BAZ=2"}
assert expected_env <= set(web.get('Config.Env'))
def test_top_services_not_running(self):
@@ -2612,17 +2939,27 @@ class CLITestCase(DockerClientTestCase):
self.base_dir = 'tests/fixtures/exit-code-from'
proc = start_process(
self.base_dir,
- ['up', '--abort-on-container-exit', '--exit-code-from', 'another'])
+ ['up', '--abort-on-container-exit', '--exit-code-from', 'another']
+ )
result = wait_on_process(proc, returncode=1)
-
assert 'exit-code-from_another_1 exited with code 1' in result.stdout
+ def test_exit_code_from_signal_stop(self):
+ self.base_dir = 'tests/fixtures/exit-code-from'
+ proc = start_process(
+ self.base_dir,
+ ['up', '--abort-on-container-exit', '--exit-code-from', 'simple']
+ )
+ result = wait_on_process(proc, returncode=137) # SIGKILL
+ name = self.project.get_service('another').containers(stopped=True)[0].name_without_project
+ assert '{} exited with code 1'.format(name) in result.stdout
+
def test_images(self):
self.project.get_service('simple').create_container()
result = self.dispatch(['images'])
assert 'busybox' in result.stdout
- assert 'simple-composefile_simple_1' in result.stdout
+ assert 'simple-composefile_simple_' in result.stdout
def test_images_default_composefile(self):
self.base_dir = 'tests/fixtures/multiple-composefiles'
@@ -2630,8 +2967,8 @@ class CLITestCase(DockerClientTestCase):
result = self.dispatch(['images'])
assert 'busybox' in result.stdout
- assert 'multiple-composefiles_another_1' in result.stdout
- assert 'multiple-composefiles_simple_1' in result.stdout
+ assert '_another_1' in result.stdout
+ assert '_simple_1' in result.stdout
@mock.patch.dict(os.environ)
def test_images_tagless_image(self):
@@ -2670,3 +3007,148 @@ class CLITestCase(DockerClientTestCase):
with pytest.raises(DuplicateOverrideFileFound):
get_project(self.base_dir, [])
self.base_dir = None
+
+ def test_images_use_service_tag(self):
+ pull_busybox(self.client)
+ self.base_dir = 'tests/fixtures/images-service-tag'
+ self.dispatch(['up', '-d', '--build'])
+ result = self.dispatch(['images'])
+
+ assert re.search(r'foo1.+test[ \t]+dev', result.stdout) is not None
+ assert re.search(r'foo2.+test[ \t]+prod', result.stdout) is not None
+ assert re.search(r'foo3.+test[ \t]+latest', result.stdout) is not None
+
+ def test_build_with_stop_process_flag(self):
+ self.base_dir = 'tests/fixtures/flag-as-service-name'
+ result = self.dispatch(['build', '--pull', '--', '--test-service'])
+
+ assert BUILD_PULL_TEXT in result.stdout
+
+ def test_events_with_stop_process_flag(self):
+ self.base_dir = 'tests/fixtures/flag-as-service-name'
+ events_proc = start_process(self.base_dir, ['events', '--json', '--', '--test-service'])
+ self.dispatch(['up', '-d', '--', '--test-service'])
+ wait_on_condition(ContainerCountCondition(self.project, 1))
+
+ os.kill(events_proc.pid, signal.SIGINT)
+ result = wait_on_process(events_proc, returncode=1)
+ lines = [json.loads(line) for line in result.stdout.rstrip().split('\n')]
+ assert Counter(e['action'] for e in lines) == {'create': 1, 'start': 1}
+
+ def test_exec_with_stop_process_flag(self):
+ self.base_dir = 'tests/fixtures/flag-as-service-name'
+ self.dispatch(['up', '-d', '--', '--test-service'])
+ assert len(self.project.containers()) == 1
+
+ stdout, stderr = self.dispatch(['exec', '-T', '--', '--test-service', 'ls', '-1d', '/'])
+
+ assert stderr == ""
+ assert stdout == "/\n"
+
+ def test_images_with_stop_process_flag(self):
+ self.base_dir = 'tests/fixtures/flag-as-service-name'
+ self.dispatch(['up', '-d', '--', '--test-service'])
+ result = self.dispatch(['images', '--', '--test-service'])
+
+ assert "busybox" in result.stdout
+
+ def test_kill_with_stop_process_flag(self):
+ self.base_dir = 'tests/fixtures/flag-as-service-name'
+ self.dispatch(['up', '-d', '--', '--test-service'])
+ service = self.project.get_service('--test-service')
+
+ assert len(service.containers()) == 1
+ assert service.containers()[0].is_running
+
+ self.dispatch(['kill', '--', '--test-service'])
+
+ assert len(service.containers(stopped=True)) == 1
+ assert not service.containers(stopped=True)[0].is_running
+
+ def test_logs_with_stop_process_flag(self):
+ self.base_dir = 'tests/fixtures/flag-as-service-name'
+ self.dispatch(['up', '-d', '--', '--log-service'])
+ result = self.dispatch(['logs', '--', '--log-service'])
+
+ assert 'hello' in result.stdout
+ assert 'exited with' not in result.stdout
+
+ def test_port_with_stop_process_flag(self):
+ self.base_dir = 'tests/fixtures/flag-as-service-name'
+ self.dispatch(['up', '-d', '--', '--test-service'])
+ result = self.dispatch(['port', '--', '--test-service', '80'])
+
+ assert result.stdout.strip() == "0.0.0.0:8080"
+
+ def test_ps_with_stop_process_flag(self):
+ self.base_dir = 'tests/fixtures/flag-as-service-name'
+ self.dispatch(['up', '-d', '--', '--test-service'])
+
+ result = self.dispatch(['ps', '--', '--test-service'])
+
+ assert 'flag-as-service-name_--test-service_1' in result.stdout
+
+ def test_pull_with_stop_process_flag(self):
+ self.base_dir = 'tests/fixtures/flag-as-service-name'
+ result = self.dispatch(['pull', '--', '--test-service'])
+
+ assert 'Pulling --test-service' in result.stderr
+ assert 'failed' not in result.stderr
+
+ def test_rm_with_stop_process_flag(self):
+ self.base_dir = 'tests/fixtures/flag-as-service-name'
+ self.dispatch(['up', '--no-start', '--', '--test-service'])
+ service = self.project.get_service('--test-service')
+ assert len(service.containers(stopped=True)) == 1
+
+ self.dispatch(['rm', '--force', '--', '--test-service'])
+ assert len(service.containers(stopped=True)) == 0
+
+ def test_run_with_stop_process_flag(self):
+ self.base_dir = 'tests/fixtures/flag-as-service-name'
+ result = self.dispatch(['run', '--no-deps', '--', '--test-service', 'echo', '-hello'])
+
+ assert 'hello' in result.stdout
+ assert len(self.project.containers()) == 0
+
+ def test_stop_with_stop_process_flag(self):
+ self.base_dir = 'tests/fixtures/flag-as-service-name'
+ self.dispatch(['up', '-d', '--', '--test-service'])
+ service = self.project.get_service('--test-service')
+ assert len(service.containers()) == 1
+ assert service.containers()[0].is_running
+
+ self.dispatch(['stop', '-t', '1', '--', '--test-service'])
+
+ assert len(service.containers(stopped=True)) == 1
+ assert not service.containers(stopped=True)[0].is_running
+
+ def test_restart_with_stop_process_flag(self):
+ self.base_dir = 'tests/fixtures/flag-as-service-name'
+ self.dispatch(['up', '-d', '--', '--test-service'])
+ service = self.project.get_service('--test-service')
+ assert len(service.containers()) == 1
+ assert service.containers()[0].is_running
+
+ self.dispatch(['restart', '-t', '1', '--', '--test-service'])
+
+ assert len(service.containers()) == 1
+ assert service.containers()[0].is_running
+
+ def test_up_with_stop_process_flag(self):
+ self.base_dir = 'tests/fixtures/flag-as-service-name'
+ self.dispatch(['up', '-d', '--', '--test-service', '--log-service'])
+
+ service = self.project.get_service('--test-service')
+ another = self.project.get_service('--log-service')
+ assert len(service.containers()) == 1
+ assert len(another.containers()) == 1
+
+ def test_up_no_log_prefix(self):
+ self.base_dir = 'tests/fixtures/echo-services'
+ result = self.dispatch(['up', '--no-log-prefix'])
+
+ assert 'simple' in result.stdout
+ assert 'another' in result.stdout
+ assert 'exited with code 0' in result.stdout
+ assert 'exited with code 0' in result.stdout
diff --git a/tests/acceptance/context_test.py b/tests/acceptance/context_test.py
new file mode 100644
index 00000000..a5d0c147
--- /dev/null
+++ b/tests/acceptance/context_test.py
@@ -0,0 +1,44 @@
+import os
+import shutil
+import unittest
+
+from docker import ContextAPI
+
+from tests.acceptance.cli_test import dispatch
+
+
+class ContextTestCase(unittest.TestCase):
+ @classmethod
+ def setUpClass(cls):
+ cls.docker_dir = os.path.join(os.environ.get("HOME", "/tmp"), '.docker')
+ if not os.path.exists(cls.docker_dir):
+ os.makedirs(cls.docker_dir)
+ f = open(os.path.join(cls.docker_dir, "config.json"), "w")
+ f.write("{}")
+ f.close()
+ cls.docker_config = os.path.join(cls.docker_dir, "config.json")
+ os.environ['DOCKER_CONFIG'] = cls.docker_config
+ ContextAPI.create_context("testcontext", host="tcp://doesnotexist:8000")
+
+ @classmethod
+ def tearDownClass(cls):
+ shutil.rmtree(cls.docker_dir, ignore_errors=True)
+
+ def setUp(self):
+ self.base_dir = 'tests/fixtures/simple-composefile'
+ self.override_dir = None
+
+ def dispatch(self, options, project_options=None, returncode=0, stdin=None):
+ return dispatch(self.base_dir, options, project_options, returncode, stdin)
+
+ def test_help(self):
+ result = self.dispatch(['help'], returncode=0)
+ assert '-c, --context NAME' in result.stdout
+
+ def test_fail_on_both_host_and_context_opt(self):
+ result = self.dispatch(['-H', 'unix://', '-c', 'default', 'up'], returncode=1)
+ assert '-H, --host and -c, --context are mutually exclusive' in result.stderr
+
+ def test_fail_run_on_inexistent_context(self):
+ result = self.dispatch(['-c', 'testcontext', 'up', '-d'], returncode=1)
+ assert "Couldn't connect to Docker daemon" in result.stderr
diff --git a/tests/conftest.py b/tests/conftest.py
new file mode 100644
index 00000000..fd31a974
--- /dev/null
+++ b/tests/conftest.py
@@ -0,0 +1,240 @@
+import pytest
+
+import tests.acceptance.cli_test
+
+# FIXME Skipping all the acceptance tests when in `--conformity`
+non_conformity_tests = [
+ "test_build_failed",
+ "test_build_failed_forcerm",
+ "test_build_log_level",
+ "test_build_memory_build_option",
+ "test_build_no_cache",
+ "test_build_no_cache_pull",
+ "test_build_override_dir",
+ "test_build_override_dir_invalid_path",
+ "test_build_parallel",
+ "test_build_plain",
+ "test_build_pull",
+ "test_build_rm",
+ "test_build_shm_size_build_option",
+ "test_build_with_buildarg_cli_override",
+ "test_build_with_buildarg_from_compose_file",
+ "test_build_with_buildarg_old_api_version",
+ "test_config_compatibility_mode",
+ "test_config_compatibility_mode_from_env",
+ "test_config_compatibility_mode_from_env_and_option_precedence",
+ "test_config_default",
+ "test_config_external_network",
+ "test_config_external_network_v3_5",
+ "test_config_external_volume_v2",
+ "test_config_external_volume_v2_x",
+ "test_config_external_volume_v3_4",
+ "test_config_external_volume_v3_x",
+ "test_config_list_services",
+ "test_config_list_volumes",
+ "test_config_quiet",
+ "test_config_quiet_with_error",
+ "test_config_restart",
+ "test_config_stdin",
+ "test_config_v1",
+ "test_config_v3",
+ "test_config_with_dot_env",
+ "test_config_with_dot_env_and_override_dir",
+ "test_config_with_env_file",
+ "test_config_with_hash_option",
+ "test_create",
+ "test_create_with_force_recreate",
+ "test_create_with_force_recreate_and_no_recreate",
+ "test_create_with_no_recreate",
+ "test_down",
+ "test_down_invalid_rmi_flag",
+ "test_down_signal",
+ "test_down_timeout",
+ "test_env_file_relative_to_compose_file",
+ "test_events_human_readable",
+ "test_events_json",
+ "test_exec_custom_user",
+ "test_exec_detach_long_form",
+ "test_exec_novalue_var_dotenv_file",
+ "test_exec_service_with_environment_overridden",
+ "test_exec_without_tty",
+ "test_exec_workdir",
+ "test_exit_code_from_signal_stop",
+ "test_expanded_port",
+ "test_forward_exitval",
+ "test_help",
+ "test_help_nonexistent",
+ "test_home_and_env_var_in_volume_path",
+ "test_host_not_reachable",
+ "test_host_not_reachable_volumes_from_container",
+ "test_host_not_reachable_volumes_from_container",
+ "test_images",
+ "test_images_default_composefile",
+ "test_images_tagless_image",
+ "test_images_use_service_tag",
+ "test_kill",
+ "test_kill_signal_sigstop",
+ "test_kill_stopped_service",
+ "test_logs_default",
+ "test_logs_follow",
+ "test_logs_follow_logs_from_new_containers",
+ "test_logs_follow_logs_from_restarted_containers",
+ "test_logs_invalid_service_name",
+ "test_logs_on_stopped_containers_exits",
+ "test_logs_tail",
+ "test_logs_timestamps",
+ "test_pause_no_containers",
+ "test_pause_unpause",
+ "test_port",
+ "test_port_with_scale",
+ "test_ps",
+ "test_ps_all",
+ "test_ps_alternate_composefile",
+ "test_ps_default_composefile",
+ "test_ps_services_filter_option",
+ "test_ps_services_filter_status",
+ "test_pull",
+ "test_pull_can_build",
+ "test_pull_with_digest",
+ "test_pull_with_ignore_pull_failures",
+ "test_pull_with_include_deps",
+ "test_pull_with_no_deps",
+ "test_pull_with_parallel_failure",
+ "test_pull_with_quiet",
+ "test_quiet_build",
+ "test_restart",
+ "test_restart_no_containers",
+ "test_restart_stopped_container",
+ "test_rm",
+ "test_rm_all",
+ "test_rm_stop",
+ "test_run_detached_connects_to_network",
+ "test_run_does_not_recreate_linked_containers",
+ "test_run_env_values_from_system",
+ "test_run_handles_sighup",
+ "test_run_handles_sigint",
+ "test_run_handles_sigterm",
+ "test_run_interactive_connects_to_network",
+ "test_run_label_flag",
+ "test_run_one_off_with_multiple_volumes",
+ "test_run_one_off_with_volume",
+ "test_run_one_off_with_volume_merge",
+ "test_run_rm",
+ "test_run_service_with_compose_file_entrypoint",
+ "test_run_service_with_compose_file_entrypoint_and_command_overridden",
+ "test_run_service_with_compose_file_entrypoint_and_empty_string_command",
+ "test_run_service_with_compose_file_entrypoint_overridden",
+ "test_run_service_with_dependencies",
+ "test_run_service_with_dockerfile_entrypoint",
+ "test_run_service_with_dockerfile_entrypoint_and_command_overridden",
+ "test_run_service_with_dockerfile_entrypoint_overridden",
+ "test_run_service_with_environment_overridden",
+ "test_run_service_with_explicitly_mapped_ip_ports",
+ "test_run_service_with_explicitly_mapped_ports",
+ "test_run_service_with_links",
+ "test_run_service_with_map_ports",
+ "test_run_service_with_scaled_dependencies",
+ "test_run_service_with_unset_entrypoint",
+ "test_run_service_with_use_aliases",
+ "test_run_service_with_user_overridden",
+ "test_run_service_with_user_overridden_short_form",
+ "test_run_service_with_workdir_overridden",
+ "test_run_service_with_workdir_overridden_short_form",
+ "test_run_service_without_links",
+ "test_run_service_without_map_ports",
+ "test_run_unicode_env_values_from_system",
+ "test_run_with_custom_name",
+ "test_run_with_expose_ports",
+ "test_run_with_no_deps",
+ "test_run_without_command",
+ "test_scale",
+ "test_scale_v2_2",
+ "test_shorthand_host_opt",
+ "test_shorthand_host_opt_interactive",
+ "test_start_no_containers",
+ "test_stop",
+ "test_stop_signal",
+ "test_top_processes_running",
+ "test_top_services_not_running",
+ "test_top_services_running",
+ "test_unpause_no_containers",
+ "test_up",
+ "test_up_attached",
+ "test_up_detached",
+ "test_up_detached_long_form",
+ "test_up_external_networks",
+ "test_up_handles_abort_on_container_exit",
+ "test_up_handles_abort_on_container_exit_code",
+ "test_up_handles_aborted_dependencies",
+ "test_up_handles_force_shutdown",
+ "test_up_handles_sigint",
+ "test_up_handles_sigterm",
+ "test_up_logging",
+ "test_up_logging_legacy",
+ "test_up_missing_network",
+ "test_up_no_ansi",
+ "test_up_no_services",
+ "test_up_no_start",
+ "test_up_no_start_remove_orphans",
+ "test_up_scale_reset",
+ "test_up_scale_scale_down",
+ "test_up_scale_scale_up",
+ "test_up_scale_to_zero",
+ "test_up_with_attach_dependencies",
+ "test_up_with_default_network_config",
+ "test_up_with_default_override_file",
+ "test_up_with_duplicate_override_yaml_files",
+ "test_up_with_extends",
+ "test_up_with_external_default_network",
+ "test_up_with_force_recreate",
+ "test_up_with_force_recreate_and_no_recreate",
+ "test_up_with_healthcheck",
+ "test_up_with_ignore_remove_orphans",
+ "test_up_with_links_v1",
+ "test_up_with_multiple_files",
+ "test_up_with_net_is_invalid",
+ "test_up_with_net_v1",
+ "test_up_with_network_aliases",
+ "test_up_with_network_internal",
+ "test_up_with_network_labels",
+ "test_up_with_network_mode",
+ "test_up_with_network_static_addresses",
+ "test_up_with_networks",
+ "test_up_with_no_deps",
+ "test_up_with_no_recreate",
+ "test_up_with_override_yaml",
+ "test_up_with_pid_mode",
+ "test_up_with_timeout",
+ "test_up_with_volume_labels",
+ "test_fail_on_both_host_and_context_opt",
+ "test_fail_run_on_inexistent_context",
+]
+
+
+def pytest_addoption(parser):
+ parser.addoption(
+ "--conformity",
+ action="store_true",
+ default=False,
+ help="Only runs tests that are not black listed as non conformity test. "
+ "The conformity tests check for compatibility with the Compose spec."
+ )
+ parser.addoption(
+ "--binary",
+ default=tests.acceptance.cli_test.DOCKER_COMPOSE_EXECUTABLE,
+ help="Forces the execution of a binary in the PATH. Default is `docker-compose`."
+ )
+
+
+def pytest_collection_modifyitems(config, items):
+ if not config.getoption("--conformity"):
+ return
+ if config.getoption("--binary"):
+ tests.acceptance.cli_test.DOCKER_COMPOSE_EXECUTABLE = config.getoption("--binary")
+
+ print("Binary -> {}".format(tests.acceptance.cli_test.DOCKER_COMPOSE_EXECUTABLE))
+ skip_non_conformity = pytest.mark.skip(reason="skipping because that's not a conformity test")
+ for item in items:
+ if item.name in non_conformity_tests:
+ print("Skipping '{}' when running in compatibility mode".format(item.name))
+ item.add_marker(skip_non_conformity)
diff --git a/tests/fixtures/UpperCaseDir/docker-compose.yml b/tests/fixtures/UpperCaseDir/docker-compose.yml
index b25beaf4..09cc9519 100644
--- a/tests/fixtures/UpperCaseDir/docker-compose.yml
+++ b/tests/fixtures/UpperCaseDir/docker-compose.yml
@@ -1,6 +1,6 @@
simple:
- image: busybox:latest
+ image: busybox:1.31.0-uclibc
command: top
another:
- image: busybox:latest
+ image: busybox:1.31.0-uclibc
command: top
diff --git a/tests/fixtures/abort-on-container-exit-0/docker-compose.yml b/tests/fixtures/abort-on-container-exit-0/docker-compose.yml
index ce41697b..77307ef2 100644
--- a/tests/fixtures/abort-on-container-exit-0/docker-compose.yml
+++ b/tests/fixtures/abort-on-container-exit-0/docker-compose.yml
@@ -1,6 +1,6 @@
simple:
- image: busybox:latest
+ image: busybox:1.31.0-uclibc
command: top
another:
- image: busybox:latest
+ image: busybox:1.31.0-uclibc
command: ls .
diff --git a/tests/fixtures/abort-on-container-exit-1/docker-compose.yml b/tests/fixtures/abort-on-container-exit-1/docker-compose.yml
index 7ec9b7e1..23290964 100644
--- a/tests/fixtures/abort-on-container-exit-1/docker-compose.yml
+++ b/tests/fixtures/abort-on-container-exit-1/docker-compose.yml
@@ -1,6 +1,6 @@
simple:
- image: busybox:latest
+ image: busybox:1.31.0-uclibc
command: top
another:
- image: busybox:latest
+ image: busybox:1.31.0-uclibc
command: ls /thecakeisalie
diff --git a/tests/fixtures/abort-on-container-exit-dependencies/docker-compose.yml b/tests/fixtures/abort-on-container-exit-dependencies/docker-compose.yml
new file mode 100644
index 00000000..cd10c851
--- /dev/null
+++ b/tests/fixtures/abort-on-container-exit-dependencies/docker-compose.yml
@@ -0,0 +1,10 @@
+version: "2.0"
+services:
+ simple:
+ image: busybox:1.31.0-uclibc
+ command: top
+ depends_on:
+ - another
+ another:
+ image: busybox:1.31.0-uclibc
+ command: ls /thecakeisalie
diff --git a/tests/fixtures/build-args/Dockerfile b/tests/fixtures/build-args/Dockerfile
index 93ebcb9c..d1534068 100644
--- a/tests/fixtures/build-args/Dockerfile
+++ b/tests/fixtures/build-args/Dockerfile
@@ -1,4 +1,4 @@
-FROM busybox:latest
+FROM busybox:1.31.0-uclibc
LABEL com.docker.compose.test_image=true
ARG favorite_th_character
RUN echo "Favorite Touhou Character: ${favorite_th_character}"
diff --git a/tests/fixtures/build-ctx/Dockerfile b/tests/fixtures/build-ctx/Dockerfile
index dd864b83..4acac9c7 100644
--- a/tests/fixtures/build-ctx/Dockerfile
+++ b/tests/fixtures/build-ctx/Dockerfile
@@ -1,3 +1,3 @@
-FROM busybox:latest
+FROM busybox:1.31.0-uclibc
LABEL com.docker.compose.test_image=true
CMD echo "success"
diff --git a/tests/fixtures/build-memory/Dockerfile b/tests/fixtures/build-memory/Dockerfile
index b27349b9..076b84d7 100644
--- a/tests/fixtures/build-memory/Dockerfile
+++ b/tests/fixtures/build-memory/Dockerfile
@@ -1,4 +1,4 @@
-FROM busybox
+FROM busybox:1.31.0-uclibc
# Report the memory (through the size of the group memory)
RUN echo "memory:" $(cat /sys/fs/cgroup/memory/memory.limit_in_bytes)
diff --git a/tests/fixtures/build-multiple-composefile/a/Dockerfile b/tests/fixtures/build-multiple-composefile/a/Dockerfile
new file mode 100644
index 00000000..52ed15ec
--- /dev/null
+++ b/tests/fixtures/build-multiple-composefile/a/Dockerfile
@@ -0,0 +1,4 @@
+
+FROM busybox:1.31.0-uclibc
+RUN echo a
+CMD top
diff --git a/tests/fixtures/build-multiple-composefile/b/Dockerfile b/tests/fixtures/build-multiple-composefile/b/Dockerfile
new file mode 100644
index 00000000..932d851d
--- /dev/null
+++ b/tests/fixtures/build-multiple-composefile/b/Dockerfile
@@ -0,0 +1,4 @@
+
+FROM busybox:1.31.0-uclibc
+RUN echo b
+CMD top
diff --git a/tests/fixtures/build-multiple-composefile/docker-compose.yml b/tests/fixtures/build-multiple-composefile/docker-compose.yml
new file mode 100644
index 00000000..efa70d7e
--- /dev/null
+++ b/tests/fixtures/build-multiple-composefile/docker-compose.yml
@@ -0,0 +1,8 @@
+
+version: "2"
+
+services:
+ a:
+ build: ./a
+ b:
+ build: ./b
diff --git a/tests/fixtures/bundle-with-digests/docker-compose.yml b/tests/fixtures/bundle-with-digests/docker-compose.yml
deleted file mode 100644
index b7013512..00000000
--- a/tests/fixtures/bundle-with-digests/docker-compose.yml
+++ /dev/null
@@ -1,9 +0,0 @@
-
-version: '2.0'
-
-services:
- web:
- image: dockercloud/hello-world@sha256:fe79a2cfbd17eefc344fb8419420808df95a1e22d93b7f621a7399fd1e9dca1d
-
- redis:
- image: redis@sha256:a84cb8f53a70e19f61ff2e1d5e73fb7ae62d374b2b7392de1e7d77be26ef8f7b
diff --git a/tests/fixtures/compatibility-mode/docker-compose.yml b/tests/fixtures/compatibility-mode/docker-compose.yml
index aac6fd4c..4b63fadf 100644
--- a/tests/fixtures/compatibility-mode/docker-compose.yml
+++ b/tests/fixtures/compatibility-mode/docker-compose.yml
@@ -1,7 +1,7 @@
version: '3.5'
services:
foo:
- image: alpine:3.7
+ image: alpine:3.10.1
command: /bin/true
deploy:
replicas: 3
@@ -16,7 +16,13 @@ services:
memory: 100M
volumes:
- foo:/bar
+ networks:
+ - bar
volumes:
foo:
driver: default
+
+networks:
+ bar:
+ attachable: true
diff --git a/tests/fixtures/config-profiles/docker-compose.yml b/tests/fixtures/config-profiles/docker-compose.yml
new file mode 100644
index 00000000..a445e7e0
--- /dev/null
+++ b/tests/fixtures/config-profiles/docker-compose.yml
@@ -0,0 +1,15 @@
+version: '3.8'
+services:
+ frontend:
+ image: frontend
+ profiles: ["frontend", "gui"]
+ phpmyadmin:
+ image: phpmyadmin
+ depends_on:
+ - db
+ profiles:
+ - debug
+ backend:
+ image: backend
+ db:
+ image: mysql
diff --git a/tests/fixtures/default-env-file/.env2 b/tests/fixtures/default-env-file/.env2
new file mode 100644
index 00000000..d754523f
--- /dev/null
+++ b/tests/fixtures/default-env-file/.env2
@@ -0,0 +1,4 @@
+IMAGE=alpine:latest
+COMMAND=false
+PORT1=5644
+PORT2=9998
diff --git a/tests/fixtures/default-env-file/alt/.env b/tests/fixtures/default-env-file/alt/.env
new file mode 100644
index 00000000..981c7207
--- /dev/null
+++ b/tests/fixtures/default-env-file/alt/.env
@@ -0,0 +1,4 @@
+IMAGE=alpine:3.10.1
+COMMAND=echo uwu
+PORT1=3341
+PORT2=4449
diff --git a/tests/fixtures/default-env-file/docker-compose.yml b/tests/fixtures/default-env-file/docker-compose.yml
index aa8e4409..79363586 100644
--- a/tests/fixtures/default-env-file/docker-compose.yml
+++ b/tests/fixtures/default-env-file/docker-compose.yml
@@ -1,4 +1,6 @@
-web:
+version: '2.4'
+services:
+ web:
image: ${IMAGE}
command: ${COMMAND}
ports:
diff --git a/tests/fixtures/dockerfile-with-volume/Dockerfile b/tests/fixtures/dockerfile-with-volume/Dockerfile
index 0d376ec4..f38e1d57 100644
--- a/tests/fixtures/dockerfile-with-volume/Dockerfile
+++ b/tests/fixtures/dockerfile-with-volume/Dockerfile
@@ -1,4 +1,4 @@
-FROM busybox:latest
+FROM busybox:1.31.0-uclibc
LABEL com.docker.compose.test_image=true
VOLUME /data
CMD top
diff --git a/tests/fixtures/duplicate-override-yaml-files/docker-compose.yml b/tests/fixtures/duplicate-override-yaml-files/docker-compose.yml
index 5f2909d6..6880435b 100644
--- a/tests/fixtures/duplicate-override-yaml-files/docker-compose.yml
+++ b/tests/fixtures/duplicate-override-yaml-files/docker-compose.yml
@@ -1,10 +1,10 @@
web:
- image: busybox:latest
+ image: busybox:1.31.0-uclibc
command: "sleep 100"
links:
- db
db:
- image: busybox:latest
+ image: busybox:1.31.0-uclibc
command: "sleep 200"
diff --git a/tests/fixtures/echo-services-dependencies/docker-compose.yml b/tests/fixtures/echo-services-dependencies/docker-compose.yml
new file mode 100644
index 00000000..5329e003
--- /dev/null
+++ b/tests/fixtures/echo-services-dependencies/docker-compose.yml
@@ -0,0 +1,10 @@
+version: "2.0"
+services:
+ simple:
+ image: busybox:1.31.0-uclibc
+ command: echo simple
+ depends_on:
+ - another
+ another:
+ image: busybox:1.31.0-uclibc
+ command: echo another
diff --git a/tests/fixtures/echo-services/docker-compose.yml b/tests/fixtures/echo-services/docker-compose.yml
index 8014f3d9..75fc45d9 100644
--- a/tests/fixtures/echo-services/docker-compose.yml
+++ b/tests/fixtures/echo-services/docker-compose.yml
@@ -1,6 +1,6 @@
simple:
- image: busybox:latest
+ image: busybox:1.31.0-uclibc
command: echo simple
another:
- image: busybox:latest
+ image: busybox:1.31.0-uclibc
command: echo another
diff --git a/tests/fixtures/entrypoint-dockerfile/Dockerfile b/tests/fixtures/entrypoint-dockerfile/Dockerfile
index 49f4416c..30ec50ba 100644
--- a/tests/fixtures/entrypoint-dockerfile/Dockerfile
+++ b/tests/fixtures/entrypoint-dockerfile/Dockerfile
@@ -1,4 +1,4 @@
-FROM busybox:latest
+FROM busybox:1.31.0-uclibc
LABEL com.docker.compose.test_image=true
ENTRYPOINT ["printf"]
CMD ["default", "args"]
diff --git a/tests/fixtures/env-file-override/.env b/tests/fixtures/env-file-override/.env
new file mode 100644
index 00000000..467f2c1d
--- /dev/null
+++ b/tests/fixtures/env-file-override/.env
@@ -0,0 +1 @@
+WHEREAMI=default
diff --git a/tests/fixtures/env-file-override/.env.conf b/tests/fixtures/env-file-override/.env.conf
new file mode 100644
index 00000000..90b8b495
--- /dev/null
+++ b/tests/fixtures/env-file-override/.env.conf
@@ -0,0 +1,2 @@
+WHEREAMI
+DEFAULT_CONF_LOADED=true
diff --git a/tests/fixtures/env-file-override/.env.override b/tests/fixtures/env-file-override/.env.override
new file mode 100644
index 00000000..398fa51b
--- /dev/null
+++ b/tests/fixtures/env-file-override/.env.override
@@ -0,0 +1 @@
+WHEREAMI=override
diff --git a/tests/fixtures/env-file-override/docker-compose.yml b/tests/fixtures/env-file-override/docker-compose.yml
new file mode 100644
index 00000000..fdae6d82
--- /dev/null
+++ b/tests/fixtures/env-file-override/docker-compose.yml
@@ -0,0 +1,6 @@
+version: '3.7'
+services:
+ test:
+ image: busybox
+ env_file: .env.conf
+ entrypoint: env
diff --git a/tests/fixtures/env/three.env b/tests/fixtures/env/three.env
new file mode 100644
index 00000000..c2da74f1
--- /dev/null
+++ b/tests/fixtures/env/three.env
@@ -0,0 +1,2 @@
+FOO=NO $ENV VAR
+DOO=NO ${ENV} VAR
diff --git a/tests/fixtures/environment-composefile/docker-compose.yml b/tests/fixtures/environment-composefile/docker-compose.yml
index 9d99fee0..5650c7c8 100644
--- a/tests/fixtures/environment-composefile/docker-compose.yml
+++ b/tests/fixtures/environment-composefile/docker-compose.yml
@@ -1,5 +1,5 @@
service:
- image: busybox:latest
+ image: busybox:1.31.0-uclibc
command: top
environment:
diff --git a/tests/fixtures/environment-exec/docker-compose.yml b/tests/fixtures/environment-exec/docker-compose.yml
index 813606eb..e284ba8c 100644
--- a/tests/fixtures/environment-exec/docker-compose.yml
+++ b/tests/fixtures/environment-exec/docker-compose.yml
@@ -2,7 +2,7 @@ version: "2.2"
services:
service:
- image: busybox:latest
+ image: busybox:1.27.2
command: top
environment:
diff --git a/tests/fixtures/exec-novalue-var/docker-compose.yml b/tests/fixtures/exec-novalue-var/docker-compose.yml
new file mode 100644
index 00000000..1f8502f9
--- /dev/null
+++ b/tests/fixtures/exec-novalue-var/docker-compose.yml
@@ -0,0 +1,6 @@
+version: '3'
+services:
+ nginx:
+ image: nginx
+ environment:
+ - CHECK_VAR=${MYVAR}
diff --git a/tests/fixtures/exit-code-from/docker-compose.yml b/tests/fixtures/exit-code-from/docker-compose.yml
index 687e78b9..c38bd549 100644
--- a/tests/fixtures/exit-code-from/docker-compose.yml
+++ b/tests/fixtures/exit-code-from/docker-compose.yml
@@ -1,6 +1,6 @@
simple:
- image: busybox:latest
+ image: busybox:1.31.0-uclibc
command: sh -c "echo hello && tail -f /dev/null"
another:
- image: busybox:latest
+ image: busybox:1.31.0-uclibc
command: /bin/false
diff --git a/tests/fixtures/expose-composefile/docker-compose.yml b/tests/fixtures/expose-composefile/docker-compose.yml
index d14a468d..c2a3dc42 100644
--- a/tests/fixtures/expose-composefile/docker-compose.yml
+++ b/tests/fixtures/expose-composefile/docker-compose.yml
@@ -1,6 +1,6 @@
simple:
- image: busybox:latest
+ image: busybox:1.31.0-uclibc
command: top
expose:
- '3000'
diff --git a/tests/fixtures/flag-as-service-name/Dockerfile b/tests/fixtures/flag-as-service-name/Dockerfile
new file mode 100644
index 00000000..098ff3eb
--- /dev/null
+++ b/tests/fixtures/flag-as-service-name/Dockerfile
@@ -0,0 +1,3 @@
+FROM busybox:1.27.2
+LABEL com.docker.compose.test_image=true
+CMD echo "success"
diff --git a/tests/fixtures/flag-as-service-name/docker-compose.yml b/tests/fixtures/flag-as-service-name/docker-compose.yml
new file mode 100644
index 00000000..5b519a63
--- /dev/null
+++ b/tests/fixtures/flag-as-service-name/docker-compose.yml
@@ -0,0 +1,12 @@
+version: "2"
+services:
+ --test-service:
+ image: busybox:1.27.0.2
+ build: .
+ command: top
+ ports:
+ - "8080:80"
+
+ --log-service:
+ image: busybox:1.31.0-uclibc
+ command: sh -c "echo hello && tail -f /dev/null"
diff --git a/tests/fixtures/images-service-tag/Dockerfile b/tests/fixtures/images-service-tag/Dockerfile
new file mode 100644
index 00000000..1e1a1b2e
--- /dev/null
+++ b/tests/fixtures/images-service-tag/Dockerfile
@@ -0,0 +1,2 @@
+FROM busybox:1.31.0-uclibc
+RUN touch /foo
diff --git a/tests/fixtures/images-service-tag/docker-compose.yml b/tests/fixtures/images-service-tag/docker-compose.yml
new file mode 100644
index 00000000..a46b32bf
--- /dev/null
+++ b/tests/fixtures/images-service-tag/docker-compose.yml
@@ -0,0 +1,11 @@
+version: "2.4"
+services:
+ foo1:
+ build: .
+ image: test:dev
+ foo2:
+ build: .
+ image: test:prod
+ foo3:
+ build: .
+ image: test:latest
diff --git a/tests/fixtures/ipc-mode/docker-compose.yml b/tests/fixtures/ipc-mode/docker-compose.yml
new file mode 100644
index 00000000..c58ce244
--- /dev/null
+++ b/tests/fixtures/ipc-mode/docker-compose.yml
@@ -0,0 +1,17 @@
+version: "2.4"
+
+services:
+ service:
+ image: busybox
+ command: top
+ ipc: "service:shareable"
+
+ container:
+ image: busybox
+ command: top
+ ipc: "container:composetest_ipc_mode_container"
+
+ shareable:
+ image: busybox
+ command: top
+ ipc: shareable
diff --git a/tests/fixtures/links-composefile/docker-compose.yml b/tests/fixtures/links-composefile/docker-compose.yml
index 930fd4c7..0a2f3d9e 100644
--- a/tests/fixtures/links-composefile/docker-compose.yml
+++ b/tests/fixtures/links-composefile/docker-compose.yml
@@ -1,11 +1,11 @@
db:
- image: busybox:latest
+ image: busybox:1.27.2
command: top
web:
- image: busybox:latest
+ image: busybox:1.27.2
command: top
links:
- db:db
console:
- image: busybox:latest
+ image: busybox:1.27.2
command: top
diff --git a/tests/fixtures/logging-composefile-legacy/docker-compose.yml b/tests/fixtures/logging-composefile-legacy/docker-compose.yml
index ee994107..efac1d6a 100644
--- a/tests/fixtures/logging-composefile-legacy/docker-compose.yml
+++ b/tests/fixtures/logging-composefile-legacy/docker-compose.yml
@@ -1,9 +1,9 @@
simple:
- image: busybox:latest
+ image: busybox:1.31.0-uclibc
command: top
log_driver: "none"
another:
- image: busybox:latest
+ image: busybox:1.31.0-uclibc
command: top
log_driver: "json-file"
log_opt:
diff --git a/tests/fixtures/logging-composefile/docker-compose.yml b/tests/fixtures/logging-composefile/docker-compose.yml
index 466d13e5..ac231b89 100644
--- a/tests/fixtures/logging-composefile/docker-compose.yml
+++ b/tests/fixtures/logging-composefile/docker-compose.yml
@@ -1,12 +1,12 @@
version: "2"
services:
simple:
- image: busybox:latest
+ image: busybox:1.31.0-uclibc
command: top
logging:
driver: "none"
another:
- image: busybox:latest
+ image: busybox:1.31.0-uclibc
command: top
logging:
driver: "json-file"
diff --git a/tests/fixtures/logs-composefile/docker-compose.yml b/tests/fixtures/logs-composefile/docker-compose.yml
index b719c91e..3ffaa984 100644
--- a/tests/fixtures/logs-composefile/docker-compose.yml
+++ b/tests/fixtures/logs-composefile/docker-compose.yml
@@ -1,6 +1,6 @@
simple:
- image: busybox:latest
- command: sh -c "echo hello && tail -f /dev/null"
+ image: busybox:1.31.0-uclibc
+ command: sh -c "sleep 1 && echo hello && tail -f /dev/null"
another:
- image: busybox:latest
- command: sh -c "echo test"
+ image: busybox:1.31.0-uclibc
+ command: sh -c "sleep 1 && echo test"
diff --git a/tests/fixtures/logs-restart-composefile/docker-compose.yml b/tests/fixtures/logs-restart-composefile/docker-compose.yml
new file mode 100644
index 00000000..2179d54d
--- /dev/null
+++ b/tests/fixtures/logs-restart-composefile/docker-compose.yml
@@ -0,0 +1,7 @@
+simple:
+ image: busybox:1.31.0-uclibc
+ command: sh -c "echo hello && tail -f /dev/null"
+another:
+ image: busybox:1.31.0-uclibc
+ command: sh -c "sleep 2 && echo world && /bin/false"
+ restart: "on-failure:2"
diff --git a/tests/fixtures/logs-tail-composefile/docker-compose.yml b/tests/fixtures/logs-tail-composefile/docker-compose.yml
index 80d8feae..18dad986 100644
--- a/tests/fixtures/logs-tail-composefile/docker-compose.yml
+++ b/tests/fixtures/logs-tail-composefile/docker-compose.yml
@@ -1,3 +1,3 @@
simple:
- image: busybox:latest
- command: sh -c "echo a && echo b && echo c && echo d"
+ image: busybox:1.31.0-uclibc
+ command: sh -c "echo w && echo x && echo y && echo z"
diff --git a/tests/fixtures/longer-filename-composefile/docker-compose.yaml b/tests/fixtures/longer-filename-composefile/docker-compose.yaml
index a4eba2d0..5dadce44 100644
--- a/tests/fixtures/longer-filename-composefile/docker-compose.yaml
+++ b/tests/fixtures/longer-filename-composefile/docker-compose.yaml
@@ -1,3 +1,3 @@
definedinyamlnotyml:
- image: busybox:latest
+ image: busybox:1.31.0-uclibc
command: top
diff --git a/tests/fixtures/multiple-composefiles/compose2.yml b/tests/fixtures/multiple-composefiles/compose2.yml
index 56803380..530d92df 100644
--- a/tests/fixtures/multiple-composefiles/compose2.yml
+++ b/tests/fixtures/multiple-composefiles/compose2.yml
@@ -1,3 +1,3 @@
yetanother:
- image: busybox:latest
+ image: busybox:1.31.0-uclibc
command: top
diff --git a/tests/fixtures/multiple-composefiles/docker-compose.yml b/tests/fixtures/multiple-composefiles/docker-compose.yml
index b25beaf4..09cc9519 100644
--- a/tests/fixtures/multiple-composefiles/docker-compose.yml
+++ b/tests/fixtures/multiple-composefiles/docker-compose.yml
@@ -1,6 +1,6 @@
simple:
- image: busybox:latest
+ image: busybox:1.31.0-uclibc
command: top
another:
- image: busybox:latest
+ image: busybox:1.31.0-uclibc
command: top
diff --git a/tests/fixtures/networks/default-network-config.yml b/tests/fixtures/networks/default-network-config.yml
index 4bd0989b..556ca980 100644
--- a/tests/fixtures/networks/default-network-config.yml
+++ b/tests/fixtures/networks/default-network-config.yml
@@ -1,10 +1,10 @@
version: "2"
services:
simple:
- image: busybox:latest
+ image: busybox:1.31.0-uclibc
command: top
another:
- image: busybox:latest
+ image: busybox:1.31.0-uclibc
command: top
networks:
default:
diff --git a/tests/fixtures/networks/docker-compose.yml b/tests/fixtures/networks/docker-compose.yml
index c11fa682..b911c752 100644
--- a/tests/fixtures/networks/docker-compose.yml
+++ b/tests/fixtures/networks/docker-compose.yml
@@ -2,17 +2,17 @@ version: "2"
services:
web:
- image: busybox
+ image: alpine:3.10.1
command: top
networks: ["front"]
app:
- image: busybox
+ image: alpine:3.10.1
command: top
networks: ["front", "back"]
links:
- "db:database"
db:
- image: busybox
+ image: alpine:3.10.1
command: top
networks: ["back"]
diff --git a/tests/fixtures/networks/external-default.yml b/tests/fixtures/networks/external-default.yml
index 5c9426b8..42a39565 100644
--- a/tests/fixtures/networks/external-default.yml
+++ b/tests/fixtures/networks/external-default.yml
@@ -1,10 +1,10 @@
version: "2"
services:
simple:
- image: busybox:latest
+ image: busybox:1.31.0-uclibc
command: top
another:
- image: busybox:latest
+ image: busybox:1.31.0-uclibc
command: top
networks:
default:
diff --git a/tests/fixtures/no-build/docker-compose.yml b/tests/fixtures/no-build/docker-compose.yml
new file mode 100644
index 00000000..f320d17c
--- /dev/null
+++ b/tests/fixtures/no-build/docker-compose.yml
@@ -0,0 +1,8 @@
+version: "3"
+services:
+ my-alpine:
+ image: alpine:3.12
+ container_name: alpine
+ entrypoint: 'echo It works!'
+ build:
+ context: /this/path/doesnt/exist # and we don't really care. We just want to run containers already pulled.
diff --git a/tests/fixtures/no-links-composefile/docker-compose.yml b/tests/fixtures/no-links-composefile/docker-compose.yml
index 75a6a085..54936f30 100644
--- a/tests/fixtures/no-links-composefile/docker-compose.yml
+++ b/tests/fixtures/no-links-composefile/docker-compose.yml
@@ -1,9 +1,9 @@
db:
- image: busybox:latest
+ image: busybox:1.31.0-uclibc
command: top
web:
- image: busybox:latest
+ image: busybox:1.31.0-uclibc
command: top
console:
- image: busybox:latest
+ image: busybox:1.31.0-uclibc
command: top
diff --git a/tests/fixtures/override-files/docker-compose.yml b/tests/fixtures/override-files/docker-compose.yml
index 6c3d4e17..0119ec73 100644
--- a/tests/fixtures/override-files/docker-compose.yml
+++ b/tests/fixtures/override-files/docker-compose.yml
@@ -1,10 +1,10 @@
version: '2.2'
services:
web:
- image: busybox:latest
+ image: busybox:1.31.0-uclibc
command: "sleep 200"
depends_on:
- db
db:
- image: busybox:latest
+ image: busybox:1.31.0-uclibc
command: "sleep 200"
diff --git a/tests/fixtures/override-files/extra.yml b/tests/fixtures/override-files/extra.yml
index 492c3795..d03c5096 100644
--- a/tests/fixtures/override-files/extra.yml
+++ b/tests/fixtures/override-files/extra.yml
@@ -6,5 +6,5 @@ services:
- other
other:
- image: busybox:latest
+ image: busybox:1.31.0-uclibc
command: "top"
diff --git a/tests/fixtures/override-yaml-files/docker-compose.yml b/tests/fixtures/override-yaml-files/docker-compose.yml
index 5f2909d6..6880435b 100644
--- a/tests/fixtures/override-yaml-files/docker-compose.yml
+++ b/tests/fixtures/override-yaml-files/docker-compose.yml
@@ -1,10 +1,10 @@
web:
- image: busybox:latest
+ image: busybox:1.31.0-uclibc
command: "sleep 100"
links:
- db
db:
- image: busybox:latest
+ image: busybox:1.31.0-uclibc
command: "sleep 200"
diff --git a/tests/fixtures/ports-composefile-scale/docker-compose.yml b/tests/fixtures/ports-composefile-scale/docker-compose.yml
index 1a2bb485..bdd39cef 100644
--- a/tests/fixtures/ports-composefile-scale/docker-compose.yml
+++ b/tests/fixtures/ports-composefile-scale/docker-compose.yml
@@ -1,6 +1,6 @@
simple:
- image: busybox:latest
+ image: busybox:1.31.0-uclibc
command: /bin/sleep 300
ports:
- '3000'
diff --git a/tests/fixtures/ports-composefile/docker-compose.yml b/tests/fixtures/ports-composefile/docker-compose.yml
index c213068d..f4987027 100644
--- a/tests/fixtures/ports-composefile/docker-compose.yml
+++ b/tests/fixtures/ports-composefile/docker-compose.yml
@@ -1,6 +1,6 @@
simple:
- image: busybox:latest
+ image: busybox:1.31.0-uclibc
command: top
ports:
- '3000'
diff --git a/tests/fixtures/ports-composefile/expanded-notation.yml b/tests/fixtures/ports-composefile/expanded-notation.yml
index 09a7a2bf..6510e428 100644
--- a/tests/fixtures/ports-composefile/expanded-notation.yml
+++ b/tests/fixtures/ports-composefile/expanded-notation.yml
@@ -1,7 +1,7 @@
version: '3.2'
services:
simple:
- image: busybox:latest
+ image: busybox:1.31.0-uclibc
command: top
ports:
- target: 3000
diff --git a/tests/fixtures/profiles/docker-compose.yml b/tests/fixtures/profiles/docker-compose.yml
new file mode 100644
index 00000000..ba77f03b
--- /dev/null
+++ b/tests/fixtures/profiles/docker-compose.yml
@@ -0,0 +1,20 @@
+version: "3"
+services:
+ foo:
+ image: busybox:1.31.0-uclibc
+ bar:
+ image: busybox:1.31.0-uclibc
+ profiles:
+ - test
+ baz:
+ image: busybox:1.31.0-uclibc
+ depends_on:
+ - bar
+ profiles:
+ - test
+ zot:
+ image: busybox:1.31.0-uclibc
+ depends_on:
+ - bar
+ profiles:
+ - debug
diff --git a/tests/fixtures/profiles/merge-profiles.yml b/tests/fixtures/profiles/merge-profiles.yml
new file mode 100644
index 00000000..42b0cfa4
--- /dev/null
+++ b/tests/fixtures/profiles/merge-profiles.yml
@@ -0,0 +1,5 @@
+version: "3"
+services:
+ bar:
+ profiles:
+ - debug
diff --git a/tests/fixtures/ps-services-filter/docker-compose.yml b/tests/fixtures/ps-services-filter/docker-compose.yml
index 3d860937..180f515a 100644
--- a/tests/fixtures/ps-services-filter/docker-compose.yml
+++ b/tests/fixtures/ps-services-filter/docker-compose.yml
@@ -1,5 +1,5 @@
with_image:
- image: busybox:latest
+ image: busybox:1.31.0-uclibc
command: top
with_build:
build: ../build-ctx/
diff --git a/tests/fixtures/run-labels/docker-compose.yml b/tests/fixtures/run-labels/docker-compose.yml
index e8cd5006..e3b237fd 100644
--- a/tests/fixtures/run-labels/docker-compose.yml
+++ b/tests/fixtures/run-labels/docker-compose.yml
@@ -1,5 +1,5 @@
service:
- image: busybox:latest
+ image: busybox:1.31.0-uclibc
command: top
labels:
diff --git a/tests/fixtures/run-workdir/docker-compose.yml b/tests/fixtures/run-workdir/docker-compose.yml
index dc3ea86a..9d092a55 100644
--- a/tests/fixtures/run-workdir/docker-compose.yml
+++ b/tests/fixtures/run-workdir/docker-compose.yml
@@ -1,4 +1,4 @@
service:
- image: busybox:latest
+ image: busybox:1.31.0-uclibc
working_dir: /etc
command: /bin/true
diff --git a/tests/fixtures/scale/docker-compose.yml b/tests/fixtures/scale/docker-compose.yml
index a0d3b771..53ae1342 100644
--- a/tests/fixtures/scale/docker-compose.yml
+++ b/tests/fixtures/scale/docker-compose.yml
@@ -5,5 +5,9 @@ services:
command: top
scale: 2
db:
- image: busybox
- command: top
+ image: busybox
+ command: top
+ worker:
+ image: busybox
+ command: top
+ scale: 0
diff --git a/tests/fixtures/simple-composefile-volume-ready/docker-compose.merge.yml b/tests/fixtures/simple-composefile-volume-ready/docker-compose.merge.yml
index fe717151..45b626d0 100644
--- a/tests/fixtures/simple-composefile-volume-ready/docker-compose.merge.yml
+++ b/tests/fixtures/simple-composefile-volume-ready/docker-compose.merge.yml
@@ -1,7 +1,7 @@
version: '2.2'
services:
simple:
- image: busybox:latest
+ image: busybox:1.31.0-uclibc
volumes:
- datastore:/data1
diff --git a/tests/fixtures/simple-composefile-volume-ready/docker-compose.yml b/tests/fixtures/simple-composefile-volume-ready/docker-compose.yml
index 98a7d23b..088d71c9 100644
--- a/tests/fixtures/simple-composefile-volume-ready/docker-compose.yml
+++ b/tests/fixtures/simple-composefile-volume-ready/docker-compose.yml
@@ -1,2 +1,2 @@
simple:
- image: busybox:latest
+ image: busybox:1.31.0-uclibc
diff --git a/tests/fixtures/simple-composefile/can-build-pull-failures.yml b/tests/fixtures/simple-composefile/can-build-pull-failures.yml
new file mode 100644
index 00000000..1ffe8e0f
--- /dev/null
+++ b/tests/fixtures/simple-composefile/can-build-pull-failures.yml
@@ -0,0 +1,6 @@
+version: '3'
+services:
+ can_build:
+ image: nonexisting-image-but-can-build:latest
+ build: .
+ command: top
diff --git a/tests/fixtures/simple-composefile/digest.yml b/tests/fixtures/simple-composefile/digest.yml
index 08f1d993..79f043ba 100644
--- a/tests/fixtures/simple-composefile/digest.yml
+++ b/tests/fixtures/simple-composefile/digest.yml
@@ -1,5 +1,5 @@
simple:
- image: busybox:latest
+ image: busybox:1.31.0-uclibc
command: top
digest:
image: busybox@sha256:38a203e1986cf79639cfb9b2e1d6e773de84002feea2d4eb006b52004ee8502d
diff --git a/tests/fixtures/simple-composefile/docker-compose.yml b/tests/fixtures/simple-composefile/docker-compose.yml
index b25beaf4..b66a0652 100644
--- a/tests/fixtures/simple-composefile/docker-compose.yml
+++ b/tests/fixtures/simple-composefile/docker-compose.yml
@@ -1,6 +1,6 @@
simple:
- image: busybox:latest
+ image: busybox:1.27.2
command: top
another:
- image: busybox:latest
+ image: busybox:1.31.0-uclibc
command: top
diff --git a/tests/fixtures/simple-composefile/ignore-pull-failures.yml b/tests/fixtures/simple-composefile/ignore-pull-failures.yml
index a28f7922..7e7d560d 100644
--- a/tests/fixtures/simple-composefile/ignore-pull-failures.yml
+++ b/tests/fixtures/simple-composefile/ignore-pull-failures.yml
@@ -1,5 +1,5 @@
simple:
- image: busybox:latest
+ image: busybox:1.31.0-uclibc
command: top
another:
image: nonexisting-image:latest
diff --git a/tests/fixtures/simple-composefile/pull-with-build.yml b/tests/fixtures/simple-composefile/pull-with-build.yml
new file mode 100644
index 00000000..3bff35c5
--- /dev/null
+++ b/tests/fixtures/simple-composefile/pull-with-build.yml
@@ -0,0 +1,11 @@
+version: "3"
+services:
+ build_simple:
+ image: simple
+ build: .
+ command: top
+ from_simple:
+ image: simple
+ another:
+ image: busybox:1.31.0-uclibc
+ command: top
diff --git a/tests/fixtures/simple-dockerfile/Dockerfile b/tests/fixtures/simple-dockerfile/Dockerfile
index dd864b83..098ff3eb 100644
--- a/tests/fixtures/simple-dockerfile/Dockerfile
+++ b/tests/fixtures/simple-dockerfile/Dockerfile
@@ -1,3 +1,3 @@
-FROM busybox:latest
+FROM busybox:1.27.2
LABEL com.docker.compose.test_image=true
CMD echo "success"
diff --git a/tests/fixtures/simple-failing-dockerfile/Dockerfile b/tests/fixtures/simple-failing-dockerfile/Dockerfile
index c2d06b16..a3328b0d 100644
--- a/tests/fixtures/simple-failing-dockerfile/Dockerfile
+++ b/tests/fixtures/simple-failing-dockerfile/Dockerfile
@@ -1,7 +1,7 @@
-FROM busybox:latest
+FROM busybox:1.31.0-uclibc
LABEL com.docker.compose.test_image=true
LABEL com.docker.compose.test_failing_image=true
-# With the following label the container wil be cleaned up automatically
+# With the following label the container will be cleaned up automatically
# Must be kept in sync with LABEL_PROJECT from compose/const.py
LABEL com.docker.compose.project=composetest
RUN exit 1
diff --git a/tests/fixtures/sleeps-composefile/docker-compose.yml b/tests/fixtures/sleeps-composefile/docker-compose.yml
index 7c8d84f8..26feb502 100644
--- a/tests/fixtures/sleeps-composefile/docker-compose.yml
+++ b/tests/fixtures/sleeps-composefile/docker-compose.yml
@@ -3,8 +3,8 @@ version: "2"
services:
simple:
- image: busybox:latest
+ image: busybox:1.31.0-uclibc
command: sleep 200
another:
- image: busybox:latest
+ image: busybox:1.31.0-uclibc
command: sleep 200
diff --git a/tests/fixtures/stop-signal-composefile/docker-compose.yml b/tests/fixtures/stop-signal-composefile/docker-compose.yml
index 04f58aa9..9f99b0c7 100644
--- a/tests/fixtures/stop-signal-composefile/docker-compose.yml
+++ b/tests/fixtures/stop-signal-composefile/docker-compose.yml
@@ -1,5 +1,5 @@
simple:
- image: busybox:latest
+ image: busybox:1.31.0-uclibc
command:
- sh
- '-c'
diff --git a/tests/fixtures/tagless-image/Dockerfile b/tests/fixtures/tagless-image/Dockerfile
index 56741055..92305555 100644
--- a/tests/fixtures/tagless-image/Dockerfile
+++ b/tests/fixtures/tagless-image/Dockerfile
@@ -1,2 +1,2 @@
-FROM busybox:latest
+FROM busybox:1.31.0-uclibc
RUN touch /blah
diff --git a/tests/fixtures/top/docker-compose.yml b/tests/fixtures/top/docker-compose.yml
index d632a836..36a3917d 100644
--- a/tests/fixtures/top/docker-compose.yml
+++ b/tests/fixtures/top/docker-compose.yml
@@ -1,6 +1,6 @@
service_a:
- image: busybox:latest
+ image: busybox:1.31.0-uclibc
command: top
service_b:
- image: busybox:latest
+ image: busybox:1.31.0-uclibc
command: top
diff --git a/tests/fixtures/unicode-environment/docker-compose.yml b/tests/fixtures/unicode-environment/docker-compose.yml
index a41af4f0..307678cd 100644
--- a/tests/fixtures/unicode-environment/docker-compose.yml
+++ b/tests/fixtures/unicode-environment/docker-compose.yml
@@ -1,7 +1,7 @@
version: '2'
services:
simple:
- image: busybox:latest
+ image: busybox:1.31.0-uclibc
command: sh -c 'echo $$FOO'
environment:
FOO: ${BAR}
diff --git a/tests/fixtures/user-composefile/docker-compose.yml b/tests/fixtures/user-composefile/docker-compose.yml
index 3eb7d397..11283d9d 100644
--- a/tests/fixtures/user-composefile/docker-compose.yml
+++ b/tests/fixtures/user-composefile/docker-compose.yml
@@ -1,4 +1,4 @@
service:
- image: busybox:latest
+ image: busybox:1.31.0-uclibc
user: notauser
command: id
diff --git a/tests/fixtures/v2-dependencies/docker-compose.yml b/tests/fixtures/v2-dependencies/docker-compose.yml
index 2e14b94b..45ec8501 100644
--- a/tests/fixtures/v2-dependencies/docker-compose.yml
+++ b/tests/fixtures/v2-dependencies/docker-compose.yml
@@ -1,13 +1,13 @@
version: "2.0"
services:
db:
- image: busybox:latest
+ image: busybox:1.31.0-uclibc
command: top
web:
- image: busybox:latest
+ image: busybox:1.31.0-uclibc
command: top
depends_on:
- db
console:
- image: busybox:latest
+ image: busybox:1.31.0-uclibc
command: top
diff --git a/tests/fixtures/v2-full/Dockerfile b/tests/fixtures/v2-full/Dockerfile
index 51ed0d90..6fa7a726 100644
--- a/tests/fixtures/v2-full/Dockerfile
+++ b/tests/fixtures/v2-full/Dockerfile
@@ -1,4 +1,4 @@
-FROM busybox:latest
+FROM busybox:1.31.0-uclibc
RUN echo something
CMD top
diff --git a/tests/fixtures/v2-full/docker-compose.yml b/tests/fixtures/v2-full/docker-compose.yml
index a973dd0c..20c14f0f 100644
--- a/tests/fixtures/v2-full/docker-compose.yml
+++ b/tests/fixtures/v2-full/docker-compose.yml
@@ -18,7 +18,7 @@ services:
- other
other:
- image: busybox:latest
+ image: busybox:1.31.0-uclibc
command: top
volumes:
- /data
diff --git a/tests/fixtures/v2-simple/docker-compose.yml b/tests/fixtures/v2-simple/docker-compose.yml
index c99ae02f..ac754eee 100644
--- a/tests/fixtures/v2-simple/docker-compose.yml
+++ b/tests/fixtures/v2-simple/docker-compose.yml
@@ -1,8 +1,8 @@
version: "2"
services:
simple:
- image: busybox:latest
+ image: busybox:1.27.2
command: top
another:
- image: busybox:latest
+ image: busybox:1.27.2
command: top
diff --git a/tests/fixtures/v2-simple/links-invalid.yml b/tests/fixtures/v2-simple/links-invalid.yml
index 481aa404..a88eb1d5 100644
--- a/tests/fixtures/v2-simple/links-invalid.yml
+++ b/tests/fixtures/v2-simple/links-invalid.yml
@@ -1,10 +1,10 @@
version: "2"
services:
simple:
- image: busybox:latest
+ image: busybox:1.31.0-uclibc
command: top
links:
- another
another:
- image: busybox:latest
+ image: busybox:1.31.0-uclibc
command: top
diff --git a/tests/fixtures/v2-simple/one-container.yml b/tests/fixtures/v2-simple/one-container.yml
new file mode 100644
index 00000000..2d5c2ca6
--- /dev/null
+++ b/tests/fixtures/v2-simple/one-container.yml
@@ -0,0 +1,5 @@
+version: "2"
+services:
+ simple:
+ image: busybox:1.31.0-uclibc
+ command: top
diff --git a/tests/fixtures/v2-unhealthy-dependencies/docker-compose.yml b/tests/fixtures/v2-unhealthy-dependencies/docker-compose.yml
new file mode 100644
index 00000000..d96473e5
--- /dev/null
+++ b/tests/fixtures/v2-unhealthy-dependencies/docker-compose.yml
@@ -0,0 +1,19 @@
+version: "2.1"
+services:
+ db:
+ image: busybox:1.31.0-uclibc
+ command: top
+ healthcheck:
+ test: exit 1
+ interval: 1s
+ timeout: 1s
+ retries: 1
+ web:
+ image: busybox:1.31.0-uclibc
+ command: top
+ depends_on:
+ db:
+ condition: service_healthy
+ console:
+ image: busybox:1.31.0-uclibc
+ command: top
diff --git a/tests/fixtures/v3-full/docker-compose.yml b/tests/fixtures/v3-full/docker-compose.yml
index 3a7ac25c..0a515658 100644
--- a/tests/fixtures/v3-full/docker-compose.yml
+++ b/tests/fixtures/v3-full/docker-compose.yml
@@ -14,10 +14,10 @@ services:
max_failure_ratio: 0.3
resources:
limits:
- cpus: '0.05'
+ cpus: 0.05
memory: 50M
reservations:
- cpus: '0.01'
+ cpus: 0.01
memory: 20M
restart_policy:
condition: on-failure
diff --git a/tests/helpers.py b/tests/helpers.py
index dd129981..3642e6eb 100644
--- a/tests/helpers.py
+++ b/tests/helpers.py
@@ -1,12 +1,14 @@
-from __future__ import absolute_import
-from __future__ import unicode_literals
-
+import contextlib
import os
from compose.config.config import ConfigDetails
from compose.config.config import ConfigFile
from compose.config.config import load
+BUSYBOX_IMAGE_NAME = 'busybox'
+BUSYBOX_DEFAULT_TAG = '1.31.0-uclibc'
+BUSYBOX_IMAGE_WITH_TAG = '{}:{}'.format(BUSYBOX_IMAGE_NAME, BUSYBOX_DEFAULT_TAG)
+
def build_config(contents, **kwargs):
return load(build_config_details(contents, **kwargs))
@@ -22,7 +24,7 @@ def build_config_details(contents, working_dir='working_dir', filename='filename
def create_custom_host_file(client, filename, content):
dirname = os.path.dirname(filename)
container = client.create_container(
- 'busybox:latest',
+ BUSYBOX_IMAGE_WITH_TAG,
['sh', '-c', 'echo -n "{}" > {}'.format(content, filename)],
volumes={dirname: {}},
host_config=client.create_host_config(
@@ -47,7 +49,21 @@ def create_custom_host_file(client, filename, content):
def create_host_file(client, filename):
- with open(filename, 'r') as fh:
+ with open(filename) as fh:
content = fh.read()
return create_custom_host_file(client, filename, content)
+
+
+@contextlib.contextmanager
+def cd(path):
+ """
+ A context manager which changes the working directory to the given
+ path, and then changes it back to its previous value on exit.
+ """
+ prev_cwd = os.getcwd()
+ os.chdir(path)
+ try:
+ yield
+ finally:
+ os.chdir(prev_cwd)
diff --git a/tests/integration/environment_test.py b/tests/integration/environment_test.py
new file mode 100644
index 00000000..b7822a59
--- /dev/null
+++ b/tests/integration/environment_test.py
@@ -0,0 +1,92 @@
+import tempfile
+
+import pytest
+from ddt import data
+from ddt import ddt
+
+from .. import mock
+from ..acceptance.cli_test import dispatch
+from compose.cli.command import get_project
+from compose.cli.command import project_from_options
+from compose.config.environment import Environment
+from compose.config.errors import EnvFileNotFound
+from tests.integration.testcases import DockerClientTestCase
+
+
+@ddt
+class EnvironmentTest(DockerClientTestCase):
+ @classmethod
+ def setUpClass(cls):
+ super().setUpClass()
+ cls.compose_file = tempfile.NamedTemporaryFile(mode='w+b')
+ cls.compose_file.write(bytes("""version: '3.2'
+services:
+ svc:
+ image: busybox:1.31.0-uclibc
+ environment:
+ TEST_VARIABLE: ${TEST_VARIABLE}""", encoding='utf-8'))
+ cls.compose_file.flush()
+
+ @classmethod
+ def tearDownClass(cls):
+ super().tearDownClass()
+ cls.compose_file.close()
+
+ @data('events',
+ 'exec',
+ 'kill',
+ 'logs',
+ 'pause',
+ 'ps',
+ 'restart',
+ 'rm',
+ 'start',
+ 'stop',
+ 'top',
+ 'unpause')
+ def _test_no_warning_on_missing_host_environment_var_on_silent_commands(self, cmd):
+ options = {'COMMAND': cmd, '--file': [EnvironmentTest.compose_file.name]}
+ with mock.patch('compose.config.environment.log') as fake_log:
+ # Note that the warning silencing and the env variables check is
+ # done in `project_from_options`
+ # So no need to have a proper options map, the `COMMAND` key is enough
+ project_from_options('.', options)
+ assert fake_log.warn.call_count == 0
+
+
+class EnvironmentOverrideFileTest(DockerClientTestCase):
+ def test_env_file_override(self):
+ base_dir = 'tests/fixtures/env-file-override'
+ # '--env-file' are relative to the current working dir
+ env = Environment.from_env_file(base_dir, base_dir+'/.env.override')
+ dispatch(base_dir, ['--env-file', '.env.override', 'up'])
+ project = get_project(project_dir=base_dir,
+ config_path=['docker-compose.yml'],
+ environment=env,
+ override_dir=base_dir)
+ containers = project.containers(stopped=True)
+ assert len(containers) == 1
+ assert "WHEREAMI=override" in containers[0].get('Config.Env')
+ assert "DEFAULT_CONF_LOADED=true" in containers[0].get('Config.Env')
+ dispatch(base_dir, ['--env-file', '.env.override', 'down'], None)
+
+ def test_env_file_not_found_error(self):
+ base_dir = 'tests/fixtures/env-file-override'
+ with pytest.raises(EnvFileNotFound) as excinfo:
+ Environment.from_env_file(base_dir, '.env.override')
+
+ assert "Couldn't find env file" in excinfo.exconly()
+
+ def test_dot_env_file(self):
+ base_dir = 'tests/fixtures/env-file-override'
+ # '.env' is relative to the project_dir (base_dir)
+ env = Environment.from_env_file(base_dir, None)
+ dispatch(base_dir, ['up'])
+ project = get_project(project_dir=base_dir,
+ config_path=['docker-compose.yml'],
+ environment=env,
+ override_dir=base_dir)
+ containers = project.containers(stopped=True)
+ assert len(containers) == 1
+ assert "WHEREAMI=default" in containers[0].get('Config.Env')
+ dispatch(base_dir, ['down'], None)
diff --git a/tests/integration/metrics_test.py b/tests/integration/metrics_test.py
new file mode 100644
index 00000000..3d6e3fe2
--- /dev/null
+++ b/tests/integration/metrics_test.py
@@ -0,0 +1,125 @@
+import logging
+import os
+import socket
+from http.server import BaseHTTPRequestHandler
+from http.server import HTTPServer
+from threading import Thread
+
+import requests
+from docker.transport import UnixHTTPAdapter
+
+from tests.acceptance.cli_test import dispatch
+from tests.integration.testcases import DockerClientTestCase
+
+
+TEST_SOCKET_FILE = '/tmp/test-metrics-docker-cli.sock'
+
+
+class MetricsTest(DockerClientTestCase):
+ test_session = requests.sessions.Session()
+ test_env = None
+ base_dir = 'tests/fixtures/v3-full'
+
+ @classmethod
+ def setUpClass(cls):
+ super().setUpClass()
+ MetricsTest.test_session.mount("http+unix://", UnixHTTPAdapter(TEST_SOCKET_FILE))
+ MetricsTest.test_env = os.environ.copy()
+ MetricsTest.test_env['METRICS_SOCKET_FILE'] = TEST_SOCKET_FILE
+ MetricsServer().start()
+
+ @classmethod
+ def test_metrics_help(cls):
+ # root `docker-compose` command is considered as a `--help`
+ dispatch(cls.base_dir, [], env=MetricsTest.test_env)
+ assert cls.get_content() == \
+ b'{"command": "compose --help", "context": "moby", ' \
+ b'"source": "docker-compose", "status": "success"}'
+ dispatch(cls.base_dir, ['help', 'run'], env=MetricsTest.test_env)
+ assert cls.get_content() == \
+ b'{"command": "compose help", "context": "moby", ' \
+ b'"source": "docker-compose", "status": "success"}'
+ dispatch(cls.base_dir, ['--help'], env=MetricsTest.test_env)
+ assert cls.get_content() == \
+ b'{"command": "compose --help", "context": "moby", ' \
+ b'"source": "docker-compose", "status": "success"}'
+ dispatch(cls.base_dir, ['run', '--help'], env=MetricsTest.test_env)
+ assert cls.get_content() == \
+ b'{"command": "compose --help run", "context": "moby", ' \
+ b'"source": "docker-compose", "status": "success"}'
+ dispatch(cls.base_dir, ['up', '--help', 'extra_args'], env=MetricsTest.test_env)
+ assert cls.get_content() == \
+ b'{"command": "compose --help up", "context": "moby", ' \
+ b'"source": "docker-compose", "status": "success"}'
+
+ @classmethod
+ def test_metrics_simple_commands(cls):
+ dispatch(cls.base_dir, ['ps'], env=MetricsTest.test_env)
+ assert cls.get_content() == \
+ b'{"command": "compose ps", "context": "moby", ' \
+ b'"source": "docker-compose", "status": "success"}'
+ dispatch(cls.base_dir, ['version'], env=MetricsTest.test_env)
+ assert cls.get_content() == \
+ b'{"command": "compose version", "context": "moby", ' \
+ b'"source": "docker-compose", "status": "success"}'
+ dispatch(cls.base_dir, ['version', '--yyy'], env=MetricsTest.test_env)
+ assert cls.get_content() == \
+ b'{"command": "compose version", "context": "moby", ' \
+ b'"source": "docker-compose", "status": "failure"}'
+
+ @staticmethod
+ def get_content():
+ resp = MetricsTest.test_session.get("http+unix://localhost")
+ print(resp.content)
+ return resp.content
+
+
+def start_server(uri=TEST_SOCKET_FILE):
+ try:
+ os.remove(uri)
+ except OSError:
+ pass
+ httpd = HTTPServer(uri, MetricsHTTPRequestHandler, False)
+ sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
+ sock.bind(TEST_SOCKET_FILE)
+ sock.listen(0)
+ httpd.socket = sock
+ print('Serving on ', uri)
+ httpd.serve_forever()
+ sock.shutdown(socket.SHUT_RDWR)
+ sock.close()
+ os.remove(uri)
+
+
+class MetricsServer:
+ @classmethod
+ def start(cls):
+ t = Thread(target=start_server, daemon=True)
+ t.start()
+
+
+class MetricsHTTPRequestHandler(BaseHTTPRequestHandler):
+ usages = []
+
+ def do_GET(self):
+ self.client_address = ('',) # avoid exception in BaseHTTPServer.py log_message()
+ self.send_response(200)
+ self.end_headers()
+ for u in MetricsHTTPRequestHandler.usages:
+ self.wfile.write(u)
+ MetricsHTTPRequestHandler.usages = []
+
+ def do_POST(self):
+ self.client_address = ('',) # avoid exception in BaseHTTPServer.py log_message()
+ content_length = int(self.headers['Content-Length'])
+ body = self.rfile.read(content_length)
+ print(body)
+ MetricsHTTPRequestHandler.usages.append(body)
+ self.send_response(200)
+ self.end_headers()
+
+
+if __name__ == '__main__':
+ logging.getLogger("urllib3").propagate = False
+ logging.getLogger("requests").propagate = False
+ start_server()
diff --git a/tests/integration/network_test.py b/tests/integration/network_test.py
index a2493fda..23c9e9a4 100644
--- a/tests/integration/network_test.py
+++ b/tests/integration/network_test.py
@@ -1,6 +1,3 @@
-from __future__ import absolute_import
-from __future__ import unicode_literals
-
import pytest
from .testcases import DockerClientTestCase
diff --git a/tests/integration/project_test.py b/tests/integration/project_test.py
index 3960d12e..54fe1ad7 100644
--- a/tests/integration/project_test.py
+++ b/tests/integration/project_test.py
@@ -1,19 +1,18 @@
-from __future__ import absolute_import
-from __future__ import unicode_literals
-
+import copy
import json
import os
import random
import shutil
import tempfile
-import py
import pytest
from docker.errors import APIError
from docker.errors import NotFound
from .. import mock
from ..helpers import build_config as load_config
+from ..helpers import BUSYBOX_IMAGE_WITH_TAG
+from ..helpers import cd
from ..helpers import create_host_file
from .testcases import DockerClientTestCase
from .testcases import SWARM_SKIP_CONTAINERS_ALL
@@ -22,14 +21,11 @@ from compose.config import ConfigurationError
from compose.config import types
from compose.config.types import VolumeFromSpec
from compose.config.types import VolumeSpec
-from compose.const import COMPOSEFILE_V2_0 as V2_0
-from compose.const import COMPOSEFILE_V2_1 as V2_1
-from compose.const import COMPOSEFILE_V2_2 as V2_2
-from compose.const import COMPOSEFILE_V2_3 as V2_3
-from compose.const import COMPOSEFILE_V3_1 as V3_1
+from compose.const import COMPOSE_SPEC as VERSION
from compose.const import LABEL_PROJECT
from compose.const import LABEL_SERVICE
from compose.container import Container
+from compose.errors import CompletedUnsuccessfully
from compose.errors import HealthCheckFailed
from compose.errors import NoHealthCheckConfigured
from compose.project import Project
@@ -38,16 +34,12 @@ from compose.service import ConvergenceStrategy
from tests.integration.testcases import if_runtime_available
from tests.integration.testcases import is_cluster
from tests.integration.testcases import no_cluster
-from tests.integration.testcases import v2_1_only
-from tests.integration.testcases import v2_2_only
-from tests.integration.testcases import v2_3_only
-from tests.integration.testcases import v2_only
-from tests.integration.testcases import v3_only
def build_config(**kwargs):
return config.Config(
- version=kwargs.get('version'),
+ config_version=kwargs.get('version', VERSION),
+ version=kwargs.get('version', VERSION),
services=kwargs.get('services'),
volumes=kwargs.get('volumes'),
networks=kwargs.get('networks'),
@@ -90,7 +82,8 @@ class ProjectTest(DockerClientTestCase):
project.up()
containers = project.containers(['web'])
- assert [c.name for c in containers] == ['composetest_web_1']
+ assert len(containers) == 1
+ assert containers[0].name.startswith('composetest_web_')
def test_containers_with_extra_service(self):
web = self.create_service('web')
@@ -102,18 +95,34 @@ class ProjectTest(DockerClientTestCase):
self.create_service('extra').create_container()
project = Project('composetest', [web, db], self.client)
- assert set(project.containers(stopped=True)) == set([web_1, db_1])
+ assert set(project.containers(stopped=True)) == {web_1, db_1}
+
+ def test_parallel_pull_with_no_image(self):
+ config_data = build_config(
+ services=[{
+ 'name': 'web',
+ 'build': {'context': '.'},
+ }],
+ )
+
+ project = Project.from_config(
+ name='composetest',
+ config_data=config_data,
+ client=self.client
+ )
+
+ project.pull(parallel_pull=True)
def test_volumes_from_service(self):
project = Project.from_config(
name='composetest',
config_data=load_config({
'data': {
- 'image': 'busybox:latest',
+ 'image': BUSYBOX_IMAGE_WITH_TAG,
'volumes': ['/var/data'],
},
'db': {
- 'image': 'busybox:latest',
+ 'image': BUSYBOX_IMAGE_WITH_TAG,
'volumes_from': ['data'],
},
}),
@@ -126,7 +135,7 @@ class ProjectTest(DockerClientTestCase):
def test_volumes_from_container(self):
data_container = Container.create(
self.client,
- image='busybox:latest',
+ image=BUSYBOX_IMAGE_WITH_TAG,
volumes=['/var/data'],
name='composetest_data_container',
labels={LABEL_PROJECT: 'composetest'},
@@ -136,7 +145,7 @@ class ProjectTest(DockerClientTestCase):
name='composetest',
config_data=load_config({
'db': {
- 'image': 'busybox:latest',
+ 'image': BUSYBOX_IMAGE_WITH_TAG,
'volumes_from': ['composetest_data_container'],
},
}),
@@ -145,21 +154,19 @@ class ProjectTest(DockerClientTestCase):
db = project.get_service('db')
assert db._get_volumes_from() == [data_container.id + ':rw']
- @v2_only()
@no_cluster('container networks not supported in Swarm')
def test_network_mode_from_service(self):
project = Project.from_config(
name='composetest',
client=self.client,
config_data=load_config({
- 'version': str(V2_0),
'services': {
'net': {
- 'image': 'busybox:latest',
+ 'image': BUSYBOX_IMAGE_WITH_TAG,
'command': ["top"]
},
'web': {
- 'image': 'busybox:latest',
+ 'image': BUSYBOX_IMAGE_WITH_TAG,
'network_mode': 'service:net',
'command': ["top"]
},
@@ -173,17 +180,15 @@ class ProjectTest(DockerClientTestCase):
net = project.get_service('net')
assert web.network_mode.mode == 'container:' + net.containers()[0].id
- @v2_only()
@no_cluster('container networks not supported in Swarm')
def test_network_mode_from_container(self):
def get_project():
return Project.from_config(
name='composetest',
config_data=load_config({
- 'version': str(V2_0),
'services': {
'web': {
- 'image': 'busybox:latest',
+ 'image': BUSYBOX_IMAGE_WITH_TAG,
'network_mode': 'container:composetest_net_container'
},
},
@@ -198,7 +203,7 @@ class ProjectTest(DockerClientTestCase):
net_container = Container.create(
self.client,
- image='busybox:latest',
+ image=BUSYBOX_IMAGE_WITH_TAG,
name='composetest_net_container',
command='top',
labels={LABEL_PROJECT: 'composetest'},
@@ -218,11 +223,11 @@ class ProjectTest(DockerClientTestCase):
name='composetest',
config_data=load_config({
'net': {
- 'image': 'busybox:latest',
+ 'image': BUSYBOX_IMAGE_WITH_TAG,
'command': ["top"]
},
'web': {
- 'image': 'busybox:latest',
+ 'image': BUSYBOX_IMAGE_WITH_TAG,
'net': 'container:net',
'command': ["top"]
},
@@ -243,7 +248,7 @@ class ProjectTest(DockerClientTestCase):
name='composetest',
config_data=load_config({
'web': {
- 'image': 'busybox:latest',
+ 'image': BUSYBOX_IMAGE_WITH_TAG,
'net': 'container:composetest_net_container'
},
}),
@@ -257,7 +262,7 @@ class ProjectTest(DockerClientTestCase):
net_container = Container.create(
self.client,
- image='busybox:latest',
+ image=BUSYBOX_IMAGE_WITH_TAG,
name='composetest_net_container',
command='top',
labels={LABEL_PROJECT: 'composetest'},
@@ -286,24 +291,20 @@ class ProjectTest(DockerClientTestCase):
db_container = db.create_container()
project.start(service_names=['web'])
- assert set(c.name for c in project.containers() if c.is_running) == set(
- [web_container_1.name, web_container_2.name]
- )
+ assert {c.name for c in project.containers() if c.is_running} == {
+ web_container_1.name, web_container_2.name}
project.start()
- assert set(c.name for c in project.containers() if c.is_running) == set(
- [web_container_1.name, web_container_2.name, db_container.name]
- )
+ assert {c.name for c in project.containers() if c.is_running} == {
+ web_container_1.name, web_container_2.name, db_container.name}
project.pause(service_names=['web'])
- assert set([c.name for c in project.containers() if c.is_paused]) == set(
- [web_container_1.name, web_container_2.name]
- )
+ assert {c.name for c in project.containers() if c.is_paused} == {
+ web_container_1.name, web_container_2.name}
project.pause()
- assert set([c.name for c in project.containers() if c.is_paused]) == set(
- [web_container_1.name, web_container_2.name, db_container.name]
- )
+ assert {c.name for c in project.containers() if c.is_paused} == {
+ web_container_1.name, web_container_2.name, db_container.name}
project.unpause(service_names=['db'])
assert len([c.name for c in project.containers() if c.is_paused]) == 2
@@ -312,7 +313,7 @@ class ProjectTest(DockerClientTestCase):
assert len([c.name for c in project.containers() if c.is_paused]) == 0
project.stop(service_names=['web'], timeout=1)
- assert set(c.name for c in project.containers() if c.is_running) == set([db_container.name])
+ assert {c.name for c in project.containers() if c.is_running} == {db_container.name}
project.kill(service_names=['db'])
assert len([c for c in project.containers() if c.is_running]) == 0
@@ -431,11 +432,10 @@ class ProjectTest(DockerClientTestCase):
project.up(strategy=ConvergenceStrategy.always)
assert len(project.containers()) == 2
- db_container = [c for c in project.containers() if 'db' in c.name][0]
+ db_container = [c for c in project.containers() if c.service == 'db'][0]
assert db_container.id != old_db_id
assert db_container.get('Volumes./etc') == db_volume_path
- @v2_3_only()
def test_recreate_preserves_mounts(self):
web = self.create_service('web')
db = self.create_service('db', volumes=[types.MountSpec(type='volume', target='/etc')])
@@ -451,7 +451,7 @@ class ProjectTest(DockerClientTestCase):
project.up(strategy=ConvergenceStrategy.always)
assert len(project.containers()) == 2
- db_container = [c for c in project.containers() if 'db' in c.name][0]
+ db_container = [c for c in project.containers() if c.service == 'db'][0]
assert db_container.id != old_db_id
assert db_container.get_mount('/etc')['Source'] == db_volume_path
@@ -464,14 +464,14 @@ class ProjectTest(DockerClientTestCase):
project.up(['db'])
assert len(project.containers()) == 1
- old_db_id = project.containers()[0].id
container, = project.containers()
+ old_db_id = container.id
db_volume_path = container.get_mount('/var/db')['Source']
project.up(strategy=ConvergenceStrategy.never)
assert len(project.containers()) == 2
- db_container = [c for c in project.containers() if 'db' in c.name][0]
+ db_container = [c for c in project.containers() if c.name == container.name][0]
assert db_container.id == old_db_id
assert db_container.get_mount('/var/db')['Source'] == db_volume_path
@@ -498,7 +498,7 @@ class ProjectTest(DockerClientTestCase):
assert len(new_containers) == 2
assert [c.is_running for c in new_containers] == [True, True]
- db_container = [c for c in new_containers if 'db' in c.name][0]
+ db_container = [c for c in new_containers if c.service == 'db'][0]
assert db_container.id == old_db_id
assert db_container.get_mount('/var/db')['Source'] == db_volume_path
@@ -534,20 +534,20 @@ class ProjectTest(DockerClientTestCase):
name='composetest',
config_data=load_config({
'console': {
- 'image': 'busybox:latest',
+ 'image': BUSYBOX_IMAGE_WITH_TAG,
'command': ["top"],
},
'data': {
- 'image': 'busybox:latest',
+ 'image': BUSYBOX_IMAGE_WITH_TAG,
'command': ["top"]
},
'db': {
- 'image': 'busybox:latest',
+ 'image': BUSYBOX_IMAGE_WITH_TAG,
'command': ["top"],
'volumes_from': ['data'],
},
'web': {
- 'image': 'busybox:latest',
+ 'image': BUSYBOX_IMAGE_WITH_TAG,
'command': ["top"],
'links': ['db'],
},
@@ -569,20 +569,20 @@ class ProjectTest(DockerClientTestCase):
name='composetest',
config_data=load_config({
'console': {
- 'image': 'busybox:latest',
+ 'image': BUSYBOX_IMAGE_WITH_TAG,
'command': ["top"],
},
'data': {
- 'image': 'busybox:latest',
+ 'image': BUSYBOX_IMAGE_WITH_TAG,
'command': ["top"]
},
'db': {
- 'image': 'busybox:latest',
+ 'image': BUSYBOX_IMAGE_WITH_TAG,
'command': ["top"],
'volumes_from': ['data'],
},
'web': {
- 'image': 'busybox:latest',
+ 'image': BUSYBOX_IMAGE_WITH_TAG,
'command': ["top"],
'links': ['db'],
},
@@ -608,7 +608,7 @@ class ProjectTest(DockerClientTestCase):
'version': '2.1',
'services': {
'foo': {
- 'image': 'busybox:latest',
+ 'image': BUSYBOX_IMAGE_WITH_TAG,
'tmpfs': ['/dev/shm'],
'volumes': ['/dev/shm']
}
@@ -643,13 +643,11 @@ class ProjectTest(DockerClientTestCase):
service = project.get_service('web')
assert len(service.containers()) == 1
- @v2_only()
def test_project_up_networks(self):
config_data = build_config(
- version=V2_0,
services=[{
'name': 'web',
- 'image': 'busybox:latest',
+ 'image': BUSYBOX_IMAGE_WITH_TAG,
'command': 'top',
'networks': {
'foo': None,
@@ -688,13 +686,11 @@ class ProjectTest(DockerClientTestCase):
foo_data = self.client.inspect_network('composetest_foo')
assert foo_data['Driver'] == 'bridge'
- @v2_only()
def test_up_with_ipam_config(self):
config_data = build_config(
- version=V2_0,
services=[{
'name': 'web',
- 'image': 'busybox:latest',
+ 'image': BUSYBOX_IMAGE_WITH_TAG,
'networks': {'front': None},
}],
networks={
@@ -748,13 +744,11 @@ class ProjectTest(DockerClientTestCase):
}],
}
- @v2_only()
def test_up_with_ipam_options(self):
config_data = build_config(
- version=V2_0,
services=[{
'name': 'web',
- 'image': 'busybox:latest',
+ 'image': BUSYBOX_IMAGE_WITH_TAG,
'networks': {'front': None},
}],
networks={
@@ -783,13 +777,11 @@ class ProjectTest(DockerClientTestCase):
"com.docker.compose.network.test": "9-29-045"
}
- @v2_1_only()
def test_up_with_network_static_addresses(self):
config_data = build_config(
- version=V2_1,
services=[{
'name': 'web',
- 'image': 'busybox:latest',
+ 'image': BUSYBOX_IMAGE_WITH_TAG,
'command': 'top',
'networks': {
'static_test': {
@@ -832,16 +824,14 @@ class ProjectTest(DockerClientTestCase):
assert ipam_config.get('IPv4Address') == '172.16.100.100'
assert ipam_config.get('IPv6Address') == 'fe80::1001:102'
- @v2_3_only()
def test_up_with_network_priorities(self):
mac_address = '74:6f:75:68:6f:75'
def get_config_data(p1, p2, p3):
return build_config(
- version=V2_3,
services=[{
'name': 'web',
- 'image': 'busybox:latest',
+ 'image': BUSYBOX_IMAGE_WITH_TAG,
'networks': {
'n1': {
'priority': p1,
@@ -897,14 +887,12 @@ class ProjectTest(DockerClientTestCase):
net_config = service_container.inspect()['NetworkSettings']['Networks']['composetest_n3']
assert net_config['MacAddress'] == mac_address
- @v2_1_only()
def test_up_with_enable_ipv6(self):
self.require_api_version('1.23')
config_data = build_config(
- version=V2_1,
services=[{
'name': 'web',
- 'image': 'busybox:latest',
+ 'image': BUSYBOX_IMAGE_WITH_TAG,
'command': 'top',
'networks': {
'static_test': {
@@ -941,13 +929,11 @@ class ProjectTest(DockerClientTestCase):
get('IPAMConfig', {}))
assert ipam_config.get('IPv6Address') == 'fe80::1001:102'
- @v2_only()
def test_up_with_network_static_addresses_missing_subnet(self):
config_data = build_config(
- version=V2_0,
services=[{
'name': 'web',
- 'image': 'busybox:latest',
+ 'image': BUSYBOX_IMAGE_WITH_TAG,
'networks': {
'static_test': {
'ipv4_address': '172.16.100.100',
@@ -977,13 +963,11 @@ class ProjectTest(DockerClientTestCase):
with pytest.raises(ProjectError):
project.up()
- @v2_1_only()
def test_up_with_network_link_local_ips(self):
config_data = build_config(
- version=V2_1,
services=[{
'name': 'web',
- 'image': 'busybox:latest',
+ 'image': BUSYBOX_IMAGE_WITH_TAG,
'networks': {
'linklocaltest': {
'link_local_ips': ['169.254.8.8']
@@ -1012,15 +996,13 @@ class ProjectTest(DockerClientTestCase):
assert 'LinkLocalIPs' in ipam_config
assert ipam_config['LinkLocalIPs'] == ['169.254.8.8']
- @v2_1_only()
def test_up_with_custom_name_resources(self):
config_data = build_config(
- version=V2_2,
services=[{
'name': 'web',
'volumes': [VolumeSpec.parse('foo:/container-path')],
'networks': {'foo': {}},
- 'image': 'busybox:latest'
+ 'image': BUSYBOX_IMAGE_WITH_TAG
}],
networks={
'foo': {
@@ -1049,14 +1031,12 @@ class ProjectTest(DockerClientTestCase):
assert network['Labels']['com.docker.compose.test_value'] == 'sharpdressedman'
assert volume['Labels']['com.docker.compose.test_value'] == 'thefuror'
- @v2_1_only()
def test_up_with_isolation(self):
self.require_api_version('1.24')
config_data = build_config(
- version=V2_1,
services=[{
'name': 'web',
- 'image': 'busybox:latest',
+ 'image': BUSYBOX_IMAGE_WITH_TAG,
'isolation': 'default'
}],
)
@@ -1069,14 +1049,12 @@ class ProjectTest(DockerClientTestCase):
service_container = project.get_service('web').containers(stopped=True)[0]
assert service_container.inspect()['HostConfig']['Isolation'] == 'default'
- @v2_1_only()
def test_up_with_invalid_isolation(self):
self.require_api_version('1.24')
config_data = build_config(
- version=V2_1,
services=[{
'name': 'web',
- 'image': 'busybox:latest',
+ 'image': BUSYBOX_IMAGE_WITH_TAG,
'isolation': 'foobar'
}],
)
@@ -1088,15 +1066,13 @@ class ProjectTest(DockerClientTestCase):
with pytest.raises(ProjectError):
project.up()
- @v2_3_only()
@if_runtime_available('runc')
def test_up_with_runtime(self):
self.require_api_version('1.30')
config_data = build_config(
- version=V2_3,
services=[{
'name': 'web',
- 'image': 'busybox:latest',
+ 'image': BUSYBOX_IMAGE_WITH_TAG,
'runtime': 'runc'
}],
)
@@ -1109,14 +1085,12 @@ class ProjectTest(DockerClientTestCase):
service_container = project.get_service('web').containers(stopped=True)[0]
assert service_container.inspect()['HostConfig']['Runtime'] == 'runc'
- @v2_3_only()
def test_up_with_invalid_runtime(self):
self.require_api_version('1.30')
config_data = build_config(
- version=V2_3,
services=[{
'name': 'web',
- 'image': 'busybox:latest',
+ 'image': BUSYBOX_IMAGE_WITH_TAG,
'runtime': 'foobar'
}],
)
@@ -1128,15 +1102,13 @@ class ProjectTest(DockerClientTestCase):
with pytest.raises(ProjectError):
project.up()
- @v2_3_only()
@if_runtime_available('nvidia')
def test_up_with_nvidia_runtime(self):
self.require_api_version('1.30')
config_data = build_config(
- version=V2_3,
services=[{
'name': 'web',
- 'image': 'busybox:latest',
+ 'image': BUSYBOX_IMAGE_WITH_TAG,
'runtime': 'nvidia'
}],
)
@@ -1149,14 +1121,12 @@ class ProjectTest(DockerClientTestCase):
service_container = project.get_service('web').containers(stopped=True)[0]
assert service_container.inspect()['HostConfig']['Runtime'] == 'nvidia'
- @v2_only()
def test_project_up_with_network_internal(self):
self.require_api_version('1.23')
config_data = build_config(
- version=V2_0,
services=[{
'name': 'web',
- 'image': 'busybox:latest',
+ 'image': BUSYBOX_IMAGE_WITH_TAG,
'networks': {'internal': None},
}],
networks={
@@ -1175,17 +1145,15 @@ class ProjectTest(DockerClientTestCase):
assert network['Internal'] is True
- @v2_1_only()
def test_project_up_with_network_label(self):
self.require_api_version('1.23')
network_name = 'network_with_label'
config_data = build_config(
- version=V2_1,
services=[{
'name': 'web',
- 'image': 'busybox:latest',
+ 'image': BUSYBOX_IMAGE_WITH_TAG,
'networks': {network_name: None}
}],
networks={
@@ -1210,15 +1178,13 @@ class ProjectTest(DockerClientTestCase):
assert 'label_key' in networks[0]['Labels']
assert networks[0]['Labels']['label_key'] == 'label_val'
- @v2_only()
def test_project_up_volumes(self):
- vol_name = '{0:x}'.format(random.getrandbits(32))
- full_vol_name = 'composetest_{0}'.format(vol_name)
+ vol_name = '{:x}'.format(random.getrandbits(32))
+ full_vol_name = 'composetest_{}'.format(vol_name)
config_data = build_config(
- version=V2_0,
services=[{
'name': 'web',
- 'image': 'busybox:latest',
+ 'image': BUSYBOX_IMAGE_WITH_TAG,
'command': 'top'
}],
volumes={vol_name: {'driver': 'local'}},
@@ -1235,17 +1201,15 @@ class ProjectTest(DockerClientTestCase):
assert volume_data['Name'].split('/')[-1] == full_vol_name
assert volume_data['Driver'] == 'local'
- @v2_1_only()
def test_project_up_with_volume_labels(self):
self.require_api_version('1.23')
volume_name = 'volume_with_label'
config_data = build_config(
- version=V2_1,
services=[{
'name': 'web',
- 'image': 'busybox:latest',
+ 'image': BUSYBOX_IMAGE_WITH_TAG,
'volumes': [VolumeSpec.parse('{}:/data'.format(volume_name))]
}],
volumes={
@@ -1270,23 +1234,21 @@ class ProjectTest(DockerClientTestCase):
if v['Name'].split('/')[-1].startswith('composetest_')
]
- assert set([v['Name'].split('/')[-1] for v in volumes]) == set(
- ['composetest_{}'.format(volume_name)]
- )
+ assert {v['Name'].split('/')[-1] for v in volumes} == {
+ 'composetest_{}'.format(volume_name)
+ }
assert 'label_key' in volumes[0]['Labels']
assert volumes[0]['Labels']['label_key'] == 'label_val'
- @v2_only()
def test_project_up_logging_with_multiple_files(self):
base_file = config.ConfigFile(
'base.yml',
{
- 'version': str(V2_0),
'services': {
- 'simple': {'image': 'busybox:latest', 'command': 'top'},
+ 'simple': {'image': BUSYBOX_IMAGE_WITH_TAG, 'command': 'top'},
'another': {
- 'image': 'busybox:latest',
+ 'image': BUSYBOX_IMAGE_WITH_TAG,
'command': 'top',
'logging': {
'driver': "json-file",
@@ -1301,7 +1263,6 @@ class ProjectTest(DockerClientTestCase):
override_file = config.ConfigFile(
'override.yml',
{
- 'version': str(V2_0),
'services': {
'another': {
'logging': {
@@ -1313,9 +1274,9 @@ class ProjectTest(DockerClientTestCase):
})
details = config.ConfigDetails('.', [base_file, override_file])
- tmpdir = py.test.ensuretemp('logging_test')
- self.addCleanup(tmpdir.remove)
- with tmpdir.as_cwd():
+ tmpdir = tempfile.mkdtemp('logging_test')
+ self.addCleanup(shutil.rmtree, tmpdir)
+ with cd(tmpdir):
config_data = config.load(details)
project = Project.from_config(
name='composetest', config_data=config_data, client=self.client
@@ -1329,15 +1290,13 @@ class ProjectTest(DockerClientTestCase):
assert log_config
assert log_config.get('Type') == 'none'
- @v2_only()
def test_project_up_port_mappings_with_multiple_files(self):
base_file = config.ConfigFile(
'base.yml',
{
- 'version': str(V2_0),
'services': {
'simple': {
- 'image': 'busybox:latest',
+ 'image': BUSYBOX_IMAGE_WITH_TAG,
'command': 'top',
'ports': ['1234:1234']
},
@@ -1347,7 +1306,6 @@ class ProjectTest(DockerClientTestCase):
override_file = config.ConfigFile(
'override.yml',
{
- 'version': str(V2_0),
'services': {
'simple': {
'ports': ['1234:1234']
@@ -1365,13 +1323,11 @@ class ProjectTest(DockerClientTestCase):
containers = project.containers()
assert len(containers) == 1
- @v2_2_only()
def test_project_up_config_scale(self):
config_data = build_config(
- version=V2_2,
services=[{
'name': 'web',
- 'image': 'busybox:latest',
+ 'image': BUSYBOX_IMAGE_WITH_TAG,
'command': 'top',
'scale': 3
}]
@@ -1393,15 +1349,43 @@ class ProjectTest(DockerClientTestCase):
project.up()
assert len(project.containers()) == 3
- @v2_only()
+ def test_project_up_scale_with_stopped_containers(self):
+ config_data = build_config(
+ services=[{
+ 'name': 'web',
+ 'image': BUSYBOX_IMAGE_WITH_TAG,
+ 'command': 'top',
+ 'scale': 2
+ }]
+ )
+ project = Project.from_config(
+ name='composetest', config_data=config_data, client=self.client
+ )
+
+ project.up()
+ containers = project.containers()
+ assert len(containers) == 2
+
+ self.client.stop(containers[0].id)
+ project.up(scale_override={'web': 2})
+ containers = project.containers()
+ assert len(containers) == 2
+
+ self.client.stop(containers[0].id)
+ project.up(scale_override={'web': 3})
+ assert len(project.containers()) == 3
+
+ self.client.stop(containers[0].id)
+ project.up(scale_override={'web': 1})
+ assert len(project.containers()) == 1
+
def test_initialize_volumes(self):
- vol_name = '{0:x}'.format(random.getrandbits(32))
- full_vol_name = 'composetest_{0}'.format(vol_name)
+ vol_name = '{:x}'.format(random.getrandbits(32))
+ full_vol_name = 'composetest_{}'.format(vol_name)
config_data = build_config(
- version=V2_0,
services=[{
'name': 'web',
- 'image': 'busybox:latest',
+ 'image': BUSYBOX_IMAGE_WITH_TAG,
'command': 'top'
}],
volumes={vol_name: {}},
@@ -1417,15 +1401,13 @@ class ProjectTest(DockerClientTestCase):
assert volume_data['Name'].split('/')[-1] == full_vol_name
assert volume_data['Driver'] == 'local'
- @v2_only()
def test_project_up_implicit_volume_driver(self):
- vol_name = '{0:x}'.format(random.getrandbits(32))
- full_vol_name = 'composetest_{0}'.format(vol_name)
+ vol_name = '{:x}'.format(random.getrandbits(32))
+ full_vol_name = 'composetest_{}'.format(vol_name)
config_data = build_config(
- version=V2_0,
services=[{
'name': 'web',
- 'image': 'busybox:latest',
+ 'image': BUSYBOX_IMAGE_WITH_TAG,
'command': 'top'
}],
volumes={vol_name: {}},
@@ -1441,15 +1423,13 @@ class ProjectTest(DockerClientTestCase):
assert volume_data['Name'].split('/')[-1] == full_vol_name
assert volume_data['Driver'] == 'local'
- @v3_only()
def test_project_up_with_secrets(self):
node = create_host_file(self.client, os.path.abspath('tests/fixtures/secrets/default'))
config_data = build_config(
- version=V3_1,
services=[{
'name': 'web',
- 'image': 'busybox:latest',
+ 'image': BUSYBOX_IMAGE_WITH_TAG,
'command': 'cat /run/secrets/special',
'secrets': [
types.ServiceSecret.parse({'source': 'super', 'target': 'special'}),
@@ -1478,15 +1458,66 @@ class ProjectTest(DockerClientTestCase):
output = container.logs()
assert output == b"This is the secret\n"
- @v2_only()
+ def test_project_up_with_added_secrets(self):
+ node = create_host_file(self.client, os.path.abspath('tests/fixtures/secrets/default'))
+
+ config_input1 = {
+ 'services': [
+ {
+ 'name': 'web',
+ 'image': BUSYBOX_IMAGE_WITH_TAG,
+ 'command': 'cat /run/secrets/special',
+ 'environment': ['constraint:node=={}'.format(node if node is not None else '')]
+ }
+
+ ],
+ 'secrets': {
+ 'super': {
+ 'file': os.path.abspath('tests/fixtures/secrets/default')
+ }
+ }
+ }
+ config_input2 = copy.deepcopy(config_input1)
+ # Add the secret
+ config_input2['services'][0]['secrets'] = [
+ types.ServiceSecret.parse({'source': 'super', 'target': 'special'})
+ ]
+
+ config_data1 = build_config(**config_input1)
+ config_data2 = build_config(**config_input2)
+
+ # First up with non-secret
+ project = Project.from_config(
+ client=self.client,
+ name='composetest',
+ config_data=config_data1,
+ )
+ project.up()
+
+ # Then up with secret
+ project = Project.from_config(
+ client=self.client,
+ name='composetest',
+ config_data=config_data2,
+ )
+ project.up()
+ project.stop()
+
+ containers = project.containers(stopped=True)
+ assert len(containers) == 1
+ container, = containers
+
+ output = container.logs()
+ assert output == b"This is the secret\n"
+
def test_initialize_volumes_invalid_volume_driver(self):
- vol_name = '{0:x}'.format(random.getrandbits(32))
+ vol_name = '{:x}'.format(random.getrandbits(32))
config_data = build_config(
- version=V2_0,
+ version=VERSION,
services=[{
'name': 'web',
- 'image': 'busybox:latest',
+ 'image': BUSYBOX_IMAGE_WITH_TAG,
'command': 'top'
}],
volumes={vol_name: {'driver': 'foobar'}},
@@ -1499,17 +1530,15 @@ class ProjectTest(DockerClientTestCase):
with pytest.raises(APIError if is_cluster(self.client) else config.ConfigurationError):
project.volumes.initialize()
- @v2_only()
@no_cluster('inspect volume by name defect on Swarm Classic')
def test_initialize_volumes_updated_driver(self):
- vol_name = '{0:x}'.format(random.getrandbits(32))
- full_vol_name = 'composetest_{0}'.format(vol_name)
+ vol_name = '{:x}'.format(random.getrandbits(32))
+ full_vol_name = 'composetest_{}'.format(vol_name)
config_data = build_config(
- version=V2_0,
services=[{
'name': 'web',
- 'image': 'busybox:latest',
+ 'image': BUSYBOX_IMAGE_WITH_TAG,
'command': 'top'
}],
volumes={vol_name: {'driver': 'local'}},
@@ -1534,24 +1563,22 @@ class ProjectTest(DockerClientTestCase):
)
with pytest.raises(config.ConfigurationError) as e:
project.volumes.initialize()
- assert 'Configuration for volume {0} specifies driver smb'.format(
+ assert 'Configuration for volume {} specifies driver smb'.format(
vol_name
) in str(e.value)
- @v2_only()
@no_cluster('inspect volume by name defect on Swarm Classic')
def test_initialize_volumes_updated_driver_opts(self):
- vol_name = '{0:x}'.format(random.getrandbits(32))
- full_vol_name = 'composetest_{0}'.format(vol_name)
+ vol_name = '{:x}'.format(random.getrandbits(32))
+ full_vol_name = 'composetest_{}'.format(vol_name)
tmpdir = tempfile.mkdtemp(prefix='compose_test_')
self.addCleanup(shutil.rmtree, tmpdir)
driver_opts = {'o': 'bind', 'device': tmpdir, 'type': 'none'}
config_data = build_config(
- version=V2_0,
services=[{
'name': 'web',
- 'image': 'busybox:latest',
+ 'image': BUSYBOX_IMAGE_WITH_TAG,
'command': 'top'
}],
volumes={
@@ -1580,20 +1607,18 @@ class ProjectTest(DockerClientTestCase):
)
with pytest.raises(config.ConfigurationError) as e:
project.volumes.initialize()
- assert 'Configuration for volume {0} specifies "device" driver_opt {1}'.format(
+ assert 'Configuration for volume {} specifies "device" driver_opt {}'.format(
vol_name, driver_opts['device']
) in str(e.value)
- @v2_only()
def test_initialize_volumes_updated_blank_driver(self):
- vol_name = '{0:x}'.format(random.getrandbits(32))
- full_vol_name = 'composetest_{0}'.format(vol_name)
+ vol_name = '{:x}'.format(random.getrandbits(32))
+ full_vol_name = 'composetest_{}'.format(vol_name)
config_data = build_config(
- version=V2_0,
services=[{
'name': 'web',
- 'image': 'busybox:latest',
+ 'image': BUSYBOX_IMAGE_WITH_TAG,
'command': 'top'
}],
volumes={vol_name: {'driver': 'local'}},
@@ -1621,18 +1646,16 @@ class ProjectTest(DockerClientTestCase):
assert volume_data['Name'].split('/')[-1] == full_vol_name
assert volume_data['Driver'] == 'local'
- @v2_only()
@no_cluster('inspect volume by name defect on Swarm Classic')
def test_initialize_volumes_external_volumes(self):
# Use composetest_ prefix so it gets garbage-collected in tearDown()
- vol_name = 'composetest_{0:x}'.format(random.getrandbits(32))
- full_vol_name = 'composetest_{0}'.format(vol_name)
+ vol_name = 'composetest_{:x}'.format(random.getrandbits(32))
+ full_vol_name = 'composetest_{}'.format(vol_name)
self.client.create_volume(vol_name)
config_data = build_config(
- version=V2_0,
services=[{
'name': 'web',
- 'image': 'busybox:latest',
+ 'image': BUSYBOX_IMAGE_WITH_TAG,
'command': 'top'
}],
volumes={
@@ -1648,15 +1671,13 @@ class ProjectTest(DockerClientTestCase):
with pytest.raises(NotFound):
self.client.inspect_volume(full_vol_name)
- @v2_only()
def test_initialize_volumes_inexistent_external_volume(self):
- vol_name = '{0:x}'.format(random.getrandbits(32))
+ vol_name = '{:x}'.format(random.getrandbits(32))
config_data = build_config(
- version=V2_0,
services=[{
'name': 'web',
- 'image': 'busybox:latest',
+ 'image': BUSYBOX_IMAGE_WITH_TAG,
'command': 'top'
}],
volumes={
@@ -1669,24 +1690,22 @@ class ProjectTest(DockerClientTestCase):
)
with pytest.raises(config.ConfigurationError) as e:
project.volumes.initialize()
- assert 'Volume {0} declared as external'.format(
+ assert 'Volume {} declared as external'.format(
vol_name
) in str(e.value)
- @v2_only()
def test_project_up_named_volumes_in_binds(self):
- vol_name = '{0:x}'.format(random.getrandbits(32))
- full_vol_name = 'composetest_{0}'.format(vol_name)
+ vol_name = '{:x}'.format(random.getrandbits(32))
+ full_vol_name = 'composetest_{}'.format(vol_name)
base_file = config.ConfigFile(
'base.yml',
{
- 'version': str(V2_0),
'services': {
'simple': {
- 'image': 'busybox:latest',
+ 'image': BUSYBOX_IMAGE_WITH_TAG,
'command': 'top',
- 'volumes': ['{0}:/data'.format(vol_name)]
+ 'volumes': ['{}:/data'.format(vol_name)]
},
},
'volumes': {
@@ -1713,7 +1732,7 @@ class ProjectTest(DockerClientTestCase):
def test_project_up_orphans(self):
config_dict = {
'service1': {
- 'image': 'busybox:latest',
+ 'image': BUSYBOX_IMAGE_WITH_TAG,
'command': 'top',
}
}
@@ -1750,7 +1769,7 @@ class ProjectTest(DockerClientTestCase):
def test_project_up_ignore_orphans(self):
config_dict = {
'service1': {
- 'image': 'busybox:latest',
+ 'image': BUSYBOX_IMAGE_WITH_TAG,
'command': 'top',
}
}
@@ -1772,13 +1791,12 @@ class ProjectTest(DockerClientTestCase):
mock_log.warning.assert_not_called()
- @v2_1_only()
def test_project_up_healthy_dependency(self):
config_dict = {
'version': '2.1',
'services': {
'svc1': {
- 'image': 'busybox:latest',
+ 'image': BUSYBOX_IMAGE_WITH_TAG,
'command': 'top',
'healthcheck': {
'test': 'exit 0',
@@ -1788,7 +1806,7 @@ class ProjectTest(DockerClientTestCase):
},
},
'svc2': {
- 'image': 'busybox:latest',
+ 'image': BUSYBOX_IMAGE_WITH_TAG,
'command': 'top',
'depends_on': {
'svc1': {'condition': 'service_healthy'},
@@ -1809,13 +1827,12 @@ class ProjectTest(DockerClientTestCase):
assert 'svc1' in svc2.get_dependency_names()
assert svc1.is_healthy()
- @v2_1_only()
def test_project_up_unhealthy_dependency(self):
config_dict = {
'version': '2.1',
'services': {
'svc1': {
- 'image': 'busybox:latest',
+ 'image': BUSYBOX_IMAGE_WITH_TAG,
'command': 'top',
'healthcheck': {
'test': 'exit 1',
@@ -1825,7 +1842,7 @@ class ProjectTest(DockerClientTestCase):
},
},
'svc2': {
- 'image': 'busybox:latest',
+ 'image': BUSYBOX_IMAGE_WITH_TAG,
'command': 'top',
'depends_on': {
'svc1': {'condition': 'service_healthy'},
@@ -1848,20 +1865,19 @@ class ProjectTest(DockerClientTestCase):
with pytest.raises(HealthCheckFailed):
svc1.is_healthy()
- @v2_1_only()
def test_project_up_no_healthcheck_dependency(self):
config_dict = {
'version': '2.1',
'services': {
'svc1': {
- 'image': 'busybox:latest',
+ 'image': BUSYBOX_IMAGE_WITH_TAG,
'command': 'top',
'healthcheck': {
'disable': True
},
},
'svc2': {
- 'image': 'busybox:latest',
+ 'image': BUSYBOX_IMAGE_WITH_TAG,
'command': 'top',
'depends_on': {
'svc1': {'condition': 'service_healthy'},
@@ -1884,6 +1900,106 @@ class ProjectTest(DockerClientTestCase):
with pytest.raises(NoHealthCheckConfigured):
svc1.is_healthy()
+ def test_project_up_completed_successfully_dependency(self):
+ config_dict = {
+ 'version': '2.1',
+ 'services': {
+ 'svc1': {
+ 'image': BUSYBOX_IMAGE_WITH_TAG,
+ 'command': 'true'
+ },
+ 'svc2': {
+ 'image': BUSYBOX_IMAGE_WITH_TAG,
+ 'command': 'top',
+ 'depends_on': {
+ 'svc1': {'condition': 'service_completed_successfully'},
+ }
+ }
+ }
+ }
+ config_data = load_config(config_dict)
+ project = Project.from_config(
+ name='composetest', config_data=config_data, client=self.client
+ )
+ project.up()
+
+ svc1 = project.get_service('svc1')
+ svc2 = project.get_service('svc2')
+
+ assert 'svc1' in svc2.get_dependency_names()
+ assert svc2.containers()[0].is_running
+ assert len(svc1.containers()) == 0
+ assert svc1.is_completed_successfully()
+
+ def test_project_up_completed_unsuccessfully_dependency(self):
+ config_dict = {
+ 'version': '2.1',
+ 'services': {
+ 'svc1': {
+ 'image': BUSYBOX_IMAGE_WITH_TAG,
+ 'command': 'false'
+ },
+ 'svc2': {
+ 'image': BUSYBOX_IMAGE_WITH_TAG,
+ 'command': 'top',
+ 'depends_on': {
+ 'svc1': {'condition': 'service_completed_successfully'},
+ }
+ }
+ }
+ }
+ config_data = load_config(config_dict)
+ project = Project.from_config(
+ name='composetest', config_data=config_data, client=self.client
+ )
+ with pytest.raises(ProjectError):
+ project.up()
+
+ svc1 = project.get_service('svc1')
+ svc2 = project.get_service('svc2')
+ assert 'svc1' in svc2.get_dependency_names()
+ assert len(svc2.containers()) == 0
+ with pytest.raises(CompletedUnsuccessfully):
+ svc1.is_completed_successfully()
+
+ def test_project_up_completed_differently_dependencies(self):
+ config_dict = {
+ 'version': '2.1',
+ 'services': {
+ 'svc1': {
+ 'image': BUSYBOX_IMAGE_WITH_TAG,
+ 'command': 'true'
+ },
+ 'svc2': {
+ 'image': BUSYBOX_IMAGE_WITH_TAG,
+ 'command': 'false'
+ },
+ 'svc3': {
+ 'image': BUSYBOX_IMAGE_WITH_TAG,
+ 'command': 'top',
+ 'depends_on': {
+ 'svc1': {'condition': 'service_completed_successfully'},
+ 'svc2': {'condition': 'service_completed_successfully'},
+ }
+ }
+ }
+ }
+ config_data = load_config(config_dict)
+ project = Project.from_config(
+ name='composetest', config_data=config_data, client=self.client
+ )
+ with pytest.raises(ProjectError):
+ project.up()
+
+ svc1 = project.get_service('svc1')
+ svc2 = project.get_service('svc2')
+ svc3 = project.get_service('svc3')
+ assert ['svc1', 'svc2'] == svc3.get_dependency_names()
+ assert svc1.is_completed_successfully()
+ assert len(svc3.containers()) == 0
+ with pytest.raises(CompletedUnsuccessfully):
+ svc2.is_completed_successfully()
+
def test_project_up_seccomp_profile(self):
seccomp_data = {
'defaultAction': 'SCMP_ACT_ALLOW',
@@ -1898,7 +2014,7 @@ class ProjectTest(DockerClientTestCase):
'version': '2.3',
'services': {
'svc1': {
- 'image': 'busybox:latest',
+ 'image': BUSYBOX_IMAGE_WITH_TAG,
'command': 'top',
'security_opt': ['seccomp:"{}"'.format(profile_path)]
}
@@ -1915,3 +2031,65 @@ class ProjectTest(DockerClientTestCase):
assert len(remote_secopts) == 1
assert remote_secopts[0].startswith('seccomp=')
assert json.loads(remote_secopts[0].lstrip('seccomp=')) == seccomp_data
+
+ @no_cluster('inspect volume by name defect on Swarm Classic')
+ def test_project_up_name_starts_with_illegal_char(self):
+ config_dict = {
+ 'version': '2.3',
+ 'services': {
+ 'svc1': {
+ 'image': BUSYBOX_IMAGE_WITH_TAG,
+ 'command': 'ls',
+ 'volumes': ['foo:/foo:rw'],
+ 'networks': ['bar'],
+ },
+ },
+ 'volumes': {
+ 'foo': {},
+ },
+ 'networks': {
+ 'bar': {},
+ }
+ }
+ config_data = load_config(config_dict)
+ project = Project.from_config(
+ name='_underscoretest', config_data=config_data, client=self.client
+ )
+ project.up()
+ self.addCleanup(project.down, None, True)
+
+ containers = project.containers(stopped=True)
+ assert len(containers) == 1
+ assert containers[0].name.startswith('underscoretest_svc1_')
+ assert containers[0].project == '_underscoretest'
+
+ full_vol_name = 'underscoretest_foo'
+ vol_data = self.get_volume_data(full_vol_name)
+ assert vol_data
+ assert vol_data['Labels'][LABEL_PROJECT] == '_underscoretest'
+
+ full_net_name = '_underscoretest_bar'
+ net_data = self.client.inspect_network(full_net_name)
+ assert net_data
+ assert net_data['Labels'][LABEL_PROJECT] == '_underscoretest'
+
+ project2 = Project.from_config(
+ name='-dashtest', config_data=config_data, client=self.client
+ )
+ project2.up()
+ self.addCleanup(project2.down, None, True)
+
+ containers = project2.containers(stopped=True)
+ assert len(containers) == 1
+ assert containers[0].name.startswith('dashtest_svc1_')
+ assert containers[0].project == '-dashtest'
+
+ full_vol_name = 'dashtest_foo'
+ vol_data = self.get_volume_data(full_vol_name)
+ assert vol_data
+ assert vol_data['Labels'][LABEL_PROJECT] == '-dashtest'
+
+ full_net_name = '-dashtest_bar'
+ net_data = self.client.inspect_network(full_net_name)
+ assert net_data
+ assert net_data['Labels'][LABEL_PROJECT] == '-dashtest'
diff --git a/tests/integration/resilience_test.py b/tests/integration/resilience_test.py
index 3de16e97..2fbaafb2 100644
--- a/tests/integration/resilience_test.py
+++ b/tests/integration/resilience_test.py
@@ -1,6 +1,3 @@
-from __future__ import absolute_import
-from __future__ import unicode_literals
-
import pytest
from .. import mock
@@ -25,7 +22,7 @@ class ResilienceTest(DockerClientTestCase):
def tearDown(self):
del self.project
del self.db
- super(ResilienceTest, self).tearDown()
+ super().tearDown()
def test_successful_recreate(self):
self.project.up(strategy=ConvergenceStrategy.always)
diff --git a/tests/integration/service_test.py b/tests/integration/service_test.py
index d8f4d094..06a97508 100644
--- a/tests/integration/service_test.py
+++ b/tests/integration/service_test.py
@@ -1,20 +1,17 @@
-from __future__ import absolute_import
-from __future__ import unicode_literals
-
import os
import re
import shutil
import tempfile
from distutils.spawn import find_executable
+from io import StringIO
from os import path
import pytest
from docker.errors import APIError
from docker.errors import ImageNotFound
-from six import StringIO
-from six import text_type
from .. import mock
+from ..helpers import BUSYBOX_IMAGE_WITH_TAG
from .testcases import docker_client
from .testcases import DockerClientTestCase
from .testcases import get_links
@@ -37,8 +34,12 @@ from compose.container import Container
from compose.errors import OperationFailedError
from compose.parallel import ParallelStreamWriter
from compose.project import OneOffFilter
+from compose.project import Project
+from compose.service import BuildAction
+from compose.service import BuildError
from compose.service import ConvergencePlan
from compose.service import ConvergenceStrategy
+from compose.service import IpcMode
from compose.service import NetworkMode
from compose.service import PidMode
from compose.service import Service
@@ -46,11 +47,6 @@ from compose.utils import parse_nanoseconds_int
from tests.helpers import create_custom_host_file
from tests.integration.testcases import is_cluster
from tests.integration.testcases import no_cluster
-from tests.integration.testcases import v2_1_only
-from tests.integration.testcases import v2_2_only
-from tests.integration.testcases import v2_3_only
-from tests.integration.testcases import v2_only
-from tests.integration.testcases import v3_only
def create_and_start_container(service, **override_options):
@@ -67,7 +63,7 @@ class ServiceTest(DockerClientTestCase):
create_and_start_container(foo)
assert len(foo.containers()) == 1
- assert foo.containers()[0].name == 'composetest_foo_1'
+ assert foo.containers()[0].name.startswith('composetest_foo_')
assert len(bar.containers()) == 0
create_and_start_container(bar)
@@ -77,8 +73,8 @@ class ServiceTest(DockerClientTestCase):
assert len(bar.containers()) == 2
names = [c.name for c in bar.containers()]
- assert 'composetest_bar_1' in names
- assert 'composetest_bar_2' in names
+ assert len(names) == 2
+ assert all(name.startswith('composetest_bar_') for name in names)
def test_containers_one_off(self):
db = self.create_service('db')
@@ -89,18 +85,18 @@ class ServiceTest(DockerClientTestCase):
def test_project_is_added_to_container_name(self):
service = self.create_service('web')
create_and_start_container(service)
- assert service.containers()[0].name == 'composetest_web_1'
+ assert service.containers()[0].name.startswith('composetest_web_')
def test_create_container_with_one_off(self):
db = self.create_service('db')
container = db.create_container(one_off=True)
- assert container.name == 'composetest_db_run_1'
+ assert container.name.startswith('composetest_db_run_')
def test_create_container_with_one_off_when_existing_container_is_running(self):
db = self.create_service('db')
db.start()
container = db.create_container(one_off=True)
- assert container.name == 'composetest_db_run_1'
+ assert container.name.startswith('composetest_db_run_')
def test_create_container_with_unspecified_volume(self):
service = self.create_service('db', volumes=[VolumeSpec.parse('/var/db')])
@@ -136,7 +132,6 @@ class ServiceTest(DockerClientTestCase):
assert container.get('HostConfig.CpuRealtimeRuntime') == 40000
assert container.get('HostConfig.CpuRealtimePeriod') == 150000
- @v2_2_only()
def test_create_container_with_cpu_count(self):
self.require_api_version('1.25')
service = self.create_service('db', cpu_count=2)
@@ -144,7 +139,6 @@ class ServiceTest(DockerClientTestCase):
service.start_container(container)
assert container.get('HostConfig.CpuCount') == 2
- @v2_2_only()
@pytest.mark.skipif(not IS_WINDOWS_PLATFORM, reason='cpu_percent is not supported for Linux')
def test_create_container_with_cpu_percent(self):
self.require_api_version('1.25')
@@ -153,7 +147,6 @@ class ServiceTest(DockerClientTestCase):
service.start_container(container)
assert container.get('HostConfig.CpuPercent') == 12
- @v2_2_only()
def test_create_container_with_cpus(self):
self.require_api_version('1.25')
service = self.create_service('db', cpus=1)
@@ -220,6 +213,9 @@ class ServiceTest(DockerClientTestCase):
service.start_container(container)
assert container.get('HostConfig.ReadonlyRootfs') == read_only
+ @pytest.mark.xfail(True, reason='Getting "Your kernel does not support '
+ 'cgroup blkio weight and weight_device" on daemon start '
+ 'on Linux kernel 5.3.x')
def test_create_container_with_blkio_config(self):
blkio_config = {
'weight': 300,
@@ -253,7 +249,7 @@ class ServiceTest(DockerClientTestCase):
service = self.create_service('db', security_opt=security_opt)
container = service.create_container()
service.start_container(container)
- assert set(container.get('HostConfig.SecurityOpt')) == set([o.repr() for o in security_opt])
+ assert set(container.get('HostConfig.SecurityOpt')) == {o.repr() for o in security_opt}
@pytest.mark.xfail(True, reason='Not supported on most drivers')
def test_create_container_with_storage_opt(self):
@@ -295,10 +291,9 @@ class ServiceTest(DockerClientTestCase):
actual_host_path = container.get_mount(container_path)['Source']
assert path.basename(actual_host_path) == path.basename(host_path), (
- "Last component differs: %s, %s" % (actual_host_path, host_path)
+ "Last component differs: {}, {}".format(actual_host_path, host_path)
)
- @v2_3_only()
def test_create_container_with_host_mount(self):
host_path = '/tmp/host-path'
container_path = '/container-path'
@@ -318,7 +313,6 @@ class ServiceTest(DockerClientTestCase):
assert path.basename(mount['Source']) == path.basename(host_path)
assert mount['RW'] is False
- @v2_3_only()
def test_create_container_with_tmpfs_mount(self):
container_path = '/container-tmpfs'
service = self.create_service(
@@ -331,7 +325,6 @@ class ServiceTest(DockerClientTestCase):
assert mount
assert mount['Type'] == 'tmpfs'
- @v2_3_only()
def test_create_container_with_tmpfs_mount_tmpfs_size(self):
container_path = '/container-tmpfs'
service = self.create_service(
@@ -348,7 +341,6 @@ class ServiceTest(DockerClientTestCase):
'SizeBytes': 5368709
}
- @v2_3_only()
def test_create_container_with_volume_mount(self):
container_path = '/container-volume'
volume_name = 'composetest_abcde'
@@ -363,7 +355,6 @@ class ServiceTest(DockerClientTestCase):
assert mount
assert mount['Name'] == volume_name
- @v3_only()
def test_create_container_with_legacy_mount(self):
# Ensure mounts are converted to volumes if API version < 1.30
# Needed to support long syntax in the 3.2 format
@@ -373,14 +364,13 @@ class ServiceTest(DockerClientTestCase):
self.client.create_volume(volume_name)
service = Service('db', client=client, volumes=[
MountSpec(type='volume', source=volume_name, target=container_path)
- ], image='busybox:latest', command=['top'], project='composetest')
+ ], image=BUSYBOX_IMAGE_WITH_TAG, command=['top'], project='composetest')
container = service.create_container()
service.start_container(container)
mount = container.get_mount(container_path)
assert mount
assert mount['Name'] == volume_name
- @v3_only()
def test_create_container_with_legacy_tmpfs_mount(self):
# Ensure tmpfs mounts are converted to tmpfs entries if API version < 1.30
# Needed to support long syntax in the 3.2 format
@@ -388,7 +378,7 @@ class ServiceTest(DockerClientTestCase):
container_path = '/container-tmpfs'
service = Service('db', client=client, volumes=[
MountSpec(type='tmpfs', target=container_path)
- ], image='busybox:latest', command=['top'], project='composetest')
+ ], image=BUSYBOX_IMAGE_WITH_TAG, command=['top'], project='composetest')
container = service.create_container()
service.start_container(container)
mount = container.get_mount(container_path)
@@ -424,6 +414,22 @@ class ServiceTest(DockerClientTestCase):
new_container = service.recreate_container(old_container)
assert new_container.get_mount('/data')['Source'] == volume_path
+ def test_recreate_volume_to_mount(self):
+ # https://github.com/docker/compose/issues/6280
+ service = Service(
+ project='composetest',
+ name='db',
+ client=self.client,
+ build={'context': 'tests/fixtures/dockerfile-with-volume'},
+ volumes=[MountSpec.parse({
+ 'type': 'volume',
+ 'target': '/data',
+ })]
+ )
+ old_container = create_and_start_container(service)
+ new_container = service.recreate_container(old_container)
+ assert new_container.get_mount('/data')['Source']
+
def test_duplicate_volume_trailing_slash(self):
"""
When an image specifies a volume, and the Compose file specifies a host path
@@ -458,7 +464,7 @@ class ServiceTest(DockerClientTestCase):
volume_container_1 = volume_service.create_container()
volume_container_2 = Container.create(
self.client,
- image='busybox:latest',
+ image=BUSYBOX_IMAGE_WITH_TAG,
command=["top"],
labels={LABEL_PROJECT: 'composetest'},
host_config={},
@@ -489,7 +495,7 @@ class ServiceTest(DockerClientTestCase):
assert old_container.get('Config.Entrypoint') == ['top']
assert old_container.get('Config.Cmd') == ['-d', '1']
assert 'FOO=1' in old_container.get('Config.Env')
- assert old_container.name == 'composetest_db_1'
+ assert old_container.name.startswith('composetest_db_')
service.start_container(old_container)
old_container.inspect() # reload volume data
volume_path = old_container.get_mount('/etc')['Source']
@@ -503,7 +509,7 @@ class ServiceTest(DockerClientTestCase):
assert new_container.get('Config.Entrypoint') == ['top']
assert new_container.get('Config.Cmd') == ['-d', '1']
assert 'FOO=2' in new_container.get('Config.Env')
- assert new_container.name == 'composetest_db_1'
+ assert new_container.name.startswith('composetest_db_')
assert new_container.get_mount('/etc')['Source'] == volume_path
if not is_cluster(self.client):
assert (
@@ -571,7 +577,6 @@ class ServiceTest(DockerClientTestCase):
orig_container = new_container
- @v2_3_only()
def test_execute_convergence_plan_recreate_twice_with_mount(self):
service = self.create_service(
'db',
@@ -679,8 +684,8 @@ class ServiceTest(DockerClientTestCase):
new_container, = service.execute_convergence_plan(
ConvergencePlan('recreate', [old_container]))
- mock_log.warn.assert_called_once_with(mock.ANY)
- _, args, kwargs = mock_log.warn.mock_calls[0]
+ mock_log.warning.assert_called_once_with(mock.ANY)
+ _, args, kwargs = mock_log.warning.mock_calls[0]
assert "Service \"db\" is using volume \"/data\" from the previous container" in args[0]
assert [mount['Destination'] for mount in new_container.get('Mounts')] == ['/data']
@@ -836,47 +841,50 @@ class ServiceTest(DockerClientTestCase):
db = self.create_service('db')
web = self.create_service('web', links=[(db, None)])
- create_and_start_container(db)
- create_and_start_container(db)
+ db1 = create_and_start_container(db)
+ db2 = create_and_start_container(db)
create_and_start_container(web)
- assert set(get_links(web.containers()[0])) == set([
- 'composetest_db_1', 'db_1',
- 'composetest_db_2', 'db_2',
+ assert set(get_links(web.containers()[0])) == {
+ db1.name, db1.name_without_project,
+ db2.name, db2.name_without_project,
'db'
- ])
+ }
@no_cluster('No legacy links support in Swarm')
def test_start_container_creates_links_with_names(self):
db = self.create_service('db')
web = self.create_service('web', links=[(db, 'custom_link_name')])
- create_and_start_container(db)
- create_and_start_container(db)
+ db1 = create_and_start_container(db)
+ db2 = create_and_start_container(db)
create_and_start_container(web)
- assert set(get_links(web.containers()[0])) == set([
- 'composetest_db_1', 'db_1',
- 'composetest_db_2', 'db_2',
+ assert set(get_links(web.containers()[0])) == {
+ db1.name, db1.name_without_project,
+ db2.name, db2.name_without_project,
'custom_link_name'
- ])
+ }
@no_cluster('No legacy links support in Swarm')
def test_start_container_with_external_links(self):
db = self.create_service('db')
- web = self.create_service('web', external_links=['composetest_db_1',
- 'composetest_db_2',
- 'composetest_db_3:db_3'])
+ db_ctnrs = [create_and_start_container(db) for _ in range(3)]
+ web = self.create_service(
+ 'web', external_links=[
+ db_ctnrs[0].name,
+ db_ctnrs[1].name,
+ '{}:db_3'.format(db_ctnrs[2].name)
+ ]
+ )
- for _ in range(3):
- create_and_start_container(db)
create_and_start_container(web)
- assert set(get_links(web.containers()[0])) == set([
- 'composetest_db_1',
- 'composetest_db_2',
+ assert set(get_links(web.containers()[0])) == {
+ db_ctnrs[0].name,
+ db_ctnrs[1].name,
'db_3'
- ])
+ }
@no_cluster('No legacy links support in Swarm')
def test_start_normal_container_does_not_create_links_to_its_own_service(self):
@@ -886,22 +894,22 @@ class ServiceTest(DockerClientTestCase):
create_and_start_container(db)
c = create_and_start_container(db)
- assert set(get_links(c)) == set([])
+ assert set(get_links(c)) == set()
@no_cluster('No legacy links support in Swarm')
def test_start_one_off_container_creates_links_to_its_own_service(self):
db = self.create_service('db')
- create_and_start_container(db)
- create_and_start_container(db)
+ db1 = create_and_start_container(db)
+ db2 = create_and_start_container(db)
c = create_and_start_container(db, one_off=OneOffFilter.only)
- assert set(get_links(c)) == set([
- 'composetest_db_1', 'db_1',
- 'composetest_db_2', 'db_2',
+ assert set(get_links(c)) == {
+ db1.name, db1.name_without_project,
+ db2.name, db2.name_without_project,
'db'
- ])
+ }
def test_start_container_builds_images(self):
service = Service(
@@ -940,12 +948,86 @@ class ServiceTest(DockerClientTestCase):
with open(os.path.join(base_dir, 'Dockerfile'), 'w') as f:
f.write("FROM busybox\n")
- service = self.create_service('web', build={'context': base_dir})
+ service = self.create_service('web',
+ build={'context': base_dir},
+ environment={
+ 'COMPOSE_DOCKER_CLI_BUILD': '0',
+ 'DOCKER_BUILDKIT': '0',
+ })
service.build()
self.addCleanup(self.client.remove_image, service.image_name)
assert self.client.inspect_image('composetest_web')
+ def test_build_cli(self):
+ base_dir = tempfile.mkdtemp()
+ self.addCleanup(shutil.rmtree, base_dir)
+
+ with open(os.path.join(base_dir, 'Dockerfile'), 'w') as f:
+ f.write("FROM busybox\n")
+
+ service = self.create_service('web',
+ build={'context': base_dir},
+ environment={
+ 'DOCKER_BUILDKIT': '1',
+ })
+ service.build(cli=True)
+ self.addCleanup(self.client.remove_image, service.image_name)
+ assert self.client.inspect_image('composetest_web')
+
+ def test_build_cli_with_build_labels(self):
+ base_dir = tempfile.mkdtemp()
+ self.addCleanup(shutil.rmtree, base_dir)
+
+ with open(os.path.join(base_dir, 'Dockerfile'), 'w') as f:
+ f.write("FROM busybox\n")
+
+ service = self.create_service('web',
+ build={
+ 'context': base_dir,
+ 'labels': {'com.docker.compose.test': 'true'}},
+ )
+ service.build(cli=True)
+ self.addCleanup(self.client.remove_image, service.image_name)
+ image = self.client.inspect_image('composetest_web')
+ assert image['Config']['Labels']['com.docker.compose.test']
+
+ def test_build_cli_with_build_error(self):
+ base_dir = tempfile.mkdtemp()
+ self.addCleanup(shutil.rmtree, base_dir)
+
+ with open(os.path.join(base_dir, 'Dockerfile'), 'w') as f:
+ f.write('\n'.join([
+ "FROM busybox",
+ "RUN exit 2",
+ ]))
+ service = self.create_service('web',
+ build={
+ 'context': base_dir,
+ 'labels': {'com.docker.compose.test': 'true'}},
+ )
+ with pytest.raises(BuildError):
+ service.build(cli=True)
+
+ def test_up_build_cli(self):
+ base_dir = tempfile.mkdtemp()
+ self.addCleanup(shutil.rmtree, base_dir)
+
+ with open(os.path.join(base_dir, 'Dockerfile'), 'w') as f:
+ f.write("FROM busybox\n")
+
+ web = self.create_service('web',
+ build={'context': base_dir},
+ environment={
+ 'DOCKER_BUILDKIT': '1',
+ })
+ project = Project('composetest', [web], self.client)
+ project.up(do_build=BuildAction.force)
+
+ containers = project.containers(['web'])
+ assert len(containers) == 1
+ assert containers[0].name.startswith('composetest_web_')
+
def test_build_non_ascii_filename(self):
base_dir = tempfile.mkdtemp()
self.addCleanup(shutil.rmtree, base_dir)
@@ -956,7 +1038,7 @@ class ServiceTest(DockerClientTestCase):
with open(os.path.join(base_dir.encode('utf8'), b'foo\xE2bar'), 'w') as f:
f.write("hello world\n")
- service = self.create_service('web', build={'context': text_type(base_dir)})
+ service = self.create_service('web', build={'context': str(base_dir)})
service.build()
self.addCleanup(self.client.remove_image, service.image_name)
assert self.client.inspect_image('composetest_web')
@@ -990,7 +1072,7 @@ class ServiceTest(DockerClientTestCase):
f.write("RUN echo ${build_version}\n")
service = self.create_service('buildwithargs',
- build={'context': text_type(base_dir),
+ build={'context': str(base_dir),
'args': {"build_version": "1"}})
service.build()
self.addCleanup(self.client.remove_image, service.image_name)
@@ -1007,7 +1089,7 @@ class ServiceTest(DockerClientTestCase):
f.write("RUN echo ${build_version}\n")
service = self.create_service('buildwithargs',
- build={'context': text_type(base_dir),
+ build={'context': str(base_dir),
'args': {"build_version": "1"}})
service.build(build_args_override={'build_version': '2'})
self.addCleanup(self.client.remove_image, service.image_name)
@@ -1023,7 +1105,7 @@ class ServiceTest(DockerClientTestCase):
f.write('FROM busybox\n')
service = self.create_service('buildlabels', build={
- 'context': text_type(base_dir),
+ 'context': str(base_dir),
'labels': {'com.docker.compose.test': 'true'}
})
service.build()
@@ -1050,7 +1132,7 @@ class ServiceTest(DockerClientTestCase):
self.client.start(net_container)
service = self.create_service('buildwithnet', build={
- 'context': text_type(base_dir),
+ 'context': str(base_dir),
'network': 'container:{}'.format(net_container['Id'])
})
@@ -1059,7 +1141,6 @@ class ServiceTest(DockerClientTestCase):
assert service.image()
- @v2_3_only()
@no_cluster('Not supported on UCP 2.2.0-beta1') # FIXME: remove once support is added
def test_build_with_target(self):
self.require_api_version('1.30')
@@ -1074,7 +1155,7 @@ class ServiceTest(DockerClientTestCase):
f.write('LABEL com.docker.compose.test.target=two\n')
service = self.create_service('buildtarget', build={
- 'context': text_type(base_dir),
+ 'context': str(base_dir),
'target': 'one'
})
@@ -1082,7 +1163,6 @@ class ServiceTest(DockerClientTestCase):
assert service.image()
assert service.image()['Config']['Labels']['com.docker.compose.test.target'] == 'one'
- @v2_3_only()
def test_build_with_extra_hosts(self):
self.require_api_version('1.27')
base_dir = tempfile.mkdtemp()
@@ -1096,7 +1176,7 @@ class ServiceTest(DockerClientTestCase):
]))
service = self.create_service('build_extra_hosts', build={
- 'context': text_type(base_dir),
+ 'context': str(base_dir),
'extra_hosts': {
'foobar': '127.0.0.1',
'baz': '127.0.0.1'
@@ -1118,12 +1198,11 @@ class ServiceTest(DockerClientTestCase):
f.write('hello world\n')
service = self.create_service('build_gzip', build={
- 'context': text_type(base_dir),
+ 'context': str(base_dir),
})
service.build(gzip=True)
assert service.image()
- @v2_1_only()
def test_build_with_isolation(self):
base_dir = tempfile.mkdtemp()
self.addCleanup(shutil.rmtree, base_dir)
@@ -1131,12 +1210,27 @@ class ServiceTest(DockerClientTestCase):
f.write('FROM busybox\n')
service = self.create_service('build_isolation', build={
- 'context': text_type(base_dir),
+ 'context': str(base_dir),
'isolation': 'default',
})
service.build()
assert service.image()
+ def test_build_with_illegal_leading_chars(self):
+ base_dir = tempfile.mkdtemp()
+ self.addCleanup(shutil.rmtree, base_dir)
+ with open(os.path.join(base_dir, 'Dockerfile'), 'w') as f:
+ f.write('FROM busybox\nRUN echo "Embodiment of Scarlet Devil"\n')
+ service = Service(
+ 'build_leading_slug', client=self.client,
+ project='___-composetest', build={
+ 'context': str(base_dir)
+ }
+ )
+ assert service.image_name == 'composetest_build_leading_slug'
+ service.build()
+ assert service.image()
+
def test_start_container_stays_unprivileged(self):
service = self.create_service('web')
container = create_and_start_container(service).inspect()
@@ -1198,9 +1292,8 @@ class ServiceTest(DockerClientTestCase):
# })
def test_create_with_image_id(self):
- # Get image id for the current busybox:latest
pull_busybox(self.client)
- image_id = self.client.inspect_image('busybox:latest')['Id'][:12]
+ image_id = self.client.inspect_image(BUSYBOX_IMAGE_WITH_TAG)['Id'][:12]
service = self.create_service('foo', image=image_id)
service.create_container()
@@ -1234,17 +1327,15 @@ class ServiceTest(DockerClientTestCase):
test that those containers are restarted and not removed/recreated.
"""
service = self.create_service('web')
- next_number = service._next_container_number()
- valid_numbers = [next_number, next_number + 1]
- service.create_container(number=next_number)
- service.create_container(number=next_number + 1)
+ service.create_container(number=1)
+ service.create_container(number=2)
ParallelStreamWriter.instance = None
with mock.patch('sys.stderr', new_callable=StringIO) as mock_stderr:
service.scale(2)
for container in service.containers():
assert container.is_running
- assert container.number in valid_numbers
+ assert container.number in [1, 2]
captured_output = mock_stderr.getvalue()
assert 'Creating' not in captured_output
@@ -1295,10 +1386,8 @@ class ServiceTest(DockerClientTestCase):
assert len(service.containers()) == 1
assert service.containers()[0].is_running
- assert (
- "ERROR: for composetest_web_2 Cannot create container for service"
- " web: Boom" in mock_stderr.getvalue()
- )
+ assert "ERROR: for composetest_web_" in mock_stderr.getvalue()
+ assert "Cannot create container for service web: Boom" in mock_stderr.getvalue()
def test_scale_with_unexpected_exception(self):
"""Test that when scaling if the API returns an error, that is not of type
@@ -1352,7 +1441,7 @@ class ServiceTest(DockerClientTestCase):
with pytest.raises(OperationFailedError):
service.scale(3)
- captured_output = mock_log.warn.call_args[0][0]
+ captured_output = mock_log.warning.call_args[0][0]
assert len(service.containers()) == 1
assert "Remove the custom name to scale the service." in captured_output
@@ -1395,13 +1484,22 @@ class ServiceTest(DockerClientTestCase):
container = create_and_start_container(service)
assert container.get('HostConfig.PidMode') == 'host'
- @v2_1_only()
+ def test_ipc_mode_none_defined(self):
+ service = self.create_service('web', ipc_mode=None)
+ container = create_and_start_container(service)
+ print(container.get('HostConfig.IpcMode'))
+ assert container.get('HostConfig.IpcMode') == 'shareable'
+
+ def test_ipc_mode_host(self):
+ service = self.create_service('web', ipc_mode=IpcMode('host'))
+ container = create_and_start_container(service)
+ assert container.get('HostConfig.IpcMode') == 'host'
+
def test_userns_mode_none_defined(self):
service = self.create_service('web', userns_mode=None)
container = create_and_start_container(service)
assert container.get('HostConfig.UsernsMode') == ''
- @v2_1_only()
def test_userns_mode_host(self):
service = self.create_service('web', userns_mode='host')
container = create_and_start_container(service)
@@ -1477,7 +1575,6 @@ class ServiceTest(DockerClientTestCase):
container = create_and_start_container(service)
assert container.get('HostConfig.DnsSearch') == ['dc1.example.com', 'dc2.example.com']
- @v2_only()
def test_tmpfs(self):
service = self.create_service('web', tmpfs=['/run'])
container = create_and_start_container(service)
@@ -1511,7 +1608,6 @@ class ServiceTest(DockerClientTestCase):
}.items():
assert env[k] == v
- @v3_only()
def test_build_with_cachefrom(self):
base_dir = tempfile.mkdtemp()
self.addCleanup(shutil.rmtree, base_dir)
@@ -1565,16 +1661,17 @@ class ServiceTest(DockerClientTestCase):
}
compose_labels = {
- LABEL_CONTAINER_NUMBER: '1',
LABEL_ONE_OFF: 'False',
LABEL_PROJECT: 'composetest',
LABEL_SERVICE: 'web',
LABEL_VERSION: __version__,
+ LABEL_CONTAINER_NUMBER: '1'
}
expected = dict(labels_dict, **compose_labels)
service = self.create_service('web', labels=labels_dict)
- labels = create_and_start_container(service).labels.items()
+ ctnr = create_and_start_container(service)
+ labels = ctnr.labels.items()
for pair in expected.items():
assert pair in labels
@@ -1640,17 +1737,17 @@ class ServiceTest(DockerClientTestCase):
def test_duplicate_containers(self):
service = self.create_service('web')
- options = service._get_container_create_options({}, 1)
+ options = service._get_container_create_options({}, service._next_container_number())
original = Container.create(service.client, **options)
- assert set(service.containers(stopped=True)) == set([original])
+ assert set(service.containers(stopped=True)) == {original}
assert set(service.duplicate_containers()) == set()
options['name'] = 'temporary_container_name'
duplicate = Container.create(service.client, **options)
- assert set(service.containers(stopped=True)) == set([original, duplicate])
- assert set(service.duplicate_containers()) == set([duplicate])
+ assert set(service.containers(stopped=True)) == {original, duplicate}
+ assert set(service.duplicate_containers()) == {duplicate}
def converge(service, strategy=ConvergenceStrategy.changed):
diff --git a/tests/integration/state_test.py b/tests/integration/state_test.py
index 5992a02a..8168cddf 100644
--- a/tests/integration/state_test.py
+++ b/tests/integration/state_test.py
@@ -2,12 +2,14 @@
Integration tests which cover state convergence (aka smart recreate) performed
by `docker-compose up`.
"""
-from __future__ import absolute_import
-from __future__ import unicode_literals
+import copy
+import os
+import shutil
+import tempfile
-import py
from docker.errors import ImageNotFound
+from ..helpers import BUSYBOX_IMAGE_WITH_TAG
from .testcases import DockerClientTestCase
from .testcases import get_links
from .testcases import no_cluster
@@ -37,11 +39,11 @@ class ProjectTestCase(DockerClientTestCase):
class BasicProjectTest(ProjectTestCase):
def setUp(self):
- super(BasicProjectTest, self).setUp()
+ super().setUp()
self.cfg = {
- 'db': {'image': 'busybox:latest', 'command': 'top'},
- 'web': {'image': 'busybox:latest', 'command': 'top'},
+ 'db': {'image': BUSYBOX_IMAGE_WITH_TAG, 'command': 'top'},
+ 'web': {'image': BUSYBOX_IMAGE_WITH_TAG, 'command': 'top'},
}
def test_no_change(self):
@@ -55,8 +57,8 @@ class BasicProjectTest(ProjectTestCase):
def test_partial_change(self):
old_containers = self.run_up(self.cfg)
- old_db = [c for c in old_containers if c.name_without_project == 'db_1'][0]
- old_web = [c for c in old_containers if c.name_without_project == 'web_1'][0]
+ old_db = [c for c in old_containers if c.name_without_project.startswith('db_')][0]
+ old_web = [c for c in old_containers if c.name_without_project.startswith('web_')][0]
self.cfg['web']['command'] = '/bin/true'
@@ -71,7 +73,7 @@ class BasicProjectTest(ProjectTestCase):
created = list(new_containers - old_containers)
assert len(created) == 1
- assert created[0].name_without_project == 'web_1'
+ assert created[0].name_without_project == old_web.name_without_project
assert created[0].get('Config.Cmd') == ['/bin/true']
def test_all_change(self):
@@ -93,20 +95,20 @@ class BasicProjectTest(ProjectTestCase):
class ProjectWithDependenciesTest(ProjectTestCase):
def setUp(self):
- super(ProjectWithDependenciesTest, self).setUp()
+ super().setUp()
self.cfg = {
'db': {
- 'image': 'busybox:latest',
+ 'image': BUSYBOX_IMAGE_WITH_TAG,
'command': 'tail -f /dev/null',
},
'web': {
- 'image': 'busybox:latest',
+ 'image': BUSYBOX_IMAGE_WITH_TAG,
'command': 'tail -f /dev/null',
'links': ['db'],
},
'nginx': {
- 'image': 'busybox:latest',
+ 'image': BUSYBOX_IMAGE_WITH_TAG,
'command': 'tail -f /dev/null',
'links': ['web'],
},
@@ -114,7 +116,7 @@ class ProjectWithDependenciesTest(ProjectTestCase):
def test_up(self):
containers = self.run_up(self.cfg)
- assert set(c.name_without_project for c in containers) == set(['db_1', 'web_1', 'nginx_1'])
+ assert {c.service for c in containers} == {'db', 'web', 'nginx'}
def test_change_leaf(self):
old_containers = self.run_up(self.cfg)
@@ -122,7 +124,7 @@ class ProjectWithDependenciesTest(ProjectTestCase):
self.cfg['nginx']['environment'] = {'NEW_VAR': '1'}
new_containers = self.run_up(self.cfg)
- assert set(c.name_without_project for c in new_containers - old_containers) == set(['nginx_1'])
+ assert {c.service for c in new_containers - old_containers} == {'nginx'}
def test_change_middle(self):
old_containers = self.run_up(self.cfg)
@@ -130,7 +132,7 @@ class ProjectWithDependenciesTest(ProjectTestCase):
self.cfg['web']['environment'] = {'NEW_VAR': '1'}
new_containers = self.run_up(self.cfg)
- assert set(c.name_without_project for c in new_containers - old_containers) == set(['web_1'])
+ assert {c.service for c in new_containers - old_containers} == {'web'}
def test_change_middle_always_recreate_deps(self):
old_containers = self.run_up(self.cfg, always_recreate_deps=True)
@@ -138,8 +140,7 @@ class ProjectWithDependenciesTest(ProjectTestCase):
self.cfg['web']['environment'] = {'NEW_VAR': '1'}
new_containers = self.run_up(self.cfg, always_recreate_deps=True)
- assert set(c.name_without_project
- for c in new_containers - old_containers) == {'web_1', 'nginx_1'}
+ assert {c.service for c in new_containers - old_containers} == {'web', 'nginx'}
def test_change_root(self):
old_containers = self.run_up(self.cfg)
@@ -147,7 +148,7 @@ class ProjectWithDependenciesTest(ProjectTestCase):
self.cfg['db']['environment'] = {'NEW_VAR': '1'}
new_containers = self.run_up(self.cfg)
- assert set(c.name_without_project for c in new_containers - old_containers) == set(['db_1'])
+ assert {c.service for c in new_containers - old_containers} == {'db'}
def test_change_root_always_recreate_deps(self):
old_containers = self.run_up(self.cfg, always_recreate_deps=True)
@@ -155,8 +156,9 @@ class ProjectWithDependenciesTest(ProjectTestCase):
self.cfg['db']['environment'] = {'NEW_VAR': '1'}
new_containers = self.run_up(self.cfg, always_recreate_deps=True)
- assert set(c.name_without_project
- for c in new_containers - old_containers) == {'db_1', 'web_1', 'nginx_1'}
+ assert {c.service for c in new_containers - old_containers} == {
+ 'db', 'web', 'nginx'
+ }
def test_change_root_no_recreate(self):
old_containers = self.run_up(self.cfg)
@@ -171,7 +173,7 @@ class ProjectWithDependenciesTest(ProjectTestCase):
def test_service_removed_while_down(self):
next_cfg = {
'web': {
- 'image': 'busybox:latest',
+ 'image': BUSYBOX_IMAGE_WITH_TAG,
'command': 'tail -f /dev/null',
},
'nginx': self.cfg['nginx'],
@@ -195,9 +197,155 @@ class ProjectWithDependenciesTest(ProjectTestCase):
web, = [c for c in containers if c.service == 'web']
nginx, = [c for c in containers if c.service == 'nginx']
+ db, = [c for c in containers if c.service == 'db']
+
+ assert set(get_links(web)) == {
+ 'composetest_db_1',
+ 'db',
+ 'db_1',
+ }
+ assert set(get_links(nginx)) == {
+ 'composetest_web_1',
+ 'web',
+ 'web_1',
+ }
+
+
+class ProjectWithDependsOnDependenciesTest(ProjectTestCase):
+ def setUp(self):
+ super().setUp()
+
+ self.cfg = {
+ 'version': '2',
+ 'services': {
+ 'db': {
+ 'image': BUSYBOX_IMAGE_WITH_TAG,
+ 'command': 'tail -f /dev/null',
+ },
+ 'web': {
+ 'image': BUSYBOX_IMAGE_WITH_TAG,
+ 'command': 'tail -f /dev/null',
+ 'depends_on': ['db'],
+ },
+ 'nginx': {
+ 'image': BUSYBOX_IMAGE_WITH_TAG,
+ 'command': 'tail -f /dev/null',
+ 'depends_on': ['web'],
+ },
+ }
+ }
+
+ def test_up(self):
+ local_cfg = copy.deepcopy(self.cfg)
+ containers = self.run_up(local_cfg)
+ assert {c.service for c in containers} == {'db', 'web', 'nginx'}
+
+ def test_change_leaf(self):
+ local_cfg = copy.deepcopy(self.cfg)
+ old_containers = self.run_up(local_cfg)
+
+ local_cfg['services']['nginx']['environment'] = {'NEW_VAR': '1'}
+ new_containers = self.run_up(local_cfg)
+
+ assert {c.service for c in new_containers - old_containers} == {'nginx'}
+
+ def test_change_middle(self):
+ local_cfg = copy.deepcopy(self.cfg)
+ old_containers = self.run_up(local_cfg)
+
+ local_cfg['services']['web']['environment'] = {'NEW_VAR': '1'}
+ new_containers = self.run_up(local_cfg)
+
+ assert {c.service for c in new_containers - old_containers} == {'web'}
+
+ def test_change_middle_always_recreate_deps(self):
+ local_cfg = copy.deepcopy(self.cfg)
+ old_containers = self.run_up(local_cfg, always_recreate_deps=True)
+
+ local_cfg['services']['web']['environment'] = {'NEW_VAR': '1'}
+ new_containers = self.run_up(local_cfg, always_recreate_deps=True)
+
+ assert {c.service for c in new_containers - old_containers} == {'web', 'nginx'}
+
+ def test_change_root(self):
+ local_cfg = copy.deepcopy(self.cfg)
+ old_containers = self.run_up(local_cfg)
+
+ local_cfg['services']['db']['environment'] = {'NEW_VAR': '1'}
+ new_containers = self.run_up(local_cfg)
+
+ assert {c.service for c in new_containers - old_containers} == {'db'}
+
+ def test_change_root_always_recreate_deps(self):
+ local_cfg = copy.deepcopy(self.cfg)
+ old_containers = self.run_up(local_cfg, always_recreate_deps=True)
+
+ local_cfg['services']['db']['environment'] = {'NEW_VAR': '1'}
+ new_containers = self.run_up(local_cfg, always_recreate_deps=True)
+
+ assert {c.service for c in new_containers - old_containers} == {'db', 'web', 'nginx'}
+
+ def test_change_root_no_recreate(self):
+ local_cfg = copy.deepcopy(self.cfg)
+ old_containers = self.run_up(local_cfg)
+
+ local_cfg['services']['db']['environment'] = {'NEW_VAR': '1'}
+ new_containers = self.run_up(
+ local_cfg,
+ strategy=ConvergenceStrategy.never)
+
+ assert new_containers - old_containers == set()
+
+ def test_service_removed_while_down(self):
+ local_cfg = copy.deepcopy(self.cfg)
+ next_cfg = copy.deepcopy(self.cfg)
+ del next_cfg['services']['db']
+ del next_cfg['services']['web']['depends_on']
+
+ containers = self.run_up(local_cfg)
+ assert {c.service for c in containers} == {'db', 'web', 'nginx'}
+
+ project = self.make_project(local_cfg)
+ project.stop(timeout=1)
+
+ next_containers = self.run_up(next_cfg)
+ assert {c.service for c in next_containers} == {'web', 'nginx'}
+
+ def test_service_removed_while_up(self):
+ local_cfg = copy.deepcopy(self.cfg)
+ containers = self.run_up(local_cfg)
+ assert {c.service for c in containers} == {'db', 'web', 'nginx'}
+
+ del local_cfg['services']['db']
+ del local_cfg['services']['web']['depends_on']
+
+ containers = self.run_up(local_cfg)
+ assert {c.service for c in containers} == {'web', 'nginx'}
+
+ def test_dependency_removed(self):
+ local_cfg = copy.deepcopy(self.cfg)
+ next_cfg = copy.deepcopy(self.cfg)
+ del next_cfg['services']['nginx']['depends_on']
+
+ containers = self.run_up(local_cfg, service_names=['nginx'])
+ assert {c.service for c in containers} == {'db', 'web', 'nginx'}
+
+ project = self.make_project(local_cfg)
+ project.stop(timeout=1)
+
+ next_containers = self.run_up(next_cfg, service_names=['nginx'])
+ assert {c.service for c in next_containers if c.is_running} == {'nginx'}
+
+ def test_dependency_added(self):
+ local_cfg = copy.deepcopy(self.cfg)
+
+ del local_cfg['services']['nginx']['depends_on']
+ containers = self.run_up(local_cfg, service_names=['nginx'])
+ assert {c.service for c in containers} == {'nginx'}
- assert set(get_links(web)) == {'composetest_db_1', 'db', 'db_1'}
- assert set(get_links(nginx)) == {'composetest_web_1', 'web', 'web_1'}
+ local_cfg['services']['nginx']['depends_on'] = ['db']
+ containers = self.run_up(local_cfg, service_names=['nginx'])
+ assert {c.service for c in containers} == {'nginx', 'db'}
class ServiceStateTest(DockerClientTestCase):
@@ -227,7 +375,7 @@ class ServiceStateTest(DockerClientTestCase):
assert [c.is_running for c in containers] == [False, True]
- assert ('start', containers[0:1]) == web.convergence_plan()
+ assert ('start', containers) == web.convergence_plan()
def test_trigger_recreate_with_config_change(self):
web = self.create_service('web', command=["top"])
@@ -237,7 +385,7 @@ class ServiceStateTest(DockerClientTestCase):
assert ('recreate', [container]) == web.convergence_plan()
def test_trigger_recreate_with_nonexistent_image_tag(self):
- web = self.create_service('web', image="busybox:latest")
+ web = self.create_service('web', image=BUSYBOX_IMAGE_WITH_TAG)
container = web.create_container()
web = self.create_service('web', image="nonexistent-image")
@@ -277,29 +425,32 @@ class ServiceStateTest(DockerClientTestCase):
@no_cluster('Can not guarantee the build will be run on the same node the service is deployed')
def test_trigger_recreate_with_build(self):
- context = py.test.ensuretemp('test_trigger_recreate_with_build')
- self.addCleanup(context.remove)
+ context = tempfile.mkdtemp('test_trigger_recreate_with_build')
+ self.addCleanup(shutil.rmtree, context)
base_image = "FROM busybox\nLABEL com.docker.compose.test_image=true\n"
- dockerfile = context.join('Dockerfile')
- dockerfile.write(base_image)
+ dockerfile = os.path.join(context, 'Dockerfile')
+ with open(dockerfile, mode="w") as dockerfile_fh:
+ dockerfile_fh.write(base_image)
web = self.create_service('web', build={'context': str(context)})
container = web.create_container()
- dockerfile.write(base_image + 'CMD echo hello world\n')
+ with open(dockerfile, mode="w") as dockerfile_fh:
+ dockerfile_fh.write(base_image + 'CMD echo hello world\n')
web.build()
web = self.create_service('web', build={'context': str(context)})
assert ('recreate', [container]) == web.convergence_plan()
def test_image_changed_to_build(self):
- context = py.test.ensuretemp('test_image_changed_to_build')
- self.addCleanup(context.remove)
- context.join('Dockerfile').write("""
- FROM busybox
- LABEL com.docker.compose.test_image=true
- """)
+ context = tempfile.mkdtemp('test_image_changed_to_build')
+ self.addCleanup(shutil.rmtree, context)
+ with open(os.path.join(context, 'Dockerfile'), mode="w") as dockerfile:
+ dockerfile.write("""
+ FROM busybox
+ LABEL com.docker.compose.test_image=true
+ """)
web = self.create_service('web', image='busybox')
container = web.create_container()
diff --git a/tests/integration/testcases.py b/tests/integration/testcases.py
index 4440d771..d4fbc9f6 100644
--- a/tests/integration/testcases.py
+++ b/tests/integration/testcases.py
@@ -1,6 +1,3 @@
-from __future__ import absolute_import
-from __future__ import unicode_literals
-
import functools
import os
@@ -9,18 +6,13 @@ from docker.errors import APIError
from docker.utils import version_lt
from .. import unittest
+from ..helpers import BUSYBOX_IMAGE_WITH_TAG
from compose.cli.docker_client import docker_client
from compose.config.config import resolve_environment
from compose.config.environment import Environment
from compose.const import API_VERSIONS
+from compose.const import COMPOSE_SPEC as VERSION
from compose.const import COMPOSEFILE_V1 as V1
-from compose.const import COMPOSEFILE_V2_0 as V2_0
-from compose.const import COMPOSEFILE_V2_0 as V2_1
-from compose.const import COMPOSEFILE_V2_2 as V2_2
-from compose.const import COMPOSEFILE_V2_3 as V2_3
-from compose.const import COMPOSEFILE_V3_0 as V3_0
-from compose.const import COMPOSEFILE_V3_2 as V3_2
-from compose.const import COMPOSEFILE_V3_5 as V3_5
from compose.const import LABEL_PROJECT
from compose.progress_stream import stream_output
from compose.service import Service
@@ -32,7 +24,7 @@ SWARM_ASSUME_MULTINODE = os.environ.get('SWARM_ASSUME_MULTINODE', '0') != '0'
def pull_busybox(client):
- client.pull('busybox:latest', stream=False)
+ client.pull(BUSYBOX_IMAGE_WITH_TAG, stream=False)
def get_links(container):
@@ -47,17 +39,11 @@ def get_links(container):
def engine_max_version():
if 'DOCKER_VERSION' not in os.environ:
- return V3_5
+ return VERSION
version = os.environ['DOCKER_VERSION'].partition('-')[0]
if version_lt(version, '1.10'):
return V1
- if version_lt(version, '1.12'):
- return V2_0
- if version_lt(version, '1.13'):
- return V2_1
- if version_lt(version, '17.06'):
- return V3_2
- return V3_5
+ return VERSION
def min_version_skip(version):
@@ -67,26 +53,6 @@ def min_version_skip(version):
)
-def v2_only():
- return min_version_skip(V2_0)
-
-
-def v2_1_only():
- return min_version_skip(V2_1)
-
-
-def v2_2_only():
- return min_version_skip(V2_2)
-
-
-def v2_3_only():
- return min_version_skip(V2_3)
-
-
-def v3_only():
- return min_version_skip(V3_0)
-
-
class DockerClientTestCase(unittest.TestCase):
@classmethod
def setUpClass(cls):
@@ -95,6 +61,7 @@ class DockerClientTestCase(unittest.TestCase):
@classmethod
def tearDownClass(cls):
+ cls.client.close()
del cls.client
def tearDown(self):
@@ -123,7 +90,7 @@ class DockerClientTestCase(unittest.TestCase):
def create_service(self, name, **kwargs):
if 'image' not in kwargs and 'build' not in kwargs:
- kwargs['image'] = 'busybox:latest'
+ kwargs['image'] = BUSYBOX_IMAGE_WITH_TAG
if 'command' not in kwargs:
kwargs['command'] = ["top"]
@@ -139,7 +106,9 @@ class DockerClientTestCase(unittest.TestCase):
def check_build(self, *args, **kwargs):
kwargs.setdefault('rm', True)
build_output = self.client.build(*args, **kwargs)
- stream_output(build_output, open('/dev/null', 'w'))
+ with open(os.devnull, 'w') as devnull:
+ for event in stream_output(build_output, devnull):
+ pass
def require_api_version(self, minimum):
api_version = self.client.version()['ApiVersion']
diff --git a/tests/integration/volume_test.py b/tests/integration/volume_test.py
index 2a521d4c..0e7c78bc 100644
--- a/tests/integration/volume_test.py
+++ b/tests/integration/volume_test.py
@@ -1,7 +1,3 @@
-from __future__ import absolute_import
-from __future__ import unicode_literals
-
-import six
from docker.errors import DockerException
from .testcases import DockerClientTestCase
@@ -22,12 +18,12 @@ class VolumeTest(DockerClientTestCase):
except DockerException:
pass
del self.tmp_volumes
- super(VolumeTest, self).tearDown()
+ super().tearDown()
def create_volume(self, name, driver=None, opts=None, external=None, custom_name=False):
if external:
custom_name = True
- if isinstance(external, six.text_type):
+ if isinstance(external, str):
name = external
vol = Volume(
diff --git a/tests/unit/bundle_test.py b/tests/unit/bundle_test.py
deleted file mode 100644
index 88f75405..00000000
--- a/tests/unit/bundle_test.py
+++ /dev/null
@@ -1,222 +0,0 @@
-from __future__ import absolute_import
-from __future__ import unicode_literals
-
-import docker
-import pytest
-
-from .. import mock
-from compose import bundle
-from compose import service
-from compose.cli.errors import UserError
-from compose.config.config import Config
-from compose.const import COMPOSEFILE_V2_0 as V2_0
-
-
-@pytest.fixture
-def mock_service():
- return mock.create_autospec(
- service.Service,
- client=mock.create_autospec(docker.APIClient),
- options={})
-
-
-def test_get_image_digest_exists(mock_service):
- mock_service.options['image'] = 'abcd'
- mock_service.image.return_value = {'RepoDigests': ['digest1']}
- digest = bundle.get_image_digest(mock_service)
- assert digest == 'digest1'
-
-
-def test_get_image_digest_image_uses_digest(mock_service):
- mock_service.options['image'] = image_id = 'redis@sha256:digest'
-
- digest = bundle.get_image_digest(mock_service)
- assert digest == image_id
- assert not mock_service.image.called
-
-
-def test_get_image_digest_no_image(mock_service):
- with pytest.raises(UserError) as exc:
- bundle.get_image_digest(service.Service(name='theservice'))
-
- assert "doesn't define an image tag" in exc.exconly()
-
-
-def test_push_image_with_saved_digest(mock_service):
- mock_service.options['build'] = '.'
- mock_service.options['image'] = image_id = 'abcd'
- mock_service.push.return_value = expected = 'sha256:thedigest'
- mock_service.image.return_value = {'RepoDigests': ['digest1']}
-
- digest = bundle.push_image(mock_service)
- assert digest == image_id + '@' + expected
-
- mock_service.push.assert_called_once_with()
- assert not mock_service.client.push.called
-
-
-def test_push_image(mock_service):
- mock_service.options['build'] = '.'
- mock_service.options['image'] = image_id = 'abcd'
- mock_service.push.return_value = expected = 'sha256:thedigest'
- mock_service.image.return_value = {'RepoDigests': []}
-
- digest = bundle.push_image(mock_service)
- assert digest == image_id + '@' + expected
-
- mock_service.push.assert_called_once_with()
- mock_service.client.pull.assert_called_once_with(digest)
-
-
-def test_to_bundle():
- image_digests = {'a': 'aaaa', 'b': 'bbbb'}
- services = [
- {'name': 'a', 'build': '.', },
- {'name': 'b', 'build': './b'},
- ]
- config = Config(
- version=V2_0,
- services=services,
- volumes={'special': {}},
- networks={'extra': {}},
- secrets={},
- configs={}
- )
-
- with mock.patch('compose.bundle.log.warn', autospec=True) as mock_log:
- output = bundle.to_bundle(config, image_digests)
-
- assert mock_log.mock_calls == [
- mock.call("Unsupported top level key 'networks' - ignoring"),
- mock.call("Unsupported top level key 'volumes' - ignoring"),
- ]
-
- assert output == {
- 'Version': '0.1',
- 'Services': {
- 'a': {'Image': 'aaaa', 'Networks': ['default']},
- 'b': {'Image': 'bbbb', 'Networks': ['default']},
- }
- }
-
-
-def test_convert_service_to_bundle():
- name = 'theservice'
- image_digest = 'thedigest'
- service_dict = {
- 'ports': ['80'],
- 'expose': ['1234'],
- 'networks': {'extra': {}},
- 'command': 'foo',
- 'entrypoint': 'entry',
- 'environment': {'BAZ': 'ENV'},
- 'build': '.',
- 'working_dir': '/tmp',
- 'user': 'root',
- 'labels': {'FOO': 'LABEL'},
- 'privileged': True,
- }
-
- with mock.patch('compose.bundle.log.warn', autospec=True) as mock_log:
- config = bundle.convert_service_to_bundle(name, service_dict, image_digest)
-
- mock_log.assert_called_once_with(
- "Unsupported key 'privileged' in services.theservice - ignoring")
-
- assert config == {
- 'Image': image_digest,
- 'Ports': [
- {'Protocol': 'tcp', 'Port': 80},
- {'Protocol': 'tcp', 'Port': 1234},
- ],
- 'Networks': ['extra'],
- 'Command': ['entry', 'foo'],
- 'Env': ['BAZ=ENV'],
- 'WorkingDir': '/tmp',
- 'User': 'root',
- 'Labels': {'FOO': 'LABEL'},
- }
-
-
-def test_set_command_and_args_none():
- config = {}
- bundle.set_command_and_args(config, [], [])
- assert config == {}
-
-
-def test_set_command_and_args_from_command():
- config = {}
- bundle.set_command_and_args(config, [], "echo ok")
- assert config == {'Args': ['echo', 'ok']}
-
-
-def test_set_command_and_args_from_entrypoint():
- config = {}
- bundle.set_command_and_args(config, "echo entry", [])
- assert config == {'Command': ['echo', 'entry']}
-
-
-def test_set_command_and_args_from_both():
- config = {}
- bundle.set_command_and_args(config, "echo entry", ["extra", "arg"])
- assert config == {'Command': ['echo', 'entry', "extra", "arg"]}
-
-
-def test_make_service_networks_default():
- name = 'theservice'
- service_dict = {}
-
- with mock.patch('compose.bundle.log.warn', autospec=True) as mock_log:
- networks = bundle.make_service_networks(name, service_dict)
-
- assert not mock_log.called
- assert networks == ['default']
-
-
-def test_make_service_networks():
- name = 'theservice'
- service_dict = {
- 'networks': {
- 'foo': {
- 'aliases': ['one', 'two'],
- },
- 'bar': {}
- },
- }
-
- with mock.patch('compose.bundle.log.warn', autospec=True) as mock_log:
- networks = bundle.make_service_networks(name, service_dict)
-
- mock_log.assert_called_once_with(
- "Unsupported key 'aliases' in services.theservice.networks.foo - ignoring")
- assert sorted(networks) == sorted(service_dict['networks'])
-
-
-def test_make_port_specs():
- service_dict = {
- 'expose': ['80', '500/udp'],
- 'ports': [
- '400:80',
- '222',
- '127.0.0.1:8001:8001',
- '127.0.0.1:5000-5001:3000-3001'],
- }
- port_specs = bundle.make_port_specs(service_dict)
- assert port_specs == [
- {'Protocol': 'tcp', 'Port': 80},
- {'Protocol': 'tcp', 'Port': 222},
- {'Protocol': 'tcp', 'Port': 8001},
- {'Protocol': 'tcp', 'Port': 3000},
- {'Protocol': 'tcp', 'Port': 3001},
- {'Protocol': 'udp', 'Port': 500},
- ]
-
-
-def test_make_port_spec_with_protocol():
- port_spec = bundle.make_port_spec("5000/udp")
- assert port_spec == {'Protocol': 'udp', 'Port': 5000}
-
-
-def test_make_port_spec_default_protocol():
- port_spec = bundle.make_port_spec("50000")
- assert port_spec == {'Protocol': 'tcp', 'Port': 50000}
diff --git a/tests/unit/cli/colors_test.py b/tests/unit/cli/colors_test.py
new file mode 100644
index 00000000..79b9cf10
--- /dev/null
+++ b/tests/unit/cli/colors_test.py
@@ -0,0 +1,56 @@
+import os
+
+import pytest
+
+from compose.cli.colors import AnsiMode
+from tests import mock
+
+
+@pytest.fixture
+def tty_stream():
+ stream = mock.Mock()
+ stream.isatty.return_value = True
+ return stream
+
+
+@pytest.fixture
+def non_tty_stream():
+ stream = mock.Mock()
+ stream.isatty.return_value = False
+ return stream
+
+
+class TestAnsiModeTestCase:
+
+ @mock.patch.dict(os.environ)
+ def test_ansi_mode_never(self, tty_stream, non_tty_stream):
+ if "CLICOLOR" in os.environ:
+ del os.environ["CLICOLOR"]
+ assert not AnsiMode.NEVER.use_ansi_codes(tty_stream)
+ assert not AnsiMode.NEVER.use_ansi_codes(non_tty_stream)
+
+ os.environ["CLICOLOR"] = "0"
+ assert not AnsiMode.NEVER.use_ansi_codes(tty_stream)
+ assert not AnsiMode.NEVER.use_ansi_codes(non_tty_stream)
+
+ @mock.patch.dict(os.environ)
+ def test_ansi_mode_always(self, tty_stream, non_tty_stream):
+ if "CLICOLOR" in os.environ:
+ del os.environ["CLICOLOR"]
+ assert AnsiMode.ALWAYS.use_ansi_codes(tty_stream)
+ assert AnsiMode.ALWAYS.use_ansi_codes(non_tty_stream)
+
+ os.environ["CLICOLOR"] = "0"
+ assert AnsiMode.ALWAYS.use_ansi_codes(tty_stream)
+ assert AnsiMode.ALWAYS.use_ansi_codes(non_tty_stream)
+
+ @mock.patch.dict(os.environ)
+ def test_ansi_mode_auto(self, tty_stream, non_tty_stream):
+ if "CLICOLOR" in os.environ:
+ del os.environ["CLICOLOR"]
+ assert AnsiMode.AUTO.use_ansi_codes(tty_stream)
+ assert not AnsiMode.AUTO.use_ansi_codes(non_tty_stream)
+
+ os.environ["CLICOLOR"] = "0"
+ assert not AnsiMode.AUTO.use_ansi_codes(tty_stream)
+ assert not AnsiMode.AUTO.use_ansi_codes(non_tty_stream)
diff --git a/tests/unit/cli/command_test.py b/tests/unit/cli/command_test.py
index 3a9844c4..60638864 100644
--- a/tests/unit/cli/command_test.py
+++ b/tests/unit/cli/command_test.py
@@ -1,11 +1,6 @@
-# ~*~ encoding: utf-8 ~*~
-from __future__ import absolute_import
-from __future__ import unicode_literals
-
import os
import pytest
-import six
from compose.cli.command import get_config_path_from_options
from compose.config.environment import Environment
@@ -13,64 +8,47 @@ from compose.const import IS_WINDOWS_PLATFORM
from tests import mock
-class TestGetConfigPathFromOptions(object):
+class TestGetConfigPathFromOptions:
def test_path_from_options(self):
paths = ['one.yml', 'two.yml']
opts = {'--file': paths}
environment = Environment.from_env_file('.')
- assert get_config_path_from_options('.', opts, environment) == paths
+ assert get_config_path_from_options(opts, environment) == paths
def test_single_path_from_env(self):
with mock.patch.dict(os.environ):
os.environ['COMPOSE_FILE'] = 'one.yml'
environment = Environment.from_env_file('.')
- assert get_config_path_from_options('.', {}, environment) == ['one.yml']
+ assert get_config_path_from_options({}, environment) == ['one.yml']
@pytest.mark.skipif(IS_WINDOWS_PLATFORM, reason='posix separator')
def test_multiple_path_from_env(self):
with mock.patch.dict(os.environ):
os.environ['COMPOSE_FILE'] = 'one.yml:two.yml'
environment = Environment.from_env_file('.')
- assert get_config_path_from_options(
- '.', {}, environment
- ) == ['one.yml', 'two.yml']
+ assert get_config_path_from_options({}, environment) == ['one.yml', 'two.yml']
@pytest.mark.skipif(not IS_WINDOWS_PLATFORM, reason='windows separator')
def test_multiple_path_from_env_windows(self):
with mock.patch.dict(os.environ):
os.environ['COMPOSE_FILE'] = 'one.yml;two.yml'
environment = Environment.from_env_file('.')
- assert get_config_path_from_options(
- '.', {}, environment
- ) == ['one.yml', 'two.yml']
+ assert get_config_path_from_options({}, environment) == ['one.yml', 'two.yml']
def test_multiple_path_from_env_custom_separator(self):
with mock.patch.dict(os.environ):
os.environ['COMPOSE_PATH_SEPARATOR'] = '^'
os.environ['COMPOSE_FILE'] = 'c:\\one.yml^.\\semi;colon.yml'
environment = Environment.from_env_file('.')
- assert get_config_path_from_options(
- '.', {}, environment
- ) == ['c:\\one.yml', '.\\semi;colon.yml']
+ assert get_config_path_from_options({}, environment) == ['c:\\one.yml', '.\\semi;colon.yml']
def test_no_path(self):
environment = Environment.from_env_file('.')
- assert not get_config_path_from_options('.', {}, environment)
+ assert not get_config_path_from_options({}, environment)
def test_unicode_path_from_options(self):
paths = [b'\xe5\xb0\xb1\xe5\x90\x83\xe9\xa5\xad/docker-compose.yml']
opts = {'--file': paths}
environment = Environment.from_env_file('.')
- assert get_config_path_from_options(
- '.', opts, environment
- ) == ['就吃饭/docker-compose.yml']
-
- @pytest.mark.skipif(six.PY3, reason='Env values in Python 3 are already Unicode')
- def test_unicode_path_from_env(self):
- with mock.patch.dict(os.environ):
- os.environ['COMPOSE_FILE'] = b'\xe5\xb0\xb1\xe5\x90\x83\xe9\xa5\xad/docker-compose.yml'
- environment = Environment.from_env_file('.')
- assert get_config_path_from_options(
- '.', {}, environment
- ) == ['就吃饭/docker-compose.yml']
+ assert get_config_path_from_options(opts, environment) == ['就吃饭/docker-compose.yml']
diff --git a/tests/unit/cli/docker_client_test.py b/tests/unit/cli/docker_client_test.py
index be91ea31..307e47f1 100644
--- a/tests/unit/cli/docker_client_test.py
+++ b/tests/unit/cli/docker_client_test.py
@@ -1,12 +1,10 @@
-from __future__ import absolute_import
-from __future__ import unicode_literals
-
import os
import platform
import ssl
import docker
import pytest
+from docker.constants import DEFAULT_DOCKER_API_VERSION
import compose
from compose.cli import errors
@@ -26,18 +24,18 @@ class DockerClientTestCase(unittest.TestCase):
del os.environ['HOME']
except KeyError:
pass
- docker_client(os.environ)
+ docker_client(os.environ, version=DEFAULT_DOCKER_API_VERSION)
@mock.patch.dict(os.environ)
def test_docker_client_with_custom_timeout(self):
os.environ['COMPOSE_HTTP_TIMEOUT'] = '123'
- client = docker_client(os.environ)
+ client = docker_client(os.environ, version=DEFAULT_DOCKER_API_VERSION)
assert client.timeout == 123
@mock.patch.dict(os.environ)
def test_custom_timeout_error(self):
os.environ['COMPOSE_HTTP_TIMEOUT'] = '123'
- client = docker_client(os.environ)
+ client = docker_client(os.environ, version=DEFAULT_DOCKER_API_VERSION)
with mock.patch('compose.cli.errors.log') as fake_log:
with pytest.raises(errors.ConnectionError):
@@ -57,8 +55,8 @@ class DockerClientTestCase(unittest.TestCase):
assert '123' in fake_log.error.call_args[0][0]
def test_user_agent(self):
- client = docker_client(os.environ)
- expected = "docker-compose/{0} docker-py/{1} {2}/{3}".format(
+ client = docker_client(os.environ, version=DEFAULT_DOCKER_API_VERSION)
+ expected = "docker-compose/{} docker-py/{} {}/{}".format(
compose.__version__,
docker.__version__,
platform.system(),
@@ -154,9 +152,9 @@ class TLSConfigTestCase(unittest.TestCase):
def test_tls_client_and_ca_quoted_paths(self):
options = {
- '--tlscacert': '"{0}"'.format(self.ca_cert),
- '--tlscert': '"{0}"'.format(self.client_cert),
- '--tlskey': '"{0}"'.format(self.key),
+ '--tlscacert': '"{}"'.format(self.ca_cert),
+ '--tlscert': '"{}"'.format(self.client_cert),
+ '--tlskey': '"{}"'.format(self.key),
'--tlsverify': True
}
result = tls_config_from_options(options)
@@ -188,9 +186,9 @@ class TLSConfigTestCase(unittest.TestCase):
'DOCKER_TLS_VERIFY': 'false'
})
options = {
- '--tlscacert': '"{0}"'.format(self.ca_cert),
- '--tlscert': '"{0}"'.format(self.client_cert),
- '--tlskey': '"{0}"'.format(self.key),
+ '--tlscacert': '"{}"'.format(self.ca_cert),
+ '--tlscert': '"{}"'.format(self.client_cert),
+ '--tlskey': '"{}"'.format(self.key),
'--tlsverify': True
}
@@ -233,7 +231,7 @@ class TLSConfigTestCase(unittest.TestCase):
assert result.cert == (self.client_cert, self.key)
-class TestGetTlsVersion(object):
+class TestGetTlsVersion:
def test_get_tls_version_default(self):
environment = {}
assert get_tls_version(environment) is None
@@ -247,5 +245,5 @@ class TestGetTlsVersion(object):
environment = {'COMPOSE_TLS_VERSION': 'TLSv5_5'}
with mock.patch('compose.cli.docker_client.log') as mock_log:
tls_version = get_tls_version(environment)
- mock_log.warn.assert_called_once_with(mock.ANY)
+ mock_log.warning.assert_called_once_with(mock.ANY)
assert tls_version is None
diff --git a/tests/unit/cli/errors_test.py b/tests/unit/cli/errors_test.py
index 7b53ed2b..3b70ffe7 100644
--- a/tests/unit/cli/errors_test.py
+++ b/tests/unit/cli/errors_test.py
@@ -1,6 +1,3 @@
-from __future__ import absolute_import
-from __future__ import unicode_literals
-
import pytest
from docker.errors import APIError
from requests.exceptions import ConnectionError
@@ -24,7 +21,7 @@ def patch_find_executable(side_effect):
side_effect=side_effect)
-class TestHandleConnectionErrors(object):
+class TestHandleConnectionErrors:
def test_generic_connection_error(self, mock_logging):
with pytest.raises(errors.ConnectionError):
@@ -37,19 +34,19 @@ class TestHandleConnectionErrors(object):
def test_api_error_version_mismatch(self, mock_logging):
with pytest.raises(errors.ConnectionError):
- with handle_connection_errors(mock.Mock(api_version='1.22')):
+ with handle_connection_errors(mock.Mock(api_version='1.38')):
raise APIError(None, None, b"client is newer than server")
_, args, _ = mock_logging.error.mock_calls[0]
- assert "Docker Engine of version 1.10.0 or greater" in args[0]
+ assert "Docker Engine of version 18.06.0 or greater" in args[0]
def test_api_error_version_mismatch_unicode_explanation(self, mock_logging):
with pytest.raises(errors.ConnectionError):
- with handle_connection_errors(mock.Mock(api_version='1.22')):
- raise APIError(None, None, u"client is newer than server")
+ with handle_connection_errors(mock.Mock(api_version='1.38')):
+ raise APIError(None, None, "client is newer than server")
_, args, _ = mock_logging.error.mock_calls[0]
- assert "Docker Engine of version 1.10.0 or greater" in args[0]
+ assert "Docker Engine of version 18.06.0 or greater" in args[0]
def test_api_error_version_other(self, mock_logging):
msg = b"Something broke!"
@@ -60,7 +57,7 @@ class TestHandleConnectionErrors(object):
mock_logging.error.assert_called_once_with(msg.decode('utf-8'))
def test_api_error_version_other_unicode_explanation(self, mock_logging):
- msg = u"Something broke!"
+ msg = "Something broke!"
with pytest.raises(errors.ConnectionError):
with handle_connection_errors(mock.Mock(api_version='1.22')):
raise APIError(None, None, msg)
diff --git a/tests/unit/cli/formatter_test.py b/tests/unit/cli/formatter_test.py
index e6857251..08752a62 100644
--- a/tests/unit/cli/formatter_test.py
+++ b/tests/unit/cli/formatter_test.py
@@ -1,6 +1,3 @@
-from __future__ import absolute_import
-from __future__ import unicode_literals
-
import logging
from compose.cli import colors
@@ -43,10 +40,10 @@ class ConsoleWarningFormatterTestCase(unittest.TestCase):
message = b'\xec\xa0\x95\xec\x88\x98\xec\xa0\x95'
output = self.formatter.format(make_log_record(logging.WARN, message))
expected = colors.yellow('WARNING') + ': '
- assert output == '{0}{1}'.format(expected, message.decode('utf-8'))
+ assert output == '{}{}'.format(expected, message.decode('utf-8'))
def test_format_unicode_error(self):
message = b'\xec\xa0\x95\xec\x88\x98\xec\xa0\x95'
output = self.formatter.format(make_log_record(logging.ERROR, message))
expected = colors.red('ERROR') + ': '
- assert output == '{0}{1}'.format(expected, message.decode('utf-8'))
+ assert output == '{}{}'.format(expected, message.decode('utf-8'))
diff --git a/tests/unit/cli/log_printer_test.py b/tests/unit/cli/log_printer_test.py
index d0c4b56b..f76e2d62 100644
--- a/tests/unit/cli/log_printer_test.py
+++ b/tests/unit/cli/log_printer_test.py
@@ -1,17 +1,13 @@
-from __future__ import absolute_import
-from __future__ import unicode_literals
-
import itertools
+from io import StringIO
+from queue import Queue
import pytest
import requests
-import six
from docker.errors import APIError
-from six.moves.queue import Queue
from compose.cli.log_printer import build_log_generator
from compose.cli.log_printer import build_log_presenters
-from compose.cli.log_printer import build_no_log_generator
from compose.cli.log_printer import consume_queue
from compose.cli.log_printer import QueueItem
from compose.cli.log_printer import wait_on_exit
@@ -22,7 +18,7 @@ from tests import mock
@pytest.fixture
def output_stream():
- output = six.StringIO()
+ output = StringIO()
output.flush = mock.Mock()
return output
@@ -32,7 +28,7 @@ def mock_container():
return mock.Mock(spec=Container, name_without_project='web_1')
-class TestLogPresenter(object):
+class TestLogPresenter:
def test_monochrome(self, mock_container):
presenters = build_log_presenters(['foo', 'bar'], True)
@@ -78,15 +74,7 @@ def test_wait_on_exit_raises():
assert expected in wait_on_exit(mock_container)
-def test_build_no_log_generator(mock_container):
- mock_container.has_api_logs = False
- mock_container.log_driver = 'none'
- output, = build_no_log_generator(mock_container, None)
- assert "WARNING: no logs are available with the 'none' log driver\n" in output
- assert "exited with code" not in output
-
-
-class TestBuildLogGenerator(object):
+class TestBuildLogGenerator:
def test_no_log_stream(self, mock_container):
mock_container.log_stream = None
@@ -111,7 +99,7 @@ class TestBuildLogGenerator(object):
assert next(generator) == "world"
def test_unicode(self, output_stream):
- glyph = u'\u2022\n'
+ glyph = '\u2022\n'
mock_container.log_stream = iter([glyph.encode('utf-8')])
generator = build_log_generator(mock_container, {})
@@ -128,7 +116,7 @@ def mock_presenters():
return itertools.cycle([mock.Mock()])
-class TestWatchEvents(object):
+class TestWatchEvents:
def test_stop_event(self, thread_map, mock_presenters):
event_stream = [{'action': 'stop', 'id': 'cid'}]
@@ -152,6 +140,17 @@ class TestWatchEvents(object):
*thread_args)
assert container_id in thread_map
+ def test_container_attach_event(self, thread_map, mock_presenters):
+ container_id = 'abcd'
+ mock_container = mock.Mock(is_restarting=False)
+ mock_container.attach_log_stream.side_effect = APIError("race condition")
+ event_die = {'action': 'die', 'id': container_id}
+ event_start = {'action': 'start', 'id': container_id, 'container': mock_container}
+ event_stream = [event_die, event_start]
+ thread_args = 'foo', 'bar'
+ watch_events(thread_map, event_stream, mock_presenters, thread_args)
+ assert mock_container.attach_log_stream.called
+
def test_other_event(self, thread_map, mock_presenters):
container_id = 'abcd'
event_stream = [{'action': 'create', 'id': container_id}]
@@ -159,7 +158,7 @@ class TestWatchEvents(object):
assert container_id not in thread_map
-class TestConsumeQueue(object):
+class TestConsumeQueue:
def test_item_is_an_exception(self):
@@ -193,7 +192,7 @@ class TestConsumeQueue(object):
queue.put(item)
generator = consume_queue(queue, True)
- assert next(generator) is 'foobar-1'
+ assert next(generator) == 'foobar-1'
def test_item_is_none_when_timeout_is_hit(self):
queue = Queue()
diff --git a/tests/unit/cli/main_test.py b/tests/unit/cli/main_test.py
index 1a2dfbcf..12b5c3dc 100644
--- a/tests/unit/cli/main_test.py
+++ b/tests/unit/cli/main_test.py
@@ -1,6 +1,3 @@
-from __future__ import absolute_import
-from __future__ import unicode_literals
-
import logging
import docker
@@ -9,9 +6,11 @@ import pytest
from compose import container
from compose.cli.errors import UserError
from compose.cli.formatter import ConsoleWarningFormatter
+from compose.cli.main import build_one_off_container_options
from compose.cli.main import call_docker
from compose.cli.main import convergence_strategy_from_opts
-from compose.cli.main import filter_containers_to_service_names
+from compose.cli.main import filter_attached_containers
+from compose.cli.main import get_docker_start_call
from compose.cli.main import setup_console_handler
from compose.cli.main import warn_for_swarm_mode
from compose.service import ConvergenceStrategy
@@ -23,7 +22,7 @@ def mock_container(service, number):
container.Container,
service=service,
number=number,
- name_without_project='{0}_{1}'.format(service, number))
+ name_without_project='{}_{}'.format(service, number))
@pytest.fixture
@@ -33,9 +32,9 @@ def logging_handler():
return logging.StreamHandler(stream=stream)
-class TestCLIMainTestCase(object):
+class TestCLIMainTestCase:
- def test_filter_containers_to_service_names(self):
+ def test_filter_attached_containers(self):
containers = [
mock_container('web', 1),
mock_container('web', 2),
@@ -44,17 +43,29 @@ class TestCLIMainTestCase(object):
mock_container('another', 1),
]
service_names = ['web', 'db']
- actual = filter_containers_to_service_names(containers, service_names)
+ actual = filter_attached_containers(containers, service_names)
assert actual == containers[:3]
- def test_filter_containers_to_service_names_all(self):
+ def test_filter_attached_containers_with_dependencies(self):
+ containers = [
+ mock_container('web', 1),
+ mock_container('web', 2),
+ mock_container('db', 1),
+ mock_container('other', 1),
+ mock_container('another', 1),
+ ]
+ service_names = ['web', 'db']
+ actual = filter_attached_containers(containers, service_names, attach_dependencies=True)
+ assert actual == containers
+
+ def test_filter_attached_containers_all(self):
containers = [
mock_container('web', 1),
mock_container('db', 1),
mock_container('other', 1),
]
service_names = []
- actual = filter_containers_to_service_names(containers, service_names)
+ actual = filter_attached_containers(containers, service_names)
assert actual == containers
def test_warning_in_swarm_mode(self):
@@ -63,30 +74,87 @@ class TestCLIMainTestCase(object):
with mock.patch('compose.cli.main.log') as fake_log:
warn_for_swarm_mode(mock_client)
- assert fake_log.warn.call_count == 1
-
-
-class TestSetupConsoleHandlerTestCase(object):
-
- def test_with_tty_verbose(self, logging_handler):
+ assert fake_log.warning.call_count == 1
+
+ def test_build_one_off_container_options(self):
+ command = 'build myservice'
+ detach = False
+ options = {
+ '-e': ['MYVAR=MYVALUE'],
+ '-T': True,
+ '--label': ['MYLABEL'],
+ '--entrypoint': 'bash',
+ '--user': 'MYUSER',
+ '--service-ports': [],
+ '--publish': '',
+ '--name': 'MYNAME',
+ '--workdir': '.',
+ '--volume': [],
+ 'stdin_open': False,
+ }
+
+ expected_container_options = {
+ 'command': command,
+ 'tty': False,
+ 'stdin_open': False,
+ 'detach': detach,
+ 'entrypoint': 'bash',
+ 'environment': {'MYVAR': 'MYVALUE'},
+ 'labels': {'MYLABEL': ''},
+ 'name': 'MYNAME',
+ 'ports': [],
+ 'restart': None,
+ 'user': 'MYUSER',
+ 'working_dir': '.',
+ }
+
+ container_options = build_one_off_container_options(options, detach, command)
+ assert container_options == expected_container_options
+
+ def test_get_docker_start_call(self):
+ container_id = 'my_container_id'
+
+ mock_container_options = {'detach': False, 'stdin_open': True}
+ expected_docker_start_call = ['start', '--attach', '--interactive', container_id]
+ docker_start_call = get_docker_start_call(mock_container_options, container_id)
+ assert expected_docker_start_call == docker_start_call
+
+ mock_container_options = {'detach': False, 'stdin_open': False}
+ expected_docker_start_call = ['start', '--attach', container_id]
+ docker_start_call = get_docker_start_call(mock_container_options, container_id)
+ assert expected_docker_start_call == docker_start_call
+
+ mock_container_options = {'detach': True, 'stdin_open': True}
+ expected_docker_start_call = ['start', '--interactive', container_id]
+ docker_start_call = get_docker_start_call(mock_container_options, container_id)
+ assert expected_docker_start_call == docker_start_call
+
+ mock_container_options = {'detach': True, 'stdin_open': False}
+ expected_docker_start_call = ['start', container_id]
+ docker_start_call = get_docker_start_call(mock_container_options, container_id)
+ assert expected_docker_start_call == docker_start_call
+
+
+class TestSetupConsoleHandlerTestCase:
+
+ def test_with_console_formatter_verbose(self, logging_handler):
setup_console_handler(logging_handler, True)
assert type(logging_handler.formatter) == ConsoleWarningFormatter
assert '%(name)s' in logging_handler.formatter._fmt
assert '%(funcName)s' in logging_handler.formatter._fmt
- def test_with_tty_not_verbose(self, logging_handler):
+ def test_with_console_formatter_not_verbose(self, logging_handler):
setup_console_handler(logging_handler, False)
assert type(logging_handler.formatter) == ConsoleWarningFormatter
assert '%(name)s' not in logging_handler.formatter._fmt
assert '%(funcName)s' not in logging_handler.formatter._fmt
- def test_with_not_a_tty(self, logging_handler):
- logging_handler.stream.isatty.return_value = False
- setup_console_handler(logging_handler, False)
+ def test_without_console_formatter(self, logging_handler):
+ setup_console_handler(logging_handler, False, use_console_formatter=False)
assert type(logging_handler.formatter) == logging.Formatter
-class TestConvergeStrategyFromOptsTestCase(object):
+class TestConvergeStrategyFromOptsTestCase:
def test_invalid_opts(self):
options = {'--force-recreate': True, '--no-recreate': True}
@@ -120,16 +188,16 @@ def mock_find_executable(exe):
@mock.patch('compose.cli.main.find_executable', mock_find_executable)
-class TestCallDocker(object):
+class TestCallDocker:
def test_simple_no_options(self):
with mock.patch('subprocess.call') as fake_call:
- call_docker(['ps'], {})
+ call_docker(['ps'], {}, {})
assert fake_call.call_args[0][0] == ['docker', 'ps']
def test_simple_tls_option(self):
with mock.patch('subprocess.call') as fake_call:
- call_docker(['ps'], {'--tls': True})
+ call_docker(['ps'], {'--tls': True}, {})
assert fake_call.call_args[0][0] == ['docker', '--tls', 'ps']
@@ -140,7 +208,7 @@ class TestCallDocker(object):
'--tlscacert': './ca.pem',
'--tlscert': './cert.pem',
'--tlskey': './key.pem',
- })
+ }, {})
assert fake_call.call_args[0][0] == [
'docker', '--tls', '--tlscacert', './ca.pem', '--tlscert',
@@ -149,16 +217,33 @@ class TestCallDocker(object):
def test_with_host_option(self):
with mock.patch('subprocess.call') as fake_call:
- call_docker(['ps'], {'--host': 'tcp://mydocker.net:2333'})
+ call_docker(['ps'], {'--host': 'tcp://mydocker.net:2333'}, {})
assert fake_call.call_args[0][0] == [
'docker', '--host', 'tcp://mydocker.net:2333', 'ps'
]
+ def test_with_http_host(self):
+ with mock.patch('subprocess.call') as fake_call:
+ call_docker(['ps'], {'--host': 'http://mydocker.net:2333'}, {})
+
+ assert fake_call.call_args[0][0] == [
+ 'docker', '--host', 'tcp://mydocker.net:2333', 'ps',
+ ]
+
def test_with_host_option_shorthand_equal(self):
with mock.patch('subprocess.call') as fake_call:
- call_docker(['ps'], {'--host': '=tcp://mydocker.net:2333'})
+ call_docker(['ps'], {'--host': '=tcp://mydocker.net:2333'}, {})
assert fake_call.call_args[0][0] == [
'docker', '--host', 'tcp://mydocker.net:2333', 'ps'
]
+
+ def test_with_env(self):
+ with mock.patch('subprocess.call') as fake_call:
+ call_docker(['ps'], {}, {'DOCKER_HOST': 'tcp://mydocker.net:2333'})
+
+ assert fake_call.call_args[0][0] == [
+ 'docker', 'ps'
+ ]
+ assert fake_call.call_args[1]['env'] == {'DOCKER_HOST': 'tcp://mydocker.net:2333'}
diff --git a/tests/unit/cli/utils_test.py b/tests/unit/cli/utils_test.py
index 26524ff3..d67c8ba8 100644
--- a/tests/unit/cli/utils_test.py
+++ b/tests/unit/cli/utils_test.py
@@ -1,8 +1,6 @@
-from __future__ import absolute_import
-from __future__ import unicode_literals
-
import unittest
+from compose.cli.utils import human_readable_file_size
from compose.utils import unquote_path
@@ -21,3 +19,27 @@ class UnquotePathTest(unittest.TestCase):
assert unquote_path('""hello""') == '"hello"'
assert unquote_path('"hel"lo"') == 'hel"lo'
assert unquote_path('"hello""') == 'hello"'
+
+
+class HumanReadableFileSizeTest(unittest.TestCase):
+ def test_100b(self):
+ assert human_readable_file_size(100) == '100 B'
+
+ def test_1kb(self):
+ assert human_readable_file_size(1000) == '1 kB'
+ assert human_readable_file_size(1024) == '1.024 kB'
+
+ def test_1023b(self):
+ assert human_readable_file_size(1023) == '1.023 kB'
+
+ def test_999b(self):
+ assert human_readable_file_size(999) == '999 B'
+
+ def test_units(self):
+ assert human_readable_file_size((10 ** 3) ** 0) == '1 B'
+ assert human_readable_file_size((10 ** 3) ** 1) == '1 kB'
+ assert human_readable_file_size((10 ** 3) ** 2) == '1 MB'
+ assert human_readable_file_size((10 ** 3) ** 3) == '1 GB'
+ assert human_readable_file_size((10 ** 3) ** 4) == '1 TB'
+ assert human_readable_file_size((10 ** 3) ** 5) == '1 PB'
+ assert human_readable_file_size((10 ** 3) ** 6) == '1 EB'
diff --git a/tests/unit/cli/verbose_proxy_test.py b/tests/unit/cli/verbose_proxy_test.py
index f111f8cd..0da662fd 100644
--- a/tests/unit/cli/verbose_proxy_test.py
+++ b/tests/unit/cli/verbose_proxy_test.py
@@ -1,8 +1,3 @@
-from __future__ import absolute_import
-from __future__ import unicode_literals
-
-import six
-
from compose.cli import verbose_proxy
from tests import unittest
@@ -10,7 +5,7 @@ from tests import unittest
class VerboseProxyTestCase(unittest.TestCase):
def test_format_call(self):
- prefix = '' if six.PY3 else 'u'
+ prefix = ''
expected = "(%(p)s'arg1', True, key=%(p)s'value')" % dict(p=prefix)
actual = verbose_proxy.format_call(
("arg1", True),
diff --git a/tests/unit/cli_test.py b/tests/unit/cli_test.py
index 7c8a1423..fa6e7674 100644
--- a/tests/unit/cli_test.py
+++ b/tests/unit/cli_test.py
@@ -1,7 +1,3 @@
-# encoding: utf-8
-from __future__ import absolute_import
-from __future__ import unicode_literals
-
import os
import shutil
import tempfile
@@ -20,7 +16,10 @@ from compose.cli.command import get_project_name
from compose.cli.docopt_command import NoSuchCommand
from compose.cli.errors import UserError
from compose.cli.main import TopLevelCommand
+from compose.config.environment import Environment
from compose.const import IS_WINDOWS_PLATFORM
+from compose.const import LABEL_SERVICE
+from compose.container import Container
from compose.project import Project
@@ -79,7 +78,9 @@ class CLITestCase(unittest.TestCase):
def test_get_project(self):
base_dir = 'tests/fixtures/longer-filename-composefile'
- project = get_project(base_dir)
+ env = Environment.from_env_file(base_dir)
+ env['COMPOSE_API_VERSION'] = DEFAULT_DOCKER_API_VERSION
+ project = get_project(base_dir, environment=env)
assert project.name == 'longer-filename-composefile'
assert project.client
assert project.services
@@ -97,12 +98,26 @@ class CLITestCase(unittest.TestCase):
@pytest.mark.xfail(IS_WINDOWS_PLATFORM, reason="requires dockerpty")
@mock.patch('compose.cli.main.RunOperation', autospec=True)
@mock.patch('compose.cli.main.PseudoTerminal', autospec=True)
+ @mock.patch('compose.service.Container.create')
@mock.patch.dict(os.environ)
- def test_run_interactive_passes_logs_false(self, mock_pseudo_terminal, mock_run_operation):
+ def test_run_interactive_passes_logs_false(
+ self,
+ mock_container_create,
+ mock_pseudo_terminal,
+ mock_run_operation,
+ ):
os.environ['COMPOSE_INTERACTIVE_NO_CLI'] = 'true'
mock_client = mock.create_autospec(docker.APIClient)
mock_client.api_version = DEFAULT_DOCKER_API_VERSION
mock_client._general_configs = {}
+ mock_container_create.return_value = Container(mock_client, {
+ 'Id': '37b35e0ba80d91009d37e16f249b32b84f72bda269985578ed6c75a0a13fcaa8',
+ 'Config': {
+ 'Labels': {
+ LABEL_SERVICE: 'service',
+ }
+ },
+ }, has_been_inspected=True)
project = Project.from_config(
name='composetest',
client=mock_client,
@@ -135,10 +150,20 @@ class CLITestCase(unittest.TestCase):
_, _, call_kwargs = mock_run_operation.mock_calls[0]
assert call_kwargs['logs'] is False
- def test_run_service_with_restart_always(self):
+ @mock.patch('compose.service.Container.create')
+ def test_run_service_with_restart_always(self, mock_container_create):
mock_client = mock.create_autospec(docker.APIClient)
mock_client.api_version = DEFAULT_DOCKER_API_VERSION
mock_client._general_configs = {}
+ mock_container_create.return_value = Container(mock_client, {
+ 'Id': '37b35e0ba80d91009d37e16f249b32b84f72bda269985578ed6c75a0a13fcaa8',
+ 'Name': 'composetest_service_37b35',
+ 'Config': {
+ 'Labels': {
+ LABEL_SERVICE: 'service',
+ }
+ },
+ }, has_been_inspected=True)
project = Project.from_config(
name='composetest',
@@ -171,7 +196,10 @@ class CLITestCase(unittest.TestCase):
'--workdir': None,
})
- assert mock_client.create_host_config.call_args[1]['restart_policy']['Name'] == 'always'
+ # NOTE: The "run" command is supposed to be a one-off tool; therefore restart policy "no"
+ # (the default) is enforced despite explicit wish for "always" in the project
+ # configuration file
+ assert not mock_client.create_host_config.call_args[1].get('restart_policy')
command = TopLevelCommand(project)
command.run({
@@ -195,6 +223,55 @@ class CLITestCase(unittest.TestCase):
assert not mock_client.create_host_config.call_args[1].get('restart_policy')
+ @mock.patch('compose.project.Project.up')
+ @mock.patch.dict(os.environ)
+ def test_run_up_with_docker_cli_build(self, mock_project_up):
+ os.environ['COMPOSE_DOCKER_CLI_BUILD'] = '1'
+ mock_client = mock.create_autospec(docker.APIClient)
+ mock_client.api_version = DEFAULT_DOCKER_API_VERSION
+ mock_client._general_configs = {}
+ container = Container(mock_client, {
+ 'Id': '37b35e0ba80d91009d37e16f249b32b84f72bda269985578ed6c75a0a13fcaa8',
+ 'Name': 'composetest_service_37b35',
+ 'Config': {
+ 'Labels': {
+ LABEL_SERVICE: 'service',
+ }
+ },
+ }, has_been_inspected=True)
+ mock_project_up.return_value = [container]
+
+ project = Project.from_config(
+ name='composetest',
+ config_data=build_config({
+ 'service': {'image': 'busybox'}
+ }),
+ client=mock_client,
+ )
+
+ command = TopLevelCommand(project)
+ command.run({
+ 'SERVICE': 'service',
+ 'COMMAND': None,
+ '-e': [],
+ '--label': [],
+ '--user': None,
+ '--no-deps': None,
+ '--detach': True,
+ '-T': None,
+ '--entrypoint': None,
+ '--service-ports': None,
+ '--use-aliases': None,
+ '--publish': [],
+ '--volume': [],
+ '--rm': None,
+ '--name': None,
+ '--workdir': None,
+ })
+
+ _, _, call_kwargs = mock_project_up.mock_calls[0]
+ assert call_kwargs.get('cli')
+
def test_command_manual_and_service_ports_together(self):
project = Project.from_config(
name='composetest',
diff --git a/tests/unit/config/config_test.py b/tests/unit/config/config_test.py
index 8a75648a..72e39e4f 100644
--- a/tests/unit/config/config_test.py
+++ b/tests/unit/config/config_test.py
@@ -1,21 +1,21 @@
-# encoding: utf-8
-from __future__ import absolute_import
-from __future__ import print_function
-from __future__ import unicode_literals
-
import codecs
import os
import shutil
import tempfile
from operator import itemgetter
+from random import shuffle
-import py
import pytest
import yaml
+from ddt import data
+from ddt import ddt
from ...helpers import build_config_details
+from ...helpers import BUSYBOX_IMAGE_WITH_TAG
+from ...helpers import cd
from compose.config import config
from compose.config import types
+from compose.config.config import ConfigFile
from compose.config.config import resolve_build_args
from compose.config.config import resolve_environment
from compose.config.environment import Environment
@@ -25,24 +25,16 @@ from compose.config.serialize import denormalize_service_dict
from compose.config.serialize import serialize_config
from compose.config.serialize import serialize_ns_time_value
from compose.config.types import VolumeSpec
+from compose.const import COMPOSE_SPEC as VERSION
from compose.const import COMPOSEFILE_V1 as V1
-from compose.const import COMPOSEFILE_V2_0 as V2_0
-from compose.const import COMPOSEFILE_V2_1 as V2_1
-from compose.const import COMPOSEFILE_V2_2 as V2_2
-from compose.const import COMPOSEFILE_V2_3 as V2_3
-from compose.const import COMPOSEFILE_V3_0 as V3_0
-from compose.const import COMPOSEFILE_V3_1 as V3_1
-from compose.const import COMPOSEFILE_V3_2 as V3_2
-from compose.const import COMPOSEFILE_V3_3 as V3_3
-from compose.const import COMPOSEFILE_V3_5 as V3_5
from compose.const import IS_WINDOWS_PLATFORM
from tests import mock
from tests import unittest
-DEFAULT_VERSION = V2_0
+DEFAULT_VERSION = VERSION
-def make_service_dict(name, service_dict, working_dir, filename=None):
+def make_service_dict(name, service_dict, working_dir='.', filename=None):
"""Test helper function to construct a ServiceExtendsResolver
"""
resolver = config.ServiceExtendsResolver(
@@ -65,14 +57,17 @@ def secret_sort(secrets):
return sorted(secrets, key=itemgetter('source'))
+@ddt
class ConfigTest(unittest.TestCase):
def test_load(self):
service_dicts = config.load(
build_config_details(
{
- 'foo': {'image': 'busybox'},
- 'bar': {'image': 'busybox', 'environment': ['FOO=1']},
+ 'services': {
+ 'foo': {'image': 'busybox'},
+ 'bar': {'image': 'busybox', 'environment': ['FOO=1']},
+ }
},
'tests/fixtures/extends',
'common.yml'
@@ -165,25 +160,22 @@ class ConfigTest(unittest.TestCase):
}
def test_valid_versions(self):
- for version in ['2', '2.0']:
- cfg = config.load(build_config_details({'version': version}))
- assert cfg.version == V2_0
-
- cfg = config.load(build_config_details({'version': '2.1'}))
- assert cfg.version == V2_1
-
- cfg = config.load(build_config_details({'version': '2.2'}))
- assert cfg.version == V2_2
-
- cfg = config.load(build_config_details({'version': '2.3'}))
- assert cfg.version == V2_3
+ cfg = config.load(
+ build_config_details({
+ 'services': {
+ 'foo': {'image': 'busybox'},
+ 'bar': {'image': 'busybox', 'environment': ['FOO=1']},
+ }
+ })
+ )
+ assert cfg.config_version == VERSION
+ assert cfg.version == VERSION
- for version in ['3', '3.0']:
+ for version in ['2', '2.0', '2.1', '2.2', '2.3',
+ '3', '3.0', '3.1', '3.2', '3.3', '3.4', '3.5', '3.6', '3.7', '3.8']:
cfg = config.load(build_config_details({'version': version}))
- assert cfg.version == V3_0
-
- cfg = config.load(build_config_details({'version': '3.1'}))
- assert cfg.version == V3_1
+ assert cfg.config_version == version
+ assert cfg.version == VERSION
def test_v1_file_version(self):
cfg = config.load(build_config_details({'web': {'image': 'busybox'}}))
@@ -195,7 +187,7 @@ class ConfigTest(unittest.TestCase):
assert list(s['name'] for s in cfg.services) == ['version']
def test_wrong_version_type(self):
- for version in [None, 1, 2, 2.0]:
+ for version in [1, 2, 2.0]:
with pytest.raises(ConfigurationError) as excinfo:
config.load(
build_config_details(
@@ -211,12 +203,12 @@ class ConfigTest(unittest.TestCase):
with pytest.raises(ConfigurationError) as excinfo:
config.load(
build_config_details(
- {'version': '2.18'},
+ {'version': '1'},
filename='filename.yml',
)
)
- assert 'Version in "filename.yml" is unsupported' in excinfo.exconly()
+ assert 'Version in "filename.yml" is invalid' in excinfo.exconly()
assert VERSION_EXPLANATION in excinfo.exconly()
def test_version_1_is_invalid(self):
@@ -246,7 +238,9 @@ class ConfigTest(unittest.TestCase):
)
)
- assert 'Invalid top-level property "web"' in excinfo.exconly()
+ assert "compose.config.errors.ConfigurationError: " \
+ "The Compose file 'filename.yml' is invalid because:\n" \
+ "'web' does not match any of the regexes: '^x-'" in excinfo.exconly()
assert VERSION_EXPLANATION in excinfo.exconly()
def test_named_volume_config_empty(self):
@@ -326,9 +320,8 @@ class ConfigTest(unittest.TestCase):
}
}, 'working_dir', 'filename.yml')
)
-
assert 'Unexpected type for "version" key in "filename.yml"' \
- in mock_logging.warn.call_args[0][0]
+ in mock_logging.warning.call_args[0][0]
service_dicts = config_data.services
assert service_sort(service_dicts) == service_sort([
@@ -342,7 +335,7 @@ class ConfigTest(unittest.TestCase):
with pytest.raises(ConfigurationError):
config.load(
build_config_details(
- {'web': 'busybox:latest'},
+ {'web': BUSYBOX_IMAGE_WITH_TAG},
'working_dir',
'filename.yml'
)
@@ -352,7 +345,7 @@ class ConfigTest(unittest.TestCase):
with pytest.raises(ConfigurationError):
config.load(
build_config_details(
- {'version': '2', 'services': {'web': 'busybox:latest'}},
+ {'version': '2', 'services': {'web': BUSYBOX_IMAGE_WITH_TAG}},
'working_dir',
'filename.yml'
)
@@ -363,7 +356,7 @@ class ConfigTest(unittest.TestCase):
config.load(
build_config_details({
'version': '2',
- 'services': {'web': 'busybox:latest'},
+ 'services': {'web': BUSYBOX_IMAGE_WITH_TAG},
'networks': {
'invalid': {'foo', 'bar'}
}
@@ -374,7 +367,7 @@ class ConfigTest(unittest.TestCase):
base_file = config.ConfigFile(
'base.yaml',
{
- 'version': str(V2_1),
+ 'version': '2',
'services': {
'web': {
'image': 'example/web',
@@ -509,7 +502,15 @@ class ConfigTest(unittest.TestCase):
for invalid_name in ['?not?allowed', ' ', '', '!', '/', '\xe2']:
with pytest.raises(ConfigurationError) as exc:
config.load(build_config_details(
- {invalid_name: {'image': 'busybox'}}))
+ {
+ 'version': '2',
+ 'services': {
+ invalid_name:
+ {
+ 'image': 'busybox'
+ }
+ }
+ }))
assert 'Invalid service name \'%s\'' % invalid_name in exc.exconly()
def test_load_config_invalid_service_names_v2(self):
@@ -541,17 +542,24 @@ class ConfigTest(unittest.TestCase):
with pytest.raises(ConfigurationError) as exc:
config.load(build_config_details(
{
- 'web': {'image': 'busybox', 'name': 'bogus'},
+ 'version': '2',
+ 'services': {
+ 'web': {'image': 'busybox', 'name': 'bogus'}
+ }
},
'working_dir',
'filename.yml',
))
-
- assert "Unsupported config option for web: 'name'" in exc.exconly()
+ assert "Unsupported config option for services.web: 'name'" in exc.exconly()
def test_load_invalid_service_definition(self):
config_details = build_config_details(
- {'web': 'wrong'},
+ {
+ 'version': '2',
+ 'services': {
+ 'web': 'wrong'
+ }
+ },
'working_dir',
'filename.yml')
with pytest.raises(ConfigurationError) as exc:
@@ -583,7 +591,10 @@ class ConfigTest(unittest.TestCase):
with pytest.raises(ConfigurationError) as excinfo:
config.load(
build_config_details(
- {1: {'image': 'busybox'}},
+ {
+ 'version': '2',
+ 'services': {1: {'image': 'busybox'}}
+ },
'working_dir',
'filename.yml'
)
@@ -612,6 +623,38 @@ class ConfigTest(unittest.TestCase):
excinfo.exconly()
)
+ def test_config_integer_service_name_raise_validation_error_v2_when_no_interpolate(self):
+ with pytest.raises(ConfigurationError) as excinfo:
+ config.load(
+ build_config_details(
+ {
+ 'version': '2',
+ 'services': {1: {'image': 'busybox'}}
+ },
+ 'working_dir',
+ 'filename.yml'
+ ),
+ interpolate=False
+ )
+
+ assert (
+ "In file 'filename.yml', the service name 1 must be a quoted string, i.e. '1'." in
+ excinfo.exconly()
+ )
+
+ def test_config_integer_service_property_raise_validation_error(self):
+ with pytest.raises(ConfigurationError) as excinfo:
+ config.load(
+ build_config_details({
+ 'version': '2.1',
+ 'services': {'foobar': {'image': 'busybox', 1234: 'hah'}}
+ }, 'working_dir', 'filename.yml')
+ )
+
+ assert (
+ "Unsupported config option for services.foobar: '1234'" in excinfo.exconly()
+ )
+
def test_config_invalid_service_name_raise_validation_error(self):
with pytest.raises(ConfigurationError) as excinfo:
config.load(
@@ -626,7 +669,7 @@ class ConfigTest(unittest.TestCase):
assert 'Invalid service name \'mong\\o\'' in excinfo.exconly()
- def test_config_duplicate_cache_from_values_validation_error(self):
+ def test_config_duplicate_cache_from_values_no_validation_error(self):
with pytest.raises(ConfigurationError) as exc:
config.load(
build_config_details({
@@ -638,7 +681,7 @@ class ConfigTest(unittest.TestCase):
})
)
- assert 'build.cache_from contains non-unique items' in exc.exconly()
+ assert 'build.cache_from contains non-unique items' not in exc.exconly()
def test_load_with_multiple_files_v1(self):
base_file = config.ConfigFile(
@@ -742,13 +785,14 @@ class ConfigTest(unittest.TestCase):
})
details = config.ConfigDetails('.', [base_file, override_file])
- tmpdir = py.test.ensuretemp('config_test')
- self.addCleanup(tmpdir.remove)
- tmpdir.join('common.yml').write("""
- base:
- labels: ['label=one']
- """)
- with tmpdir.as_cwd():
+ tmpdir = tempfile.mkdtemp('config_test')
+ self.addCleanup(shutil.rmtree, tmpdir)
+ with open(os.path.join(tmpdir, 'common.yml'), mode="w") as common_fh:
+ common_fh.write("""
+ base:
+ labels: ['label=one']
+ """)
+ with cd(tmpdir):
service_dicts = config.load(details).services
expected = [
@@ -777,19 +821,20 @@ class ConfigTest(unittest.TestCase):
}
)
- tmpdir = pytest.ensuretemp('config_test')
- self.addCleanup(tmpdir.remove)
- tmpdir.join('base.yml').write("""
- version: '2.2'
- services:
- base:
- image: base
- web:
- extends: base
- """)
+ tmpdir = tempfile.mkdtemp('config_test')
+ self.addCleanup(shutil.rmtree, tmpdir)
+ with open(os.path.join(tmpdir, 'base.yml'), mode="w") as base_fh:
+ base_fh.write("""
+ version: '2.2'
+ services:
+ base:
+ image: base
+ web:
+ extends: base
+ """)
details = config.ConfigDetails('.', [main_file])
- with tmpdir.as_cwd():
+ with cd(tmpdir):
service_dicts = config.load(details).services
assert service_dicts[0] == {
'name': 'prodweb',
@@ -800,10 +845,10 @@ class ConfigTest(unittest.TestCase):
def test_load_with_multiple_files_and_invalid_override(self):
base_file = config.ConfigFile(
'base.yaml',
- {'web': {'image': 'example/web'}})
+ {'version': '2', 'services': {'web': {'image': 'example/web'}}})
override_file = config.ConfigFile(
'override.yaml',
- {'bogus': 'thing'})
+ {'version': '2', 'services': {'bogus': 'thing'}})
details = config.ConfigDetails('.', [base_file, override_file])
with pytest.raises(ConfigurationError) as exc:
@@ -814,15 +859,15 @@ class ConfigTest(unittest.TestCase):
def test_load_sorts_in_dependency_order(self):
config_details = build_config_details({
'web': {
- 'image': 'busybox:latest',
+ 'image': BUSYBOX_IMAGE_WITH_TAG,
'links': ['db'],
},
'db': {
- 'image': 'busybox:latest',
+ 'image': BUSYBOX_IMAGE_WITH_TAG,
'volumes_from': ['volume:ro']
},
'volume': {
- 'image': 'busybox:latest',
+ 'image': BUSYBOX_IMAGE_WITH_TAG,
'volumes': ['/tmp'],
}
})
@@ -941,7 +986,6 @@ class ConfigTest(unittest.TestCase):
service = config.load(
build_config_details(
{
- 'version': str(V3_3),
'services': {
'web': {
'build': {
@@ -1071,8 +1115,43 @@ class ConfigTest(unittest.TestCase):
details = config.ConfigDetails('.', [base_file, override_file])
web_service = config.load(details).services[0]
assert web_service['networks'] == {
- 'foobar': {'aliases': ['foo', 'bar']},
- 'baz': None
+ 'foobar': {'aliases': ['bar', 'foo']},
+ 'baz': {}
+ }
+
+ def test_load_with_multiple_files_mismatched_networks_format_inverse_order(self):
+ base_file = config.ConfigFile(
+ 'override.yaml',
+ {
+ 'version': '2',
+ 'services': {
+ 'web': {
+ 'networks': ['baz']
+ }
+ }
+ }
+ )
+ override_file = config.ConfigFile(
+ 'base.yaml',
+ {
+ 'version': '2',
+ 'services': {
+ 'web': {
+ 'image': 'example/web',
+ 'networks': {
+ 'foobar': {'aliases': ['foo', 'bar']}
+ }
+ }
+ },
+ 'networks': {'foobar': {}, 'baz': {}}
+ }
+ )
+
+ details = config.ConfigDetails('.', [base_file, override_file])
+ web_service = config.load(details).services[0]
+ assert web_service['networks'] == {
+ 'foobar': {'aliases': ['bar', 'foo']},
+ 'baz': {}
}
def test_load_with_multiple_files_v2(self):
@@ -1212,7 +1291,7 @@ class ConfigTest(unittest.TestCase):
'version': '2',
'services': {
'web': {
- 'image': 'busybox:latest',
+ 'image': BUSYBOX_IMAGE_WITH_TAG,
'volumes': ['data0028:/data:ro'],
},
},
@@ -1228,7 +1307,7 @@ class ConfigTest(unittest.TestCase):
'version': '2',
'services': {
'web': {
- 'image': 'busybox:latest',
+ 'image': BUSYBOX_IMAGE_WITH_TAG,
'volumes': ['./data0028:/data:ro'],
},
},
@@ -1244,7 +1323,7 @@ class ConfigTest(unittest.TestCase):
'base.yaml',
{
'web': {
- 'image': 'busybox:latest',
+ 'image': BUSYBOX_IMAGE_WITH_TAG,
'volumes': ['data0028:/data:ro'],
},
}
@@ -1261,7 +1340,7 @@ class ConfigTest(unittest.TestCase):
'version': '2.3',
'services': {
'web': {
- 'image': 'busybox:latest',
+ 'image': BUSYBOX_IMAGE_WITH_TAG,
'volumes': [
{
'target': '/anonymous', 'type': 'volume'
@@ -1291,7 +1370,7 @@ class ConfigTest(unittest.TestCase):
assert tmpfs_mount.target == '/tmpfs'
assert not tmpfs_mount.is_named_volume
- assert host_mount.source == os.path.normpath('/abc')
+ assert host_mount.source == '/abc'
assert host_mount.target == '/xyz'
assert not host_mount.is_named_volume
@@ -1306,7 +1385,7 @@ class ConfigTest(unittest.TestCase):
'version': '3.4',
'services': {
'web': {
- 'image': 'busybox:latest',
+ 'image': BUSYBOX_IMAGE_WITH_TAG,
'volumes': [
{'type': 'bind', 'source': './web', 'target': '/web'},
],
@@ -1322,6 +1401,85 @@ class ConfigTest(unittest.TestCase):
assert mount.type == 'bind'
assert mount.source == expected_source
+ def test_load_bind_mount_relative_path_with_tilde(self):
+ base_file = config.ConfigFile(
+ 'base.yaml', {
+ 'version': '3.4',
+ 'services': {
+ 'web': {
+ 'image': BUSYBOX_IMAGE_WITH_TAG,
+ 'volumes': [
+ {'type': 'bind', 'source': '~/web', 'target': '/web'},
+ ],
+ },
+ },
+ },
+ )
+
+ details = config.ConfigDetails('.', [base_file])
+ config_data = config.load(details)
+ mount = config_data.services[0].get('volumes')[0]
+ assert mount.target == '/web'
+ assert mount.type == 'bind'
+ assert (
+ not mount.source.startswith('~') and mount.source.endswith(
+ '{}web'.format(os.path.sep)
+ )
+ )
+
+ def test_config_invalid_ipam_config(self):
+ with pytest.raises(ConfigurationError) as excinfo:
+ config.load(
+ build_config_details(
+ {
+ 'version': str(VERSION),
+ 'networks': {
+ 'foo': {
+ 'driver': 'default',
+ 'ipam': {
+ 'driver': 'default',
+ 'config': ['172.18.0.0/16'],
+ }
+ }
+ }
+ },
+ filename='filename.yml',
+ )
+ )
+ assert ('networks.foo.ipam.config contains an invalid type,'
+ ' it should be an object') in excinfo.exconly()
+
+ def test_config_valid_ipam_config(self):
+ ipam_config = {
+ 'subnet': '172.28.0.0/16',
+ 'ip_range': '172.28.5.0/24',
+ 'gateway': '172.28.5.254',
+ 'aux_addresses': {
+ 'host1': '172.28.1.5',
+ 'host2': '172.28.1.6',
+ 'host3': '172.28.1.7',
+ },
+ }
+ networks = config.load(
+ build_config_details(
+ {
+ 'networks': {
+ 'foo': {
+ 'driver': 'default',
+ 'ipam': {
+ 'driver': 'default',
+ 'config': [ipam_config],
+ }
+ }
+ }
+ },
+ filename='filename.yml',
+ )
+ ).networks
+
+ assert 'foo' in networks
+ assert networks['foo']['ipam']['config'] == [ipam_config]
+
def test_config_valid_service_names(self):
for valid_name in ['_', '-', '.__.', '_what-up.', 'what_.up----', 'whatup']:
services = config.load(
@@ -1336,7 +1494,10 @@ class ConfigTest(unittest.TestCase):
config.load(
build_config_details(
{
- 'foo': {'image': 'busybox', 'privilige': 'something'},
+ 'version': str(VERSION),
+ 'services': {
+ 'foo': {'image': 'busybox', 'privilige': 'something'},
+ }
},
'tests/fixtures/extends',
'filename.yml'
@@ -1357,7 +1518,10 @@ class ConfigTest(unittest.TestCase):
config.load(
build_config_details(
{
- 'foo': {'image': 1},
+ 'version': str(VERSION),
+ 'services': {
+ 'foo': {'image': 1},
+ }
},
'tests/fixtures/extends',
'filename.yml'
@@ -1404,7 +1568,10 @@ class ConfigTest(unittest.TestCase):
config.load(
build_config_details(
{
- 'foo': {'image': 'busybox', 'links': 'an_link'},
+ 'version': str(VERSION),
+ 'services': {
+ 'foo': {'image': 'busybox', 'links': 'an_link'},
+ }
},
'tests/fixtures/extends',
'filename.yml'
@@ -1432,7 +1599,10 @@ class ConfigTest(unittest.TestCase):
config.load(
build_config_details(
{
- 'web': {'build': '.', 'devices': ['/dev/foo:/dev/foo', '/dev/foo:/dev/foo']}
+ 'version': str(VERSION),
+ 'services': {
+ 'web': {'build': '.', 'devices': ['/dev/foo:/dev/foo', '/dev/foo:/dev/foo']}
+ }
},
'tests/fixtures/extends',
'filename.yml'
@@ -1446,7 +1616,10 @@ class ConfigTest(unittest.TestCase):
config.load(
build_config_details(
{
- 'web': {'build': '.', 'command': [1]}
+ 'version': str(VERSION),
+ 'services': {
+ 'web': {'build': '.', 'command': [1]}
+ }
},
'tests/fixtures/extends',
'filename.yml'
@@ -1471,10 +1644,13 @@ class ConfigTest(unittest.TestCase):
with pytest.raises(ConfigurationError) as excinfo:
config.load(
build_config_details(
- {'web': {
- 'image': 'busybox',
- 'extra_hosts': 'somehost:162.242.195.82'
- }},
+ {
+ 'version': str(VERSION),
+ 'services': {
+ 'web': {
+ 'image': 'busybox',
+ 'extra_hosts': 'somehost:162.242.195.82'}}
+ },
'working_dir',
'filename.yml'
)
@@ -1487,13 +1663,16 @@ class ConfigTest(unittest.TestCase):
with pytest.raises(ConfigurationError) as excinfo:
config.load(
build_config_details(
- {'web': {
- 'image': 'busybox',
- 'extra_hosts': [
- {'somehost': '162.242.195.82'},
- {'otherhost': '50.31.209.229'}
- ]
- }},
+ {
+ 'version': str(VERSION),
+ 'services': {
+ 'web': {
+ 'image': 'busybox',
+ 'extra_hosts': [
+ {'somehost': '162.242.195.82'},
+ {'otherhost': '50.31.209.229'}
+ ]}}
+ },
'working_dir',
'filename.yml'
)
@@ -1507,13 +1686,16 @@ class ConfigTest(unittest.TestCase):
with pytest.raises(ConfigurationError) as exc:
config.load(build_config_details(
{
- 'web': {
- 'image': 'busybox',
- 'ulimits': {
- 'nofile': {
- "not_soft_or_hard": 100,
- "soft": 10000,
- "hard": 20000,
+ 'version': str(VERSION),
+ 'services': {
+ 'web': {
+ 'image': 'busybox',
+ 'ulimits': {
+ 'nofile': {
+ "not_soft_or_hard": 100,
+ "soft": 10000,
+ "hard": 20000,
+ }
}
}
}
@@ -1528,9 +1710,12 @@ class ConfigTest(unittest.TestCase):
with pytest.raises(ConfigurationError) as exc:
config.load(build_config_details(
{
- 'web': {
- 'image': 'busybox',
- 'ulimits': {'nofile': {"soft": 10000}}
+ 'version': str(VERSION),
+ 'services': {
+ 'web': {
+ 'image': 'busybox',
+ 'ulimits': {'nofile': {"soft": 10000}}
+ }
}
},
'working_dir',
@@ -1544,10 +1729,13 @@ class ConfigTest(unittest.TestCase):
with pytest.raises(ConfigurationError) as exc:
config.load(build_config_details(
{
- 'web': {
- 'image': 'busybox',
- 'ulimits': {
- 'nofile': {"soft": 10000, "hard": 1000}
+ 'version': str(VERSION),
+ 'services': {
+ 'web': {
+ 'image': 'busybox',
+ 'ulimits': {
+ 'nofile': {"soft": 10000, "hard": 1000}
+ }
}
}
},
@@ -1560,10 +1748,12 @@ class ConfigTest(unittest.TestCase):
for expose in expose_values:
service = config.load(
build_config_details(
- {'web': {
- 'image': 'busybox',
- 'expose': expose
- }},
+ {
+ 'version': str(VERSION),
+ 'services': {
+ 'web': {
+ 'image': 'busybox',
+ 'expose': expose}}},
'working_dir',
'filename.yml'
)
@@ -1575,10 +1765,12 @@ class ConfigTest(unittest.TestCase):
for entrypoint in entrypoint_values:
service = config.load(
build_config_details(
- {'web': {
- 'image': 'busybox',
- 'entrypoint': entrypoint
- }},
+ {
+ 'version': str(VERSION),
+ 'services': {
+ 'web': {
+ 'image': 'busybox',
+ 'entrypoint': entrypoint}}},
'working_dir',
'filename.yml'
)
@@ -1587,9 +1779,12 @@ class ConfigTest(unittest.TestCase):
def test_logs_warning_for_boolean_in_environment(self):
config_details = build_config_details({
- 'web': {
- 'image': 'busybox',
- 'environment': {'SHOW_STUFF': True}
+ 'version': str(VERSION),
+ 'services': {
+ 'web': {
+ 'image': 'busybox',
+ 'environment': {'SHOW_STUFF': True}
+ }
}
})
@@ -1601,10 +1796,12 @@ class ConfigTest(unittest.TestCase):
def test_config_valid_environment_dict_key_contains_dashes(self):
services = config.load(
build_config_details(
- {'web': {
- 'image': 'busybox',
- 'environment': {'SPRING_JPA_HIBERNATE_DDL-AUTO': 'none'}
- }},
+ {
+ 'version': str(VERSION),
+ 'services': {
+ 'web': {
+ 'image': 'busybox',
+ 'environment': {'SPRING_JPA_HIBERNATE_DDL-AUTO': 'none'}}}},
'working_dir',
'filename.yml'
)
@@ -1612,22 +1809,23 @@ class ConfigTest(unittest.TestCase):
assert services[0]['environment']['SPRING_JPA_HIBERNATE_DDL-AUTO'] == 'none'
def test_load_yaml_with_yaml_error(self):
- tmpdir = py.test.ensuretemp('invalid_yaml_test')
- self.addCleanup(tmpdir.remove)
- invalid_yaml_file = tmpdir.join('docker-compose.yml')
- invalid_yaml_file.write("""
- web:
- this is bogus: ok: what
- """)
+ tmpdir = tempfile.mkdtemp('invalid_yaml_test')
+ self.addCleanup(shutil.rmtree, tmpdir)
+ invalid_yaml_file = os.path.join(tmpdir, 'docker-compose.yml')
+ with open(invalid_yaml_file, mode="w") as invalid_yaml_file_fh:
+ invalid_yaml_file_fh.write("""
+web:
+ this is bogus: ok: what
+ """)
with pytest.raises(ConfigurationError) as exc:
config.load_yaml(str(invalid_yaml_file))
- assert 'line 3, column 32' in exc.exconly()
+ assert 'line 3, column 22' in exc.exconly()
def test_load_yaml_with_bom(self):
- tmpdir = py.test.ensuretemp('bom_yaml')
- self.addCleanup(tmpdir.remove)
- bom_yaml = tmpdir.join('docker-compose.yml')
+ tmpdir = tempfile.mkdtemp('bom_yaml')
+ self.addCleanup(shutil.rmtree, tmpdir)
+ bom_yaml = os.path.join(tmpdir, 'docker-compose.yml')
with codecs.open(str(bom_yaml), 'w', encoding='utf-8') as f:
f.write('''\ufeff
version: '2.3'
@@ -1642,9 +1840,12 @@ class ConfigTest(unittest.TestCase):
def test_validate_extra_hosts_invalid(self):
with pytest.raises(ConfigurationError) as exc:
config.load(build_config_details({
- 'web': {
- 'image': 'alpine',
- 'extra_hosts': "www.example.com: 192.168.0.17",
+ 'version': str(VERSION),
+ 'services': {
+ 'web': {
+ 'image': 'alpine',
+ 'extra_hosts': "www.example.com: 192.168.0.17",
+ }
}
}))
assert "web.extra_hosts contains an invalid type" in exc.exconly()
@@ -1652,22 +1853,28 @@ class ConfigTest(unittest.TestCase):
def test_validate_extra_hosts_invalid_list(self):
with pytest.raises(ConfigurationError) as exc:
config.load(build_config_details({
- 'web': {
- 'image': 'alpine',
- 'extra_hosts': [
- {'www.example.com': '192.168.0.17'},
- {'api.example.com': '192.168.0.18'}
- ],
+ 'version': str(VERSION),
+ 'services': {
+ 'web': {
+ 'image': 'alpine',
+ 'extra_hosts': [
+ {'www.example.com': '192.168.0.17'},
+ {'api.example.com': '192.168.0.18'}
+ ],
+ }
}
}))
assert "which is an invalid type" in exc.exconly()
def test_normalize_dns_options(self):
actual = config.load(build_config_details({
- 'web': {
- 'image': 'alpine',
- 'dns': '8.8.8.8',
- 'dns_search': 'domain.local',
+ 'version': str(VERSION),
+ 'services': {
+ 'web': {
+ 'image': 'alpine',
+ 'dns': '8.8.8.8',
+ 'dns_search': 'domain.local',
+ }
}
}))
assert actual.services == [
@@ -1735,6 +1942,26 @@ class ConfigTest(unittest.TestCase):
}
]
+ @data(
+ '2 ',
+ '3.',
+ '3.0.0',
+ '3.0.a',
+ '3.a',
+ '3a')
+ def test_invalid_version_formats(self, version):
+ content = {
+ 'version': version,
+ 'services': {
+ 'web': {
+ 'image': 'alpine',
+ }
+ }
+ }
+ with pytest.raises(ConfigurationError) as exc:
+ config.load(build_config_details(content))
+ assert 'Version "{}" in "filename.yml" is invalid.'.format(version) in exc.exconly()
+
def test_group_add_option(self):
actual = config.load(build_config_details({
'version': '2',
@@ -1775,7 +2002,6 @@ class ConfigTest(unittest.TestCase):
def test_isolation_option(self):
actual = config.load(build_config_details({
- 'version': str(V2_1),
'services': {
'web': {
'image': 'win10',
@@ -1794,7 +2020,6 @@ class ConfigTest(unittest.TestCase):
def test_runtime_option(self):
actual = config.load(build_config_details({
- 'version': str(V2_3),
'services': {
'web': {
'image': 'nvidia/cuda',
@@ -1916,7 +2141,7 @@ class ConfigTest(unittest.TestCase):
}
actual = config.merge_service_dicts_from_files(
- base, override, V3_2
+ base, override, VERSION
)
assert actual['volumes'] == [
@@ -1963,7 +2188,7 @@ class ConfigTest(unittest.TestCase):
}
}
- actual = config.merge_service_dicts(base, override, V2_0)
+ actual = config.merge_service_dicts(base, override, VERSION)
assert actual == {
'image': 'alpine:edge',
'logging': {
@@ -1997,7 +2222,7 @@ class ConfigTest(unittest.TestCase):
}
}
- actual = config.merge_service_dicts(base, override, V2_0)
+ actual = config.merge_service_dicts(base, override, VERSION)
assert actual == {
'image': 'alpine:edge',
'logging': {
@@ -2029,7 +2254,7 @@ class ConfigTest(unittest.TestCase):
}
}
- actual = config.merge_service_dicts(base, override, V2_0)
+ actual = config.merge_service_dicts(base, override, VERSION)
assert actual == {
'image': 'alpine:edge',
'logging': {
@@ -2061,7 +2286,7 @@ class ConfigTest(unittest.TestCase):
}
}
- actual = config.merge_service_dicts(base, override, V2_0)
+ actual = config.merge_service_dicts(base, override, VERSION)
assert actual == {
'image': 'alpine:edge',
'logging': {
@@ -2090,7 +2315,7 @@ class ConfigTest(unittest.TestCase):
}
}
- actual = config.merge_service_dicts(base, override, V2_0)
+ actual = config.merge_service_dicts(base, override, VERSION)
assert actual == {
'image': 'alpine:edge',
'logging': {
@@ -2110,7 +2335,7 @@ class ConfigTest(unittest.TestCase):
}
}
}
- actual = config.merge_service_dicts(base, override, V2_0)
+ actual = config.merge_service_dicts(base, override, VERSION)
assert actual == {
'image': 'alpine:edge',
'logging': {
@@ -2132,7 +2357,7 @@ class ConfigTest(unittest.TestCase):
}
}
override = {}
- actual = config.merge_service_dicts(base, override, V2_0)
+ actual = config.merge_service_dicts(base, override, VERSION)
assert actual == {
'image': 'alpine:edge',
'logging': {
@@ -2145,7 +2370,7 @@ class ConfigTest(unittest.TestCase):
def test_merge_mixed_ports(self):
base = {
- 'image': 'busybox:latest',
+ 'image': BUSYBOX_IMAGE_WITH_TAG,
'command': 'top',
'ports': [
{
@@ -2160,9 +2385,9 @@ class ConfigTest(unittest.TestCase):
'ports': ['1245:1245/udp']
}
- actual = config.merge_service_dicts(base, override, V3_1)
+ actual = config.merge_service_dicts(base, override, VERSION)
assert actual == {
- 'image': 'busybox:latest',
+ 'image': BUSYBOX_IMAGE_WITH_TAG,
'command': 'top',
'ports': [types.ServicePort('1245', '1245', 'udp', None, None)]
}
@@ -2172,11 +2397,12 @@ class ConfigTest(unittest.TestCase):
'image': 'busybox',
'depends_on': {
'app1': {'condition': 'service_started'},
- 'app2': {'condition': 'service_healthy'}
+ 'app2': {'condition': 'service_healthy'},
+ 'app3': {'condition': 'service_completed_successfully'}
}
}
override = {}
- actual = config.merge_service_dicts(base, override, V2_1)
+ actual = config.merge_service_dicts(base, override, VERSION)
assert actual == base
def test_merge_depends_on_mixed_syntax(self):
@@ -2184,20 +2410,22 @@ class ConfigTest(unittest.TestCase):
'image': 'busybox',
'depends_on': {
'app1': {'condition': 'service_started'},
- 'app2': {'condition': 'service_healthy'}
+ 'app2': {'condition': 'service_healthy'},
+ 'app3': {'condition': 'service_completed_successfully'}
}
}
override = {
- 'depends_on': ['app3']
+ 'depends_on': ['app4']
}
- actual = config.merge_service_dicts(base, override, V2_1)
+ actual = config.merge_service_dicts(base, override, VERSION)
assert actual == {
'image': 'busybox',
'depends_on': {
'app1': {'condition': 'service_started'},
'app2': {'condition': 'service_healthy'},
- 'app3': {'condition': 'service_started'}
+ 'app3': {'condition': 'service_completed_successfully'},
+ 'app4': {'condition': 'service_started'},
}
}
@@ -2229,7 +2457,7 @@ class ConfigTest(unittest.TestCase):
'labels': {'com.docker.compose.test': 'yes'}
}
- actual = config.merge_service_dicts(base, override, V2_0)
+ actual = config.merge_service_dicts(base, override, VERSION)
assert actual == {
'image': 'busybox',
'pid': 'host',
@@ -2245,7 +2473,7 @@ class ConfigTest(unittest.TestCase):
}
override = {'secrets': ['other-src.txt']}
- actual = config.merge_service_dicts(base, override, V3_1)
+ actual = config.merge_service_dicts(base, override, VERSION)
assert secret_sort(actual['secrets']) == secret_sort([
{'source': 'src.txt'},
{'source': 'other-src.txt'}
@@ -2265,7 +2493,7 @@ class ConfigTest(unittest.TestCase):
}
]
}
- actual = config.merge_service_dicts(base, override, V3_1)
+ actual = config.merge_service_dicts(base, override, VERSION)
assert actual['secrets'] == override['secrets']
def test_merge_different_configs(self):
@@ -2277,7 +2505,7 @@ class ConfigTest(unittest.TestCase):
}
override = {'configs': ['other-src.txt']}
- actual = config.merge_service_dicts(base, override, V3_3)
+ actual = config.merge_service_dicts(base, override, VERSION)
assert secret_sort(actual['configs']) == secret_sort([
{'source': 'src.txt'},
{'source': 'other-src.txt'}
@@ -2297,7 +2525,7 @@ class ConfigTest(unittest.TestCase):
}
]
}
- actual = config.merge_service_dicts(base, override, V3_3)
+ actual = config.merge_service_dicts(base, override, VERSION)
assert actual['configs'] == override['configs']
def test_merge_deploy(self):
@@ -2312,7 +2540,7 @@ class ConfigTest(unittest.TestCase):
}
}
}
- actual = config.merge_service_dicts(base, override, V3_0)
+ actual = config.merge_service_dicts(base, override, VERSION)
assert actual['deploy'] == override['deploy']
def test_merge_deploy_override(self):
@@ -2322,6 +2550,7 @@ class ConfigTest(unittest.TestCase):
'labels': ['com.docker.compose.a=1', 'com.docker.compose.b=2'],
'mode': 'replicated',
'placement': {
+ 'max_replicas_per_node': 1,
'constraints': [
'node.role == manager', 'engine.labels.aws == true'
],
@@ -2368,7 +2597,7 @@ class ConfigTest(unittest.TestCase):
'update_config': {'max_failure_ratio': 0.712, 'parallelism': 4}
}
}
- actual = config.merge_service_dicts(base, override, V3_5)
+ actual = config.merge_service_dicts(base, override, VERSION)
assert actual['deploy'] == {
'mode': 'replicated',
'endpoint_mode': 'vip',
@@ -2378,6 +2607,7 @@ class ConfigTest(unittest.TestCase):
'com.docker.compose.c': '3'
},
'placement': {
+ 'max_replicas_per_node': 1,
'constraints': [
'engine.labels.aws == true', 'engine.labels.dev == true',
'node.role == manager', 'node.role == worker'
@@ -2424,7 +2654,7 @@ class ConfigTest(unittest.TestCase):
}
}
- actual = config.merge_service_dicts(base, override, V3_3)
+ actual = config.merge_service_dicts(base, override, VERSION)
assert actual['credential_spec'] == override['credential_spec']
def test_merge_scale(self):
@@ -2437,7 +2667,7 @@ class ConfigTest(unittest.TestCase):
'scale': 4,
}
- actual = config.merge_service_dicts(base, override, V2_2)
+ actual = config.merge_service_dicts(base, override, VERSION)
assert actual == {'image': 'bar', 'scale': 4}
def test_merge_blkio_config(self):
@@ -2472,7 +2702,7 @@ class ConfigTest(unittest.TestCase):
}
}
- actual = config.merge_service_dicts(base, override, V2_2)
+ actual = config.merge_service_dicts(base, override, VERSION)
assert actual == {
'image': 'bar',
'blkio_config': {
@@ -2499,7 +2729,7 @@ class ConfigTest(unittest.TestCase):
'extra_hosts': ['bar:5.6.7.8', 'foo:127.0.0.1']
}
- actual = config.merge_service_dicts(base, override, V2_0)
+ actual = config.merge_service_dicts(base, override, VERSION)
assert actual['extra_hosts'] == {
'foo': '127.0.0.1',
'bar': '5.6.7.8',
@@ -2523,7 +2753,7 @@ class ConfigTest(unittest.TestCase):
}
}
- actual = config.merge_service_dicts(base, override, V2_3)
+ actual = config.merge_service_dicts(base, override, VERSION)
assert actual['healthcheck'] == {
'start_period': base['healthcheck']['start_period'],
'test': override['healthcheck']['test'],
@@ -2549,7 +2779,7 @@ class ConfigTest(unittest.TestCase):
}
}
- actual = config.merge_service_dicts(base, override, V2_3)
+ actual = config.merge_service_dicts(base, override, VERSION)
assert actual['healthcheck'] == {'disabled': True}
def test_merge_healthcheck_override_enables(self):
@@ -2571,7 +2801,7 @@ class ConfigTest(unittest.TestCase):
}
}
- actual = config.merge_service_dicts(base, override, V2_3)
+ actual = config.merge_service_dicts(base, override, VERSION)
assert actual['healthcheck'] == override['healthcheck']
def test_merge_device_cgroup_rules(self):
@@ -2584,11 +2814,50 @@ class ConfigTest(unittest.TestCase):
'device_cgroup_rules': ['c 7:128 rwm', 'f 0:128 n']
}
- actual = config.merge_service_dicts(base, override, V2_3)
+ actual = config.merge_service_dicts(base, override, VERSION)
assert sorted(actual['device_cgroup_rules']) == sorted(
['c 7:128 rwm', 'x 3:244 rw', 'f 0:128 n']
)
+ def test_merge_isolation(self):
+ base = {
+ 'image': 'bar',
+ 'isolation': 'default',
+ }
+
+ override = {
+ 'isolation': 'hyperv',
+ }
+
+ actual = config.merge_service_dicts(base, override, VERSION)
+ assert actual == {
+ 'image': 'bar',
+ 'isolation': 'hyperv',
+ }
+
+ def test_merge_storage_opt(self):
+ base = {
+ 'image': 'bar',
+ 'storage_opt': {
+ 'size': '1G',
+ 'readonly': 'false',
+ }
+ }
+
+ override = {
+ 'storage_opt': {
+ 'size': '2G',
+ 'encryption': 'aes',
+ }
+ }
+
+ actual = config.merge_service_dicts(base, override, VERSION)
+ assert actual['storage_opt'] == {
+ 'size': '2G',
+ 'readonly': 'false',
+ 'encryption': 'aes',
+ }
+
def test_external_volume_config(self):
config_details = build_config_details({
'version': '2',
@@ -2938,6 +3207,41 @@ class ConfigTest(unittest.TestCase):
)
config.load(config_details)
+ def test_config_duplicate_mount_points(self):
+ config1 = build_config_details(
+ {
+ 'version': '3.5',
+ 'services': {
+ 'web': {
+ 'image': 'busybox',
+ 'volumes': ['/tmp/foo:/tmp/foo', '/tmp/foo:/tmp/foo:rw']
+ }
+ }
+ }
+ )
+
+ config2 = build_config_details(
+ {
+ 'version': '3.5',
+ 'services': {
+ 'web': {
+ 'image': 'busybox',
+ 'volumes': ['/x:/y', '/z:/y']
+ }
+ }
+ }
+ )
+
+ with self.assertRaises(ConfigurationError) as e:
+ config.load(config1)
+ self.assertEqual(str(e.exception), 'Duplicate mount points: [%s]' % (
+ ', '.join(['/tmp/foo:/tmp/foo:rw']*2)))
+
+ with self.assertRaises(ConfigurationError) as e:
+ config.load(config2)
+ self.assertEqual(str(e.exception), 'Duplicate mount points: [%s]' % (
+ ', '.join(['/x:/y:rw', '/z:/y:rw'])))
+
class NetworkModeTest(unittest.TestCase):
@@ -3104,6 +3408,7 @@ class PortsTest(unittest.TestCase):
assert "non-unique" in exc.value.msg
+ @pytest.mark.skip(reason="Validator is one_off (generic error)")
def test_config_invalid_ports_format_validation(self):
for invalid_ports in self.INVALID_PORT_MAPPINGS:
with pytest.raises(ConfigurationError) as exc:
@@ -3263,6 +3568,27 @@ class InterpolationTest(unittest.TestCase):
}
@mock.patch.dict(os.environ)
+ def test_config_file_with_options_environment_file(self):
+ project_dir = 'tests/fixtures/default-env-file'
+ # env-file is relative to current working dir
+ env = Environment.from_env_file(project_dir, project_dir + '/.env2')
+ service_dicts = config.load(
+ config.find(
+ project_dir, None, env
+ )
+ ).services
+
+ assert service_dicts[0] == {
+ 'name': 'web',
+ 'image': 'alpine:latest',
+ 'ports': [
+ types.ServicePort.parse('5644')[0],
+ types.ServicePort.parse('9998')[0]
+ ],
+ 'command': 'false'
+ }
+
+ @mock.patch.dict(os.environ)
def test_config_file_with_environment_variable(self):
project_dir = 'tests/fixtures/environment-interpolation'
os.environ.update(
@@ -3329,11 +3655,12 @@ class InterpolationTest(unittest.TestCase):
with mock.patch('compose.config.environment.log') as log:
config.load(config_details)
- assert 2 == log.warn.call_count
- warnings = sorted(args[0][0] for args in log.warn.call_args_list)
+ assert 2 == log.warning.call_count
+ warnings = sorted(args[0][0] for args in log.warning.call_args_list)
assert 'BAR' in warnings[0]
assert 'FOO' in warnings[1]
+ @pytest.mark.skip(reason='compatibility mode was removed internally')
def test_compatibility_mode_warnings(self):
config_details = build_config_details({
'version': '3.5',
@@ -3360,25 +3687,25 @@ class InterpolationTest(unittest.TestCase):
with mock.patch('compose.config.config.log') as log:
config.load(config_details, compatibility=True)
- assert log.warn.call_count == 1
- warn_message = log.warn.call_args[0][0]
+ assert log.warning.call_count == 1
+ warn_message = log.warning.call_args[0][0]
assert warn_message.startswith(
'The following deploy sub-keys are not supported in compatibility mode'
)
assert 'labels' in warn_message
assert 'endpoint_mode' in warn_message
assert 'update_config' in warn_message
- assert 'placement' in warn_message
assert 'resources.reservations.cpus' in warn_message
assert 'restart_policy.delay' in warn_message
assert 'restart_policy.window' in warn_message
+ @pytest.mark.skip(reason='compatibility mode was removed internally')
def test_compatibility_mode_load(self):
config_details = build_config_details({
'version': '3.5',
'services': {
'foo': {
- 'image': 'alpine:3.7',
+ 'image': 'alpine:3.10.1',
'deploy': {
'replicas': 3,
'restart_policy': {
@@ -3390,6 +3717,9 @@ class InterpolationTest(unittest.TestCase):
'reservations': {'memory': '100M'},
},
},
+ 'credential_spec': {
+ 'file': 'spec.json'
+ },
},
},
})
@@ -3397,17 +3727,18 @@ class InterpolationTest(unittest.TestCase):
with mock.patch('compose.config.config.log') as log:
cfg = config.load(config_details, compatibility=True)
- assert log.warn.call_count == 0
+ assert log.warning.call_count == 0
service_dict = cfg.services[0]
assert service_dict == {
- 'image': 'alpine:3.7',
+ 'image': 'alpine:3.10.1',
'scale': 3,
'restart': {'MaximumRetryCount': 7, 'Name': 'always'},
'mem_limit': '300M',
'mem_reservation': '100M',
'cpus': 0.7,
- 'name': 'foo'
+ 'name': 'foo',
+ 'security_opt': ['credentialspec=file://spec.json'],
}
@mock.patch.dict(os.environ)
@@ -3483,6 +3814,13 @@ class VolumeConfigTest(unittest.TestCase):
assert d['volumes'] == [VolumeSpec.parse('/host/path:/container/path')]
@pytest.mark.skipif(IS_WINDOWS_PLATFORM, reason='posix paths')
+ def test_volumes_order_is_preserved(self):
+ volumes = ['/{0}:/{0}'.format(i) for i in range(0, 6)]
+ shuffle(volumes)
+ cfg = make_service_dict('foo', {'build': '.', 'volumes': volumes})
+ assert cfg['volumes'] == volumes
+
+ @pytest.mark.skipif(IS_WINDOWS_PLATFORM, reason='posix paths')
@mock.patch.dict(os.environ)
def test_volume_binding_with_home(self):
os.environ['HOME'] = '/home/user'
@@ -3552,12 +3890,12 @@ class VolumeConfigTest(unittest.TestCase):
assert d['volumes'] == ['~:/data']
def test_volume_path_with_non_ascii_directory(self):
- volume = u'/Füü/data:/data'
+ volume = '/Füü/data:/data'
container_path = config.resolve_volume_path(".", volume)
assert container_path == volume
-class MergePathMappingTest(object):
+class MergePathMappingTest:
config_name = ""
def test_empty(self):
@@ -3569,35 +3907,35 @@ class MergePathMappingTest(object):
{self.config_name: ['/foo:/code', '/data']},
{},
DEFAULT_VERSION)
- assert set(service_dict[self.config_name]) == set(['/foo:/code', '/data'])
+ assert set(service_dict[self.config_name]) == {'/foo:/code', '/data'}
def test_no_base(self):
service_dict = config.merge_service_dicts(
{},
{self.config_name: ['/bar:/code']},
DEFAULT_VERSION)
- assert set(service_dict[self.config_name]) == set(['/bar:/code'])
+ assert set(service_dict[self.config_name]) == {'/bar:/code'}
def test_override_explicit_path(self):
service_dict = config.merge_service_dicts(
{self.config_name: ['/foo:/code', '/data']},
{self.config_name: ['/bar:/code']},
DEFAULT_VERSION)
- assert set(service_dict[self.config_name]) == set(['/bar:/code', '/data'])
+ assert set(service_dict[self.config_name]) == {'/bar:/code', '/data'}
def test_add_explicit_path(self):
service_dict = config.merge_service_dicts(
{self.config_name: ['/foo:/code', '/data']},
{self.config_name: ['/bar:/code', '/quux:/data']},
DEFAULT_VERSION)
- assert set(service_dict[self.config_name]) == set(['/bar:/code', '/quux:/data'])
+ assert set(service_dict[self.config_name]) == {'/bar:/code', '/quux:/data'}
def test_remove_explicit_path(self):
service_dict = config.merge_service_dicts(
{self.config_name: ['/foo:/code', '/quux:/data']},
{self.config_name: ['/bar:/code', '/data']},
DEFAULT_VERSION)
- assert set(service_dict[self.config_name]) == set(['/bar:/code', '/data'])
+ assert set(service_dict[self.config_name]) == {'/bar:/code', '/data'}
class MergeVolumesTest(unittest.TestCase, MergePathMappingTest):
@@ -3630,7 +3968,7 @@ class BuildOrImageMergeTest(unittest.TestCase):
assert config.merge_service_dicts({'image': 'redis'}, {'build': '.'}, V1) == {'build': '.'}
-class MergeListsTest(object):
+class MergeListsTest:
config_name = ""
base_config = []
override_config = []
@@ -3703,8 +4041,95 @@ class MergePortsTest(unittest.TestCase, MergeListsTest):
class MergeNetworksTest(unittest.TestCase, MergeListsTest):
config_name = 'networks'
- base_config = ['frontend', 'backend']
- override_config = ['monitoring']
+ base_config = {'default': {'aliases': ['foo.bar', 'foo.baz']}}
+ override_config = {'default': {'ipv4_address': '123.234.123.234'}}
+
+ def test_no_network_overrides(self):
+ service_dict = config.merge_service_dicts(
+ {self.config_name: self.base_config},
+ {self.config_name: self.override_config},
+ DEFAULT_VERSION)
+ assert service_dict[self.config_name] == {
+ 'default': {
+ 'aliases': ['foo.bar', 'foo.baz'],
+ 'ipv4_address': '123.234.123.234'
+ }
+ }
+
+ def test_network_has_none_value(self):
+ service_dict = config.merge_service_dicts(
+ {self.config_name: {
+ 'default': None
+ }},
+ {self.config_name: {
+ 'default': {
+ 'aliases': []
+ }
+ }},
+ DEFAULT_VERSION)
+
+ assert service_dict[self.config_name] == {
+ 'default': {
+ 'aliases': []
+ }
+ }
+
+ def test_all_properties(self):
+ service_dict = config.merge_service_dicts(
+ {self.config_name: {
+ 'default': {
+ 'aliases': ['foo.bar', 'foo.baz'],
+ 'link_local_ips': ['192.168.1.10', '192.168.1.11'],
+ 'ipv4_address': '111.111.111.111',
+ 'ipv6_address': 'FE80:CD00:0000:0CDE:1257:0000:211E:729C-first'
+ }
+ }},
+ {self.config_name: {
+ 'default': {
+ 'aliases': ['foo.baz', 'foo.baz2'],
+ 'link_local_ips': ['192.168.1.11', '192.168.1.12'],
+ 'ipv4_address': '123.234.123.234',
+ 'ipv6_address': 'FE80:CD00:0000:0CDE:1257:0000:211E:729C-second'
+ }
+ }},
+ DEFAULT_VERSION)
+
+ assert service_dict[self.config_name] == {
+ 'default': {
+ 'aliases': ['foo.bar', 'foo.baz', 'foo.baz2'],
+ 'link_local_ips': ['192.168.1.10', '192.168.1.11', '192.168.1.12'],
+ 'ipv4_address': '123.234.123.234',
+ 'ipv6_address': 'FE80:CD00:0000:0CDE:1257:0000:211E:729C-second'
+ }
+ }
+
+ def test_no_network_name_overrides(self):
+ service_dict = config.merge_service_dicts(
+ {
+ self.config_name: {
+ 'default': {
+ 'aliases': ['foo.bar', 'foo.baz'],
+ 'ipv4_address': '123.234.123.234'
+ }
+ }
+ },
+ {
+ self.config_name: {
+ 'another_network': {
+ 'ipv4_address': '123.234.123.234'
+ }
+ }
+ },
+ DEFAULT_VERSION)
+ assert service_dict[self.config_name] == {
+ 'default': {
+ 'aliases': ['foo.bar', 'foo.baz'],
+ 'ipv4_address': '123.234.123.234'
+ },
+ 'another_network': {
+ 'ipv4_address': '123.234.123.234'
+ }
+ }
class MergeStringsOrListsTest(unittest.TestCase):
@@ -3714,28 +4139,28 @@ class MergeStringsOrListsTest(unittest.TestCase):
{'dns': '8.8.8.8'},
{},
DEFAULT_VERSION)
- assert set(service_dict['dns']) == set(['8.8.8.8'])
+ assert set(service_dict['dns']) == {'8.8.8.8'}
def test_no_base(self):
service_dict = config.merge_service_dicts(
{},
{'dns': '8.8.8.8'},
DEFAULT_VERSION)
- assert set(service_dict['dns']) == set(['8.8.8.8'])
+ assert set(service_dict['dns']) == {'8.8.8.8'}
def test_add_string(self):
service_dict = config.merge_service_dicts(
{'dns': ['8.8.8.8']},
{'dns': '9.9.9.9'},
DEFAULT_VERSION)
- assert set(service_dict['dns']) == set(['8.8.8.8', '9.9.9.9'])
+ assert set(service_dict['dns']) == {'8.8.8.8', '9.9.9.9'}
def test_add_list(self):
service_dict = config.merge_service_dicts(
{'dns': '8.8.8.8'},
{'dns': ['9.9.9.9']},
DEFAULT_VERSION)
- assert set(service_dict['dns']) == set(['8.8.8.8', '9.9.9.9'])
+ assert set(service_dict['dns']) == {'8.8.8.8', '9.9.9.9'}
class MergeLabelsTest(unittest.TestCase):
@@ -3807,7 +4232,7 @@ class MergeBuildTest(unittest.TestCase):
assert result['context'] == override['context']
assert result['dockerfile'] == override['dockerfile']
assert result['args'] == {'x': '12', 'y': '2'}
- assert set(result['cache_from']) == set(['ubuntu', 'debian'])
+ assert set(result['cache_from']) == {'ubuntu', 'debian'}
assert result['labels'] == override['labels']
def test_empty_override(self):
@@ -3976,7 +4401,7 @@ class EnvTest(unittest.TestCase):
{'env_file': ['tests/fixtures/env/resolve.env']},
Environment.from_env_file(None)
) == {
- 'FILE_DEF': u'bär',
+ 'FILE_DEF': 'bär',
'FILE_DEF_EMPTY': '',
'ENV_DEF': 'E3',
'NO_DEF': None
@@ -4007,19 +4432,21 @@ class EnvTest(unittest.TestCase):
service_dict = config.load(
build_config_details(
- {'foo': {'build': '.', 'volumes': ['$HOSTENV:$CONTAINERENV']}},
+ {'services': {
+ 'foo': {'build': '.', 'volumes': ['$HOSTENV:$CONTAINERENV']}}},
"tests/fixtures/env",
)
).services[0]
- assert set(service_dict['volumes']) == set([VolumeSpec.parse('/tmp:/host/tmp')])
+ assert set(service_dict['volumes']) == {VolumeSpec.parse('/tmp:/host/tmp')}
service_dict = config.load(
build_config_details(
- {'foo': {'build': '.', 'volumes': ['/opt${HOSTENV}:/opt${CONTAINERENV}']}},
+ {'services': {
+ 'foo': {'build': '.', 'volumes': ['/opt${HOSTENV}:/opt${CONTAINERENV}']}}},
"tests/fixtures/env",
)
).services[0]
- assert set(service_dict['volumes']) == set([VolumeSpec.parse('/opt/tmp:/opt/host/tmp')])
+ assert set(service_dict['volumes']) == {VolumeSpec.parse('/opt/tmp:/opt/host/tmp')}
def load_from_filename(filename, override_dir=None):
@@ -4136,7 +4563,11 @@ class ExtendsTest(unittest.TestCase):
config.load(
build_config_details(
{
- 'web': {'image': 'busybox', 'extends': {}},
+ 'version': '3',
+ 'services':
+ {
+ 'web': {'image': 'busybox', 'extends': {}},
+ }
},
'tests/fixtures/extends',
'filename.yml'
@@ -4150,7 +4581,14 @@ class ExtendsTest(unittest.TestCase):
config.load(
build_config_details(
{
- 'web': {'image': 'busybox', 'extends': {'file': 'common.yml'}},
+ 'version': '3',
+ 'services':
+ {
+ 'web': {
+ 'image': 'busybox',
+ 'extends': {'file': 'common.yml'}
+ }
+ }
},
'tests/fixtures/extends',
'filename.yml'
@@ -4164,14 +4602,18 @@ class ExtendsTest(unittest.TestCase):
config.load(
build_config_details(
{
- 'web': {
- 'image': 'busybox',
- 'extends': {
- 'file': 'common.yml',
- 'service': 'web',
- 'rogue_key': 'is not allowed'
- }
- },
+ 'version': '3',
+ 'services':
+ {
+ 'web': {
+ 'image': 'busybox',
+ 'extends': {
+ 'file': 'common.yml',
+ 'service': 'web',
+ 'rogue_key': 'is not allowed'
+ }
+ },
+ }
},
'tests/fixtures/extends',
'filename.yml'
@@ -4186,11 +4628,14 @@ class ExtendsTest(unittest.TestCase):
config.load(
build_config_details(
{
- 'web': {
- 'image': 'busybox',
- 'extends': {
- 'file': 1,
- 'service': 'web',
+ 'version': '3',
+ 'services': {
+ 'web': {
+ 'image': 'busybox',
+ 'extends': {
+ 'file': 1,
+ 'service': 'web',
+ }
}
},
},
@@ -4360,43 +4805,48 @@ class ExtendsTest(unittest.TestCase):
@mock.patch.dict(os.environ)
def test_extends_with_environment_and_env_files(self):
- tmpdir = py.test.ensuretemp('test_extends_with_environment')
- self.addCleanup(tmpdir.remove)
- commondir = tmpdir.mkdir('common')
- commondir.join('base.yml').write("""
- app:
- image: 'example/app'
- env_file:
- - 'envs'
- environment:
- - SECRET
- - TEST_ONE=common
- - TEST_TWO=common
- """)
- tmpdir.join('docker-compose.yml').write("""
- ext:
- extends:
- file: common/base.yml
- service: app
- env_file:
- - 'envs'
- environment:
- - THING
- - TEST_ONE=top
- """)
- commondir.join('envs').write("""
- COMMON_ENV_FILE
- TEST_ONE=common-env-file
- TEST_TWO=common-env-file
- TEST_THREE=common-env-file
- TEST_FOUR=common-env-file
- """)
- tmpdir.join('envs').write("""
- TOP_ENV_FILE
- TEST_ONE=top-env-file
- TEST_TWO=top-env-file
- TEST_THREE=top-env-file
- """)
+ tmpdir = tempfile.mkdtemp('test_extends_with_environment')
+ self.addCleanup(shutil.rmtree, tmpdir)
+ commondir = os.path.join(tmpdir, 'common')
+ os.mkdir(commondir)
+ with open(os.path.join(commondir, 'base.yml'), mode="w") as base_fh:
+ base_fh.write("""
+ app:
+ image: 'example/app'
+ env_file:
+ - 'envs'
+ environment:
+ - SECRET
+ - TEST_ONE=common
+ - TEST_TWO=common
+ """)
+ with open(os.path.join(tmpdir, 'docker-compose.yml'), mode="w") as docker_compose_fh:
+ docker_compose_fh.write("""
+ ext:
+ extends:
+ file: common/base.yml
+ service: app
+ env_file:
+ - 'envs'
+ environment:
+ - THING
+ - TEST_ONE=top
+ """)
+ with open(os.path.join(commondir, 'envs'), mode="w") as envs_fh:
+ envs_fh.write("""
+ COMMON_ENV_FILE
+ TEST_ONE=common-env-file
+ TEST_TWO=common-env-file
+ TEST_THREE=common-env-file
+ TEST_FOUR=common-env-file
+ """)
+ with open(os.path.join(tmpdir, 'envs'), mode="w") as envs_fh:
+ envs_fh.write("""
+ TOP_ENV_FILE
+ TEST_ONE=top-env-file
+ TEST_TWO=top-env-file
+ TEST_THREE=top-env-file
+ """)
expected = [
{
@@ -4419,72 +4869,77 @@ class ExtendsTest(unittest.TestCase):
os.environ['THING'] = 'thing'
os.environ['COMMON_ENV_FILE'] = 'secret'
os.environ['TOP_ENV_FILE'] = 'secret'
- config = load_from_filename(str(tmpdir.join('docker-compose.yml')))
+ config = load_from_filename(str(os.path.join(tmpdir, 'docker-compose.yml')))
assert config == expected
def test_extends_with_mixed_versions_is_error(self):
- tmpdir = py.test.ensuretemp('test_extends_with_mixed_version')
- self.addCleanup(tmpdir.remove)
- tmpdir.join('docker-compose.yml').write("""
- version: "2"
- services:
- web:
- extends:
- file: base.yml
- service: base
- image: busybox
- """)
- tmpdir.join('base.yml').write("""
- base:
- volumes: ['/foo']
- ports: ['3000:3000']
- """)
+ tmpdir = tempfile.mkdtemp('test_extends_with_mixed_version')
+ self.addCleanup(shutil.rmtree, tmpdir)
+ with open(os.path.join(tmpdir, 'docker-compose.yml'), mode="w") as docker_compose_fh:
+ docker_compose_fh.write("""
+ version: "2"
+ services:
+ web:
+ extends:
+ file: base.yml
+ service: base
+ image: busybox
+ """)
+ with open(os.path.join(tmpdir, 'base.yml'), mode="w") as base_fh:
+ base_fh.write("""
+ base:
+ volumes: ['/foo']
+ ports: ['3000:3000']
+ """)
with pytest.raises(ConfigurationError) as exc:
- load_from_filename(str(tmpdir.join('docker-compose.yml')))
+ load_from_filename(str(os.path.join(tmpdir, 'docker-compose.yml')))
assert 'Version mismatch' in exc.exconly()
def test_extends_with_defined_version_passes(self):
- tmpdir = py.test.ensuretemp('test_extends_with_defined_version')
- self.addCleanup(tmpdir.remove)
- tmpdir.join('docker-compose.yml').write("""
- version: "2"
- services:
- web:
- extends:
- file: base.yml
- service: base
- image: busybox
- """)
- tmpdir.join('base.yml').write("""
- version: "2"
- services:
- base:
- volumes: ['/foo']
- ports: ['3000:3000']
- command: top
- """)
-
- service = load_from_filename(str(tmpdir.join('docker-compose.yml')))
+ tmpdir = tempfile.mkdtemp('test_extends_with_defined_version')
+ self.addCleanup(shutil.rmtree, tmpdir)
+ with open(os.path.join(tmpdir, 'docker-compose.yml'), mode="w") as docker_compose_fh:
+ docker_compose_fh.write("""
+ version: "2"
+ services:
+ web:
+ extends:
+ file: base.yml
+ service: base
+ image: busybox
+ """)
+ with open(os.path.join(tmpdir, 'base.yml'), mode="w") as base_fh:
+ base_fh.write("""
+ version: "2"
+ services:
+ base:
+ volumes: ['/foo']
+ ports: ['3000:3000']
+ command: top
+ """)
+
+ service = load_from_filename(str(os.path.join(tmpdir, 'docker-compose.yml')))
assert service[0]['command'] == "top"
def test_extends_with_depends_on(self):
- tmpdir = py.test.ensuretemp('test_extends_with_depends_on')
- self.addCleanup(tmpdir.remove)
- tmpdir.join('docker-compose.yml').write("""
- version: "2"
- services:
- base:
- image: example
- web:
- extends: base
- image: busybox
- depends_on: ['other']
- other:
- image: example
- """)
- services = load_from_filename(str(tmpdir.join('docker-compose.yml')))
+ tmpdir = tempfile.mkdtemp('test_extends_with_depends_on')
+ self.addCleanup(shutil.rmtree, tmpdir)
+ with open(os.path.join(tmpdir, 'docker-compose.yml'), mode="w") as docker_compose_fh:
+ docker_compose_fh.write("""
+ version: "2"
+ services:
+ base:
+ image: example
+ web:
+ extends: base
+ image: busybox
+ depends_on: ['other']
+ other:
+ image: example
+ """)
+ services = load_from_filename(str(os.path.join(tmpdir, 'docker-compose.yml')))
assert service_sort(services)[2]['depends_on'] == {
'other': {'condition': 'service_started'}
}
@@ -4503,50 +4958,57 @@ class ExtendsTest(unittest.TestCase):
}]
def test_extends_with_ports(self):
- tmpdir = py.test.ensuretemp('test_extends_with_ports')
- self.addCleanup(tmpdir.remove)
- tmpdir.join('docker-compose.yml').write("""
- version: '2'
-
- services:
- a:
- image: nginx
- ports:
- - 80
-
- b:
- extends:
- service: a
- """)
- services = load_from_filename(str(tmpdir.join('docker-compose.yml')))
+ tmpdir = tempfile.mkdtemp('test_extends_with_ports')
+ self.addCleanup(shutil.rmtree, tmpdir)
+ with open(os.path.join(tmpdir, 'docker-compose.yml'), mode="w") as docker_compose_fh:
+ docker_compose_fh.write("""
+ version: '2'
+
+ services:
+ a:
+ image: nginx
+ ports:
+ - 80
+
+ b:
+ extends:
+ service: a
+ """)
+ services = load_from_filename(str(os.path.join(tmpdir, 'docker-compose.yml')))
assert len(services) == 2
for svc in services:
assert svc['ports'] == [types.ServicePort('80', None, None, None, None)]
def test_extends_with_security_opt(self):
- tmpdir = py.test.ensuretemp('test_extends_with_ports')
- self.addCleanup(tmpdir.remove)
- tmpdir.join('docker-compose.yml').write("""
- version: '2'
-
- services:
- a:
- image: nginx
- security_opt:
- - apparmor:unconfined
- - seccomp:unconfined
-
- b:
- extends:
- service: a
- """)
- services = load_from_filename(str(tmpdir.join('docker-compose.yml')))
+ tmpdir = tempfile.mkdtemp('test_extends_with_ports')
+ self.addCleanup(shutil.rmtree, tmpdir)
+ with open(os.path.join(tmpdir, 'docker-compose.yml'), mode="w") as docker_compose_fh:
+ docker_compose_fh.write("""
+ version: '2'
+
+ services:
+ a:
+ image: nginx
+ security_opt:
+ - apparmor:unconfined
+ - seccomp:unconfined
+
+ b:
+ extends:
+ service: a
+ """)
+ services = load_from_filename(str(os.path.join(tmpdir, 'docker-compose.yml')))
assert len(services) == 2
for svc in services:
assert types.SecurityOpt.parse('apparmor:unconfined') in svc['security_opt']
assert types.SecurityOpt.parse('seccomp:unconfined') in svc['security_opt']
+ @mock.patch.object(ConfigFile, 'from_filename', wraps=ConfigFile.from_filename)
+ def test_extends_same_file_optimization(self, from_filename_mock):
+ load_from_filename('tests/fixtures/extends/no-file-specified.yml')
+ from_filename_mock.assert_called_once()
+
@pytest.mark.xfail(IS_WINDOWS_PLATFORM, reason='paths use slash')
class ExpandPathTest(unittest.TestCase):
@@ -4585,14 +5047,14 @@ class VolumePathTest(unittest.TestCase):
container_path = 'c:\\scarletdevil\\data'
expected_mapping = (container_path, (host_path, None))
- mapping = config.split_path_mapping('{0}:{1}'.format(host_path, container_path))
+ mapping = config.split_path_mapping('{}:{}'.format(host_path, container_path))
assert mapping == expected_mapping
def test_split_path_mapping_with_root_mount(self):
host_path = '/'
container_path = '/var/hostroot'
expected_mapping = (container_path, (host_path, None))
- mapping = config.split_path_mapping('{0}:{1}'.format(host_path, container_path))
+ mapping = config.split_path_mapping('{}:{}'.format(host_path, container_path))
assert mapping == expected_mapping
@@ -4691,7 +5153,7 @@ class HealthcheckTest(unittest.TestCase):
})
)
- serialized_config = yaml.load(serialize_config(config_dict))
+ serialized_config = yaml.safe_load(serialize_config(config_dict))
serialized_service = serialized_config['services']['test']
assert serialized_service['healthcheck'] == {
@@ -4718,7 +5180,7 @@ class HealthcheckTest(unittest.TestCase):
})
)
- serialized_config = yaml.load(serialize_config(config_dict))
+ serialized_config = yaml.safe_load(serialize_config(config_dict))
serialized_service = serialized_config['services']['test']
assert serialized_service['healthcheck'] == {
@@ -4776,6 +5238,8 @@ class GetDefaultConfigFilesTestCase(unittest.TestCase):
files = [
'docker-compose.yml',
'docker-compose.yaml',
+ 'compose.yml',
+ 'compose.yaml',
]
def test_get_config_path_default_file_in_basedir(self):
@@ -4809,30 +5273,16 @@ def get_config_filename_for_files(filenames, subdir=None):
base_dir = tempfile.mkdtemp(dir=project_dir)
else:
base_dir = project_dir
- filename, = config.get_default_config_files(base_dir)
- return os.path.basename(filename)
+ filenames = config.get_default_config_files(base_dir)
+ if not filenames:
+ raise config.ComposeFileNotFound(config.SUPPORTED_FILENAMES)
+ return os.path.basename(filenames[0])
finally:
shutil.rmtree(project_dir)
class SerializeTest(unittest.TestCase):
- def test_denormalize_depends_on_v3(self):
- service_dict = {
- 'image': 'busybox',
- 'command': 'true',
- 'depends_on': {
- 'service2': {'condition': 'service_started'},
- 'service3': {'condition': 'service_started'},
- }
- }
-
- assert denormalize_service_dict(service_dict, V3_0) == {
- 'image': 'busybox',
- 'command': 'true',
- 'depends_on': ['service2', 'service3']
- }
-
- def test_denormalize_depends_on_v2_1(self):
+ def test_denormalize_depends(self):
service_dict = {
'image': 'busybox',
'command': 'true',
@@ -4842,7 +5292,7 @@ class SerializeTest(unittest.TestCase):
}
}
- assert denormalize_service_dict(service_dict, V2_1) == service_dict
+ assert denormalize_service_dict(service_dict, VERSION) == service_dict
def test_serialize_time(self):
data = {
@@ -4876,7 +5326,7 @@ class SerializeTest(unittest.TestCase):
processed_service = config.process_service(config.ServiceConfig(
'.', 'test', 'test', service_dict
))
- denormalized_service = denormalize_service_dict(processed_service, V2_3)
+ denormalized_service = denormalize_service_dict(processed_service, VERSION)
assert denormalized_service['healthcheck']['interval'] == '100s'
assert denormalized_service['healthcheck']['timeout'] == '30s'
assert denormalized_service['healthcheck']['start_period'] == '2090ms'
@@ -4887,7 +5337,7 @@ class SerializeTest(unittest.TestCase):
}
image_digest = 'busybox@sha256:abcde'
- assert denormalize_service_dict(service_dict, V3_0, image_digest) == {
+ assert denormalize_service_dict(service_dict, VERSION, image_digest) == {
'image': 'busybox@sha256:abcde'
}
@@ -4896,7 +5346,7 @@ class SerializeTest(unittest.TestCase):
'image': 'busybox'
}
- assert denormalize_service_dict(service_dict, V3_0) == {
+ assert denormalize_service_dict(service_dict, VERSION) == {
'image': 'busybox'
}
@@ -4925,14 +5375,14 @@ class SerializeTest(unittest.TestCase):
'secrets': secrets_dict
}))
- serialized_config = yaml.load(serialize_config(config_dict))
+ serialized_config = yaml.safe_load(serialize_config(config_dict))
serialized_service = serialized_config['services']['web']
assert secret_sort(serialized_service['secrets']) == secret_sort(service_dict['secrets'])
assert 'secrets' in serialized_config
- assert serialized_config['secrets']['two'] == secrets_dict['two']
+ assert serialized_config['secrets']['two'] == {'external': True, 'name': 'two'}
def test_serialize_ports(self):
- config_dict = config.Config(version=V2_0, services=[
+ config_dict = config.Config(config_version=VERSION, version=VERSION, services=[
{
'ports': [types.ServicePort('80', '8080', None, None, None)],
'image': 'alpine',
@@ -4940,11 +5390,23 @@ class SerializeTest(unittest.TestCase):
}
], volumes={}, networks={}, secrets={}, configs={})
- serialized_config = yaml.load(serialize_config(config_dict))
- assert '8080:80/tcp' in serialized_config['services']['web']['ports']
+ serialized_config = yaml.safe_load(serialize_config(config_dict))
+ assert [{'published': 8080, 'target': 80}] == serialized_config['services']['web']['ports']
+
+ def test_serialize_ports_v1(self):
+ config_dict = config.Config(config_version=V1, version=V1, services=[
+ {
+ 'ports': [types.ServicePort('80', '8080', None, None, None)],
+ 'image': 'alpine',
+ 'name': 'web'
+ }
+ ], volumes={}, networks={}, secrets={}, configs={})
+
+ serialized_config = yaml.safe_load(serialize_config(config_dict))
+ assert ['8080:80/tcp'] == serialized_config['services']['web']['ports']
def test_serialize_ports_with_ext_ip(self):
- config_dict = config.Config(version=V3_5, services=[
+ config_dict = config.Config(config_version=VERSION, version=VERSION, services=[
{
'ports': [types.ServicePort('80', '8080', None, None, '127.0.0.1')],
'image': 'alpine',
@@ -4952,7 +5414,7 @@ class SerializeTest(unittest.TestCase):
}
], volumes={}, networks={}, secrets={}, configs={})
- serialized_config = yaml.load(serialize_config(config_dict))
+ serialized_config = yaml.safe_load(serialize_config(config_dict))
assert '127.0.0.1:8080:80/tcp' in serialized_config['services']['web']['ports']
def test_serialize_configs(self):
@@ -4980,11 +5442,11 @@ class SerializeTest(unittest.TestCase):
'configs': configs_dict
}))
- serialized_config = yaml.load(serialize_config(config_dict))
+ serialized_config = yaml.safe_load(serialize_config(config_dict))
serialized_service = serialized_config['services']['web']
assert secret_sort(serialized_service['configs']) == secret_sort(service_dict['configs'])
assert 'configs' in serialized_config
- assert serialized_config['configs']['two'] == configs_dict['two']
+ assert serialized_config['configs']['two'] == {'external': True, 'name': 'two'}
def test_serialize_bool_string(self):
cfg = {
@@ -5020,12 +5482,38 @@ class SerializeTest(unittest.TestCase):
}
config_dict = config.load(build_config_details(cfg))
- serialized_config = yaml.load(serialize_config(config_dict))
+ serialized_config = yaml.safe_load(serialize_config(config_dict))
serialized_service = serialized_config['services']['web']
assert serialized_service['environment']['CURRENCY'] == '$$'
assert serialized_service['command'] == 'echo $$FOO'
assert serialized_service['entrypoint'][0] == '$$SHELL'
+ def test_serialize_escape_dont_interpolate(self):
+ cfg = {
+ 'version': '2.2',
+ 'services': {
+ 'web': {
+ 'image': 'busybox',
+ 'command': 'echo $FOO',
+ 'environment': {
+ 'CURRENCY': '$'
+ },
+ 'env_file': ['tests/fixtures/env/three.env'],
+ 'entrypoint': ['$SHELL', '-c'],
+ }
+ }
+ }
+ config_dict = config.load(build_config_details(cfg, working_dir='.'), interpolate=False)
+
+ serialized_config = yaml.safe_load(serialize_config(config_dict, escape_dollar=False))
+ serialized_service = serialized_config['services']['web']
+ assert serialized_service['environment']['CURRENCY'] == '$'
+ # Values coming from env_files are not allowed to have variables
+ assert serialized_service['environment']['FOO'] == 'NO $$ENV VAR'
+ assert serialized_service['environment']['DOO'] == 'NO $${ENV} VAR'
+ assert serialized_service['command'] == 'echo $FOO'
+ assert serialized_service['entrypoint'][0] == '$SHELL'
+
def test_serialize_unicode_values(self):
cfg = {
'version': '2.3',
@@ -5039,6 +5527,22 @@ class SerializeTest(unittest.TestCase):
config_dict = config.load(build_config_details(cfg))
- serialized_config = yaml.load(serialize_config(config_dict))
+ serialized_config = yaml.safe_load(serialize_config(config_dict))
serialized_service = serialized_config['services']['web']
assert serialized_service['command'] == 'echo 十六夜 咲夜'
+
+ def test_serialize_external_false(self):
+ cfg = {
+ 'version': '3.4',
+ 'volumes': {
+ 'test': {
+ 'name': 'test-false',
+ 'external': False
+ }
+ }
+ }
+
+ config_dict = config.load(build_config_details(cfg))
+ serialized_config = yaml.safe_load(serialize_config(config_dict))
+ serialized_volume = serialized_config['volumes']['test']
+ assert serialized_volume['external'] is False
diff --git a/tests/unit/config/environment_test.py b/tests/unit/config/environment_test.py
index 854aee5a..6a80ff12 100644
--- a/tests/unit/config/environment_test.py
+++ b/tests/unit/config/environment_test.py
@@ -1,18 +1,20 @@
-# encoding: utf-8
-from __future__ import absolute_import
-from __future__ import print_function
-from __future__ import unicode_literals
-
import codecs
+import os
+import shutil
+import tempfile
-import pytest
+from ddt import data
+from ddt import ddt
+from ddt import unpack
from compose.config.environment import env_vars_from_file
from compose.config.environment import Environment
from tests import unittest
+@ddt
class EnvironmentTest(unittest.TestCase):
+ @classmethod
def test_get_simple(self):
env = Environment({
'FOO': 'bar',
@@ -24,12 +26,14 @@ class EnvironmentTest(unittest.TestCase):
assert env.get('BAR') == '1'
assert env.get('BAZ') == ''
+ @classmethod
def test_get_undefined(self):
env = Environment({
'FOO': 'bar'
})
assert env.get('FOOBAR') is None
+ @classmethod
def test_get_boolean(self):
env = Environment({
'FOO': '',
@@ -44,11 +48,18 @@ class EnvironmentTest(unittest.TestCase):
assert env.get_boolean('FOOBAR') is True
assert env.get_boolean('UNDEFINED') is False
- def test_env_vars_from_file_bom(self):
- tmpdir = pytest.ensuretemp('env_file')
- self.addCleanup(tmpdir.remove)
- with codecs.open('{}/bom.env'.format(str(tmpdir)), 'w', encoding='utf-8') as f:
- f.write('\ufeffPARK_BOM=박봄\n')
- assert env_vars_from_file(str(tmpdir.join('bom.env'))) == {
- 'PARK_BOM': '박봄'
- }
+ @data(
+ ('unicode exclude test', '\ufeffPARK_BOM=박봄\n', {'PARK_BOM': '박봄'}),
+ ('export prefixed test', 'export PREFIXED_VARS=yes\n', {"PREFIXED_VARS": "yes"}),
+ ('quoted vars test', "QUOTED_VARS='yes'\n", {"QUOTED_VARS": "yes"}),
+ ('double quoted vars test', 'DOUBLE_QUOTED_VARS="yes"\n', {"DOUBLE_QUOTED_VARS": "yes"}),
+ ('extra spaces test', 'SPACES_VARS = "yes"\n', {"SPACES_VARS": "yes"}),
+ )
+ @unpack
+ def test_env_vars(self, test_name, content, expected):
+ tmpdir = tempfile.mkdtemp('env_file')
+ self.addCleanup(shutil.rmtree, tmpdir)
+ file_abs_path = str(os.path.join(tmpdir, ".env"))
+ with codecs.open(file_abs_path, 'w', encoding='utf-8') as f:
+ f.write(content)
+ assert env_vars_from_file(file_abs_path) == expected, '"{}" Failed'.format(test_name)
diff --git a/tests/unit/config/interpolation_test.py b/tests/unit/config/interpolation_test.py
index 0d0e7d28..1fd50d60 100644
--- a/tests/unit/config/interpolation_test.py
+++ b/tests/unit/config/interpolation_test.py
@@ -1,7 +1,3 @@
-# encoding: utf-8
-from __future__ import absolute_import
-from __future__ import unicode_literals
-
import pytest
from compose.config.environment import Environment
@@ -11,9 +7,7 @@ from compose.config.interpolation import Interpolator
from compose.config.interpolation import InvalidInterpolation
from compose.config.interpolation import TemplateWithDefaults
from compose.config.interpolation import UnsetRequiredSubstitution
-from compose.const import COMPOSEFILE_V2_0 as V2_0
-from compose.const import COMPOSEFILE_V2_3 as V2_3
-from compose.const import COMPOSEFILE_V3_4 as V3_4
+from compose.const import COMPOSE_SPEC as VERSION
@pytest.fixture
@@ -66,7 +60,7 @@ def test_interpolate_environment_variables_in_services(mock_env):
}
}
}
- value = interpolate_environment_variables(V2_0, services, 'service', mock_env)
+ value = interpolate_environment_variables(VERSION, services, 'service', mock_env)
assert value == expected
@@ -91,7 +85,7 @@ def test_interpolate_environment_variables_in_volumes(mock_env):
},
'other': {},
}
- value = interpolate_environment_variables(V2_0, volumes, 'volume', mock_env)
+ value = interpolate_environment_variables(VERSION, volumes, 'volume', mock_env)
assert value == expected
@@ -116,7 +110,7 @@ def test_interpolate_environment_variables_in_secrets(mock_env):
},
'other': {},
}
- value = interpolate_environment_variables(V3_4, secrets, 'secret', mock_env)
+ value = interpolate_environment_variables(VERSION, secrets, 'secret', mock_env)
assert value == expected
@@ -187,7 +181,7 @@ def test_interpolate_environment_services_convert_types_v2(mock_env):
}
}
- value = interpolate_environment_variables(V2_3, entry, 'service', mock_env)
+ value = interpolate_environment_variables(VERSION, entry, 'service', mock_env)
assert value == expected
@@ -260,7 +254,7 @@ def test_interpolate_environment_services_convert_types_v3(mock_env):
}
}
- value = interpolate_environment_variables(V3_4, entry, 'service', mock_env)
+ value = interpolate_environment_variables(VERSION, entry, 'service', mock_env)
assert value == expected
@@ -268,21 +262,21 @@ def test_interpolate_environment_services_convert_types_invalid(mock_env):
entry = {'service1': {'privileged': '${POSINT}'}}
with pytest.raises(ConfigurationError) as exc:
- interpolate_environment_variables(V2_3, entry, 'service', mock_env)
+ interpolate_environment_variables(VERSION, entry, 'service', mock_env)
assert 'Error while attempting to convert service.service1.privileged to '\
'appropriate type: "50" is not a valid boolean value' in exc.exconly()
entry = {'service1': {'cpus': '${TRUE}'}}
with pytest.raises(ConfigurationError) as exc:
- interpolate_environment_variables(V2_3, entry, 'service', mock_env)
+ interpolate_environment_variables(VERSION, entry, 'service', mock_env)
assert 'Error while attempting to convert service.service1.cpus to '\
'appropriate type: "True" is not a valid float' in exc.exconly()
entry = {'service1': {'ulimits': {'nproc': '${FLOAT}'}}}
with pytest.raises(ConfigurationError) as exc:
- interpolate_environment_variables(V2_3, entry, 'service', mock_env)
+ interpolate_environment_variables(VERSION, entry, 'service', mock_env)
assert 'Error while attempting to convert service.service1.ulimits.nproc to '\
'appropriate type: "0.145" is not a valid integer' in exc.exconly()
@@ -305,7 +299,7 @@ def test_interpolate_environment_network_convert_types(mock_env):
}
}
- value = interpolate_environment_variables(V3_4, entry, 'network', mock_env)
+ value = interpolate_environment_variables(VERSION, entry, 'network', mock_env)
assert value == expected
@@ -322,13 +316,44 @@ def test_interpolate_environment_external_resource_convert_types(mock_env):
}
}
- value = interpolate_environment_variables(V3_4, entry, 'network', mock_env)
+ value = interpolate_environment_variables(VERSION, entry, 'network', mock_env)
+ assert value == expected
+ value = interpolate_environment_variables(VERSION, entry, 'volume', mock_env)
assert value == expected
- value = interpolate_environment_variables(V3_4, entry, 'volume', mock_env)
+ value = interpolate_environment_variables(VERSION, entry, 'secret', mock_env)
assert value == expected
- value = interpolate_environment_variables(V3_4, entry, 'secret', mock_env)
+ value = interpolate_environment_variables(VERSION, entry, 'config', mock_env)
assert value == expected
- value = interpolate_environment_variables(V3_4, entry, 'config', mock_env)
+
+
+def test_interpolate_service_name_uses_dot(mock_env):
+ entry = {
+ 'service.1': {
+ 'image': 'busybox',
+ 'ulimits': {
+ 'nproc': '${POSINT}',
+ 'nofile': {
+ 'soft': '${POSINT}',
+ 'hard': '${DEFAULT:-40000}'
+ },
+ },
+ }
+ }
+
+ expected = {
+ 'service.1': {
+ 'image': 'busybox',
+ 'ulimits': {
+ 'nproc': 50,
+ 'nofile': {
+ 'soft': 50,
+ 'hard': 40000
+ },
+ },
+ }
+ }
+
+ value = interpolate_environment_variables(VERSION, entry, 'service', mock_env)
assert value == expected
@@ -391,7 +416,7 @@ def test_interpolate_mandatory_no_err_msg(defaults_interpolator):
with pytest.raises(UnsetRequiredSubstitution) as e:
defaults_interpolator("not ok ${BAZ?}")
- assert e.value.err == ''
+ assert e.value.err == 'BAZ'
def test_interpolate_mixed_separators(defaults_interpolator):
@@ -413,7 +438,7 @@ def test_unbraced_separators(defaults_interpolator):
def test_interpolate_unicode_values():
variable_mapping = {
- 'FOO': '十六夜 咲夜'.encode('utf-8'),
+ 'FOO': '十六夜 咲夜'.encode(),
'BAR': '十六夜 咲夜'
}
interpol = Interpolator(TemplateWithDefaults, variable_mapping).interpolate
diff --git a/tests/unit/config/sort_services_test.py b/tests/unit/config/sort_services_test.py
index c39ac022..508c4bba 100644
--- a/tests/unit/config/sort_services_test.py
+++ b/tests/unit/config/sort_services_test.py
@@ -1,6 +1,3 @@
-from __future__ import absolute_import
-from __future__ import unicode_literals
-
import pytest
from compose.config.errors import DependencyError
@@ -8,7 +5,7 @@ from compose.config.sort_services import sort_service_dicts
from compose.config.types import VolumeFromSpec
-class TestSortService(object):
+class TestSortService:
def test_sort_service_dicts_1(self):
services = [
{
diff --git a/tests/unit/config/types_test.py b/tests/unit/config/types_test.py
index e7cc67b0..e5fcde1a 100644
--- a/tests/unit/config/types_test.py
+++ b/tests/unit/config/types_test.py
@@ -1,6 +1,3 @@
-from __future__ import absolute_import
-from __future__ import unicode_literals
-
import pytest
from compose.config.errors import ConfigurationError
@@ -8,8 +5,8 @@ from compose.config.types import parse_extra_hosts
from compose.config.types import ServicePort
from compose.config.types import VolumeFromSpec
from compose.config.types import VolumeSpec
+from compose.const import COMPOSE_SPEC as VERSION
from compose.const import COMPOSEFILE_V1 as V1
-from compose.const import COMPOSEFILE_V2_0 as V2_0
def test_parse_extra_hosts_list():
@@ -42,7 +39,7 @@ def test_parse_extra_hosts_dict():
}
-class TestServicePort(object):
+class TestServicePort:
def test_parse_dict(self):
data = {
'target': 8000,
@@ -132,7 +129,7 @@ class TestServicePort(object):
ServicePort.parse(port_def)
-class TestVolumeSpec(object):
+class TestVolumeSpec:
def test_parse_volume_spec_only_one_path(self):
spec = VolumeSpec.parse('/the/volume')
@@ -219,7 +216,7 @@ class TestVolumeSpec(object):
)
-class TestVolumesFromSpec(object):
+class TestVolumesFromSpec:
services = ['servicea', 'serviceb']
@@ -236,26 +233,26 @@ class TestVolumesFromSpec(object):
VolumeFromSpec.parse('unknown:format:ro', self.services, V1)
def test_parse_v2_from_service(self):
- volume_from = VolumeFromSpec.parse('servicea', self.services, V2_0)
+ volume_from = VolumeFromSpec.parse('servicea', self.services, VERSION)
assert volume_from == VolumeFromSpec('servicea', 'rw', 'service')
def test_parse_v2_from_service_with_mode(self):
- volume_from = VolumeFromSpec.parse('servicea:ro', self.services, V2_0)
+ volume_from = VolumeFromSpec.parse('servicea:ro', self.services, VERSION)
assert volume_from == VolumeFromSpec('servicea', 'ro', 'service')
def test_parse_v2_from_container(self):
- volume_from = VolumeFromSpec.parse('container:foo', self.services, V2_0)
+ volume_from = VolumeFromSpec.parse('container:foo', self.services, VERSION)
assert volume_from == VolumeFromSpec('foo', 'rw', 'container')
def test_parse_v2_from_container_with_mode(self):
- volume_from = VolumeFromSpec.parse('container:foo:ro', self.services, V2_0)
+ volume_from = VolumeFromSpec.parse('container:foo:ro', self.services, VERSION)
assert volume_from == VolumeFromSpec('foo', 'ro', 'container')
def test_parse_v2_invalid_type(self):
with pytest.raises(ConfigurationError) as exc:
- VolumeFromSpec.parse('bogus:foo:ro', self.services, V2_0)
+ VolumeFromSpec.parse('bogus:foo:ro', self.services, VERSION)
assert "Unknown volumes_from type 'bogus'" in exc.exconly()
def test_parse_v2_invalid(self):
with pytest.raises(ConfigurationError):
- VolumeFromSpec.parse('unknown:format:ro', self.services, V2_0)
+ VolumeFromSpec.parse('unknown:format:ro', self.services, VERSION)
diff --git a/tests/unit/container_test.py b/tests/unit/container_test.py
index d64263c1..fe1e1078 100644
--- a/tests/unit/container_test.py
+++ b/tests/unit/container_test.py
@@ -1,10 +1,10 @@
-from __future__ import absolute_import
-from __future__ import unicode_literals
-
import docker
from .. import mock
from .. import unittest
+from ..helpers import BUSYBOX_IMAGE_WITH_TAG
+from compose.const import LABEL_ONE_OFF
+from compose.const import LABEL_SLUG
from compose.container import Container
from compose.container import get_container_name
@@ -15,7 +15,7 @@ class ContainerTest(unittest.TestCase):
self.container_id = "abcabcabcbabc12345"
self.container_dict = {
"Id": self.container_id,
- "Image": "busybox:latest",
+ "Image": BUSYBOX_IMAGE_WITH_TAG,
"Command": "top",
"Created": 1387384730,
"Status": "Up 8 seconds",
@@ -30,7 +30,7 @@ class ContainerTest(unittest.TestCase):
"Labels": {
"com.docker.compose.project": "composetest",
"com.docker.compose.service": "web",
- "com.docker.compose.container-number": 7,
+ "com.docker.compose.container-number": "7",
},
}
}
@@ -41,7 +41,7 @@ class ContainerTest(unittest.TestCase):
has_been_inspected=True)
assert container.dictionary == {
"Id": self.container_id,
- "Image": "busybox:latest",
+ "Image": BUSYBOX_IMAGE_WITH_TAG,
"Name": "/composetest_db_1",
}
@@ -56,7 +56,7 @@ class ContainerTest(unittest.TestCase):
has_been_inspected=True)
assert container.dictionary == {
"Id": self.container_id,
- "Image": "busybox:latest",
+ "Image": BUSYBOX_IMAGE_WITH_TAG,
"Name": "/composetest_db_1",
}
@@ -95,6 +95,15 @@ class ContainerTest(unittest.TestCase):
container = Container(None, self.container_dict, has_been_inspected=True)
assert container.name_without_project == "custom_name_of_container"
+ def test_name_without_project_one_off(self):
+ self.container_dict['Name'] = "/composetest_web_092cd63296f"
+ self.container_dict['Config']['Labels'][LABEL_SLUG] = (
+ "092cd63296fdc446ad432d3905dd1fcbe12a2ba6b52"
+ )
+ self.container_dict['Config']['Labels'][LABEL_ONE_OFF] = 'True'
+ container = Container(None, self.container_dict, has_been_inspected=True)
+ assert container.name_without_project == 'web_092cd63296fd'
+
def test_inspect_if_not_inspected(self):
mock_client = mock.create_autospec(docker.APIClient)
container = Container(mock_client, dict(Id="the_id"))
@@ -212,34 +221,6 @@ class ContainerTest(unittest.TestCase):
container = Container(None, self.container_dict, has_been_inspected=True)
assert container.short_id == self.container_id[:12]
- def test_has_api_logs(self):
- container_dict = {
- 'HostConfig': {
- 'LogConfig': {
- 'Type': 'json-file'
- }
- }
- }
-
- container = Container(None, container_dict, has_been_inspected=True)
- assert container.has_api_logs is True
-
- container_dict['HostConfig']['LogConfig']['Type'] = 'none'
- container = Container(None, container_dict, has_been_inspected=True)
- assert container.has_api_logs is False
-
- container_dict['HostConfig']['LogConfig']['Type'] = 'syslog'
- container = Container(None, container_dict, has_been_inspected=True)
- assert container.has_api_logs is False
-
- container_dict['HostConfig']['LogConfig']['Type'] = 'journald'
- container = Container(None, container_dict, has_been_inspected=True)
- assert container.has_api_logs is True
-
- container_dict['HostConfig']['LogConfig']['Type'] = 'foobar'
- container = Container(None, container_dict, has_been_inspected=True)
- assert container.has_api_logs is False
-
class GetContainerNameTestCase(unittest.TestCase):
diff --git a/tests/unit/metrics/__init__.py b/tests/unit/metrics/__init__.py
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/tests/unit/metrics/__init__.py
diff --git a/tests/unit/metrics/metrics_test.py b/tests/unit/metrics/metrics_test.py
new file mode 100644
index 00000000..e9f23720
--- /dev/null
+++ b/tests/unit/metrics/metrics_test.py
@@ -0,0 +1,36 @@
+import unittest
+
+from compose.metrics.client import MetricsCommand
+from compose.metrics.client import Status
+
+
+class MetricsTest(unittest.TestCase):
+ @classmethod
+ def test_metrics(cls):
+ assert MetricsCommand('up', 'moby').to_map() == {
+ 'command': 'compose up',
+ 'context': 'moby',
+ 'status': 'success',
+ 'source': 'docker-compose',
+ }
+
+ assert MetricsCommand('down', 'local').to_map() == {
+ 'command': 'compose down',
+ 'context': 'local',
+ 'status': 'success',
+ 'source': 'docker-compose',
+ }
+
+ assert MetricsCommand('help', 'aci', Status.FAILURE).to_map() == {
+ 'command': 'compose help',
+ 'context': 'aci',
+ 'status': 'failure',
+ 'source': 'docker-compose',
+ }
+
+ assert MetricsCommand('run', 'ecs').to_map() == {
+ 'command': 'compose run',
+ 'context': 'ecs',
+ 'status': 'success',
+ 'source': 'docker-compose',
+ }
diff --git a/tests/unit/network_test.py b/tests/unit/network_test.py
index b27339af..ab7ad59c 100644
--- a/tests/unit/network_test.py
+++ b/tests/unit/network_test.py
@@ -1,6 +1,3 @@
-from __future__ import absolute_import
-from __future__ import unicode_literals
-
import pytest
from .. import mock
@@ -23,7 +20,10 @@ class NetworkTest(unittest.TestCase):
'aux_addresses': ['11.0.0.1', '24.25.26.27'],
'ip_range': '156.0.0.1-254'
}
- ]
+ ],
+ 'options': {
+ 'iface': 'eth0',
+ }
}
labels = {
'com.project.tests.istest': 'true',
@@ -57,6 +57,9 @@ class NetworkTest(unittest.TestCase):
'Subnet': '172.0.0.1/16',
'Gateway': '172.0.0.1'
}],
+ 'Options': {
+ 'iface': 'eth0',
+ },
},
'Labels': remote_labels
},
@@ -78,6 +81,7 @@ class NetworkTest(unittest.TestCase):
{'Driver': 'overlay', 'Options': remote_options}, net
)
+ @mock.patch('compose.network.Network.true_name', lambda n: n.full_name)
def test_check_remote_network_config_driver_mismatch(self):
net = Network(None, 'compose_test', 'net1', 'overlay')
with pytest.raises(NetworkConfigChangedError) as e:
@@ -87,6 +91,7 @@ class NetworkTest(unittest.TestCase):
assert 'driver has changed' in str(e.value)
+ @mock.patch('compose.network.Network.true_name', lambda n: n.full_name)
def test_check_remote_network_config_options_mismatch(self):
net = Network(None, 'compose_test', 'net1', 'overlay')
with pytest.raises(NetworkConfigChangedError) as e:
@@ -140,6 +145,7 @@ class NetworkTest(unittest.TestCase):
net
)
+ @mock.patch('compose.network.Network.true_name', lambda n: n.full_name)
def test_check_remote_network_labels_mismatch(self):
net = Network(None, 'compose_test', 'net1', 'overlay', labels={
'com.project.touhou.character': 'sakuya.izayoi'
@@ -156,6 +162,11 @@ class NetworkTest(unittest.TestCase):
with mock.patch('compose.network.log') as mock_log:
check_remote_network_config(remote, net)
- mock_log.warn.assert_called_once_with(mock.ANY)
- _, args, kwargs = mock_log.warn.mock_calls[0]
+ mock_log.warning.assert_called_once_with(mock.ANY)
+ _, args, kwargs = mock_log.warning.mock_calls[0]
assert 'label "com.project.touhou.character" has changed' in args[0]
+
+ def test_remote_config_labels_none(self):
+ remote = {'Labels': None}
+ local = Network(None, 'test_project', 'test_network')
+ check_remote_network_config(remote, local)
diff --git a/tests/unit/parallel_test.py b/tests/unit/parallel_test.py
index 0735bfcc..91914333 100644
--- a/tests/unit/parallel_test.py
+++ b/tests/unit/parallel_test.py
@@ -1,12 +1,9 @@
-from __future__ import absolute_import
-from __future__ import unicode_literals
-
import unittest
from threading import Lock
-import six
from docker.errors import APIError
+from compose.cli.colors import AnsiMode
from compose.parallel import GlobalLimit
from compose.parallel import parallel_execute
from compose.parallel import parallel_execute_iter
@@ -39,7 +36,7 @@ class ParallelTest(unittest.TestCase):
results, errors = parallel_execute(
objects=[1, 2, 3, 4, 5],
func=lambda x: x * 2,
- get_name=six.text_type,
+ get_name=str,
msg="Doubling",
)
@@ -61,7 +58,7 @@ class ParallelTest(unittest.TestCase):
results, errors = parallel_execute(
objects=list(range(tasks)),
func=f,
- get_name=six.text_type,
+ get_name=str,
msg="Testing",
limit=limit,
)
@@ -85,7 +82,7 @@ class ParallelTest(unittest.TestCase):
results, errors = parallel_execute(
objects=list(range(tasks)),
func=f,
- get_name=six.text_type,
+ get_name=str,
msg="Testing",
)
@@ -147,7 +144,7 @@ def test_parallel_execute_alignment(capsys):
results, errors = parallel_execute(
objects=["short", "a very long name"],
func=lambda x: x,
- get_name=six.text_type,
+ get_name=str,
msg="Aligning",
)
@@ -160,11 +157,11 @@ def test_parallel_execute_alignment(capsys):
def test_parallel_execute_ansi(capsys):
ParallelStreamWriter.instance = None
- ParallelStreamWriter.set_noansi(value=False)
+ ParallelStreamWriter.set_default_ansi_mode(AnsiMode.ALWAYS)
results, errors = parallel_execute(
objects=["something", "something more"],
func=lambda x: x,
- get_name=six.text_type,
+ get_name=str,
msg="Control characters",
)
@@ -176,11 +173,11 @@ def test_parallel_execute_ansi(capsys):
def test_parallel_execute_noansi(capsys):
ParallelStreamWriter.instance = None
- ParallelStreamWriter.set_noansi()
+ ParallelStreamWriter.set_default_ansi_mode(AnsiMode.NEVER)
results, errors = parallel_execute(
objects=["something", "something more"],
func=lambda x: x,
- get_name=six.text_type,
+ get_name=str,
msg="Control characters",
)
diff --git a/tests/unit/progress_stream_test.py b/tests/unit/progress_stream_test.py
index f4a0ab06..288c9b6e 100644
--- a/tests/unit/progress_stream_test.py
+++ b/tests/unit/progress_stream_test.py
@@ -1,14 +1,8 @@
-# ~*~ encoding: utf-8 ~*~
-from __future__ import absolute_import
-from __future__ import unicode_literals
-
-import io
import os
import random
import shutil
import tempfile
-
-from six import StringIO
+from io import StringIO
from compose import progress_stream
from tests import unittest
@@ -21,7 +15,7 @@ class ProgressStreamTestCase(unittest.TestCase):
b'31019763, "start": 1413653874, "total": 62763875}, '
b'"progress": "..."}',
]
- events = progress_stream.stream_output(output, StringIO())
+ events = list(progress_stream.stream_output(output, StringIO()))
assert len(events) == 1
def test_stream_output_div_zero(self):
@@ -30,7 +24,7 @@ class ProgressStreamTestCase(unittest.TestCase):
b'0, "start": 1413653874, "total": 0}, '
b'"progress": "..."}',
]
- events = progress_stream.stream_output(output, StringIO())
+ events = list(progress_stream.stream_output(output, StringIO()))
assert len(events) == 1
def test_stream_output_null_total(self):
@@ -39,7 +33,7 @@ class ProgressStreamTestCase(unittest.TestCase):
b'0, "start": 1413653874, "total": null}, '
b'"progress": "..."}',
]
- events = progress_stream.stream_output(output, StringIO())
+ events = list(progress_stream.stream_output(output, StringIO()))
assert len(events) == 1
def test_stream_output_progress_event_tty(self):
@@ -52,7 +46,7 @@ class ProgressStreamTestCase(unittest.TestCase):
return True
output = TTYStringIO()
- events = progress_stream.stream_output(events, output)
+ events = list(progress_stream.stream_output(events, output))
assert len(output.getvalue()) > 0
def test_stream_output_progress_event_no_tty(self):
@@ -61,7 +55,7 @@ class ProgressStreamTestCase(unittest.TestCase):
]
output = StringIO()
- events = progress_stream.stream_output(events, output)
+ events = list(progress_stream.stream_output(events, output))
assert len(output.getvalue()) == 0
def test_stream_output_no_progress_event_no_tty(self):
@@ -70,7 +64,7 @@ class ProgressStreamTestCase(unittest.TestCase):
]
output = StringIO()
- events = progress_stream.stream_output(events, output)
+ events = list(progress_stream.stream_output(events, output))
assert len(output.getvalue()) > 0
def test_mismatched_encoding_stream_write(self):
@@ -79,7 +73,7 @@ class ProgressStreamTestCase(unittest.TestCase):
def mktempfile(encoding):
fname = os.path.join(tmpdir, hex(random.getrandbits(128))[2:-1])
- return io.open(fname, mode='w+', encoding=encoding)
+ return open(fname, mode='w+', encoding=encoding)
text = '就吃饭'
with mktempfile(encoding='utf-8') as tf:
@@ -97,22 +91,24 @@ class ProgressStreamTestCase(unittest.TestCase):
tf.seek(0)
assert tf.read() == '???'
+ def test_get_digest_from_push(self):
+ digest = "sha256:abcd"
+ events = [
+ {"status": "..."},
+ {"status": "..."},
+ {"progressDetail": {}, "aux": {"Digest": digest}},
+ ]
+ assert progress_stream.get_digest_from_push(events) == digest
-def test_get_digest_from_push():
- digest = "sha256:abcd"
- events = [
- {"status": "..."},
- {"status": "..."},
- {"progressDetail": {}, "aux": {"Digest": digest}},
- ]
- assert progress_stream.get_digest_from_push(events) == digest
-
-
-def test_get_digest_from_pull():
- digest = "sha256:abcd"
- events = [
- {"status": "..."},
- {"status": "..."},
- {"status": "Digest: %s" % digest},
- ]
- assert progress_stream.get_digest_from_pull(events) == digest
+ def test_get_digest_from_pull(self):
+ events = list()
+ assert progress_stream.get_digest_from_pull(events) is None
+
+ digest = "sha256:abcd"
+ events = [
+ {"status": "..."},
+ {"status": "..."},
+ {"status": "Digest: %s" % digest},
+ {"status": "..."},
+ ]
+ assert progress_stream.get_digest_from_pull(events) == digest
diff --git a/tests/unit/project_test.py b/tests/unit/project_test.py
index 83a01475..a3ffdb67 100644
--- a/tests/unit/project_test.py
+++ b/tests/unit/project_test.py
@@ -1,8 +1,6 @@
-# encoding: utf-8
-from __future__ import absolute_import
-from __future__ import unicode_literals
-
import datetime
+import os
+import tempfile
import docker
import pytest
@@ -10,14 +8,17 @@ from docker.errors import NotFound
from .. import mock
from .. import unittest
+from ..helpers import BUSYBOX_IMAGE_WITH_TAG
+from compose.config import ConfigurationError
from compose.config.config import Config
from compose.config.types import VolumeFromSpec
+from compose.const import COMPOSE_SPEC as VERSION
from compose.const import COMPOSEFILE_V1 as V1
-from compose.const import COMPOSEFILE_V2_0 as V2_0
-from compose.const import COMPOSEFILE_V2_4 as V2_4
+from compose.const import DEFAULT_TIMEOUT
from compose.const import LABEL_SERVICE
from compose.container import Container
from compose.errors import OperationFailedError
+from compose.project import get_secrets
from compose.project import NoSuchService
from compose.project import Project
from compose.project import ProjectError
@@ -25,22 +26,35 @@ from compose.service import ImageType
from compose.service import Service
+def build_config(**kwargs):
+ return Config(
+ config_version=kwargs.get('config_version', VERSION),
+ version=kwargs.get('version', VERSION),
+ services=kwargs.get('services'),
+ volumes=kwargs.get('volumes'),
+ networks=kwargs.get('networks'),
+ secrets=kwargs.get('secrets'),
+ configs=kwargs.get('configs'),
+ )
+
+
class ProjectTest(unittest.TestCase):
def setUp(self):
self.mock_client = mock.create_autospec(docker.APIClient)
self.mock_client._general_configs = {}
+ self.mock_client.api_version = docker.constants.DEFAULT_DOCKER_API_VERSION
def test_from_config_v1(self):
- config = Config(
+ config = build_config(
version=V1,
services=[
{
'name': 'web',
- 'image': 'busybox:latest',
+ 'image': BUSYBOX_IMAGE_WITH_TAG,
},
{
'name': 'db',
- 'image': 'busybox:latest',
+ 'image': BUSYBOX_IMAGE_WITH_TAG,
},
],
networks=None,
@@ -55,22 +69,22 @@ class ProjectTest(unittest.TestCase):
)
assert len(project.services) == 2
assert project.get_service('web').name == 'web'
- assert project.get_service('web').options['image'] == 'busybox:latest'
+ assert project.get_service('web').options['image'] == BUSYBOX_IMAGE_WITH_TAG
assert project.get_service('db').name == 'db'
- assert project.get_service('db').options['image'] == 'busybox:latest'
+ assert project.get_service('db').options['image'] == BUSYBOX_IMAGE_WITH_TAG
assert not project.networks.use_networking
+ @mock.patch('compose.network.Network.true_name', lambda n: n.full_name)
def test_from_config_v2(self):
- config = Config(
- version=V2_0,
+ config = build_config(
services=[
{
'name': 'web',
- 'image': 'busybox:latest',
+ 'image': BUSYBOX_IMAGE_WITH_TAG,
},
{
'name': 'db',
- 'image': 'busybox:latest',
+ 'image': BUSYBOX_IMAGE_WITH_TAG,
},
],
networks=None,
@@ -87,7 +101,7 @@ class ProjectTest(unittest.TestCase):
project='composetest',
name='web',
client=None,
- image="busybox:latest",
+ image=BUSYBOX_IMAGE_WITH_TAG,
)
project = Project('test', [web], None)
assert project.get_service('web') == web
@@ -168,11 +182,10 @@ class ProjectTest(unittest.TestCase):
project = Project.from_config(
name='test',
client=self.mock_client,
- config_data=Config(
- version=V2_0,
+ config_data=build_config(
services=[{
'name': 'test',
- 'image': 'busybox:latest',
+ 'image': BUSYBOX_IMAGE_WITH_TAG,
'volumes_from': [VolumeFromSpec('aaa', 'rw', 'container')]
}],
networks=None,
@@ -190,22 +203,21 @@ class ProjectTest(unittest.TestCase):
"Name": container_name,
"Names": [container_name],
"Id": container_name,
- "Image": 'busybox:latest'
+ "Image": BUSYBOX_IMAGE_WITH_TAG
}
]
project = Project.from_config(
name='test',
client=self.mock_client,
- config_data=Config(
- version=V2_0,
+ config_data=build_config(
services=[
{
'name': 'vol',
- 'image': 'busybox:latest'
+ 'image': BUSYBOX_IMAGE_WITH_TAG
},
{
'name': 'test',
- 'image': 'busybox:latest',
+ 'image': BUSYBOX_IMAGE_WITH_TAG,
'volumes_from': [VolumeFromSpec('vol', 'rw', 'service')]
}
],
@@ -217,22 +229,22 @@ class ProjectTest(unittest.TestCase):
)
assert project.get_service('test')._get_volumes_from() == [container_name + ":rw"]
+ @mock.patch('compose.network.Network.true_name', lambda n: n.full_name)
def test_use_volumes_from_service_container(self):
container_ids = ['aabbccddee', '12345']
project = Project.from_config(
name='test',
client=None,
- config_data=Config(
- version=V2_0,
+ config_data=build_config(
services=[
{
'name': 'vol',
- 'image': 'busybox:latest'
+ 'image': BUSYBOX_IMAGE_WITH_TAG
},
{
'name': 'test',
- 'image': 'busybox:latest',
+ 'image': BUSYBOX_IMAGE_WITH_TAG,
'volumes_from': [VolumeFromSpec('vol', 'rw', 'service')]
}
],
@@ -251,9 +263,10 @@ class ProjectTest(unittest.TestCase):
[container_ids[0] + ':rw']
)
- def test_events(self):
+ def test_events_legacy(self):
services = [Service(name='web'), Service(name='db')]
project = Project('test', services, self.mock_client)
+ self.mock_client.api_version = '1.21'
self.mock_client.events.return_value = iter([
{
'status': 'create',
@@ -359,16 +372,185 @@ class ProjectTest(unittest.TestCase):
},
]
+ def test_events(self):
+ services = [Service(name='web'), Service(name='db')]
+ project = Project('test', services, self.mock_client)
+ self.mock_client.api_version = '1.35'
+ self.mock_client.events.return_value = iter([
+ {
+ 'status': 'create',
+ 'from': 'example/image',
+ 'Type': 'container',
+ 'Actor': {
+ 'ID': 'abcde',
+ 'Attributes': {
+ 'com.docker.compose.project': 'test',
+ 'com.docker.compose.service': 'web',
+ 'image': 'example/image',
+ 'name': 'test_web_1',
+ }
+ },
+ 'id': 'abcde',
+ 'time': 1420092061,
+ 'timeNano': 14200920610000002000,
+ },
+ {
+ 'status': 'attach',
+ 'from': 'example/image',
+ 'Type': 'container',
+ 'Actor': {
+ 'ID': 'abcde',
+ 'Attributes': {
+ 'com.docker.compose.project': 'test',
+ 'com.docker.compose.service': 'web',
+ 'image': 'example/image',
+ 'name': 'test_web_1',
+ }
+ },
+ 'id': 'abcde',
+ 'time': 1420092061,
+ 'timeNano': 14200920610000003000,
+ },
+ {
+ 'status': 'create',
+ 'from': 'example/other',
+ 'Type': 'container',
+ 'Actor': {
+ 'ID': 'bdbdbd',
+ 'Attributes': {
+ 'image': 'example/other',
+ 'name': 'shrewd_einstein',
+ }
+ },
+ 'id': 'bdbdbd',
+ 'time': 1420092061,
+ 'timeNano': 14200920610000005000,
+ },
+ {
+ 'status': 'create',
+ 'from': 'example/db',
+ 'Type': 'container',
+ 'Actor': {
+ 'ID': 'ababa',
+ 'Attributes': {
+ 'com.docker.compose.project': 'test',
+ 'com.docker.compose.service': 'db',
+ 'image': 'example/db',
+ 'name': 'test_db_1',
+ }
+ },
+ 'id': 'ababa',
+ 'time': 1420092061,
+ 'timeNano': 14200920610000004000,
+ },
+ {
+ 'status': 'destroy',
+ 'from': 'example/db',
+ 'Type': 'container',
+ 'Actor': {
+ 'ID': 'eeeee',
+ 'Attributes': {
+ 'com.docker.compose.project': 'test',
+ 'com.docker.compose.service': 'db',
+ 'image': 'example/db',
+ 'name': 'test_db_1',
+ }
+ },
+ 'id': 'eeeee',
+ 'time': 1420092061,
+ 'timeNano': 14200920610000004000,
+ },
+ ])
+
+ def dt_with_microseconds(dt, us):
+ return datetime.datetime.fromtimestamp(dt).replace(microsecond=us)
+
+ def get_container(cid):
+ if cid == 'eeeee':
+ raise NotFound(None, None, "oops")
+ if cid == 'abcde':
+ name = 'web'
+ labels = {LABEL_SERVICE: name}
+ elif cid == 'ababa':
+ name = 'db'
+ labels = {LABEL_SERVICE: name}
+ else:
+ labels = {}
+ name = ''
+ return {
+ 'Id': cid,
+ 'Config': {'Labels': labels},
+ 'Name': '/project_%s_1' % name,
+ }
+
+ self.mock_client.inspect_container.side_effect = get_container
+
+ events = project.events()
+
+ events_list = list(events)
+ # Assert the return value is a generator
+ assert not list(events)
+ assert events_list == [
+ {
+ 'type': 'container',
+ 'service': 'web',
+ 'action': 'create',
+ 'id': 'abcde',
+ 'attributes': {
+ 'name': 'test_web_1',
+ 'image': 'example/image',
+ },
+ 'time': dt_with_microseconds(1420092061, 2),
+ 'container': Container(None, get_container('abcde')),
+ },
+ {
+ 'type': 'container',
+ 'service': 'web',
+ 'action': 'attach',
+ 'id': 'abcde',
+ 'attributes': {
+ 'name': 'test_web_1',
+ 'image': 'example/image',
+ },
+ 'time': dt_with_microseconds(1420092061, 3),
+ 'container': Container(None, get_container('abcde')),
+ },
+ {
+ 'type': 'container',
+ 'service': 'db',
+ 'action': 'create',
+ 'id': 'ababa',
+ 'attributes': {
+ 'name': 'test_db_1',
+ 'image': 'example/db',
+ },
+ 'time': dt_with_microseconds(1420092061, 4),
+ 'container': Container(None, get_container('ababa')),
+ },
+ {
+ 'type': 'container',
+ 'service': 'db',
+ 'action': 'destroy',
+ 'id': 'eeeee',
+ 'attributes': {
+ 'name': 'test_db_1',
+ 'image': 'example/db',
+ },
+ 'time': dt_with_microseconds(1420092061, 4),
+ 'container': None,
+ },
+ ]
+
def test_net_unset(self):
project = Project.from_config(
name='test',
client=self.mock_client,
- config_data=Config(
+ config_data=build_config(
version=V1,
services=[
{
'name': 'test',
- 'image': 'busybox:latest',
+ 'image': BUSYBOX_IMAGE_WITH_TAG,
}
],
networks=None,
@@ -388,12 +570,11 @@ class ProjectTest(unittest.TestCase):
project = Project.from_config(
name='test',
client=self.mock_client,
- config_data=Config(
- version=V2_0,
+ config_data=build_config(
services=[
{
'name': 'test',
- 'image': 'busybox:latest',
+ 'image': BUSYBOX_IMAGE_WITH_TAG,
'network_mode': 'container:aaa'
},
],
@@ -413,22 +594,21 @@ class ProjectTest(unittest.TestCase):
"Name": container_name,
"Names": [container_name],
"Id": container_name,
- "Image": 'busybox:latest'
+ "Image": BUSYBOX_IMAGE_WITH_TAG
}
]
project = Project.from_config(
name='test',
client=self.mock_client,
- config_data=Config(
- version=V2_0,
+ config_data=build_config(
services=[
{
'name': 'aaa',
- 'image': 'busybox:latest'
+ 'image': BUSYBOX_IMAGE_WITH_TAG
},
{
'name': 'test',
- 'image': 'busybox:latest',
+ 'image': BUSYBOX_IMAGE_WITH_TAG,
'network_mode': 'service:aaa'
},
],
@@ -446,12 +626,11 @@ class ProjectTest(unittest.TestCase):
project = Project.from_config(
name='test',
client=self.mock_client,
- config_data=Config(
- version=V2_0,
+ config_data=build_config(
services=[
{
'name': 'foo',
- 'image': 'busybox:latest'
+ 'image': BUSYBOX_IMAGE_WITH_TAG
},
],
networks=None,
@@ -467,12 +646,11 @@ class ProjectTest(unittest.TestCase):
project = Project.from_config(
name='test',
client=self.mock_client,
- config_data=Config(
- version=V2_0,
+ config_data=build_config(
services=[
{
'name': 'foo',
- 'image': 'busybox:latest',
+ 'image': BUSYBOX_IMAGE_WITH_TAG,
'networks': {'custom': None}
},
],
@@ -487,9 +665,9 @@ class ProjectTest(unittest.TestCase):
def test_container_without_name(self):
self.mock_client.containers.return_value = [
- {'Image': 'busybox:latest', 'Id': '1', 'Name': '1'},
- {'Image': 'busybox:latest', 'Id': '2', 'Name': None},
- {'Image': 'busybox:latest', 'Id': '3'},
+ {'Image': BUSYBOX_IMAGE_WITH_TAG, 'Id': '1', 'Name': '1'},
+ {'Image': BUSYBOX_IMAGE_WITH_TAG, 'Id': '2', 'Name': None},
+ {'Image': BUSYBOX_IMAGE_WITH_TAG, 'Id': '3'},
]
self.mock_client.inspect_container.return_value = {
'Id': '1',
@@ -502,11 +680,10 @@ class ProjectTest(unittest.TestCase):
project = Project.from_config(
name='test',
client=self.mock_client,
- config_data=Config(
- version=V2_0,
+ config_data=build_config(
services=[{
'name': 'web',
- 'image': 'busybox:latest',
+ 'image': BUSYBOX_IMAGE_WITH_TAG,
}],
networks=None,
volumes=None,
@@ -520,11 +697,10 @@ class ProjectTest(unittest.TestCase):
project = Project.from_config(
name='test',
client=self.mock_client,
- config_data=Config(
- version=V2_0,
+ config_data=build_config(
services=[{
'name': 'web',
- 'image': 'busybox:latest',
+ 'image': BUSYBOX_IMAGE_WITH_TAG,
}],
networks={'default': {}},
volumes={'data': {}},
@@ -536,7 +712,7 @@ class ProjectTest(unittest.TestCase):
self.mock_client.remove_volume.side_effect = NotFound(None, None, 'oops')
project.down(ImageType.all, True)
- self.mock_client.remove_image.assert_called_once_with("busybox:latest")
+ self.mock_client.remove_image.assert_called_once_with(BUSYBOX_IMAGE_WITH_TAG)
def test_no_warning_on_stop(self):
self.mock_client.info.return_value = {'Swarm': {'LocalNodeState': 'active'}}
@@ -563,45 +739,71 @@ class ProjectTest(unittest.TestCase):
assert fake_log.warn.call_count == 0
def test_no_such_service_unicode(self):
- assert NoSuchService('十六夜 咲夜'.encode('utf-8')).msg == 'No such service: 十六夜 咲夜'
+ assert NoSuchService('十六夜 咲夜'.encode()).msg == 'No such service: 十六夜 咲夜'
assert NoSuchService('十六夜 咲夜').msg == 'No such service: 十六夜 咲夜'
def test_project_platform_value(self):
service_config = {
'name': 'web',
- 'image': 'busybox:latest',
+ 'image': BUSYBOX_IMAGE_WITH_TAG,
}
- config_data = Config(
- version=V2_4, services=[service_config], networks={}, volumes={}, secrets=None, configs=None
+ config_data = build_config(
+ services=[service_config], networks={}, volumes={}, secrets=None, configs=None
)
project = Project.from_config(name='test', client=self.mock_client, config_data=config_data)
- assert project.get_service('web').options.get('platform') is None
+ assert project.get_service('web').platform is None
project = Project.from_config(
name='test', client=self.mock_client, config_data=config_data, default_platform='windows'
)
- assert project.get_service('web').options.get('platform') == 'windows'
+ assert project.get_service('web').platform == 'windows'
service_config['platform'] = 'linux/s390x'
project = Project.from_config(name='test', client=self.mock_client, config_data=config_data)
- assert project.get_service('web').options.get('platform') == 'linux/s390x'
+ assert project.get_service('web').platform == 'linux/s390x'
project = Project.from_config(
name='test', client=self.mock_client, config_data=config_data, default_platform='windows'
)
- assert project.get_service('web').options.get('platform') == 'linux/s390x'
+ assert project.get_service('web').platform == 'linux/s390x'
+
+ def test_build_container_operation_with_timeout_func_does_not_mutate_options_with_timeout(self):
+ config_data = build_config(
+ services=[
+ {'name': 'web', 'image': BUSYBOX_IMAGE_WITH_TAG},
+ {'name': 'db', 'image': BUSYBOX_IMAGE_WITH_TAG, 'stop_grace_period': '1s'},
+ ],
+ networks={}, volumes={}, secrets=None, configs=None,
+ )
+
+ project = Project.from_config(name='test', client=self.mock_client, config_data=config_data)
+
+ stop_op = project.build_container_operation_with_timeout_func('stop', options={})
+
+ web_container = mock.create_autospec(Container, service='web')
+ db_container = mock.create_autospec(Container, service='db')
+
+ # `stop_grace_period` is not set to 'web' service,
+ # then it is stopped with the default timeout.
+ stop_op(web_container)
+ web_container.stop.assert_called_once_with(timeout=DEFAULT_TIMEOUT)
+
+ # `stop_grace_period` is set to 'db' service,
+ # then it is stopped with the specified timeout and
+ # the value is not overridden by the previous function call.
+ stop_op(db_container)
+ db_container.stop.assert_called_once_with(timeout=1)
@mock.patch('compose.parallel.ParallelStreamWriter._write_noansi')
def test_error_parallel_pull(self, mock_write):
project = Project.from_config(
name='test',
client=self.mock_client,
- config_data=Config(
- version=V2_0,
+ config_data=build_config(
services=[{
'name': 'web',
- 'image': 'busybox:latest',
+ 'image': BUSYBOX_IMAGE_WITH_TAG,
}],
networks=None,
volumes=None,
@@ -617,3 +819,104 @@ class ProjectTest(unittest.TestCase):
self.mock_client.pull.side_effect = OperationFailedError(b'pull error')
with pytest.raises(ProjectError):
project.pull(parallel_pull=True)
+
+ def test_avoid_multiple_push(self):
+ service_config_latest = {'image': 'busybox:latest', 'build': '.'}
+ service_config_default = {'image': 'busybox', 'build': '.'}
+ service_config_sha = {
+ 'image': 'busybox@sha256:38a203e1986cf79639cfb9b2e1d6e773de84002feea2d4eb006b52004ee8502d',
+ 'build': '.'
+ }
+ svc1 = Service('busy1', **service_config_latest)
+ svc1_1 = Service('busy11', **service_config_latest)
+ svc2 = Service('busy2', **service_config_default)
+ svc2_1 = Service('busy21', **service_config_default)
+ svc3 = Service('busy3', **service_config_sha)
+ svc3_1 = Service('busy31', **service_config_sha)
+ project = Project(
+ 'composetest', [svc1, svc1_1, svc2, svc2_1, svc3, svc3_1], self.mock_client
+ )
+ with mock.patch('compose.service.Service.push') as fake_push:
+ project.push()
+ assert fake_push.call_count == 2
+
+ def test_get_secrets_no_secret_def(self):
+ service = 'foo'
+ secret_source = 'bar'
+
+ secret_defs = mock.Mock()
+ secret_defs.get.return_value = None
+ secret = mock.Mock(source=secret_source)
+
+ with self.assertRaises(ConfigurationError):
+ get_secrets(service, [secret], secret_defs)
+
+ def test_get_secrets_external_warning(self):
+ service = 'foo'
+ secret_source = 'bar'
+
+ secret_def = mock.Mock()
+ secret_def.get.return_value = True
+
+ secret_defs = mock.Mock()
+ secret_defs.get.side_effect = secret_def
+ secret = mock.Mock(source=secret_source)
+
+ with mock.patch('compose.project.log') as mock_log:
+ get_secrets(service, [secret], secret_defs)
+
+ mock_log.warning.assert_called_with("Service \"{service}\" uses secret \"{secret}\" "
+ "which is external. External secrets are not available"
+ " to containers created by docker-compose."
+ .format(service=service, secret=secret_source))
+
+ def test_get_secrets_uid_gid_mode_warning(self):
+ service = 'foo'
+ secret_source = 'bar'
+
+ fd, filename_path = tempfile.mkstemp()
+ os.close(fd)
+ self.addCleanup(os.remove, filename_path)
+
+ def mock_get(key):
+ return {'external': False, 'file': filename_path}[key]
+
+ secret_def = mock.MagicMock()
+ secret_def.get = mock.MagicMock(side_effect=mock_get)
+
+ secret_defs = mock.Mock()
+ secret_defs.get.return_value = secret_def
+
+ secret = mock.Mock(uid=True, gid=True, mode=True, source=secret_source)
+
+ with mock.patch('compose.project.log') as mock_log:
+ get_secrets(service, [secret], secret_defs)
+
+ mock_log.warning.assert_called_with("Service \"{service}\" uses secret \"{secret}\" with uid, "
+ "gid, or mode. These fields are not supported by this "
+ "implementation of the Compose file"
+ .format(service=service, secret=secret_source))
+
+ def test_get_secrets_secret_file_warning(self):
+ service = 'foo'
+ secret_source = 'bar'
+ not_a_path = 'NOT_A_PATH'
+
+ def mock_get(key):
+ return {'external': False, 'file': not_a_path}[key]
+
+ secret_def = mock.MagicMock()
+ secret_def.get = mock.MagicMock(side_effect=mock_get)
+
+ secret_defs = mock.Mock()
+ secret_defs.get.return_value = secret_def
+
+ secret = mock.Mock(uid=False, gid=False, mode=False, source=secret_source)
+
+ with mock.patch('compose.project.log') as mock_log:
+ get_secrets(service, [secret], secret_defs)
+
+ mock_log.warning.assert_called_with("Service \"{service}\" uses an undefined secret file "
+ "\"{secret_file}\", the following file should be created "
+ "\"{secret_file}\""
+ .format(service=service, secret_file=not_a_path))
diff --git a/tests/unit/service_test.py b/tests/unit/service_test.py
index 4ccc4865..8a51c978 100644
--- a/tests/unit/service_test.py
+++ b/tests/unit/service_test.py
@@ -1,15 +1,14 @@
-from __future__ import absolute_import
-from __future__ import unicode_literals
-
import docker
import pytest
from docker.constants import DEFAULT_DOCKER_API_VERSION
from docker.errors import APIError
+from docker.errors import ImageNotFound
from docker.errors import NotFound
from .. import mock
from .. import unittest
from compose.config.errors import DependencyError
+from compose.config.types import MountSpec
from compose.config.types import ServicePort
from compose.config.types import ServiceSecret
from compose.config.types import VolumeFromSpec
@@ -20,6 +19,7 @@ from compose.const import LABEL_ONE_OFF
from compose.const import LABEL_PROJECT
from compose.const import LABEL_SERVICE
from compose.const import SECRETS_PATH
+from compose.const import WINDOWS_LONGPATH_PREFIX
from compose.container import Container
from compose.errors import OperationFailedError
from compose.parallel import ParallelStreamWriter
@@ -37,6 +37,7 @@ from compose.service import NeedsBuildError
from compose.service import NetworkMode
from compose.service import NoSuchImageError
from compose.service import parse_repository_tag
+from compose.service import rewrite_build_path
from compose.service import Service
from compose.service import ServiceNetworkMode
from compose.service import warn_on_masked_volume
@@ -62,9 +63,9 @@ class ServiceTest(unittest.TestCase):
assert [c.id for c in service.containers()] == list(range(3))
expected_labels = [
- '{0}=myproject'.format(LABEL_PROJECT),
- '{0}=db'.format(LABEL_SERVICE),
- '{0}=False'.format(LABEL_ONE_OFF),
+ '{}=myproject'.format(LABEL_PROJECT),
+ '{}=db'.format(LABEL_SERVICE),
+ '{}=False'.format(LABEL_ONE_OFF),
]
self.mock_client.containers.assert_called_once_with(
@@ -316,19 +317,20 @@ class ServiceTest(unittest.TestCase):
self.mock_client.inspect_image.return_value = {'Id': 'abcd'}
prev_container = mock.Mock(
id='ababab',
- image_config={'ContainerConfig': {}})
+ image_config={'ContainerConfig': {}}
+ )
+ prev_container.full_slug = 'abcdefff1234'
prev_container.get.return_value = None
opts = service._get_container_create_options(
- {},
- 1,
- previous_container=prev_container)
+ {}, 1, previous_container=prev_container
+ )
assert service.options['labels'] == labels
assert service.options['environment'] == environment
assert opts['labels'][LABEL_CONFIG_HASH] == \
- '2524a06fcb3d781aa2c981fc40bcfa08013bb318e4273bfa388df22023e6f2aa'
+ '6da0f3ec0d5adf901de304bdc7e0ee44ec5dd7adb08aebc20fe0dd791d4ee5a8'
assert opts['environment'] == ['also=real']
def test_get_container_create_options_sets_affinity_with_binds(self):
@@ -354,11 +356,13 @@ class ServiceTest(unittest.TestCase):
}.get(key, None)
prev_container.get.side_effect = container_get
+ prev_container.full_slug = 'abcdefff1234'
opts = service._get_container_create_options(
{},
1,
- previous_container=prev_container)
+ previous_container=prev_container
+ )
assert opts['environment'] == ['affinity:container==ababab']
@@ -369,6 +373,7 @@ class ServiceTest(unittest.TestCase):
id='ababab',
image_config={'ContainerConfig': {}})
prev_container.get.return_value = None
+ prev_container.full_slug = 'abcdefff1234'
opts = service._get_container_create_options(
{},
@@ -385,7 +390,7 @@ class ServiceTest(unittest.TestCase):
@mock.patch('compose.service.Container', autospec=True)
def test_get_container(self, mock_container_class):
- container_dict = dict(Name='default_foo_2')
+ container_dict = dict(Name='default_foo_2_bdfa3ed91e2c')
self.mock_client.containers.return_value = [container_dict]
service = Service('foo', image='foo', client=self.mock_client)
@@ -445,9 +450,24 @@ class ServiceTest(unittest.TestCase):
with pytest.raises(OperationFailedError):
service.pull()
+ def test_pull_image_with_default_platform(self):
+ self.mock_client.api_version = '1.35'
+
+ service = Service(
+ 'foo', client=self.mock_client, image='someimage:sometag',
+ default_platform='linux'
+ )
+ assert service.platform == 'linux'
+ service.pull()
+
+ assert self.mock_client.pull.call_count == 1
+ call_args = self.mock_client.pull.call_args
+ assert call_args[1]['platform'] == 'linux'
+
@mock.patch('compose.service.Container', autospec=True)
def test_recreate_container(self, _):
mock_container = mock.create_autospec(Container)
+ mock_container.full_slug = 'abcdefff1234'
service = Service('foo', client=self.mock_client, image='someimage')
service.image = lambda: {'Id': 'abc123'}
new_container = service.recreate_container(mock_container)
@@ -461,6 +481,7 @@ class ServiceTest(unittest.TestCase):
@mock.patch('compose.service.Container', autospec=True)
def test_recreate_container_with_timeout(self, _):
mock_container = mock.create_autospec(Container)
+ mock_container.full_slug = 'abcdefff1234'
self.mock_client.inspect_image.return_value = {'Id': 'abc123'}
service = Service('foo', client=self.mock_client, image='someimage')
service.recreate_container(mock_container, timeout=1)
@@ -492,12 +513,42 @@ class ServiceTest(unittest.TestCase):
with mock.patch('compose.service.log', autospec=True) as mock_log:
service.create_container()
- assert mock_log.warn.called
- _, args, _ = mock_log.warn.mock_calls[0]
+ assert mock_log.warning.called
+ _, args, _ = mock_log.warning.mock_calls[0]
assert 'was built because it did not already exist' in args[0]
assert self.mock_client.build.call_count == 1
- self.mock_client.build.call_args[1]['tag'] == 'default_foo'
+ assert self.mock_client.build.call_args[1]['tag'] == 'default_foo'
+
+ def test_create_container_binary_string_error(self):
+ service = Service('foo', client=self.mock_client, build={'context': '.'})
+ service.image = lambda: {'Id': 'abc123'}
+
+ self.mock_client.create_container.side_effect = APIError(None,
+ None,
+ b"Test binary string explanation")
+ with pytest.raises(OperationFailedError) as ex:
+ service.create_container()
+
+ assert ex.value.msg == "Cannot create container for service foo: Test binary string explanation"
+
+ def test_start_binary_string_error(self):
+ service = Service('foo', client=self.mock_client)
+ container = Container(self.mock_client, {'Id': 'abc123'})
+
+ self.mock_client.start.side_effect = APIError(None,
+ None,
+ b"Test binary string explanation with "
+ b"driver failed programming external "
+ b"connectivity")
+ with mock.patch('compose.service.log', autospec=True) as mock_log:
+ with pytest.raises(OperationFailedError) as ex:
+ service.start_container(container)
+
+ assert ex.value.msg == "Cannot start service foo: " \
+ "Test binary string explanation " \
+ "with driver failed programming external connectivity"
+ mock_log.warn.assert_called_once_with("Host is already in use by another container")
def test_ensure_image_exists_no_build(self):
service = Service('foo', client=self.mock_client, build={'context': '.'})
@@ -522,7 +573,7 @@ class ServiceTest(unittest.TestCase):
with mock.patch('compose.service.log', autospec=True) as mock_log:
service.ensure_image_exists(do_build=BuildAction.force)
- assert not mock_log.warn.called
+ assert not mock_log.warning.called
assert self.mock_client.build.call_count == 1
self.mock_client.build.call_args[1]['tag'] == 'default_foo'
@@ -537,7 +588,7 @@ class ServiceTest(unittest.TestCase):
assert self.mock_client.build.call_count == 1
assert not self.mock_client.build.call_args[1]['pull']
- def test_build_does_with_platform(self):
+ def test_build_with_platform(self):
self.mock_client.api_version = '1.35'
self.mock_client.build.return_value = [
b'{"stream": "Successfully built 12345"}',
@@ -550,6 +601,47 @@ class ServiceTest(unittest.TestCase):
call_args = self.mock_client.build.call_args
assert call_args[1]['platform'] == 'linux'
+ def test_build_with_default_platform(self):
+ self.mock_client.api_version = '1.35'
+ self.mock_client.build.return_value = [
+ b'{"stream": "Successfully built 12345"}',
+ ]
+
+ service = Service(
+ 'foo', client=self.mock_client, build={'context': '.'},
+ default_platform='linux'
+ )
+ assert service.platform == 'linux'
+ service.build()
+
+ assert self.mock_client.build.call_count == 1
+ call_args = self.mock_client.build.call_args
+ assert call_args[1]['platform'] == 'linux'
+
+ def test_service_platform_precedence(self):
+ self.mock_client.api_version = '1.35'
+
+ service = Service(
+ 'foo', client=self.mock_client, platform='linux/arm',
+ default_platform='osx'
+ )
+ assert service.platform == 'linux/arm'
+
+ def test_service_ignore_default_platform_with_unsupported_api(self):
+ self.mock_client.api_version = '1.32'
+ self.mock_client.build.return_value = [
+ b'{"stream": "Successfully built 12345"}',
+ ]
+
+ service = Service(
+ 'foo', client=self.mock_client, default_platform='windows', build={'context': '.'}
+ )
+ assert service.platform is None
+ service.build()
+ assert self.mock_client.build.call_count == 1
+ call_args = self.mock_client.build.call_args
+ assert call_args[1]['platform'] is None
+
def test_build_with_override_build_args(self):
self.mock_client.build.return_value = [
b'{"stream": "Successfully built 12345"}',
@@ -608,9 +700,11 @@ class ServiceTest(unittest.TestCase):
config_dict = service.config_dict()
expected = {
'image_id': 'abcd',
+ 'ipc_mode': None,
'options': {'image': 'example.com/foo'},
'links': [('one', 'one')],
'net': 'other',
+ 'secrets': [],
'networks': {'default': None},
'volumes_from': [('two', 'rw')],
}
@@ -630,9 +724,11 @@ class ServiceTest(unittest.TestCase):
config_dict = service.config_dict()
expected = {
'image_id': 'abcd',
+ 'ipc_mode': None,
'options': {'image': 'example.com/foo'},
'links': [],
'networks': {},
+ 'secrets': [],
'net': 'aaabbb',
'volumes_from': [],
}
@@ -645,17 +741,19 @@ class ServiceTest(unittest.TestCase):
image='example.com/foo',
client=self.mock_client,
network_mode=NetworkMode('bridge'),
- networks={'bridge': {}},
+ networks={'bridge': {}, 'net2': {}},
links=[(Service('one', client=self.mock_client), 'one')],
- volumes_from=[VolumeFromSpec(Service('two', client=self.mock_client), 'rw', 'service')]
+ volumes_from=[VolumeFromSpec(Service('two', client=self.mock_client), 'rw', 'service')],
+ volumes=[VolumeSpec('/ext', '/int', 'ro')],
+ build={'context': 'some/random/path'},
)
config_hash = service.config_hash
for api_version in set(API_VERSIONS.values()):
self.mock_client.api_version = api_version
- assert service._get_container_create_options({}, 1)['labels'][LABEL_CONFIG_HASH] == (
- config_hash
- )
+ assert service._get_container_create_options(
+ {}, 1
+ )['labels'][LABEL_CONFIG_HASH] == config_hash
def test_remove_image_none(self):
web = Service('web', image='example', client=self.mock_client)
@@ -689,6 +787,13 @@ class ServiceTest(unittest.TestCase):
mock_log.error.assert_called_once_with(
"Failed to remove image for service %s: %s", web.name, error)
+ def test_remove_non_existing_image(self):
+ self.mock_client.remove_image.side_effect = ImageNotFound('image not found')
+ web = Service('web', image='example', client=self.mock_client)
+ with mock.patch('compose.service.log', autospec=True) as mock_log:
+ assert not web.remove_image(ImageType.all)
+ mock_log.warning.assert_called_once_with("Image %s not found.", web.image_name)
+
def test_specifies_host_port_with_no_ports(self):
service = Service(
'foo',
@@ -752,7 +857,7 @@ class ServiceTest(unittest.TestCase):
assert service.specifies_host_port()
def test_image_name_from_config(self):
- image_name = 'example/web:latest'
+ image_name = 'example/web:mytag'
service = Service('foo', image=image_name)
assert service.image_name == image_name
@@ -771,13 +876,13 @@ class ServiceTest(unittest.TestCase):
ports=["8080:80"])
service.scale(0)
- assert not mock_log.warn.called
+ assert not mock_log.warning.called
service.scale(1)
- assert not mock_log.warn.called
+ assert not mock_log.warning.called
service.scale(2)
- mock_log.warn.assert_called_once_with(
+ mock_log.warning.assert_called_once_with(
'The "{}" service specifies a port on the host. If multiple containers '
'for this service are created on a single host, the port will clash.'.format(name))
@@ -955,6 +1060,41 @@ class ServiceTest(unittest.TestCase):
assert service.create_container().id == 'new_cont_id'
+ def test_build_volume_options_duplicate_binds(self):
+ self.mock_client.api_version = '1.29' # Trigger 3.2 format workaround
+ service = Service('foo', client=self.mock_client)
+ ctnr_opts, override_opts = service._build_container_volume_options(
+ previous_container=None,
+ container_options={
+ 'volumes': [
+ MountSpec.parse({'source': 'vol', 'target': '/data', 'type': 'volume'}),
+ VolumeSpec.parse('vol:/data:rw'),
+ ],
+ 'environment': {},
+ },
+ override_options={},
+ )
+ assert 'binds' in override_opts
+ assert len(override_opts['binds']) == 1
+ assert override_opts['binds'][0] == 'vol:/data:rw'
+
+ def test_volumes_order_is_preserved(self):
+ service = Service('foo', client=self.mock_client)
+ volumes = [
+ VolumeSpec.parse(cfg) for cfg in [
+ '/v{0}:/v{0}:rw'.format(i) for i in range(6)
+ ]
+ ]
+ ctnr_opts, override_opts = service._build_container_volume_options(
+ previous_container=None,
+ container_options={
+ 'volumes': volumes,
+ 'environment': {},
+ },
+ override_options={},
+ )
+ assert override_opts['binds'] == [vol.repr() for vol in volumes]
+
class TestServiceNetwork(unittest.TestCase):
def setUp(self):
@@ -1223,10 +1363,8 @@ class ServiceVolumesTest(unittest.TestCase):
number=1,
)
- assert set(self.mock_client.create_host_config.call_args[1]['binds']) == set([
- '/host/path:/data1:rw',
- '/host/path:/data2:rw',
- ])
+ assert set(self.mock_client.create_host_config.call_args[1]['binds']) == {'/host/path:/data1:rw',
+ '/host/path:/data2:rw'}
def test_get_container_create_options_with_different_host_path_in_container_json(self):
service = Service(
@@ -1280,7 +1418,7 @@ class ServiceVolumesTest(unittest.TestCase):
with mock.patch('compose.service.log', autospec=True) as mock_log:
warn_on_masked_volume(volumes_option, container_volumes, service)
- assert not mock_log.warn.called
+ assert not mock_log.warning.called
def test_warn_on_masked_volume_when_masked(self):
volumes_option = [VolumeSpec('/home/user', '/path', 'rw')]
@@ -1293,7 +1431,7 @@ class ServiceVolumesTest(unittest.TestCase):
with mock.patch('compose.service.log', autospec=True) as mock_log:
warn_on_masked_volume(volumes_option, container_volumes, service)
- mock_log.warn.assert_called_once_with(mock.ANY)
+ mock_log.warning.assert_called_once_with(mock.ANY)
def test_warn_on_masked_no_warning_with_same_path(self):
volumes_option = [VolumeSpec('/home/user', '/path', 'rw')]
@@ -1303,7 +1441,7 @@ class ServiceVolumesTest(unittest.TestCase):
with mock.patch('compose.service.log', autospec=True) as mock_log:
warn_on_masked_volume(volumes_option, container_volumes, service)
- assert not mock_log.warn.called
+ assert not mock_log.warning.called
def test_warn_on_masked_no_warning_with_container_only_option(self):
volumes_option = [VolumeSpec(None, '/path', 'rw')]
@@ -1315,7 +1453,7 @@ class ServiceVolumesTest(unittest.TestCase):
with mock.patch('compose.service.log', autospec=True) as mock_log:
warn_on_masked_volume(volumes_option, container_volumes, service)
- assert not mock_log.warn.called
+ assert not mock_log.warning.called
def test_create_with_special_volume_mode(self):
self.mock_client.inspect_image.return_value = {'Id': 'imageid'}
@@ -1387,3 +1525,28 @@ class ServiceSecretTest(unittest.TestCase):
assert volumes[0].source == secret1['file']
assert volumes[0].target == '{}/{}'.format(SECRETS_PATH, secret1['secret'].source)
+
+
+class RewriteBuildPathTest(unittest.TestCase):
+ @mock.patch('compose.service.IS_WINDOWS_PLATFORM', True)
+ def test_rewrite_url_no_prefix(self):
+ urls = [
+ 'http://test.com',
+ 'https://test.com',
+ 'git://test.com',
+ 'github.com/test/test',
+ 'git@test.com',
+ ]
+ for u in urls:
+ assert rewrite_build_path(u) == u
+
+ @mock.patch('compose.service.IS_WINDOWS_PLATFORM', True)
+ def test_rewrite_windows_path(self):
+ assert rewrite_build_path('C:\\context') == WINDOWS_LONGPATH_PREFIX + 'C:\\context'
+ assert rewrite_build_path(
+ rewrite_build_path('C:\\context')
+ ) == rewrite_build_path('C:\\context')
+
+ @mock.patch('compose.service.IS_WINDOWS_PLATFORM', False)
+ def test_rewrite_unix_path(self):
+ assert rewrite_build_path('/context') == '/context'
diff --git a/tests/unit/split_buffer_test.py b/tests/unit/split_buffer_test.py
index dedd4ee3..d6b5b884 100644
--- a/tests/unit/split_buffer_test.py
+++ b/tests/unit/split_buffer_test.py
@@ -1,6 +1,3 @@
-from __future__ import absolute_import
-from __future__ import unicode_literals
-
from .. import unittest
from compose.utils import split_buffer
@@ -39,7 +36,7 @@ class SplitBufferTest(unittest.TestCase):
self.assert_produces(reader, ['abc\n', 'd'])
def test_preserves_unicode_sequences_within_lines(self):
- string = u"a\u2022c\n"
+ string = "a\u2022c\n"
def reader():
yield string.encode('utf-8')
diff --git a/tests/unit/timeparse_test.py b/tests/unit/timeparse_test.py
index 9915932c..e56595f1 100644
--- a/tests/unit/timeparse_test.py
+++ b/tests/unit/timeparse_test.py
@@ -1,6 +1,3 @@
-from __future__ import absolute_import
-from __future__ import unicode_literals
-
from compose import timeparse
diff --git a/tests/unit/utils_test.py b/tests/unit/utils_test.py
index 84becb97..3052e4d8 100644
--- a/tests/unit/utils_test.py
+++ b/tests/unit/utils_test.py
@@ -1,11 +1,7 @@
-# encoding: utf-8
-from __future__ import absolute_import
-from __future__ import unicode_literals
-
from compose import utils
-class TestJsonSplitter(object):
+class TestJsonSplitter:
def test_json_splitter_no_object(self):
data = '{"foo": "bar'
@@ -20,7 +16,7 @@ class TestJsonSplitter(object):
assert utils.json_splitter(data) == ({'foo': 'bar'}, '{"next": "obj"}')
-class TestStreamAsText(object):
+class TestStreamAsText:
def test_stream_with_non_utf_unicode_character(self):
stream = [b'\xed\xf3\xf3']
@@ -28,12 +24,12 @@ class TestStreamAsText(object):
assert output == '���'
def test_stream_with_utf_character(self):
- stream = ['ěĝ'.encode('utf-8')]
+ stream = ['ěĝ'.encode()]
output, = utils.stream_as_text(stream)
assert output == 'ěĝ'
-class TestJsonStream(object):
+class TestJsonStream:
def test_with_falsy_entries(self):
stream = [
@@ -62,9 +58,17 @@ class TestJsonStream(object):
]
-class TestParseBytes(object):
+class TestParseBytes:
def test_parse_bytes(self):
assert utils.parse_bytes('123kb') == 123 * 1024
assert utils.parse_bytes(123) == 123
assert utils.parse_bytes('foobar') is None
assert utils.parse_bytes('123') == 123
+
+
+class TestMoreItertools:
+ def test_unique_everseen(self):
+ unique = utils.unique_everseen
+ assert list(unique([2, 1, 2, 1])) == [2, 1]
+ assert list(unique([2, 1, 2, 1], hash)) == [2, 1]
+ assert list(unique([2, 1, 2, 1], lambda x: 'key_%s' % x)) == [2, 1]
diff --git a/tests/unit/volume_test.py b/tests/unit/volume_test.py
index 457d8558..0dfbfcd4 100644
--- a/tests/unit/volume_test.py
+++ b/tests/unit/volume_test.py
@@ -1,6 +1,3 @@
-from __future__ import absolute_import
-from __future__ import unicode_literals
-
import docker
import pytest
@@ -13,7 +10,7 @@ def mock_client():
return mock.create_autospec(docker.APIClient)
-class TestVolume(object):
+class TestVolume:
def test_remove_local_volume(self, mock_client):
vol = volume.Volume(mock_client, 'foo', 'project')
diff --git a/tox.ini b/tox.ini
index 33347df2..12530d19 100644
--- a/tox.ini
+++ b/tox.ini
@@ -1,5 +1,5 @@
[tox]
-envlist = py27,py36,pre-commit
+envlist = py37,py39,pre-commit
[testenv]
usedevelop=True
@@ -15,6 +15,7 @@ passenv =
setenv =
HOME=/tmp
deps =
+ -rrequirements-indirect.txt
-rrequirements.txt
-rrequirements-dev.txt
commands =
@@ -32,7 +33,7 @@ deps =
pre-commit
commands =
pre-commit install
- pre-commit run --all-files
+ pre-commit run --all-files --show-diff-on-failure
# Coverage configuration
[run]
@@ -49,7 +50,7 @@ directory = coverage-html
[flake8]
max-line-length = 105
# Set this high for now
-max-complexity = 11
+max-complexity = 12
exclude = compose/packages
[pytest]