summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--.circleci/config.yml2
-rw-r--r--.fossa.yml14
-rw-r--r--CHANGELOG.md68
-rw-r--r--Dockerfile87
-rw-r--r--Dockerfile.armhf39
-rw-r--r--Dockerfile.run19
-rw-r--r--Jenkinsfile55
-rw-r--r--MAINTAINERS23
-rw-r--r--appveyor.yml6
-rw-r--r--compose/__init__.py2
-rw-r--r--compose/bundle.py49
-rw-r--r--compose/cli/command.py36
-rw-r--r--compose/cli/docker_client.py2
-rw-r--r--compose/cli/main.py48
-rw-r--r--compose/config/config.py63
-rw-r--r--compose/config/environment.py12
-rw-r--r--compose/config/serialize.py20
-rw-r--r--compose/network.py6
-rw-r--r--compose/project.py42
-rw-r--r--compose/service.py55
-rw-r--r--compose/volume.py4
-rw-r--r--contrib/completion/bash/docker-compose5
-rwxr-xr-xcontrib/completion/zsh/_docker-compose1
-rwxr-xr-xcontrib/migration/migrate-compose-file-v1-to-v2.py6
-rwxr-xr-xdocker-compose-entrypoint.sh20
-rw-r--r--docs/README.md6
-rwxr-xr-xpyinstaller/ldd13
-rw-r--r--requirements-build.txt2
-rw-r--r--requirements-dev.txt1
-rw-r--r--requirements.txt8
-rwxr-xr-xscript/build/image11
-rwxr-xr-xscript/build/linux18
-rwxr-xr-xscript/build/linux-entrypoint42
-rwxr-xr-xscript/build/osx5
-rwxr-xr-xscript/build/test-image15
-rw-r--r--script/build/windows.ps16
-rwxr-xr-xscript/build/write-git-sha2
-rw-r--r--script/release/README.md2
-rwxr-xr-xscript/release/release.py8
-rw-r--r--script/release/release/const.py1
-rw-r--r--script/release/release/images.py127
-rw-r--r--script/release/release/repository.py3
-rwxr-xr-xscript/run/run.sh4
-rwxr-xr-xscript/setup/osx24
-rwxr-xr-xscript/test/all5
-rwxr-xr-xscript/test/ci3
-rwxr-xr-xscript/test/default7
-rw-r--r--setup.py4
-rw-r--r--tests/acceptance/cli_test.py93
-rw-r--r--tests/fixtures/default-env-file/.env24
-rw-r--r--tests/fixtures/environment-exec/docker-compose.yml2
-rw-r--r--tests/fixtures/links-composefile/docker-compose.yml6
-rw-r--r--tests/fixtures/logs-composefile/docker-compose.yml4
-rw-r--r--tests/fixtures/logs-restart-composefile/docker-compose.yml2
-rw-r--r--tests/fixtures/scale/docker-compose.yml8
-rw-r--r--tests/fixtures/simple-composefile/docker-compose.yml2
-rw-r--r--tests/fixtures/simple-composefile/pull-with-build.yml11
-rw-r--r--tests/fixtures/simple-dockerfile/Dockerfile2
-rw-r--r--tests/fixtures/v2-simple/docker-compose.yml4
-rw-r--r--tests/fixtures/v2-simple/one-container.yml5
-rw-r--r--tests/integration/environment_test.py52
-rw-r--r--tests/integration/project_test.py55
-rw-r--r--tests/integration/service_test.py6
-rw-r--r--tests/integration/state_test.py139
-rw-r--r--tests/unit/bundle_test.py19
-rw-r--r--tests/unit/cli/docker_client_test.py2
-rw-r--r--tests/unit/cli/main_test.py2
-rw-r--r--tests/unit/config/config_test.py72
-rw-r--r--tests/unit/network_test.py4
-rw-r--r--tests/unit/project_test.py30
-rw-r--r--tests/unit/service_test.py24
-rw-r--r--tox.ini2
72 files changed, 400 insertions, 1151 deletions
diff --git a/.circleci/config.yml b/.circleci/config.yml
index 906b1c0d..08f8c42c 100644
--- a/.circleci/config.yml
+++ b/.circleci/config.yml
@@ -13,7 +13,7 @@ jobs:
command: sudo pip install --upgrade tox==2.1.1 virtualenv==16.2.0
- run:
name: unit tests
- command: tox -e py27,py37 -- tests/unit
+ command: tox -e py27,py36,py37 -- tests/unit
build-osx-binary:
macos:
diff --git a/.fossa.yml b/.fossa.yml
new file mode 100644
index 00000000..b50761ef
--- /dev/null
+++ b/.fossa.yml
@@ -0,0 +1,14 @@
+# Generated by FOSSA CLI (https://github.com/fossas/fossa-cli)
+# Visit https://fossa.io to learn more
+
+version: 2
+cli:
+ server: https://app.fossa.io
+ fetcher: custom
+ project: git@github.com:docker/compose
+analyze:
+ modules:
+ - name: .
+ type: pip
+ target: .
+ path: .
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 0a512c35..8f777c6c 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,76 +1,14 @@
Change log
==========
-1.25.0 (2019-05-22)
+1.24.1 (2019-06-24)
-------------------
-### Features
-
-- Add tag `docker-compose:latest`
-
-- Add `docker-compose:<version>-alpine` image/tag
-
-- Add `docker-compose:<version>-debian` image/tag
-
-- Bumped `docker-py` 4.0.1
-
-- Supports `requests` up to 2.22.0 version
-
-- Drops empty tag on `build:cache_from`
-
-- `Dockerfile` now generates `libmusl` binaries for alpine
-
-- Only pull images that can't be built
-
-- Attribute `scale` can now accept `0` as a value
-
-- Added `--quiet` build flag
-
-- Added `--no-interpolate` to `docker-compose config`
-
-- Bump OpenSSL for macOS build (`1.1.0j` to `1.1.1a`)
-
-- Added `--no-rm` to `build` command
-
-- Added support for `credential_spec`
-
-- Resolve digests without pulling image
-
-- Upgrade `pyyaml` to `4.2b1`
-
-- Lowered severity to `warning` if `down` tries to remove nonexisting image
-
-- Use improved API fields for project events when possible
-
-- Update `setup.py` for modern `pypi/setuptools` and remove `pandoc` dependencies
-
-- Removed `Dockerfile.armhf` which is no longer needed
-
### Bugfixes
-- Fixed `--remove-orphans` when used with `up --no-start`
-
-- Fixed `docker-compose ps --all`
-
-- Fixed `depends_on` dependency recreation behavior
-
-- Fixed bash completion for `build --memory`
-
-- Fixed misleading warning concerning env vars when performing an `exec` command
-
-- Fixed failure check in parallel_execute_watch
-
-- Fixed race condition after pulling image
-
-- Fixed error on duplicate mount points.
-
-- Fixed merge on networks section
-
-- Always connect Compose container to `stdin`
-
-- Fixed the presentation of failed services on 'docker-compose start' when containers are not available
+- Fixed acceptance tests
-1.24.0 (2019-03-28)
+1.24.0 (2019-03-22)
-------------------
### Features
diff --git a/Dockerfile b/Dockerfile
index 1a3c501a..c5e7c815 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -1,71 +1,36 @@
-ARG DOCKER_VERSION=18.09.5
-ARG PYTHON_VERSION=3.7.3
-ARG BUILD_ALPINE_VERSION=3.9
-ARG BUILD_DEBIAN_VERSION=slim-stretch
-ARG RUNTIME_ALPINE_VERSION=3.9.3
-ARG RUNTIME_DEBIAN_VERSION=stretch-20190326-slim
+FROM docker:18.06.1 as docker
+FROM python:3.6
-ARG BUILD_PLATFORM=alpine
+RUN set -ex; \
+ apt-get update -qq; \
+ apt-get install -y \
+ locales \
+ python-dev \
+ git
-FROM docker:${DOCKER_VERSION} AS docker-cli
+COPY --from=docker /usr/local/bin/docker /usr/local/bin/docker
-FROM python:${PYTHON_VERSION}-alpine${BUILD_ALPINE_VERSION} AS build-alpine
-RUN apk add --no-cache \
- bash \
- build-base \
- ca-certificates \
- curl \
- gcc \
- git \
- libc-dev \
- libffi-dev \
- libgcc \
- make \
- musl-dev \
- openssl \
- openssl-dev \
- python2 \
- python2-dev \
- zlib-dev
-ENV BUILD_BOOTLOADER=1
+# Python3 requires a valid locale
+RUN echo "en_US.UTF-8 UTF-8" > /etc/locale.gen && locale-gen
+ENV LANG en_US.UTF-8
-FROM python:${PYTHON_VERSION}-${BUILD_DEBIAN_VERSION} AS build-debian
-RUN apt-get update && apt-get install -y \
- curl \
- gcc \
- git \
- libc-dev \
- libgcc-6-dev \
- make \
- openssl \
- python2.7-dev
-
-FROM build-${BUILD_PLATFORM} AS build
-COPY docker-compose-entrypoint.sh /usr/local/bin/
-ENTRYPOINT ["sh", "/usr/local/bin/docker-compose-entrypoint.sh"]
-COPY --from=docker-cli /usr/local/bin/docker /usr/local/bin/docker
+RUN useradd -d /home/user -m -s /bin/bash user
WORKDIR /code/
+
# FIXME(chris-crone): virtualenv 16.3.0 breaks build, force 16.2.0 until fixed
RUN pip install virtualenv==16.2.0
-RUN pip install tox==2.9.1
+RUN pip install tox==2.1.1
-COPY requirements.txt .
-COPY requirements-dev.txt .
-COPY .pre-commit-config.yaml .
-COPY tox.ini .
-COPY setup.py .
-COPY README.md .
-COPY compose compose/
+ADD requirements.txt /code/
+ADD requirements-dev.txt /code/
+ADD .pre-commit-config.yaml /code/
+ADD setup.py /code/
+ADD tox.ini /code/
+ADD compose /code/compose/
+ADD README.md /code/
RUN tox --notest
-COPY . .
-ARG GIT_COMMIT=unknown
-ENV DOCKER_COMPOSE_GITSHA=$GIT_COMMIT
-RUN script/build/linux-entrypoint
-FROM alpine:${RUNTIME_ALPINE_VERSION} AS runtime-alpine
-FROM debian:${RUNTIME_DEBIAN_VERSION} AS runtime-debian
-FROM runtime-${BUILD_PLATFORM} AS runtime
-COPY docker-compose-entrypoint.sh /usr/local/bin/
-ENTRYPOINT ["sh", "/usr/local/bin/docker-compose-entrypoint.sh"]
-COPY --from=docker-cli /usr/local/bin/docker /usr/local/bin/docker
-COPY --from=build /usr/local/bin/docker-compose /usr/local/bin/docker-compose
+ADD . /code/
+RUN chown -R user /code/
+
+ENTRYPOINT ["/code/.tox/py36/bin/docker-compose"]
diff --git a/Dockerfile.armhf b/Dockerfile.armhf
new file mode 100644
index 00000000..ee2ce894
--- /dev/null
+++ b/Dockerfile.armhf
@@ -0,0 +1,39 @@
+FROM python:3.6
+
+RUN set -ex; \
+ apt-get update -qq; \
+ apt-get install -y \
+ locales \
+ curl \
+ python-dev \
+ git
+
+RUN curl -fsSL -o dockerbins.tgz "https://download.docker.com/linux/static/stable/armhf/docker-17.12.0-ce.tgz" && \
+ SHA256=f8de6378dad825b9fd5c3c2f949e791d22f918623c27a72c84fd6975a0e5d0a2; \
+ echo "${SHA256} dockerbins.tgz" | sha256sum -c - && \
+ tar xvf dockerbins.tgz docker/docker --strip-components 1 && \
+ mv docker /usr/local/bin/docker && \
+ chmod +x /usr/local/bin/docker && \
+ rm dockerbins.tgz
+
+# Python3 requires a valid locale
+RUN echo "en_US.UTF-8 UTF-8" > /etc/locale.gen && locale-gen
+ENV LANG en_US.UTF-8
+
+RUN useradd -d /home/user -m -s /bin/bash user
+WORKDIR /code/
+
+RUN pip install tox==2.1.1
+
+ADD requirements.txt /code/
+ADD requirements-dev.txt /code/
+ADD .pre-commit-config.yaml /code/
+ADD setup.py /code/
+ADD tox.ini /code/
+ADD compose /code/compose/
+RUN tox --notest
+
+ADD . /code/
+RUN chown -R user /code/
+
+ENTRYPOINT ["/code/.tox/py36/bin/docker-compose"]
diff --git a/Dockerfile.run b/Dockerfile.run
new file mode 100644
index 00000000..ccc86ea9
--- /dev/null
+++ b/Dockerfile.run
@@ -0,0 +1,19 @@
+FROM docker:18.06.1 as docker
+FROM alpine:3.8
+
+ENV GLIBC 2.28-r0
+
+RUN apk update && apk add --no-cache openssl ca-certificates curl libgcc && \
+ curl -fsSL -o /etc/apk/keys/sgerrand.rsa.pub https://alpine-pkgs.sgerrand.com/sgerrand.rsa.pub && \
+ curl -fsSL -o glibc-$GLIBC.apk https://github.com/sgerrand/alpine-pkg-glibc/releases/download/$GLIBC/glibc-$GLIBC.apk && \
+ apk add --no-cache glibc-$GLIBC.apk && \
+ ln -s /lib/libz.so.1 /usr/glibc-compat/lib/ && \
+ ln -s /lib/libc.musl-x86_64.so.1 /usr/glibc-compat/lib && \
+ ln -s /usr/lib/libgcc_s.so.1 /usr/glibc-compat/lib && \
+ rm /etc/apk/keys/sgerrand.rsa.pub glibc-$GLIBC.apk && \
+ apk del curl
+
+COPY --from=docker /usr/local/bin/docker /usr/local/bin/docker
+COPY dist/docker-compose-Linux-x86_64 /usr/local/bin/docker-compose
+
+ENTRYPOINT ["docker-compose"]
diff --git a/Jenkinsfile b/Jenkinsfile
index 4de276ad..04f5cfbd 100644
--- a/Jenkinsfile
+++ b/Jenkinsfile
@@ -1,38 +1,29 @@
#!groovy
-def buildImage = { String baseImage ->
- def image
+def image
+
+def buildImage = { ->
wrappedNode(label: "ubuntu && !zfs", cleanWorkspace: true) {
- stage("build image for \"${baseImage}\"") {
+ stage("build image") {
checkout(scm)
- def imageName = "dockerbuildbot/compose:${baseImage}-${gitCommit()}"
+ def imageName = "dockerbuildbot/compose:${gitCommit()}"
image = docker.image(imageName)
try {
image.pull()
} catch (Exception exc) {
- sh """GIT_COMMIT=\$(script/build/write-git-sha) && \\
- docker build -t ${imageName} \\
- --target build \\
- --build-arg BUILD_PLATFORM="${baseImage}" \\
- --build-arg GIT_COMMIT="${GIT_COMMIT}" \\
- .\\
- """
- sh "docker push ${imageName}"
- echo "${imageName}"
- return imageName
+ image = docker.build(imageName, ".")
+ image.push()
}
}
}
- echo "image.id: ${image.id}"
- return image.id
}
-def get_versions = { String imageId, int number ->
+def get_versions = { int number ->
def docker_versions
wrappedNode(label: "ubuntu && !zfs") {
def result = sh(script: """docker run --rm \\
--entrypoint=/code/.tox/py27/bin/python \\
- ${imageId} \\
+ ${image.id} \\
/code/script/test/versions.py -n ${number} docker/docker-ce recent
""", returnStdout: true
)
@@ -44,11 +35,9 @@ def get_versions = { String imageId, int number ->
def runTests = { Map settings ->
def dockerVersions = settings.get("dockerVersions", null)
def pythonVersions = settings.get("pythonVersions", null)
- def baseImage = settings.get("baseImage", null)
- def imageName = settings.get("image", null)
if (!pythonVersions) {
- throw new Exception("Need Python versions to test. e.g.: `runTests(pythonVersions: 'py27,py37')`")
+ throw new Exception("Need Python versions to test. e.g.: `runTests(pythonVersions: 'py27,py36')`")
}
if (!dockerVersions) {
throw new Exception("Need Docker versions to test. e.g.: `runTests(dockerVersions: 'all')`")
@@ -56,7 +45,7 @@ def runTests = { Map settings ->
{ ->
wrappedNode(label: "ubuntu && !zfs", cleanWorkspace: true) {
- stage("test python=${pythonVersions} / docker=${dockerVersions} / baseImage=${baseImage}") {
+ stage("test python=${pythonVersions} / docker=${dockerVersions}") {
checkout(scm)
def storageDriver = sh(script: 'docker info | awk -F \': \' \'$1 == "Storage Driver" { print $2; exit }\'', returnStdout: true).trim()
echo "Using local system's storage driver: ${storageDriver}"
@@ -66,13 +55,13 @@ def runTests = { Map settings ->
--privileged \\
--volume="\$(pwd)/.git:/code/.git" \\
--volume="/var/run/docker.sock:/var/run/docker.sock" \\
- -e "TAG=${imageName}" \\
+ -e "TAG=${image.id}" \\
-e "STORAGE_DRIVER=${storageDriver}" \\
-e "DOCKER_VERSIONS=${dockerVersions}" \\
-e "BUILD_NUMBER=\$BUILD_TAG" \\
-e "PY_TEST_VERSIONS=${pythonVersions}" \\
--entrypoint="script/test/ci" \\
- ${imageName} \\
+ ${image.id} \\
--verbose
"""
}
@@ -80,16 +69,16 @@ def runTests = { Map settings ->
}
}
+buildImage()
+
def testMatrix = [failFast: true]
-def baseImages = ['alpine', 'debian']
-def pythonVersions = ['py27', 'py37']
-baseImages.each { baseImage ->
- def imageName = buildImage(baseImage)
- get_versions(imageName, 2).each { dockerVersion ->
- pythonVersions.each { pyVersion ->
- testMatrix["${baseImage}_${dockerVersion}_${pyVersion}"] = runTests([baseImage: baseImage, image: imageName, dockerVersions: dockerVersion, pythonVersions: pyVersion])
- }
- }
+def docker_versions = get_versions(2)
+
+for (int i = 0; i < docker_versions.length; i++) {
+ def dockerVersion = docker_versions[i]
+ testMatrix["${dockerVersion}_py27"] = runTests([dockerVersions: dockerVersion, pythonVersions: "py27"])
+ testMatrix["${dockerVersion}_py36"] = runTests([dockerVersions: dockerVersion, pythonVersions: "py36"])
+ testMatrix["${dockerVersion}_py37"] = runTests([dockerVersions: dockerVersion, pythonVersions: "py37"])
}
parallel(testMatrix)
diff --git a/MAINTAINERS b/MAINTAINERS
index 5d4bd6a6..7aedd46e 100644
--- a/MAINTAINERS
+++ b/MAINTAINERS
@@ -11,8 +11,9 @@
[Org]
[Org."Core maintainers"]
people = [
- "rumpl",
- "ulyssessouza",
+ "mefyl",
+ "mnottale",
+ "shin-",
]
[Org.Alumni]
people = [
@@ -33,10 +34,6 @@
# including muti-file support, variable interpolation, secrets
# emulation and many more
"dnephin",
-
- "shin-",
- "mefyl",
- "mnottale",
]
[people]
@@ -77,17 +74,7 @@
Email = "mazz@houseofmnowster.com"
GitHub = "mnowster"
- [people.rumpl]
- Name = "Djordje Lukic"
- Email = "djordje.lukic@docker.com"
- GitHub = "rumpl"
-
- [people.shin-]
+ [People.shin-]
Name = "Joffrey F"
- Email = "f.joffrey@gmail.com"
+ Email = "joffrey@docker.com"
GitHub = "shin-"
-
- [people.ulyssessouza]
- Name = "Ulysses Domiciano Souza"
- Email = "ulysses.souza@docker.com"
- GitHub = "ulyssessouza"
diff --git a/appveyor.yml b/appveyor.yml
index 04a40e9c..da80d01d 100644
--- a/appveyor.yml
+++ b/appveyor.yml
@@ -2,15 +2,15 @@
version: '{branch}-{build}'
install:
- - "SET PATH=C:\\Python37-x64;C:\\Python37-x64\\Scripts;%PATH%"
+ - "SET PATH=C:\\Python36-x64;C:\\Python36-x64\\Scripts;%PATH%"
- "python --version"
- - "pip install tox==2.9.1 virtualenv==16.2.0"
+ - "pip install tox==2.9.1 virtualenv==15.1.0"
# Build the binary after tests
build: false
test_script:
- - "tox -e py27,py37 -- tests/unit"
+ - "tox -e py27,py36,py37 -- tests/unit"
- ps: ".\\script\\build\\windows.ps1"
artifacts:
diff --git a/compose/__init__.py b/compose/__init__.py
index 55060583..6a40e150 100644
--- a/compose/__init__.py
+++ b/compose/__init__.py
@@ -1,4 +1,4 @@
from __future__ import absolute_import
from __future__ import unicode_literals
-__version__ = '1.25.0-rc1'
+__version__ = '1.24.1'
diff --git a/compose/bundle.py b/compose/bundle.py
index 77cb37aa..937a3708 100644
--- a/compose/bundle.py
+++ b/compose/bundle.py
@@ -95,10 +95,19 @@ def get_image_digest(service, allow_push=False):
if separator == '@':
return service.options['image']
- digest = get_digest(service)
+ try:
+ image = service.image()
+ except NoSuchImageError:
+ action = 'build' if 'build' in service.options else 'pull'
+ raise UserError(
+ "Image not found for service '{service}'. "
+ "You might need to run `docker-compose {action} {service}`."
+ .format(service=service.name, action=action))
- if digest:
- return digest
+ if image['RepoDigests']:
+ # TODO: pick a digest based on the image tag if there are multiple
+ # digests
+ return image['RepoDigests'][0]
if 'build' not in service.options:
raise NeedsPull(service.image_name, service.name)
@@ -109,32 +118,6 @@ def get_image_digest(service, allow_push=False):
return push_image(service)
-def get_digest(service):
- digest = None
- try:
- image = service.image()
- # TODO: pick a digest based on the image tag if there are multiple
- # digests
- if image['RepoDigests']:
- digest = image['RepoDigests'][0]
- except NoSuchImageError:
- try:
- # Fetch the image digest from the registry
- distribution = service.get_image_registry_data()
-
- if distribution['Descriptor']['digest']:
- digest = '{image_name}@{digest}'.format(
- image_name=service.image_name,
- digest=distribution['Descriptor']['digest']
- )
- except NoSuchImageError:
- raise UserError(
- "Digest not found for service '{service}'. "
- "Repository does not exist or may require 'docker login'"
- .format(service=service.name))
- return digest
-
-
def push_image(service):
try:
digest = service.push()
@@ -164,10 +147,10 @@ def push_image(service):
def to_bundle(config, image_digests):
if config.networks:
- log.warning("Unsupported top level key 'networks' - ignoring")
+ log.warn("Unsupported top level key 'networks' - ignoring")
if config.volumes:
- log.warning("Unsupported top level key 'volumes' - ignoring")
+ log.warn("Unsupported top level key 'volumes' - ignoring")
config = denormalize_config(config)
@@ -192,7 +175,7 @@ def convert_service_to_bundle(name, service_dict, image_digest):
continue
if key not in SUPPORTED_KEYS:
- log.warning("Unsupported key '{}' in services.{} - ignoring".format(key, name))
+ log.warn("Unsupported key '{}' in services.{} - ignoring".format(key, name))
continue
if key == 'environment':
@@ -239,7 +222,7 @@ def make_service_networks(name, service_dict):
for network_name, network_def in get_network_defs_for_service(service_dict).items():
for key in network_def.keys():
- log.warning(
+ log.warn(
"Unsupported key '{}' in services.{}.networks.{} - ignoring"
.format(key, name, network_name))
diff --git a/compose/cli/command.py b/compose/cli/command.py
index 21ab9a39..339a65c5 100644
--- a/compose/cli/command.py
+++ b/compose/cli/command.py
@@ -21,27 +21,10 @@ from .utils import get_version_info
log = logging.getLogger(__name__)
-SILENT_COMMANDS = set((
- 'events',
- 'exec',
- 'kill',
- 'logs',
- 'pause',
- 'ps',
- 'restart',
- 'rm',
- 'start',
- 'stop',
- 'top',
- 'unpause',
-))
-
-
-def project_from_options(project_dir, options, additional_options={}):
+
+def project_from_options(project_dir, options):
override_dir = options.get('--project-directory')
- environment_file = options.get('--env-file')
- environment = Environment.from_env_file(override_dir or project_dir, environment_file)
- environment.silent = options.get('COMMAND', None) in SILENT_COMMANDS
+ environment = Environment.from_env_file(override_dir or project_dir)
set_parallel_limit(environment)
host = options.get('--host')
@@ -57,7 +40,6 @@ def project_from_options(project_dir, options, additional_options={}):
environment=environment,
override_dir=override_dir,
compatibility=options.get('--compatibility'),
- interpolate=(not additional_options.get('--no-interpolate'))
)
@@ -77,17 +59,15 @@ def set_parallel_limit(environment):
parallel.GlobalLimit.set_global_limit(parallel_limit)
-def get_config_from_options(base_dir, options, additional_options={}):
+def get_config_from_options(base_dir, options):
override_dir = options.get('--project-directory')
- environment_file = options.get('--env-file')
- environment = Environment.from_env_file(override_dir or base_dir, environment_file)
+ environment = Environment.from_env_file(override_dir or base_dir)
config_path = get_config_path_from_options(
base_dir, options, environment
)
return config.load(
config.find(base_dir, config_path, environment, override_dir),
- options.get('--compatibility'),
- not additional_options.get('--no-interpolate')
+ options.get('--compatibility')
)
@@ -125,14 +105,14 @@ def get_client(environment, verbose=False, version=None, tls_config=None, host=N
def get_project(project_dir, config_path=None, project_name=None, verbose=False,
host=None, tls_config=None, environment=None, override_dir=None,
- compatibility=False, interpolate=True):
+ compatibility=False):
if not environment:
environment = Environment.from_env_file(project_dir)
config_details = config.find(project_dir, config_path, environment, override_dir)
project_name = get_project_name(
config_details.working_dir, project_name, environment
)
- config_data = config.load(config_details, compatibility, interpolate)
+ config_data = config.load(config_details, compatibility)
api_version = environment.get(
'COMPOSE_API_VERSION',
diff --git a/compose/cli/docker_client.py b/compose/cli/docker_client.py
index a57a69b5..a01704fd 100644
--- a/compose/cli/docker_client.py
+++ b/compose/cli/docker_client.py
@@ -31,7 +31,7 @@ def get_tls_version(environment):
tls_attr_name = "PROTOCOL_{}".format(compose_tls_version)
if not hasattr(ssl, tls_attr_name):
- log.warning(
+ log.warn(
'The "{}" protocol is unavailable. You may need to update your '
'version of Python or OpenSSL. Falling back to TLSv1 (default).'
.format(compose_tls_version)
diff --git a/compose/cli/main.py b/compose/cli/main.py
index e2c04bd2..78960179 100644
--- a/compose/cli/main.py
+++ b/compose/cli/main.py
@@ -208,7 +208,6 @@ class TopLevelCommand(object):
(default: the path of the Compose file)
--compatibility If set, Compose will attempt to convert keys
in v3 files to their non-Swarm equivalent
- --env-file PATH Specify an alternate environment file
Commands:
build Build or rebuild services
@@ -261,12 +260,10 @@ class TopLevelCommand(object):
--compress Compress the build context using gzip.
--force-rm Always remove intermediate containers.
--no-cache Do not use cache when building the image.
- --no-rm Do not remove intermediate containers after a successful build.
--pull Always attempt to pull a newer version of the image.
-m, --memory MEM Sets memory limit for the build container.
--build-arg key=val Set build-time variables for services.
--parallel Build images in parallel.
- -q, --quiet Don't print anything to STDOUT
"""
service_names = options['SERVICE']
build_args = options.get('--build-arg', None)
@@ -276,8 +273,7 @@ class TopLevelCommand(object):
'--build-arg is only supported when services are specified for API version < 1.25.'
' Please use a Compose file version > 2.2 or specify which services to build.'
)
- environment_file = options.get('--env-file')
- environment = Environment.from_env_file(self.project_dir, environment_file)
+ environment = Environment.from_env_file(self.project_dir)
build_args = resolve_build_args(build_args, environment)
self.project.build(
@@ -286,11 +282,9 @@ class TopLevelCommand(object):
pull=bool(options.get('--pull', False)),
force_rm=bool(options.get('--force-rm', False)),
memory=options.get('--memory'),
- rm=not bool(options.get('--no-rm', False)),
build_args=build_args,
gzip=options.get('--compress', False),
parallel_build=options.get('--parallel', False),
- silent=options.get('--quiet', False)
)
def bundle(self, options):
@@ -333,7 +327,6 @@ class TopLevelCommand(object):
Options:
--resolve-image-digests Pin image tags to digests.
- --no-interpolate Don't interpolate environment variables
-q, --quiet Only validate the configuration, don't print
anything.
--services Print the service names, one per line.
@@ -343,12 +336,11 @@ class TopLevelCommand(object):
or use the wildcard symbol to display all services
"""
- additional_options = {'--no-interpolate': options.get('--no-interpolate')}
- compose_config = get_config_from_options('.', self.toplevel_options, additional_options)
+ compose_config = get_config_from_options('.', self.toplevel_options)
image_digests = None
if options['--resolve-image-digests']:
- self.project = project_from_options('.', self.toplevel_options, additional_options)
+ self.project = project_from_options('.', self.toplevel_options)
with errors.handle_connection_errors(self.project.client):
image_digests = image_digests_for_project(self.project)
@@ -365,14 +357,14 @@ class TopLevelCommand(object):
if options['--hash'] is not None:
h = options['--hash']
- self.project = project_from_options('.', self.toplevel_options, additional_options)
+ self.project = project_from_options('.', self.toplevel_options)
services = [svc for svc in options['--hash'].split(',')] if h != '*' else None
with errors.handle_connection_errors(self.project.client):
for service in self.project.get_services(services):
print('{} {}'.format(service.name, service.config_hash))
return
- print(serialize_config(compose_config, image_digests, not options['--no-interpolate']))
+ print(serialize_config(compose_config, image_digests))
def create(self, options):
"""
@@ -391,7 +383,7 @@ class TopLevelCommand(object):
"""
service_names = options['SERVICE']
- log.warning(
+ log.warn(
'The create command is deprecated. '
'Use the up command with the --no-start flag instead.'
)
@@ -429,10 +421,8 @@ class TopLevelCommand(object):
Compose file
-t, --timeout TIMEOUT Specify a shutdown timeout in seconds.
(default: 10)
- --env-file PATH Specify an alternate environment file
"""
- environment_file = options.get('--env-file')
- environment = Environment.from_env_file(self.project_dir, environment_file)
+ environment = Environment.from_env_file(self.project_dir)
ignore_orphans = environment.get_boolean('COMPOSE_IGNORE_ORPHANS')
if ignore_orphans and options['--remove-orphans']:
@@ -489,10 +479,8 @@ class TopLevelCommand(object):
-e, --env KEY=VAL Set environment variables (can be used multiple times,
not supported in API < 1.25)
-w, --workdir DIR Path to workdir directory for this command.
- --env-file PATH Specify an alternate environment file
"""
- environment_file = options.get('--env-file')
- environment = Environment.from_env_file(self.project_dir, environment_file)
+ environment = Environment.from_env_file(self.project_dir)
use_cli = not environment.get_boolean('COMPOSE_INTERACTIVE_NO_CLI')
index = int(options.get('--index'))
service = self.project.get_service(options['SERVICE'])
@@ -721,8 +709,7 @@ class TopLevelCommand(object):
if options['--all']:
containers = sorted(self.project.containers(service_names=options['SERVICE'],
- one_off=OneOffFilter.include, stopped=True),
- key=attrgetter('name'))
+ one_off=OneOffFilter.include, stopped=True))
else:
containers = sorted(
self.project.containers(service_names=options['SERVICE'], stopped=True) +
@@ -766,7 +753,7 @@ class TopLevelCommand(object):
--include-deps Also pull services declared as dependencies
"""
if options.get('--parallel'):
- log.warning('--parallel option is deprecated and will be removed in future versions.')
+ log.warn('--parallel option is deprecated and will be removed in future versions.')
self.project.pull(
service_names=options['SERVICE'],
ignore_pull_failures=options.get('--ignore-pull-failures'),
@@ -807,7 +794,7 @@ class TopLevelCommand(object):
-a, --all Deprecated - no effect.
"""
if options.get('--all'):
- log.warning(
+ log.warn(
'--all flag is obsolete. This is now the default behavior '
'of `docker-compose rm`'
)
@@ -917,7 +904,7 @@ class TopLevelCommand(object):
'Use the up command with the --scale flag instead.'
)
else:
- log.warning(
+ log.warn(
'The scale command is deprecated. '
'Use the up command with the --scale flag instead.'
)
@@ -1049,7 +1036,6 @@ class TopLevelCommand(object):
container. Implies --abort-on-container-exit.
--scale SERVICE=NUM Scale SERVICE to NUM instances. Overrides the
`scale` setting in the Compose file if present.
- --env-file PATH Specify an alternate environment file
"""
start_deps = not options['--no-deps']
always_recreate_deps = options['--always-recreate-deps']
@@ -1064,8 +1050,7 @@ class TopLevelCommand(object):
if detached and (cascade_stop or exit_value_from):
raise UserError("--abort-on-container-exit and -d cannot be combined.")
- environment_file = options.get('--env-file')
- environment = Environment.from_env_file(self.project_dir, environment_file)
+ environment = Environment.from_env_file(self.project_dir)
ignore_orphans = environment.get_boolean('COMPOSE_IGNORE_ORPHANS')
if ignore_orphans and remove_orphans:
@@ -1251,7 +1236,7 @@ def exitval_from_opts(options, project):
exit_value_from = options.get('--exit-code-from')
if exit_value_from:
if not options.get('--abort-on-container-exit'):
- log.warning('using --exit-code-from implies --abort-on-container-exit')
+ log.warn('using --exit-code-from implies --abort-on-container-exit')
options['--abort-on-container-exit'] = True
if exit_value_from not in [s.name for s in project.get_services()]:
log.error('No service named "%s" was found in your compose file.',
@@ -1358,8 +1343,7 @@ def run_one_off_container(container_options, project, service, options, toplevel
if options['--rm']:
project.client.remove_container(container.id, force=True, v=True)
- environment_file = options.get('--env-file')
- environment = Environment.from_env_file(project_dir, environment_file)
+ environment = Environment.from_env_file(project_dir)
use_cli = not environment.get_boolean('COMPOSE_INTERACTIVE_NO_CLI')
signals.set_signal_handler_to_shutdown()
@@ -1581,7 +1565,7 @@ def warn_for_swarm_mode(client):
# UCP does multi-node scheduling with traditional Compose files.
return
- log.warning(
+ log.warn(
"The Docker Engine you're using is running in swarm mode.\n\n"
"Compose does not use swarm mode to deploy services to multiple nodes in a swarm. "
"All containers will be scheduled on the current node.\n\n"
diff --git a/compose/config/config.py b/compose/config/config.py
index 5202d002..f3142d80 100644
--- a/compose/config/config.py
+++ b/compose/config/config.py
@@ -198,9 +198,9 @@ class ConfigFile(namedtuple('_ConfigFile', 'filename config')):
version = self.config['version']
if isinstance(version, dict):
- log.warning('Unexpected type for "version" key in "{}". Assuming '
- '"version" is the name of a service, and defaulting to '
- 'Compose file version 1.'.format(self.filename))
+ log.warn('Unexpected type for "version" key in "{}". Assuming '
+ '"version" is the name of a service, and defaulting to '
+ 'Compose file version 1.'.format(self.filename))
return V1
if not isinstance(version, six.string_types):
@@ -318,8 +318,8 @@ def get_default_config_files(base_dir):
winner = candidates[0]
if len(candidates) > 1:
- log.warning("Found multiple config files with supported names: %s", ", ".join(candidates))
- log.warning("Using %s\n", winner)
+ log.warn("Found multiple config files with supported names: %s", ", ".join(candidates))
+ log.warn("Using %s\n", winner)
return [os.path.join(path, winner)] + get_default_override_file(path)
@@ -362,7 +362,7 @@ def check_swarm_only_config(service_dicts, compatibility=False):
def check_swarm_only_key(service_dicts, key):
services = [s for s in service_dicts if s.get(key)]
if services:
- log.warning(
+ log.warn(
warning_template.format(
services=", ".join(sorted(s['name'] for s in services)),
key=key
@@ -373,7 +373,7 @@ def check_swarm_only_config(service_dicts, compatibility=False):
check_swarm_only_key(service_dicts, 'configs')
-def load(config_details, compatibility=False, interpolate=True):
+def load(config_details, compatibility=False):
"""Load the configuration from a working directory and a list of
configuration files. Files are loaded in order, and merged on top
of each other to create the final configuration.
@@ -383,7 +383,7 @@ def load(config_details, compatibility=False, interpolate=True):
validate_config_version(config_details.config_files)
processed_files = [
- process_config_file(config_file, config_details.environment, interpolate=interpolate)
+ process_config_file(config_file, config_details.environment)
for config_file in config_details.config_files
]
config_details = config_details._replace(config_files=processed_files)
@@ -505,6 +505,7 @@ def load_services(config_details, config_file, compatibility=False):
def interpolate_config_section(config_file, config, section, environment):
+ validate_config_section(config_file.filename, config, section)
return interpolate_environment_variables(
config_file.version,
config,
@@ -513,60 +514,38 @@ def interpolate_config_section(config_file, config, section, environment):
)
-def process_config_section(config_file, config, section, environment, interpolate):
- validate_config_section(config_file.filename, config, section)
- if interpolate:
- return interpolate_environment_variables(
- config_file.version,
- config,
- section,
- environment
- )
- else:
- return config
-
-
-def process_config_file(config_file, environment, service_name=None, interpolate=True):
- services = process_config_section(
+def process_config_file(config_file, environment, service_name=None):
+ services = interpolate_config_section(
config_file,
config_file.get_service_dicts(),
'service',
- environment,
- interpolate,
- )
+ environment)
if config_file.version > V1:
processed_config = dict(config_file.config)
processed_config['services'] = services
- processed_config['volumes'] = process_config_section(
+ processed_config['volumes'] = interpolate_config_section(
config_file,
config_file.get_volumes(),
'volume',
- environment,
- interpolate,
- )
- processed_config['networks'] = process_config_section(
+ environment)
+ processed_config['networks'] = interpolate_config_section(
config_file,
config_file.get_networks(),
'network',
- environment,
- interpolate,
- )
+ environment)
if config_file.version >= const.COMPOSEFILE_V3_1:
- processed_config['secrets'] = process_config_section(
+ processed_config['secrets'] = interpolate_config_section(
config_file,
config_file.get_secrets(),
'secret',
- environment,
- interpolate,
- )
+ environment)
if config_file.version >= const.COMPOSEFILE_V3_3:
- processed_config['configs'] = process_config_section(
+ processed_config['configs'] = interpolate_config_section(
config_file,
config_file.get_configs(),
'config',
- environment,
- interpolate,
+ environment
)
else:
processed_config = services
@@ -921,7 +900,7 @@ def finalize_service(service_config, service_names, version, environment, compat
service_dict
)
if ignored_keys:
- log.warning(
+ log.warn(
'The following deploy sub-keys are not supported in compatibility mode and have'
' been ignored: {}'.format(', '.join(ignored_keys))
)
diff --git a/compose/config/environment.py b/compose/config/environment.py
index e72c8823..bd52758f 100644
--- a/compose/config/environment.py
+++ b/compose/config/environment.py
@@ -56,18 +56,14 @@ class Environment(dict):
def __init__(self, *args, **kwargs):
super(Environment, self).__init__(*args, **kwargs)
self.missing_keys = []
- self.silent = False
@classmethod
- def from_env_file(cls, base_dir, env_file=None):
+ def from_env_file(cls, base_dir):
def _initialize():
result = cls()
if base_dir is None:
return result
- if env_file:
- env_file_path = os.path.join(base_dir, env_file)
- else:
- env_file_path = os.path.join(base_dir, '.env')
+ env_file_path = os.path.join(base_dir, '.env')
try:
return cls(env_vars_from_file(env_file_path))
except EnvFileNotFound:
@@ -99,8 +95,8 @@ class Environment(dict):
return super(Environment, self).__getitem__(key.upper())
except KeyError:
pass
- if not self.silent and key not in self.missing_keys:
- log.warning(
+ if key not in self.missing_keys:
+ log.warn(
"The {} variable is not set. Defaulting to a blank string."
.format(key)
)
diff --git a/compose/config/serialize.py b/compose/config/serialize.py
index 5776ce95..8cb8a280 100644
--- a/compose/config/serialize.py
+++ b/compose/config/serialize.py
@@ -24,12 +24,14 @@ def serialize_dict_type(dumper, data):
def serialize_string(dumper, data):
- """ Ensure boolean-like strings are quoted in the output """
+ """ Ensure boolean-like strings are quoted in the output and escape $ characters """
representer = dumper.represent_str if six.PY3 else dumper.represent_unicode
if isinstance(data, six.binary_type):
data = data.decode('utf-8')
+ data = data.replace('$', '$$')
+
if data.lower() in ('y', 'n', 'yes', 'no', 'on', 'off', 'true', 'false'):
# Empirically only y/n appears to be an issue, but this might change
# depending on which PyYaml version is being used. Err on safe side.
@@ -37,12 +39,6 @@ def serialize_string(dumper, data):
return representer(data)
-def serialize_string_escape_dollar(dumper, data):
- """ Ensure boolean-like strings are quoted in the output and escape $ characters """
- data = data.replace('$', '$$')
- return serialize_string(dumper, data)
-
-
yaml.SafeDumper.add_representer(types.MountSpec, serialize_dict_type)
yaml.SafeDumper.add_representer(types.VolumeFromSpec, serialize_config_type)
yaml.SafeDumper.add_representer(types.VolumeSpec, serialize_config_type)
@@ -50,6 +46,8 @@ yaml.SafeDumper.add_representer(types.SecurityOpt, serialize_config_type)
yaml.SafeDumper.add_representer(types.ServiceSecret, serialize_dict_type)
yaml.SafeDumper.add_representer(types.ServiceConfig, serialize_dict_type)
yaml.SafeDumper.add_representer(types.ServicePort, serialize_dict_type)
+yaml.SafeDumper.add_representer(str, serialize_string)
+yaml.SafeDumper.add_representer(six.text_type, serialize_string)
def denormalize_config(config, image_digests=None):
@@ -95,13 +93,7 @@ def v3_introduced_name_key(key):
return V3_5
-def serialize_config(config, image_digests=None, escape_dollar=True):
- if escape_dollar:
- yaml.SafeDumper.add_representer(str, serialize_string_escape_dollar)
- yaml.SafeDumper.add_representer(six.text_type, serialize_string_escape_dollar)
- else:
- yaml.SafeDumper.add_representer(str, serialize_string)
- yaml.SafeDumper.add_representer(six.text_type, serialize_string)
+def serialize_config(config, image_digests=None):
return yaml.safe_dump(
denormalize_config(config, image_digests),
default_flow_style=False,
diff --git a/compose/network.py b/compose/network.py
index e0d711ff..2491a598 100644
--- a/compose/network.py
+++ b/compose/network.py
@@ -231,7 +231,7 @@ def check_remote_network_config(remote, local):
if k.startswith('com.docker.'): # We are only interested in user-specified labels
continue
if remote_labels.get(k) != local_labels.get(k):
- log.warning(
+ log.warn(
'Network {}: label "{}" has changed. It may need to be'
' recreated.'.format(local.true_name, k)
)
@@ -276,7 +276,7 @@ class ProjectNetworks(object):
}
unused = set(networks) - set(service_networks) - {'default'}
if unused:
- log.warning(
+ log.warn(
"Some networks were defined but are not used by any service: "
"{}".format(", ".join(unused)))
return cls(service_networks, use_networking)
@@ -288,7 +288,7 @@ class ProjectNetworks(object):
try:
network.remove()
except NotFound:
- log.warning("Network %s not found.", network.true_name)
+ log.warn("Network %s not found.", network.true_name)
def initialize(self):
if not self.use_networking:
diff --git a/compose/project.py b/compose/project.py
index a608ffd7..a7f2aa05 100644
--- a/compose/project.py
+++ b/compose/project.py
@@ -355,17 +355,18 @@ class Project(object):
return containers
def build(self, service_names=None, no_cache=False, pull=False, force_rm=False, memory=None,
- build_args=None, gzip=False, parallel_build=False, rm=True, silent=False):
+ build_args=None, gzip=False, parallel_build=False):
services = []
for service in self.get_services(service_names):
if service.can_be_built():
services.append(service)
- elif not silent:
+ else:
log.info('%s uses an image, skipping' % service.name)
def build_service(service):
- service.build(no_cache, pull, force_rm, memory, build_args, gzip, rm, silent)
+ service.build(no_cache, pull, force_rm, memory, build_args, gzip)
+
if parallel_build:
_, errors = parallel.parallel_execute(
services,
@@ -586,10 +587,8 @@ class Project(object):
", ".join(updated_dependencies))
containers_stopped = any(
service.containers(stopped=True, filters={'status': ['created', 'exited']}))
- service_has_links = any(service.get_link_names())
- container_has_links = any(c.get('HostConfig.Links') for c in service.containers())
- should_recreate_for_links = service_has_links ^ container_has_links
- if always_recreate_deps or containers_stopped or should_recreate_for_links:
+ has_links = any(c.get('HostConfig.Links') for c in service.containers())
+ if always_recreate_deps or containers_stopped or not has_links:
plan = service.convergence_plan(ConvergenceStrategy.always)
else:
plan = service.convergence_plan(strategy)
@@ -603,9 +602,6 @@ class Project(object):
def pull(self, service_names=None, ignore_pull_failures=False, parallel_pull=False, silent=False,
include_deps=False):
services = self.get_services(service_names, include_deps)
- images_to_build = {service.image_name for service in services if service.can_be_built()}
- services_to_pull = [service for service in services if service.image_name not in images_to_build]
-
msg = not silent and 'Pulling' or None
if parallel_pull:
@@ -631,7 +627,7 @@ class Project(object):
)
_, errors = parallel.parallel_execute(
- services_to_pull,
+ services,
pull_service,
operator.attrgetter('name'),
msg,
@@ -644,7 +640,7 @@ class Project(object):
raise ProjectError(combined_errors)
else:
- for service in services_to_pull:
+ for service in services:
service.pull(ignore_pull_failures, silent=silent)
def push(self, service_names=None, ignore_push_failures=False):
@@ -690,7 +686,7 @@ class Project(object):
def find_orphan_containers(self, remove_orphans):
def _find():
- containers = set(self._labeled_containers() + self._labeled_containers(stopped=True))
+ containers = self._labeled_containers()
for ctnr in containers:
service_name = ctnr.labels.get(LABEL_SERVICE)
if service_name not in self.service_names:
@@ -701,10 +697,7 @@ class Project(object):
if remove_orphans:
for ctnr in orphans:
log.info('Removing orphan container "{0}"'.format(ctnr.name))
- try:
- ctnr.kill()
- except APIError:
- pass
+ ctnr.kill()
ctnr.remove(force=True)
else:
log.warning(
@@ -732,11 +725,10 @@ class Project(object):
def build_container_operation_with_timeout_func(self, operation, options):
def container_operation_with_timeout(container):
- _options = options.copy()
- if _options.get('timeout') is None:
+ if options.get('timeout') is None:
service = self.get_service(container.service)
- _options['timeout'] = service.stop_timeout(None)
- return getattr(container, operation)(**_options)
+ options['timeout'] = service.stop_timeout(None)
+ return getattr(container, operation)(**options)
return container_operation_with_timeout
@@ -779,13 +771,13 @@ def get_secrets(service, service_secrets, secret_defs):
.format(service=service, secret=secret.source))
if secret_def.get('external'):
- log.warning("Service \"{service}\" uses secret \"{secret}\" which is external. "
- "External secrets are not available to containers created by "
- "docker-compose.".format(service=service, secret=secret.source))
+ log.warn("Service \"{service}\" uses secret \"{secret}\" which is external. "
+ "External secrets are not available to containers created by "
+ "docker-compose.".format(service=service, secret=secret.source))
continue
if secret.uid or secret.gid or secret.mode:
- log.warning(
+ log.warn(
"Service \"{service}\" uses secret \"{secret}\" with uid, "
"gid, or mode. These fields are not supported by this "
"implementation of the Compose file".format(
diff --git a/compose/service.py b/compose/service.py
index 0db35438..8c6702f1 100644
--- a/compose/service.py
+++ b/compose/service.py
@@ -59,6 +59,7 @@ from .utils import parse_seconds_float
from .utils import truncate_id
from .utils import unique_everseen
+
log = logging.getLogger(__name__)
@@ -176,7 +177,7 @@ class Service(object):
network_mode=None,
networks=None,
secrets=None,
- scale=1,
+ scale=None,
pid_mode=None,
default_platform=None,
**options
@@ -191,7 +192,7 @@ class Service(object):
self.pid_mode = pid_mode or PidMode(None)
self.networks = networks or {}
self.secrets = secrets or []
- self.scale_num = scale
+ self.scale_num = scale or 1
self.default_platform = default_platform
self.options = options
@@ -240,15 +241,15 @@ class Service(object):
def show_scale_warnings(self, desired_num):
if self.custom_container_name and desired_num > 1:
- log.warning('The "%s" service is using the custom container name "%s". '
- 'Docker requires each container to have a unique name. '
- 'Remove the custom name to scale the service.'
- % (self.name, self.custom_container_name))
+ log.warn('The "%s" service is using the custom container name "%s". '
+ 'Docker requires each container to have a unique name. '
+ 'Remove the custom name to scale the service.'
+ % (self.name, self.custom_container_name))
if self.specifies_host_port() and desired_num > 1:
- log.warning('The "%s" service specifies a port on the host. If multiple containers '
- 'for this service are created on a single host, the port will clash.'
- % self.name)
+ log.warn('The "%s" service specifies a port on the host. If multiple containers '
+ 'for this service are created on a single host, the port will clash.'
+ % self.name)
def scale(self, desired_num, timeout=None):
"""
@@ -357,17 +358,11 @@ class Service(object):
raise NeedsBuildError(self)
self.build()
- log.warning(
+ log.warn(
"Image for service {} was built because it did not already exist. To "
"rebuild this image you must use `docker-compose build` or "
"`docker-compose up --build`.".format(self.name))
- def get_image_registry_data(self):
- try:
- return self.client.inspect_distribution(self.image_name)
- except APIError:
- raise NoSuchImageError("Image '{}' not found".format(self.image_name))
-
def image(self):
try:
return self.client.inspect_image(self.image_name)
@@ -685,7 +680,6 @@ class Service(object):
'links': self.get_link_names(),
'net': self.network_mode.id,
'networks': self.networks,
- 'secrets': self.secrets,
'volumes_from': [
(v.source.name, v.mode)
for v in self.volumes_from if isinstance(v.source, Service)
@@ -1049,11 +1043,8 @@ class Service(object):
return [build_spec(secret) for secret in self.secrets]
def build(self, no_cache=False, pull=False, force_rm=False, memory=None, build_args_override=None,
- gzip=False, rm=True, silent=False):
- output_stream = open(os.devnull, 'w')
- if not silent:
- output_stream = sys.stdout
- log.info('Building %s' % self.name)
+ gzip=False):
+ log.info('Building %s' % self.name)
build_opts = self.options.get('build', {})
@@ -1073,12 +1064,12 @@ class Service(object):
build_output = self.client.build(
path=path,
tag=self.image_name,
- rm=rm,
+ rm=True,
forcerm=force_rm,
pull=pull,
nocache=no_cache,
dockerfile=build_opts.get('dockerfile', None),
- cache_from=self.get_cache_from(build_opts),
+ cache_from=build_opts.get('cache_from', None),
labels=build_opts.get('labels', None),
buildargs=build_args,
network_mode=build_opts.get('network', None),
@@ -1094,7 +1085,7 @@ class Service(object):
)
try:
- all_events = list(stream_output(build_output, output_stream))
+ all_events = list(stream_output(build_output, sys.stdout))
except StreamOutputError as e:
raise BuildError(self, six.text_type(e))
@@ -1116,12 +1107,6 @@ class Service(object):
return image_id
- def get_cache_from(self, build_opts):
- cache_from = build_opts.get('cache_from', None)
- if cache_from is not None:
- cache_from = [tag for tag in cache_from if tag]
- return cache_from
-
def can_be_built(self):
return 'build' in self.options
@@ -1331,7 +1316,7 @@ class ServicePidMode(PidMode):
if containers:
return 'container:' + containers[0].id
- log.warning(
+ log.warn(
"Service %s is trying to use reuse the PID namespace "
"of another service that is not running." % (self.service_name)
)
@@ -1394,8 +1379,8 @@ class ServiceNetworkMode(object):
if containers:
return 'container:' + containers[0].id
- log.warning("Service %s is trying to use reuse the network stack "
- "of another service that is not running." % (self.id))
+ log.warn("Service %s is trying to use reuse the network stack "
+ "of another service that is not running." % (self.id))
return None
@@ -1546,7 +1531,7 @@ def warn_on_masked_volume(volumes_option, container_volumes, service):
volume.internal in container_volumes and
container_volumes.get(volume.internal) != volume.external
):
- log.warning((
+ log.warn((
"Service \"{service}\" is using volume \"{volume}\" from the "
"previous container. Host mapping \"{host_path}\" has no effect. "
"Remove the existing containers (with `docker-compose rm {service}`) "
diff --git a/compose/volume.py b/compose/volume.py
index b02fc5d8..60c1e0fe 100644
--- a/compose/volume.py
+++ b/compose/volume.py
@@ -127,7 +127,7 @@ class ProjectVolumes(object):
try:
volume.remove()
except NotFound:
- log.warning("Volume %s not found.", volume.true_name)
+ log.warn("Volume %s not found.", volume.true_name)
def initialize(self):
try:
@@ -209,7 +209,7 @@ def check_remote_volume_config(remote, local):
if k.startswith('com.docker.'): # We are only interested in user-specified labels
continue
if remote_labels.get(k) != local_labels.get(k):
- log.warning(
+ log.warn(
'Volume {}: label "{}" has changed. It may need to be'
' recreated.'.format(local.name, k)
)
diff --git a/contrib/completion/bash/docker-compose b/contrib/completion/bash/docker-compose
index 941f25a3..2add0c9c 100644
--- a/contrib/completion/bash/docker-compose
+++ b/contrib/completion/bash/docker-compose
@@ -110,14 +110,11 @@ _docker_compose_build() {
__docker_compose_nospace
return
;;
- --memory|-m)
- return
- ;;
esac
case "$cur" in
-*)
- COMPREPLY=( $( compgen -W "--build-arg --compress --force-rm --help --memory -m --no-cache --no-rm --pull --parallel -q --quiet" -- "$cur" ) )
+ COMPREPLY=( $( compgen -W "--build-arg --compress --force-rm --help --memory --no-cache --pull --parallel" -- "$cur" ) )
;;
*)
__docker_compose_complete_services --filter source=build
diff --git a/contrib/completion/zsh/_docker-compose b/contrib/completion/zsh/_docker-compose
index 808b068a..d25256c1 100755
--- a/contrib/completion/zsh/_docker-compose
+++ b/contrib/completion/zsh/_docker-compose
@@ -113,7 +113,6 @@ __docker-compose_subcommand() {
$opts_help \
"*--build-arg=[Set build-time variables for one service.]:<varname>=<value>: " \
'--force-rm[Always remove intermediate containers.]' \
- '(--quiet -q)'{--quiet,-q}'[Curb build output]' \
'(--memory -m)'{--memory,-m}'[Memory limit for the build container.]' \
'--no-cache[Do not use cache when building the image.]' \
'--pull[Always attempt to pull a newer version of the image.]' \
diff --git a/contrib/migration/migrate-compose-file-v1-to-v2.py b/contrib/migration/migrate-compose-file-v1-to-v2.py
index 274b499b..c1785b0d 100755
--- a/contrib/migration/migrate-compose-file-v1-to-v2.py
+++ b/contrib/migration/migrate-compose-file-v1-to-v2.py
@@ -44,7 +44,7 @@ def warn_for_links(name, service):
links = service.get('links')
if links:
example_service = links[0].partition(':')[0]
- log.warning(
+ log.warn(
"Service {name} has links, which no longer create environment "
"variables such as {example_service_upper}_PORT. "
"If you are using those in your application code, you should "
@@ -57,7 +57,7 @@ def warn_for_links(name, service):
def warn_for_external_links(name, service):
external_links = service.get('external_links')
if external_links:
- log.warning(
+ log.warn(
"Service {name} has external_links: {ext}, which now work "
"slightly differently. In particular, two containers must be "
"connected to at least one network in common in order to "
@@ -107,7 +107,7 @@ def rewrite_volumes_from(service, service_names):
def create_volumes_section(data):
named_volumes = get_named_volumes(data['services'])
if named_volumes:
- log.warning(
+ log.warn(
"Named volumes ({names}) must be explicitly declared. Creating a "
"'volumes' section with declarations.\n\n"
"For backwards-compatibility, they've been declared as external. "
diff --git a/docker-compose-entrypoint.sh b/docker-compose-entrypoint.sh
deleted file mode 100755
index 84436fa0..00000000
--- a/docker-compose-entrypoint.sh
+++ /dev/null
@@ -1,20 +0,0 @@
-#!/bin/sh
-set -e
-
-# first arg is `-f` or `--some-option`
-if [ "${1#-}" != "$1" ]; then
- set -- docker-compose "$@"
-fi
-
-# if our command is a valid Docker subcommand, let's invoke it through Docker instead
-# (this allows for "docker run docker ps", etc)
-if docker-compose help "$1" > /dev/null 2>&1; then
- set -- docker-compose "$@"
-fi
-
-# if we have "--link some-docker:docker" and not DOCKER_HOST, let's set DOCKER_HOST automatically
-if [ -z "$DOCKER_HOST" -a "$DOCKER_PORT_2375_TCP" ]; then
- export DOCKER_HOST='tcp://docker:2375'
-fi
-
-exec "$@"
diff --git a/docs/README.md b/docs/README.md
index accc7c23..50c91d20 100644
--- a/docs/README.md
+++ b/docs/README.md
@@ -6,9 +6,11 @@ The documentation for Compose has been merged into
The docs for Compose are now here:
https://github.com/docker/docker.github.io/tree/master/compose
-Please submit pull requests for unreleased features/changes on the `master` branch (https://github.com/docker/docker.github.io/tree/master), please prefix the PR title with `[WIP]` to indicate that it relates to an unreleased change.
+Please submit pull requests for unpublished features on the `vnext-compose` branch (https://github.com/docker/docker.github.io/tree/vnext-compose).
-If you submit a PR to this codebase that has a docs impact, create a second docs PR on `docker.github.io`. Use the docs PR template provided.
+If you submit a PR to this codebase that has a docs impact, create a second docs PR on `docker.github.io`. Use the docs PR template provided (coming soon - watch this space).
+
+PRs for typos, additional information, etc. for already-published features should be labeled as `okay-to-publish` (we are still settling on a naming convention, will provide a label soon). You can submit these PRs either to `vnext-compose` or directly to `master` on `docker.github.io`
As always, the docs remain open-source and we appreciate your feedback and
pull requests!
diff --git a/pyinstaller/ldd b/pyinstaller/ldd
deleted file mode 100755
index 3f10ad27..00000000
--- a/pyinstaller/ldd
+++ /dev/null
@@ -1,13 +0,0 @@
-#!/bin/sh
-
-# From http://wiki.musl-libc.org/wiki/FAQ#Q:_where_is_ldd_.3F
-#
-# Musl's dynlinker comes with ldd functionality built in. just create a
-# symlink from ld-musl-$ARCH.so to /bin/ldd. If the dynlinker was started
-# as "ldd", it will detect that and print the appropriate DSO information.
-#
-# Instead, this string replaced "ldd" with the package so that pyinstaller
-# can find the actual lib.
-exec /usr/bin/ldd "$@" | \
- sed -r 's/([^[:space:]]+) => ldd/\1 => \/lib\/\1/g' | \
- sed -r 's/ldd \(.*\)//g'
diff --git a/requirements-build.txt b/requirements-build.txt
index 9161fadf..e5a77e79 100644
--- a/requirements-build.txt
+++ b/requirements-build.txt
@@ -1 +1 @@
-pyinstaller==3.4
+pyinstaller==3.3.1
diff --git a/requirements-dev.txt b/requirements-dev.txt
index b19fc2e0..bfb94115 100644
--- a/requirements-dev.txt
+++ b/requirements-dev.txt
@@ -1,5 +1,4 @@
coverage==4.4.2
-ddt==1.2.0
flake8==3.5.0
mock==2.0.0
pytest==3.6.3
diff --git a/requirements.txt b/requirements.txt
index ff23516e..6007ee3f 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -3,7 +3,7 @@ cached-property==1.3.0
certifi==2017.4.17
chardet==3.0.4
colorama==0.4.0; sys_platform == 'win32'
-docker==4.0.1
+docker==3.7.3
docker-pycreds==0.4.0
dockerpty==0.4.1
docopt==0.6.2
@@ -17,8 +17,8 @@ pypiwin32==219; sys_platform == 'win32' and python_version < '3.6'
pypiwin32==223; sys_platform == 'win32' and python_version >= '3.6'
PySocks==1.6.7
PyYAML==4.2b1
-requests==2.22.0
+requests==2.20.0
six==1.10.0
texttable==0.9.1
-urllib3==1.24.2; python_version == '3.3'
-websocket-client==0.32.0
+urllib3==1.21.1; python_version == '3.3'
+websocket-client==0.56.0
diff --git a/script/build/image b/script/build/image
index fb3f856e..a3198c99 100755
--- a/script/build/image
+++ b/script/build/image
@@ -7,14 +7,11 @@ if [ -z "$1" ]; then
exit 1
fi
-TAG="$1"
+TAG=$1
VERSION="$(python setup.py --version)"
-DOCKER_COMPOSE_GITSHA="$(script/build/write-git-sha)"
-echo "${DOCKER_COMPOSE_GITSHA}" > compose/GITSHA
+./script/build/write-git-sha
python setup.py sdist bdist_wheel
-
-docker build \
- --build-arg GIT_COMMIT="${DOCKER_COMPOSE_GITSHA}" \
- -t "${TAG}" .
+./script/build/linux
+docker build -t docker/compose:$TAG -f Dockerfile.run .
diff --git a/script/build/linux b/script/build/linux
index 28065da0..056940ad 100755
--- a/script/build/linux
+++ b/script/build/linux
@@ -4,14 +4,10 @@ set -ex
./script/clean
-DOCKER_COMPOSE_GITSHA="$(script/build/write-git-sha)"
-TAG="docker/compose:tmp-glibc-linux-binary-${DOCKER_COMPOSE_GITSHA}"
-
-docker build -t "${TAG}" . \
- --build-arg BUILD_PLATFORM=debian \
- --build-arg GIT_COMMIT="${DOCKER_COMPOSE_GITSHA}"
-TMP_CONTAINER=$(docker create "${TAG}")
-mkdir -p dist
-docker cp "${TMP_CONTAINER}":/usr/local/bin/docker-compose dist/docker-compose-Linux-x86_64
-docker container rm -f "${TMP_CONTAINER}"
-docker image rm -f "${TAG}"
+TAG="docker-compose"
+docker build -t "$TAG" .
+docker run \
+ --rm --entrypoint="script/build/linux-entrypoint" \
+ -v $(pwd)/dist:/code/dist \
+ -v $(pwd)/.git:/code/.git \
+ "$TAG"
diff --git a/script/build/linux-entrypoint b/script/build/linux-entrypoint
index 1c5438d8..0e3c7ec1 100755
--- a/script/build/linux-entrypoint
+++ b/script/build/linux-entrypoint
@@ -2,38 +2,14 @@
set -ex
-CODE_PATH=/code
-VENV="${CODE_PATH}"/.tox/py37
+TARGET=dist/docker-compose-$(uname -s)-$(uname -m)
+VENV=/code/.tox/py36
-cd "${CODE_PATH}"
-mkdir -p dist
-chmod 777 dist
+mkdir -p `pwd`/dist
+chmod 777 `pwd`/dist
-"${VENV}"/bin/pip3 install -q -r requirements-build.txt
-
-# TODO(ulyssessouza) To check if really needed
-if [ -z "${DOCKER_COMPOSE_GITSHA}" ]; then
- DOCKER_COMPOSE_GITSHA="$(script/build/write-git-sha)"
-fi
-echo "${DOCKER_COMPOSE_GITSHA}" > compose/GITSHA
-
-export PATH="${CODE_PATH}/pyinstaller:${PATH}"
-
-if [ ! -z "${BUILD_BOOTLOADER}" ]; then
- # Build bootloader for alpine
- git clone --single-branch --branch master https://github.com/pyinstaller/pyinstaller.git /tmp/pyinstaller
- cd /tmp/pyinstaller/bootloader
- git checkout v3.4
- "${VENV}"/bin/python3 ./waf configure --no-lsb all
- "${VENV}"/bin/pip3 install ..
- cd "${CODE_PATH}"
- rm -Rf /tmp/pyinstaller
-else
- echo "NOT compiling bootloader!!!"
-fi
-
-"${VENV}"/bin/pyinstaller --exclude-module pycrypto --exclude-module PyInstaller docker-compose.spec
-ls -la dist/
-ldd dist/docker-compose
-mv dist/docker-compose /usr/local/bin
-docker-compose version
+$VENV/bin/pip install -q -r requirements-build.txt
+./script/build/write-git-sha
+su -c "$VENV/bin/pyinstaller docker-compose.spec" user
+mv dist/docker-compose $TARGET
+$TARGET version
diff --git a/script/build/osx b/script/build/osx
index 52991458..c62b2702 100755
--- a/script/build/osx
+++ b/script/build/osx
@@ -5,12 +5,11 @@ TOOLCHAIN_PATH="$(realpath $(dirname $0)/../../build/toolchain)"
rm -rf venv
-virtualenv -p "${TOOLCHAIN_PATH}"/bin/python3 venv
+virtualenv -p ${TOOLCHAIN_PATH}/bin/python3 venv
venv/bin/pip install -r requirements.txt
venv/bin/pip install -r requirements-build.txt
venv/bin/pip install --no-deps .
-DOCKER_COMPOSE_GITSHA="$(script/build/write-git-sha)"
-echo "${DOCKER_COMPOSE_GITSHA}" > compose/GITSHA
+./script/build/write-git-sha
venv/bin/pyinstaller docker-compose.spec
mv dist/docker-compose dist/docker-compose-Darwin-x86_64
dist/docker-compose-Darwin-x86_64 version
diff --git a/script/build/test-image b/script/build/test-image
index 4964a5f9..a2eb62cd 100755
--- a/script/build/test-image
+++ b/script/build/test-image
@@ -7,12 +7,11 @@ if [ -z "$1" ]; then
exit 1
fi
-TAG="$1"
-IMAGE="docker/compose-tests"
+TAG=$1
-DOCKER_COMPOSE_GITSHA="$(script/build/write-git-sha)"
-docker build -t "${IMAGE}:${TAG}" . \
- --target build \
- --build-arg BUILD_PLATFORM="debian" \
- --build-arg GIT_COMMIT="${DOCKER_COMPOSE_GITSHA}"
-docker tag "${IMAGE}":"${TAG}" "${IMAGE}":latest
+docker build -t docker-compose-tests:tmp .
+ctnr_id=$(docker create --entrypoint=tox docker-compose-tests:tmp)
+docker commit $ctnr_id docker/compose-tests:latest
+docker tag docker/compose-tests:latest docker/compose-tests:$TAG
+docker rm -f $ctnr_id
+docker rmi -f docker-compose-tests:tmp
diff --git a/script/build/windows.ps1 b/script/build/windows.ps1
index 4c7a8bed..41dc51e3 100644
--- a/script/build/windows.ps1
+++ b/script/build/windows.ps1
@@ -6,17 +6,17 @@
#
# http://git-scm.com/download/win
#
-# 2. Install Python 3.7.2:
+# 2. Install Python 3.6.4:
#
# https://www.python.org/downloads/
#
-# 3. Append ";C:\Python37;C:\Python37\Scripts" to the "Path" environment variable:
+# 3. Append ";C:\Python36;C:\Python36\Scripts" to the "Path" environment variable:
#
# https://www.microsoft.com/resources/documentation/windows/xp/all/proddocs/en-us/sysdm_advancd_environmnt_addchange_variable.mspx?mfr=true
#
# 4. In Powershell, run the following commands:
#
-# $ pip install 'virtualenv==16.2.0'
+# $ pip install 'virtualenv>=15.1.0'
# $ Set-ExecutionPolicy -Scope CurrentUser RemoteSigned
#
# 5. Clone the repository:
diff --git a/script/build/write-git-sha b/script/build/write-git-sha
index cac4b6fd..be87f505 100755
--- a/script/build/write-git-sha
+++ b/script/build/write-git-sha
@@ -9,4 +9,4 @@ if [[ "${?}" != "0" ]]; then
echo "Couldn't get revision of the git repository. Setting to 'unknown' instead"
DOCKER_COMPOSE_GITSHA="unknown"
fi
-echo "${DOCKER_COMPOSE_GITSHA}"
+echo "${DOCKER_COMPOSE_GITSHA}" > compose/GITSHA
diff --git a/script/release/README.md b/script/release/README.md
index 97168d37..0c6f12cb 100644
--- a/script/release/README.md
+++ b/script/release/README.md
@@ -192,8 +192,6 @@ be handled manually by the operator:
- Bump the version in `compose/__init__.py` to the *next* minor version
number with `dev` appended. For example, if you just released `1.4.0`,
update it to `1.5.0dev`
- - Update compose_version in [github.com/docker/docker.github.io/blob/master/_config.yml](https://github.com/docker/docker.github.io/blob/master/_config.yml) and [github.com/docker/docker.github.io/blob/master/_config_authoring.yml](https://github.com/docker/docker.github.io/blob/master/_config_authoring.yml)
- - Update the release note in [github.com/docker/docker.github.io](https://github.com/docker/docker.github.io/blob/master/release-notes/docker-compose.md)
## Advanced options
diff --git a/script/release/release.py b/script/release/release.py
index a9c05eb7..9db1a49d 100755
--- a/script/release/release.py
+++ b/script/release/release.py
@@ -15,7 +15,6 @@ from release.const import NAME
from release.const import REPO_ROOT
from release.downloader import BinaryDownloader
from release.images import ImageManager
-from release.images import is_tag_latest
from release.pypi import check_pypirc
from release.pypi import pypi_upload
from release.repository import delete_assets
@@ -205,7 +204,7 @@ def resume(args):
delete_assets(gh_release)
upload_assets(gh_release, files)
img_manager = ImageManager(args.release)
- img_manager.build_images(repository)
+ img_manager.build_images(repository, files)
except ScriptError as e:
print(e)
return 1
@@ -245,7 +244,7 @@ def start(args):
gh_release = create_release_draft(repository, args.release, pr_data, files)
upload_assets(gh_release, files)
img_manager = ImageManager(args.release)
- img_manager.build_images(repository)
+ img_manager.build_images(repository, files)
except ScriptError as e:
print(e)
return 1
@@ -259,8 +258,7 @@ def finalize(args):
try:
check_pypirc()
repository = Repository(REPO_ROOT, args.repo)
- tag_as_latest = is_tag_latest(args.release)
- img_manager = ImageManager(args.release, tag_as_latest)
+ img_manager = ImageManager(args.release)
pr_data = repository.find_release_pr(args.release)
if not pr_data:
raise ScriptError('No PR found for {}'.format(args.release))
diff --git a/script/release/release/const.py b/script/release/release/const.py
index 52458ea1..5a72bde4 100644
--- a/script/release/release/const.py
+++ b/script/release/release/const.py
@@ -6,5 +6,4 @@ import os
REPO_ROOT = os.path.join(os.path.dirname(__file__), '..', '..', '..')
NAME = 'docker/compose'
-COMPOSE_TESTS_IMAGE_BASE_NAME = NAME + '-tests'
BINTRAY_ORG = 'docker-compose'
diff --git a/script/release/release/images.py b/script/release/release/images.py
index 17d572df..df6eeda4 100644
--- a/script/release/release/images.py
+++ b/script/release/release/images.py
@@ -5,36 +5,18 @@ from __future__ import unicode_literals
import base64
import json
import os
+import shutil
import docker
-from enum import Enum
-from .const import NAME
from .const import REPO_ROOT
from .utils import ScriptError
-from .utils import yesno
-from script.release.release.const import COMPOSE_TESTS_IMAGE_BASE_NAME
-
-
-class Platform(Enum):
- ALPINE = 'alpine'
- DEBIAN = 'debian'
-
- def __str__(self):
- return self.value
-
-
-# Checks if this version respects the GA version format ('x.y.z') and not an RC
-def is_tag_latest(version):
- ga_version = all(n.isdigit() for n in version.split('.')) and version.count('.') == 2
- return ga_version and yesno('Should this release be tagged as \"latest\"? [Y/n]: ', default=True)
class ImageManager(object):
- def __init__(self, version, latest=False):
+ def __init__(self, version):
self.docker_client = docker.APIClient(**docker.utils.kwargs_from_env())
self.version = version
- self.latest = latest
if 'HUB_CREDENTIALS' in os.environ:
print('HUB_CREDENTIALS found in environment, issuing login')
credentials = json.loads(base64.urlsafe_b64decode(os.environ['HUB_CREDENTIALS']))
@@ -42,36 +24,16 @@ class ImageManager(object):
username=credentials['Username'], password=credentials['Password']
)
- def _tag(self, image, existing_tag, new_tag):
- existing_repo_tag = '{image}:{tag}'.format(image=image, tag=existing_tag)
- new_repo_tag = '{image}:{tag}'.format(image=image, tag=new_tag)
- self.docker_client.tag(existing_repo_tag, new_repo_tag)
-
- def get_full_version(self, platform=None):
- return self.version + '-' + platform.__str__() if platform else self.version
-
- def get_runtime_image_tag(self, tag):
- return '{image_base_image}:{tag}'.format(
- image_base_image=NAME,
- tag=self.get_full_version(tag)
- )
-
- def build_runtime_image(self, repository, platform):
- git_sha = repository.write_git_sha()
- compose_image_base_name = NAME
- print('Building {image} image ({platform} based)'.format(
- image=compose_image_base_name,
- platform=platform
- ))
- full_version = self.get_full_version(platform)
- build_tag = self.get_runtime_image_tag(platform)
+ def build_images(self, repository, files):
+ print("Building release images...")
+ repository.write_git_sha()
+ distdir = os.path.join(REPO_ROOT, 'dist')
+ os.makedirs(distdir, exist_ok=True)
+ shutil.copy(files['docker-compose-Linux-x86_64'][0], distdir)
+ os.chmod(os.path.join(distdir, 'docker-compose-Linux-x86_64'), 0o755)
+ print('Building docker/compose image')
logstream = self.docker_client.build(
- REPO_ROOT,
- tag=build_tag,
- buildargs={
- 'BUILD_PLATFORM': platform.value,
- 'GIT_COMMIT': git_sha,
- },
+ REPO_ROOT, tag='docker/compose:{}'.format(self.version), dockerfile='Dockerfile.run',
decode=True
)
for chunk in logstream:
@@ -80,33 +42,9 @@ class ImageManager(object):
if 'stream' in chunk:
print(chunk['stream'], end='')
- if platform == Platform.ALPINE:
- self._tag(compose_image_base_name, full_version, self.version)
- if self.latest:
- self._tag(compose_image_base_name, full_version, platform)
- if platform == Platform.ALPINE:
- self._tag(compose_image_base_name, full_version, 'latest')
-
- def get_ucp_test_image_tag(self, tag=None):
- return '{image}:{tag}'.format(
- image=COMPOSE_TESTS_IMAGE_BASE_NAME,
- tag=tag or self.version
- )
-
- # Used for producing a test image for UCP
- def build_ucp_test_image(self, repository):
- print('Building test image (debian based for UCP e2e)')
- git_sha = repository.write_git_sha()
- ucp_test_image_tag = self.get_ucp_test_image_tag()
+ print('Building test image (for UCP e2e)')
logstream = self.docker_client.build(
- REPO_ROOT,
- tag=ucp_test_image_tag,
- target='build',
- buildargs={
- 'BUILD_PLATFORM': Platform.DEBIAN.value,
- 'GIT_COMMIT': git_sha,
- },
- decode=True
+ REPO_ROOT, tag='docker-compose-tests:tmp', decode=True
)
for chunk in logstream:
if 'error' in chunk:
@@ -114,15 +52,26 @@ class ImageManager(object):
if 'stream' in chunk:
print(chunk['stream'], end='')
- self._tag(COMPOSE_TESTS_IMAGE_BASE_NAME, self.version, 'latest')
+ container = self.docker_client.create_container(
+ 'docker-compose-tests:tmp', entrypoint='tox'
+ )
+ self.docker_client.commit(container, 'docker/compose-tests', 'latest')
+ self.docker_client.tag(
+ 'docker/compose-tests:latest', 'docker/compose-tests:{}'.format(self.version)
+ )
+ self.docker_client.remove_container(container, force=True)
+ self.docker_client.remove_image('docker-compose-tests:tmp', force=True)
- def build_images(self, repository):
- self.build_runtime_image(repository, Platform.ALPINE)
- self.build_runtime_image(repository, Platform.DEBIAN)
- self.build_ucp_test_image(repository)
+ @property
+ def image_names(self):
+ return [
+ 'docker/compose-tests:latest',
+ 'docker/compose-tests:{}'.format(self.version),
+ 'docker/compose:{}'.format(self.version)
+ ]
def check_images(self):
- for name in self.get_images_to_push():
+ for name in self.image_names:
try:
self.docker_client.inspect_image(name)
except docker.errors.ImageNotFound:
@@ -130,22 +79,8 @@ class ImageManager(object):
return False
return True
- def get_images_to_push(self):
- tags_to_push = {
- "{}:{}".format(NAME, self.version),
- self.get_runtime_image_tag(Platform.ALPINE),
- self.get_runtime_image_tag(Platform.DEBIAN),
- self.get_ucp_test_image_tag(),
- self.get_ucp_test_image_tag('latest'),
- }
- if is_tag_latest(self.version):
- tags_to_push.add("{}:latest".format(NAME))
- return tags_to_push
-
def push_images(self):
- tags_to_push = self.get_images_to_push()
- print('Build tags to push {}'.format(tags_to_push))
- for name in tags_to_push:
+ for name in self.image_names:
print('Pushing {} to Docker Hub'.format(name))
logstream = self.docker_client.push(name, stream=True, decode=True)
for chunk in logstream:
diff --git a/script/release/release/repository.py b/script/release/release/repository.py
index a0281eaa..bb8f4fbe 100644
--- a/script/release/release/repository.py
+++ b/script/release/release/repository.py
@@ -175,7 +175,6 @@ class Repository(object):
def write_git_sha(self):
with open(os.path.join(REPO_ROOT, 'compose', 'GITSHA'), 'w') as f:
f.write(self.git_repo.head.commit.hexsha[:7])
- return self.git_repo.head.commit.hexsha[:7]
def cherry_pick_prs(self, release_branch, ids):
if not ids:
@@ -220,7 +219,7 @@ def get_contributors(pr_data):
commits = pr_data.get_commits()
authors = {}
for commit in commits:
- if not commit or not commit.author or not commit.author.login:
+ if not commit.author:
continue
author = commit.author.login
authors[author] = authors.get(author, 0) + 1
diff --git a/script/run/run.sh b/script/run/run.sh
index 58caf361..4881adc3 100755
--- a/script/run/run.sh
+++ b/script/run/run.sh
@@ -15,7 +15,7 @@
set -e
-VERSION="1.25.0-rc1"
+VERSION="1.24.1"
IMAGE="docker/compose:$VERSION"
@@ -48,7 +48,7 @@ fi
# Only allocate tty if we detect one
if [ -t 0 -a -t 1 ]; then
- DOCKER_RUN_OPTIONS="$DOCKER_RUN_OPTIONS -t"
+ DOCKER_RUN_OPTIONS="$DOCKER_RUN_OPTIONS -t"
fi
# Always set -i to support piped and terminal input in run/exec
diff --git a/script/setup/osx b/script/setup/osx
index 1fb91edc..1b546816 100755
--- a/script/setup/osx
+++ b/script/setup/osx
@@ -13,13 +13,13 @@ if ! [ ${DEPLOYMENT_TARGET} == "$(macos_version)" ]; then
SDK_SHA1=dd228a335194e3392f1904ce49aff1b1da26ca62
fi
-OPENSSL_VERSION=1.1.1a
+OPENSSL_VERSION=1.1.0j
OPENSSL_URL=https://www.openssl.org/source/openssl-${OPENSSL_VERSION}.tar.gz
-OPENSSL_SHA1=8fae27b4f34445a5500c9dc50ae66b4d6472ce29
+OPENSSL_SHA1=dcad1efbacd9a4ed67d4514470af12bbe2a1d60a
-PYTHON_VERSION=3.7.2
+PYTHON_VERSION=3.6.8
PYTHON_URL=https://www.python.org/ftp/python/${PYTHON_VERSION}/Python-${PYTHON_VERSION}.tgz
-PYTHON_SHA1=0cd8e52d8ed1d0be12ac8e87a623a15df3a3b418
+PYTHON_SHA1=09fcc4edaef0915b4dedbfb462f1cd15f82d3a6f
#
# Install prerequisites.
@@ -36,7 +36,7 @@ if ! [ -x "$(command -v python3)" ]; then
brew install python3
fi
if ! [ -x "$(command -v virtualenv)" ]; then
- pip install virtualenv==16.2.0
+ pip install virtualenv
fi
#
@@ -50,7 +50,7 @@ mkdir -p ${TOOLCHAIN_PATH}
#
# Set macOS SDK.
#
-if [[ ${SDK_FETCH} && ! -f ${TOOLCHAIN_PATH}/MacOSX${DEPLOYMENT_TARGET}.sdk/SDKSettings.plist ]]; then
+if [ ${SDK_FETCH} ]; then
SDK_PATH=${TOOLCHAIN_PATH}/MacOSX${DEPLOYMENT_TARGET}.sdk
fetch_tarball ${SDK_URL} ${SDK_PATH} ${SDK_SHA1}
else
@@ -61,7 +61,7 @@ fi
# Build OpenSSL.
#
OPENSSL_SRC_PATH=${TOOLCHAIN_PATH}/openssl-${OPENSSL_VERSION}
-if ! [[ $(${TOOLCHAIN_PATH}/bin/openssl version) == *"${OPENSSL_VERSION}"* ]]; then
+if ! [ -f ${TOOLCHAIN_PATH}/bin/openssl ]; then
rm -rf ${OPENSSL_SRC_PATH}
fetch_tarball ${OPENSSL_URL} ${OPENSSL_SRC_PATH} ${OPENSSL_SHA1}
(
@@ -77,7 +77,7 @@ fi
# Build Python.
#
PYTHON_SRC_PATH=${TOOLCHAIN_PATH}/Python-${PYTHON_VERSION}
-if ! [[ $(${TOOLCHAIN_PATH}/bin/python3 --version) == *"${PYTHON_VERSION}"* ]]; then
+if ! [ -f ${TOOLCHAIN_PATH}/bin/python3 ]; then
rm -rf ${PYTHON_SRC_PATH}
fetch_tarball ${PYTHON_URL} ${PYTHON_SRC_PATH} ${PYTHON_SHA1}
(
@@ -87,10 +87,9 @@ if ! [[ $(${TOOLCHAIN_PATH}/bin/python3 --version) == *"${PYTHON_VERSION}"* ]];
--datarootdir=${TOOLCHAIN_PATH}/share \
--datadir=${TOOLCHAIN_PATH}/share \
--enable-framework=${TOOLCHAIN_PATH}/Frameworks \
- --with-openssl=${TOOLCHAIN_PATH} \
MACOSX_DEPLOYMENT_TARGET=${DEPLOYMENT_TARGET} \
CFLAGS="-isysroot ${SDK_PATH} -I${TOOLCHAIN_PATH}/include" \
- CPPFLAGS="-I${SDK_PATH}/usr/include -I${TOOLCHAIN_PATH}/include" \
+ CPPFLAGS="-I${SDK_PATH}/usr/include -I${TOOLCHAIN_PATH}include" \
LDFLAGS="-isysroot ${SDK_PATH} -L ${TOOLCHAIN_PATH}/lib"
make -j 4
make install PYTHONAPPSDIR=${TOOLCHAIN_PATH}
@@ -98,11 +97,6 @@ if ! [[ $(${TOOLCHAIN_PATH}/bin/python3 --version) == *"${PYTHON_VERSION}"* ]];
)
fi
-#
-# Smoke test built Python.
-#
-openssl_version ${TOOLCHAIN_PATH}
-
echo ""
echo "*** Targeting macOS: ${DEPLOYMENT_TARGET}"
echo "*** Using SDK ${SDK_PATH}"
diff --git a/script/test/all b/script/test/all
index f929a57e..e48f73bb 100755
--- a/script/test/all
+++ b/script/test/all
@@ -8,7 +8,8 @@ set -e
docker run --rm \
--tty \
${GIT_VOLUME} \
- "$TAG" tox -e pre-commit
+ --entrypoint="tox" \
+ "$TAG" -e pre-commit
get_versions="docker run --rm
--entrypoint=/code/.tox/py27/bin/python
@@ -23,7 +24,7 @@ fi
BUILD_NUMBER=${BUILD_NUMBER-$USER}
-PY_TEST_VERSIONS=${PY_TEST_VERSIONS:-py27,py37}
+PY_TEST_VERSIONS=${PY_TEST_VERSIONS:-py27,py36}
for version in $DOCKER_VERSIONS; do
>&2 echo "Running tests against Docker $version"
diff --git a/script/test/ci b/script/test/ci
index bbcedac4..8d3aa56c 100755
--- a/script/test/ci
+++ b/script/test/ci
@@ -20,3 +20,6 @@ export DOCKER_DAEMON_ARGS="--storage-driver=$STORAGE_DRIVER"
GIT_VOLUME="--volumes-from=$(hostname)"
. script/test/all
+
+>&2 echo "Building Linux binary"
+. script/build/linux-entrypoint
diff --git a/script/test/default b/script/test/default
index 4f307f2e..cbb6a67c 100755
--- a/script/test/default
+++ b/script/test/default
@@ -3,18 +3,17 @@
set -ex
-TAG="docker-compose:alpine-$(git rev-parse --short HEAD)"
+TAG="docker-compose:$(git rev-parse --short HEAD)"
# By default use the Dockerfile, but can be overridden to use an alternative file
-# e.g DOCKERFILE=Dockerfile.s390x script/test/default
+# e.g DOCKERFILE=Dockerfile.armhf script/test/default
DOCKERFILE="${DOCKERFILE:-Dockerfile}"
-DOCKER_BUILD_TARGET="${DOCKER_BUILD_TARGET:-build}"
rm -rf coverage-html
# Create the host directory so it's owned by $USER
mkdir -p coverage-html
-docker build -f "${DOCKERFILE}" -t "${TAG}" --target "${DOCKER_BUILD_TARGET}" .
+docker build -f ${DOCKERFILE} -t "$TAG" .
GIT_VOLUME="--volume=$(pwd)/.git:/code/.git"
. script/test/all
diff --git a/setup.py b/setup.py
index c6d07a86..8371cc75 100644
--- a/setup.py
+++ b/setup.py
@@ -33,10 +33,10 @@ install_requires = [
'cached-property >= 1.2.0, < 2',
'docopt >= 0.6.1, < 0.7',
'PyYAML >= 3.10, < 4.3',
- 'requests >= 2.6.1, != 2.11.0, != 2.12.2, != 2.18.0, < 2.23',
+ 'requests >= 2.6.1, != 2.11.0, != 2.12.2, != 2.18.0, < 2.21',
'texttable >= 0.9.0, < 0.10',
'websocket-client >= 0.32.0, < 1.0',
- 'docker[ssh] >= 3.7.0, < 4.0.2',
+ 'docker[ssh] >= 3.7.0, < 4.0',
'dockerpty >= 0.4.1, < 0.5',
'six >= 1.3.0, < 2',
'jsonschema >= 2.5.1, < 3',
diff --git a/tests/acceptance/cli_test.py b/tests/acceptance/cli_test.py
index 8e66c48e..9ed25736 100644
--- a/tests/acceptance/cli_test.py
+++ b/tests/acceptance/cli_test.py
@@ -11,7 +11,6 @@ import subprocess
import time
from collections import Counter
from collections import namedtuple
-from functools import reduce
from operator import attrgetter
import pytest
@@ -41,7 +40,7 @@ ProcessResult = namedtuple('ProcessResult', 'stdout stderr')
BUILD_CACHE_TEXT = 'Using cache'
-BUILD_PULL_TEXT = 'Status: Image is up to date for busybox:latest'
+BUILD_PULL_TEXT = 'Status: Image is up to date for busybox:1.27.2'
def start_process(base_dir, options):
@@ -171,13 +170,6 @@ class CLITestCase(DockerClientTestCase):
# Prevent tearDown from trying to create a project
self.base_dir = None
- def test_quiet_build(self):
- self.base_dir = 'tests/fixtures/build-args'
- result = self.dispatch(['build'], None)
- quietResult = self.dispatch(['build', '-q'], None)
- assert result.stdout != ""
- assert quietResult.stdout == ""
-
def test_help_nonexistent(self):
self.base_dir = 'tests/fixtures/no-composefile'
result = self.dispatch(['help', 'foobar'], returncode=1)
@@ -332,21 +324,6 @@ class CLITestCase(DockerClientTestCase):
'version': '2.4'
}
- def test_config_with_env_file(self):
- self.base_dir = 'tests/fixtures/default-env-file'
- result = self.dispatch(['--env-file', '.env2', 'config'])
- json_result = yaml.load(result.stdout)
- assert json_result == {
- 'services': {
- 'web': {
- 'command': 'false',
- 'image': 'alpine:latest',
- 'ports': ['5644/tcp', '9998/tcp']
- }
- },
- 'version': '2.4'
- }
-
def test_config_with_dot_env_and_override_dir(self):
self.base_dir = 'tests/fixtures/default-env-file'
result = self.dispatch(['--project-directory', 'alt/', 'config'])
@@ -656,13 +633,6 @@ class CLITestCase(DockerClientTestCase):
'image library/nonexisting-image:latest not found' in result.stderr or
'pull access denied for nonexisting-image' in result.stderr)
- def test_pull_with_build(self):
- result = self.dispatch(['-f', 'pull-with-build.yml', 'pull'])
-
- assert 'Pulling simple' not in result.stderr
- assert 'Pulling from_simple' not in result.stderr
- assert 'Pulling another ...' in result.stderr
-
def test_pull_with_quiet(self):
assert self.dispatch(['pull', '--quiet']).stderr == ''
assert self.dispatch(['pull', '--quiet']).stdout == ''
@@ -688,15 +658,15 @@ class CLITestCase(DockerClientTestCase):
self.base_dir = 'tests/fixtures/links-composefile'
result = self.dispatch(['pull', '--no-parallel', 'web'])
assert sorted(result.stderr.split('\n'))[1:] == [
- 'Pulling web (busybox:latest)...',
+ 'Pulling web (busybox:1.27.2)...',
]
def test_pull_with_include_deps(self):
self.base_dir = 'tests/fixtures/links-composefile'
result = self.dispatch(['pull', '--no-parallel', '--include-deps', 'web'])
assert sorted(result.stderr.split('\n'))[1:] == [
- 'Pulling db (busybox:latest)...',
- 'Pulling web (busybox:latest)...',
+ 'Pulling db (busybox:1.27.2)...',
+ 'Pulling web (busybox:1.27.2)...',
]
def test_build_plain(self):
@@ -777,26 +747,6 @@ class CLITestCase(DockerClientTestCase):
]
assert not containers
- def test_build_rm(self):
- containers = [
- Container.from_ps(self.project.client, c)
- for c in self.project.client.containers(all=True)
- ]
-
- assert not containers
-
- self.base_dir = 'tests/fixtures/simple-dockerfile'
- self.dispatch(['build', '--no-rm', 'simple'], returncode=0)
-
- containers = [
- Container.from_ps(self.project.client, c)
- for c in self.project.client.containers(all=True)
- ]
- assert containers
-
- for c in self.project.client.containers(all=True):
- self.addCleanup(self.project.client.remove_container, c, force=True)
-
def test_build_shm_size_build_option(self):
pull_busybox(self.client)
self.base_dir = 'tests/fixtures/build-shm-size'
@@ -1159,22 +1109,6 @@ class CLITestCase(DockerClientTestCase):
assert len(remote_volumes) > 0
@v2_only()
- def test_up_no_start_remove_orphans(self):
- self.base_dir = 'tests/fixtures/v2-simple'
- self.dispatch(['up', '--no-start'], None)
-
- services = self.project.get_services()
-
- stopped = reduce((lambda prev, next: prev.containers(
- stopped=True) + next.containers(stopped=True)), services)
- assert len(stopped) == 2
-
- self.dispatch(['-f', 'one-container.yml', 'up', '--no-start', '--remove-orphans'], None)
- stopped2 = reduce((lambda prev, next: prev.containers(
- stopped=True) + next.containers(stopped=True)), services)
- assert len(stopped2) == 1
-
- @v2_only()
def test_up_no_ansi(self):
self.base_dir = 'tests/fixtures/v2-simple'
result = self.dispatch(['--no-ansi', 'up', '-d'], None)
@@ -2367,7 +2301,6 @@ class CLITestCase(DockerClientTestCase):
assert 'another' in result.stdout
assert 'exited with code 0' in result.stdout
- @pytest.mark.skip(reason="race condition between up and logs")
def test_logs_follow_logs_from_new_containers(self):
self.base_dir = 'tests/fixtures/logs-composefile'
self.dispatch(['up', '-d', 'simple'])
@@ -2394,7 +2327,6 @@ class CLITestCase(DockerClientTestCase):
assert '{} exited with code 0'.format(another_name) in result.stdout
assert '{} exited with code 137'.format(simple_name) in result.stdout
- @pytest.mark.skip(reason="race condition between up and logs")
def test_logs_follow_logs_from_restarted_containers(self):
self.base_dir = 'tests/fixtures/logs-restart-composefile'
proc = start_process(self.base_dir, ['up'])
@@ -2415,7 +2347,6 @@ class CLITestCase(DockerClientTestCase):
) == 3
assert result.stdout.count('world') == 3
- @pytest.mark.skip(reason="race condition between up and logs")
def test_logs_default(self):
self.base_dir = 'tests/fixtures/logs-composefile'
self.dispatch(['up', '-d'])
@@ -2542,12 +2473,10 @@ class CLITestCase(DockerClientTestCase):
self.dispatch(['up', '-d'])
assert len(project.get_service('web').containers()) == 2
assert len(project.get_service('db').containers()) == 1
- assert len(project.get_service('worker').containers()) == 0
- self.dispatch(['up', '-d', '--scale', 'web=3', '--scale', 'worker=1'])
+ self.dispatch(['up', '-d', '--scale', 'web=3'])
assert len(project.get_service('web').containers()) == 3
assert len(project.get_service('db').containers()) == 1
- assert len(project.get_service('worker').containers()) == 1
def test_up_scale_scale_down(self):
self.base_dir = 'tests/fixtures/scale'
@@ -2556,26 +2485,22 @@ class CLITestCase(DockerClientTestCase):
self.dispatch(['up', '-d'])
assert len(project.get_service('web').containers()) == 2
assert len(project.get_service('db').containers()) == 1
- assert len(project.get_service('worker').containers()) == 0
self.dispatch(['up', '-d', '--scale', 'web=1'])
assert len(project.get_service('web').containers()) == 1
assert len(project.get_service('db').containers()) == 1
- assert len(project.get_service('worker').containers()) == 0
def test_up_scale_reset(self):
self.base_dir = 'tests/fixtures/scale'
project = self.project
- self.dispatch(['up', '-d', '--scale', 'web=3', '--scale', 'db=3', '--scale', 'worker=3'])
+ self.dispatch(['up', '-d', '--scale', 'web=3', '--scale', 'db=3'])
assert len(project.get_service('web').containers()) == 3
assert len(project.get_service('db').containers()) == 3
- assert len(project.get_service('worker').containers()) == 3
self.dispatch(['up', '-d'])
assert len(project.get_service('web').containers()) == 2
assert len(project.get_service('db').containers()) == 1
- assert len(project.get_service('worker').containers()) == 0
def test_up_scale_to_zero(self):
self.base_dir = 'tests/fixtures/scale'
@@ -2584,12 +2509,10 @@ class CLITestCase(DockerClientTestCase):
self.dispatch(['up', '-d'])
assert len(project.get_service('web').containers()) == 2
assert len(project.get_service('db').containers()) == 1
- assert len(project.get_service('worker').containers()) == 0
- self.dispatch(['up', '-d', '--scale', 'web=0', '--scale', 'db=0', '--scale', 'worker=0'])
+ self.dispatch(['up', '-d', '--scale', 'web=0', '--scale', 'db=0'])
assert len(project.get_service('web').containers()) == 0
assert len(project.get_service('db').containers()) == 0
- assert len(project.get_service('worker').containers()) == 0
def test_port(self):
self.base_dir = 'tests/fixtures/ports-composefile'
@@ -2669,7 +2592,7 @@ class CLITestCase(DockerClientTestCase):
container, = self.project.containers()
expected_template = ' container {} {}'
- expected_meta_info = ['image=busybox:latest', 'name=simple-composefile_simple_']
+ expected_meta_info = ['image=busybox:1.27.2', 'name=simple-composefile_simple_']
assert expected_template.format('create', container.id) in lines[0]
assert expected_template.format('start', container.id) in lines[1]
diff --git a/tests/fixtures/default-env-file/.env2 b/tests/fixtures/default-env-file/.env2
deleted file mode 100644
index d754523f..00000000
--- a/tests/fixtures/default-env-file/.env2
+++ /dev/null
@@ -1,4 +0,0 @@
-IMAGE=alpine:latest
-COMMAND=false
-PORT1=5644
-PORT2=9998
diff --git a/tests/fixtures/environment-exec/docker-compose.yml b/tests/fixtures/environment-exec/docker-compose.yml
index 813606eb..e284ba8c 100644
--- a/tests/fixtures/environment-exec/docker-compose.yml
+++ b/tests/fixtures/environment-exec/docker-compose.yml
@@ -2,7 +2,7 @@ version: "2.2"
services:
service:
- image: busybox:latest
+ image: busybox:1.27.2
command: top
environment:
diff --git a/tests/fixtures/links-composefile/docker-compose.yml b/tests/fixtures/links-composefile/docker-compose.yml
index 930fd4c7..0a2f3d9e 100644
--- a/tests/fixtures/links-composefile/docker-compose.yml
+++ b/tests/fixtures/links-composefile/docker-compose.yml
@@ -1,11 +1,11 @@
db:
- image: busybox:latest
+ image: busybox:1.27.2
command: top
web:
- image: busybox:latest
+ image: busybox:1.27.2
command: top
links:
- db:db
console:
- image: busybox:latest
+ image: busybox:1.27.2
command: top
diff --git a/tests/fixtures/logs-composefile/docker-compose.yml b/tests/fixtures/logs-composefile/docker-compose.yml
index ea18f162..b719c91e 100644
--- a/tests/fixtures/logs-composefile/docker-compose.yml
+++ b/tests/fixtures/logs-composefile/docker-compose.yml
@@ -1,6 +1,6 @@
simple:
image: busybox:latest
- command: sh -c "sleep 1 && echo hello && tail -f /dev/null"
+ command: sh -c "echo hello && tail -f /dev/null"
another:
image: busybox:latest
- command: sh -c "sleep 1 && echo test"
+ command: sh -c "echo test"
diff --git a/tests/fixtures/logs-restart-composefile/docker-compose.yml b/tests/fixtures/logs-restart-composefile/docker-compose.yml
index 6be8b907..c662a1e7 100644
--- a/tests/fixtures/logs-restart-composefile/docker-compose.yml
+++ b/tests/fixtures/logs-restart-composefile/docker-compose.yml
@@ -3,5 +3,5 @@ simple:
command: sh -c "echo hello && tail -f /dev/null"
another:
image: busybox:latest
- command: sh -c "sleep 2 && echo world && /bin/false"
+ command: sh -c "sleep 0.5 && echo world && /bin/false"
restart: "on-failure:2"
diff --git a/tests/fixtures/scale/docker-compose.yml b/tests/fixtures/scale/docker-compose.yml
index 53ae1342..a0d3b771 100644
--- a/tests/fixtures/scale/docker-compose.yml
+++ b/tests/fixtures/scale/docker-compose.yml
@@ -5,9 +5,5 @@ services:
command: top
scale: 2
db:
- image: busybox
- command: top
- worker:
- image: busybox
- command: top
- scale: 0
+ image: busybox
+ command: top
diff --git a/tests/fixtures/simple-composefile/docker-compose.yml b/tests/fixtures/simple-composefile/docker-compose.yml
index b25beaf4..e86d3fc8 100644
--- a/tests/fixtures/simple-composefile/docker-compose.yml
+++ b/tests/fixtures/simple-composefile/docker-compose.yml
@@ -1,5 +1,5 @@
simple:
- image: busybox:latest
+ image: busybox:1.27.2
command: top
another:
image: busybox:latest
diff --git a/tests/fixtures/simple-composefile/pull-with-build.yml b/tests/fixtures/simple-composefile/pull-with-build.yml
deleted file mode 100644
index 261dc44d..00000000
--- a/tests/fixtures/simple-composefile/pull-with-build.yml
+++ /dev/null
@@ -1,11 +0,0 @@
-version: "3"
-services:
- build_simple:
- image: simple
- build: .
- command: top
- from_simple:
- image: simple
- another:
- image: busybox:latest
- command: top
diff --git a/tests/fixtures/simple-dockerfile/Dockerfile b/tests/fixtures/simple-dockerfile/Dockerfile
index dd864b83..098ff3eb 100644
--- a/tests/fixtures/simple-dockerfile/Dockerfile
+++ b/tests/fixtures/simple-dockerfile/Dockerfile
@@ -1,3 +1,3 @@
-FROM busybox:latest
+FROM busybox:1.27.2
LABEL com.docker.compose.test_image=true
CMD echo "success"
diff --git a/tests/fixtures/v2-simple/docker-compose.yml b/tests/fixtures/v2-simple/docker-compose.yml
index c99ae02f..ac754eee 100644
--- a/tests/fixtures/v2-simple/docker-compose.yml
+++ b/tests/fixtures/v2-simple/docker-compose.yml
@@ -1,8 +1,8 @@
version: "2"
services:
simple:
- image: busybox:latest
+ image: busybox:1.27.2
command: top
another:
- image: busybox:latest
+ image: busybox:1.27.2
command: top
diff --git a/tests/fixtures/v2-simple/one-container.yml b/tests/fixtures/v2-simple/one-container.yml
deleted file mode 100644
index 22cd9863..00000000
--- a/tests/fixtures/v2-simple/one-container.yml
+++ /dev/null
@@ -1,5 +0,0 @@
-version: "2"
-services:
- simple:
- image: busybox:latest
- command: top
diff --git a/tests/integration/environment_test.py b/tests/integration/environment_test.py
deleted file mode 100644
index 07619eec..00000000
--- a/tests/integration/environment_test.py
+++ /dev/null
@@ -1,52 +0,0 @@
-from __future__ import absolute_import
-from __future__ import unicode_literals
-
-import tempfile
-
-from ddt import data
-from ddt import ddt
-
-from .. import mock
-from compose.cli.command import project_from_options
-from tests.integration.testcases import DockerClientTestCase
-
-
-@ddt
-class EnvironmentTest(DockerClientTestCase):
- @classmethod
- def setUpClass(cls):
- super(EnvironmentTest, cls).setUpClass()
- cls.compose_file = tempfile.NamedTemporaryFile(mode='w+b')
- cls.compose_file.write(bytes("""version: '3.2'
-services:
- svc:
- image: busybox:latest
- environment:
- TEST_VARIABLE: ${TEST_VARIABLE}""", encoding='utf-8'))
- cls.compose_file.flush()
-
- @classmethod
- def tearDownClass(cls):
- super(EnvironmentTest, cls).tearDownClass()
- cls.compose_file.close()
-
- @data('events',
- 'exec',
- 'kill',
- 'logs',
- 'pause',
- 'ps',
- 'restart',
- 'rm',
- 'start',
- 'stop',
- 'top',
- 'unpause')
- def _test_no_warning_on_missing_host_environment_var_on_silent_commands(self, cmd):
- options = {'COMMAND': cmd, '--file': [EnvironmentTest.compose_file.name]}
- with mock.patch('compose.config.environment.log') as fake_log:
- # Note that the warning silencing and the env variables check is
- # done in `project_from_options`
- # So no need to have a proper options map, the `COMMAND` key is enough
- project_from_options('.', options)
- assert fake_log.warn.call_count == 0
diff --git a/tests/integration/project_test.py b/tests/integration/project_test.py
index fe6ace90..57f3b707 100644
--- a/tests/integration/project_test.py
+++ b/tests/integration/project_test.py
@@ -1,7 +1,6 @@
from __future__ import absolute_import
from __future__ import unicode_literals
-import copy
import json
import os
import random
@@ -1497,60 +1496,6 @@ class ProjectTest(DockerClientTestCase):
output = container.logs()
assert output == b"This is the secret\n"
- @v3_only()
- def test_project_up_with_added_secrets(self):
- node = create_host_file(self.client, os.path.abspath('tests/fixtures/secrets/default'))
-
- config_input1 = {
- 'version': V3_1,
- 'services': [
- {
- 'name': 'web',
- 'image': 'busybox:latest',
- 'command': 'cat /run/secrets/special',
- 'environment': ['constraint:node=={}'.format(node if node is not None else '')]
- }
-
- ],
- 'secrets': {
- 'super': {
- 'file': os.path.abspath('tests/fixtures/secrets/default')
- }
- }
- }
- config_input2 = copy.deepcopy(config_input1)
- # Add the secret
- config_input2['services'][0]['secrets'] = [
- types.ServiceSecret.parse({'source': 'super', 'target': 'special'})
- ]
-
- config_data1 = build_config(**config_input1)
- config_data2 = build_config(**config_input2)
-
- # First up with non-secret
- project = Project.from_config(
- client=self.client,
- name='composetest',
- config_data=config_data1,
- )
- project.up()
-
- # Then up with secret
- project = Project.from_config(
- client=self.client,
- name='composetest',
- config_data=config_data2,
- )
- project.up()
- project.stop()
-
- containers = project.containers(stopped=True)
- assert len(containers) == 1
- container, = containers
-
- output = container.logs()
- assert output == b"This is the secret\n"
-
@v2_only()
def test_initialize_volumes_invalid_volume_driver(self):
vol_name = '{0:x}'.format(random.getrandbits(32))
diff --git a/tests/integration/service_test.py b/tests/integration/service_test.py
index b49ae710..000f6838 100644
--- a/tests/integration/service_test.py
+++ b/tests/integration/service_test.py
@@ -695,8 +695,8 @@ class ServiceTest(DockerClientTestCase):
new_container, = service.execute_convergence_plan(
ConvergencePlan('recreate', [old_container]))
- mock_log.warning.assert_called_once_with(mock.ANY)
- _, args, kwargs = mock_log.warning.mock_calls[0]
+ mock_log.warn.assert_called_once_with(mock.ANY)
+ _, args, kwargs = mock_log.warn.mock_calls[0]
assert "Service \"db\" is using volume \"/data\" from the previous container" in args[0]
assert [mount['Destination'] for mount in new_container.get('Mounts')] == ['/data']
@@ -1382,7 +1382,7 @@ class ServiceTest(DockerClientTestCase):
with pytest.raises(OperationFailedError):
service.scale(3)
- captured_output = mock_log.warning.call_args[0][0]
+ captured_output = mock_log.warn.call_args[0][0]
assert len(service.containers()) == 1
assert "Remove the custom name to scale the service." in captured_output
diff --git a/tests/integration/state_test.py b/tests/integration/state_test.py
index 0d69217c..b7d38a4b 100644
--- a/tests/integration/state_test.py
+++ b/tests/integration/state_test.py
@@ -5,8 +5,6 @@ by `docker-compose up`.
from __future__ import absolute_import
from __future__ import unicode_literals
-import copy
-
import py
from docker.errors import ImageNotFound
@@ -211,143 +209,6 @@ class ProjectWithDependenciesTest(ProjectTestCase):
}
-class ProjectWithDependsOnDependenciesTest(ProjectTestCase):
- def setUp(self):
- super(ProjectWithDependsOnDependenciesTest, self).setUp()
-
- self.cfg = {
- 'version': '2',
- 'services': {
- 'db': {
- 'image': 'busybox:latest',
- 'command': 'tail -f /dev/null',
- },
- 'web': {
- 'image': 'busybox:latest',
- 'command': 'tail -f /dev/null',
- 'depends_on': ['db'],
- },
- 'nginx': {
- 'image': 'busybox:latest',
- 'command': 'tail -f /dev/null',
- 'depends_on': ['web'],
- },
- }
- }
-
- def test_up(self):
- local_cfg = copy.deepcopy(self.cfg)
- containers = self.run_up(local_cfg)
- assert set(c.service for c in containers) == set(['db', 'web', 'nginx'])
-
- def test_change_leaf(self):
- local_cfg = copy.deepcopy(self.cfg)
- old_containers = self.run_up(local_cfg)
-
- local_cfg['services']['nginx']['environment'] = {'NEW_VAR': '1'}
- new_containers = self.run_up(local_cfg)
-
- assert set(c.service for c in new_containers - old_containers) == set(['nginx'])
-
- def test_change_middle(self):
- local_cfg = copy.deepcopy(self.cfg)
- old_containers = self.run_up(local_cfg)
-
- local_cfg['services']['web']['environment'] = {'NEW_VAR': '1'}
- new_containers = self.run_up(local_cfg)
-
- assert set(c.service for c in new_containers - old_containers) == set(['web'])
-
- def test_change_middle_always_recreate_deps(self):
- local_cfg = copy.deepcopy(self.cfg)
- old_containers = self.run_up(local_cfg, always_recreate_deps=True)
-
- local_cfg['services']['web']['environment'] = {'NEW_VAR': '1'}
- new_containers = self.run_up(local_cfg, always_recreate_deps=True)
-
- assert set(c.service for c in new_containers - old_containers) == set(['web', 'nginx'])
-
- def test_change_root(self):
- local_cfg = copy.deepcopy(self.cfg)
- old_containers = self.run_up(local_cfg)
-
- local_cfg['services']['db']['environment'] = {'NEW_VAR': '1'}
- new_containers = self.run_up(local_cfg)
-
- assert set(c.service for c in new_containers - old_containers) == set(['db'])
-
- def test_change_root_always_recreate_deps(self):
- local_cfg = copy.deepcopy(self.cfg)
- old_containers = self.run_up(local_cfg, always_recreate_deps=True)
-
- local_cfg['services']['db']['environment'] = {'NEW_VAR': '1'}
- new_containers = self.run_up(local_cfg, always_recreate_deps=True)
-
- assert set(c.service for c in new_containers - old_containers) == set(['db', 'web', 'nginx'])
-
- def test_change_root_no_recreate(self):
- local_cfg = copy.deepcopy(self.cfg)
- old_containers = self.run_up(local_cfg)
-
- local_cfg['services']['db']['environment'] = {'NEW_VAR': '1'}
- new_containers = self.run_up(
- local_cfg,
- strategy=ConvergenceStrategy.never)
-
- assert new_containers - old_containers == set()
-
- def test_service_removed_while_down(self):
- local_cfg = copy.deepcopy(self.cfg)
- next_cfg = copy.deepcopy(self.cfg)
- del next_cfg['services']['db']
- del next_cfg['services']['web']['depends_on']
-
- containers = self.run_up(local_cfg)
- assert set(c.service for c in containers) == set(['db', 'web', 'nginx'])
-
- project = self.make_project(local_cfg)
- project.stop(timeout=1)
-
- next_containers = self.run_up(next_cfg)
- assert set(c.service for c in next_containers) == set(['web', 'nginx'])
-
- def test_service_removed_while_up(self):
- local_cfg = copy.deepcopy(self.cfg)
- containers = self.run_up(local_cfg)
- assert set(c.service for c in containers) == set(['db', 'web', 'nginx'])
-
- del local_cfg['services']['db']
- del local_cfg['services']['web']['depends_on']
-
- containers = self.run_up(local_cfg)
- assert set(c.service for c in containers) == set(['web', 'nginx'])
-
- def test_dependency_removed(self):
- local_cfg = copy.deepcopy(self.cfg)
- next_cfg = copy.deepcopy(self.cfg)
- del next_cfg['services']['nginx']['depends_on']
-
- containers = self.run_up(local_cfg, service_names=['nginx'])
- assert set(c.service for c in containers) == set(['db', 'web', 'nginx'])
-
- project = self.make_project(local_cfg)
- project.stop(timeout=1)
-
- next_containers = self.run_up(next_cfg, service_names=['nginx'])
- assert set(c.service for c in next_containers if c.is_running) == set(['nginx'])
-
- def test_dependency_added(self):
- local_cfg = copy.deepcopy(self.cfg)
-
- del local_cfg['services']['nginx']['depends_on']
- containers = self.run_up(local_cfg, service_names=['nginx'])
- assert set(c.service for c in containers) == set(['nginx'])
-
- local_cfg['services']['nginx']['depends_on'] = ['db']
- containers = self.run_up(local_cfg, service_names=['nginx'])
- assert set(c.service for c in containers) == set(['nginx', 'db'])
-
-
class ServiceStateTest(DockerClientTestCase):
"""Test cases for Service.convergence_plan."""
diff --git a/tests/unit/bundle_test.py b/tests/unit/bundle_test.py
index 8faebb7f..88f75405 100644
--- a/tests/unit/bundle_test.py
+++ b/tests/unit/bundle_test.py
@@ -10,7 +10,6 @@ from compose import service
from compose.cli.errors import UserError
from compose.config.config import Config
from compose.const import COMPOSEFILE_V2_0 as V2_0
-from compose.service import NoSuchImageError
@pytest.fixture
@@ -36,16 +35,6 @@ def test_get_image_digest_image_uses_digest(mock_service):
assert not mock_service.image.called
-def test_get_image_digest_from_repository(mock_service):
- mock_service.options['image'] = 'abcd'
- mock_service.image_name = 'abcd'
- mock_service.image.side_effect = NoSuchImageError(None)
- mock_service.get_image_registry_data.return_value = {'Descriptor': {'digest': 'digest'}}
-
- digest = bundle.get_image_digest(mock_service)
- assert digest == 'abcd@digest'
-
-
def test_get_image_digest_no_image(mock_service):
with pytest.raises(UserError) as exc:
bundle.get_image_digest(service.Service(name='theservice'))
@@ -94,7 +83,7 @@ def test_to_bundle():
configs={}
)
- with mock.patch('compose.bundle.log.warning', autospec=True) as mock_log:
+ with mock.patch('compose.bundle.log.warn', autospec=True) as mock_log:
output = bundle.to_bundle(config, image_digests)
assert mock_log.mock_calls == [
@@ -128,7 +117,7 @@ def test_convert_service_to_bundle():
'privileged': True,
}
- with mock.patch('compose.bundle.log.warning', autospec=True) as mock_log:
+ with mock.patch('compose.bundle.log.warn', autospec=True) as mock_log:
config = bundle.convert_service_to_bundle(name, service_dict, image_digest)
mock_log.assert_called_once_with(
@@ -177,7 +166,7 @@ def test_make_service_networks_default():
name = 'theservice'
service_dict = {}
- with mock.patch('compose.bundle.log.warning', autospec=True) as mock_log:
+ with mock.patch('compose.bundle.log.warn', autospec=True) as mock_log:
networks = bundle.make_service_networks(name, service_dict)
assert not mock_log.called
@@ -195,7 +184,7 @@ def test_make_service_networks():
},
}
- with mock.patch('compose.bundle.log.warning', autospec=True) as mock_log:
+ with mock.patch('compose.bundle.log.warn', autospec=True) as mock_log:
networks = bundle.make_service_networks(name, service_dict)
mock_log.assert_called_once_with(
diff --git a/tests/unit/cli/docker_client_test.py b/tests/unit/cli/docker_client_test.py
index 772c136e..be91ea31 100644
--- a/tests/unit/cli/docker_client_test.py
+++ b/tests/unit/cli/docker_client_test.py
@@ -247,5 +247,5 @@ class TestGetTlsVersion(object):
environment = {'COMPOSE_TLS_VERSION': 'TLSv5_5'}
with mock.patch('compose.cli.docker_client.log') as mock_log:
tls_version = get_tls_version(environment)
- mock_log.warning.assert_called_once_with(mock.ANY)
+ mock_log.warn.assert_called_once_with(mock.ANY)
assert tls_version is None
diff --git a/tests/unit/cli/main_test.py b/tests/unit/cli/main_test.py
index eb6a99d7..2e97f2c8 100644
--- a/tests/unit/cli/main_test.py
+++ b/tests/unit/cli/main_test.py
@@ -63,7 +63,7 @@ class TestCLIMainTestCase(object):
with mock.patch('compose.cli.main.log') as fake_log:
warn_for_swarm_mode(mock_client)
- assert fake_log.warning.call_count == 1
+ assert fake_log.warn.call_count == 1
class TestSetupConsoleHandlerTestCase(object):
diff --git a/tests/unit/config/config_test.py b/tests/unit/config/config_test.py
index 163c9cd1..50d8e13a 100644
--- a/tests/unit/config/config_test.py
+++ b/tests/unit/config/config_test.py
@@ -329,7 +329,7 @@ class ConfigTest(unittest.TestCase):
)
assert 'Unexpected type for "version" key in "filename.yml"' \
- in mock_logging.warning.call_args[0][0]
+ in mock_logging.warn.call_args[0][0]
service_dicts = config_data.services
assert service_sort(service_dicts) == service_sort([
@@ -613,25 +613,6 @@ class ConfigTest(unittest.TestCase):
excinfo.exconly()
)
- def test_config_integer_service_name_raise_validation_error_v2_when_no_interpolate(self):
- with pytest.raises(ConfigurationError) as excinfo:
- config.load(
- build_config_details(
- {
- 'version': '2',
- 'services': {1: {'image': 'busybox'}}
- },
- 'working_dir',
- 'filename.yml'
- ),
- interpolate=False
- )
-
- assert (
- "In file 'filename.yml', the service name 1 must be a quoted string, i.e. '1'." in
- excinfo.exconly()
- )
-
def test_config_integer_service_property_raise_validation_error(self):
with pytest.raises(ConfigurationError) as excinfo:
config.load(
@@ -3485,25 +3466,6 @@ class InterpolationTest(unittest.TestCase):
}
@mock.patch.dict(os.environ)
- def test_config_file_with_options_environment_file(self):
- project_dir = 'tests/fixtures/default-env-file'
- service_dicts = config.load(
- config.find(
- project_dir, None, Environment.from_env_file(project_dir, '.env2')
- )
- ).services
-
- assert service_dicts[0] == {
- 'name': 'web',
- 'image': 'alpine:latest',
- 'ports': [
- types.ServicePort.parse('5644')[0],
- types.ServicePort.parse('9998')[0]
- ],
- 'command': 'false'
- }
-
- @mock.patch.dict(os.environ)
def test_config_file_with_environment_variable(self):
project_dir = 'tests/fixtures/environment-interpolation'
os.environ.update(
@@ -3570,8 +3532,8 @@ class InterpolationTest(unittest.TestCase):
with mock.patch('compose.config.environment.log') as log:
config.load(config_details)
- assert 2 == log.warning.call_count
- warnings = sorted(args[0][0] for args in log.warning.call_args_list)
+ assert 2 == log.warn.call_count
+ warnings = sorted(args[0][0] for args in log.warn.call_args_list)
assert 'BAR' in warnings[0]
assert 'FOO' in warnings[1]
@@ -3601,8 +3563,8 @@ class InterpolationTest(unittest.TestCase):
with mock.patch('compose.config.config.log') as log:
config.load(config_details, compatibility=True)
- assert log.warning.call_count == 1
- warn_message = log.warning.call_args[0][0]
+ assert log.warn.call_count == 1
+ warn_message = log.warn.call_args[0][0]
assert warn_message.startswith(
'The following deploy sub-keys are not supported in compatibility mode'
)
@@ -3641,7 +3603,7 @@ class InterpolationTest(unittest.TestCase):
with mock.patch('compose.config.config.log') as log:
cfg = config.load(config_details, compatibility=True)
- assert log.warning.call_count == 0
+ assert log.warn.call_count == 0
service_dict = cfg.services[0]
assert service_dict == {
@@ -5365,28 +5327,6 @@ class SerializeTest(unittest.TestCase):
assert serialized_service['command'] == 'echo $$FOO'
assert serialized_service['entrypoint'][0] == '$$SHELL'
- def test_serialize_escape_dont_interpolate(self):
- cfg = {
- 'version': '2.2',
- 'services': {
- 'web': {
- 'image': 'busybox',
- 'command': 'echo $FOO',
- 'environment': {
- 'CURRENCY': '$'
- },
- 'entrypoint': ['$SHELL', '-c'],
- }
- }
- }
- config_dict = config.load(build_config_details(cfg), interpolate=False)
-
- serialized_config = yaml.load(serialize_config(config_dict, escape_dollar=False))
- serialized_service = serialized_config['services']['web']
- assert serialized_service['environment']['CURRENCY'] == '$'
- assert serialized_service['command'] == 'echo $FOO'
- assert serialized_service['entrypoint'][0] == '$SHELL'
-
def test_serialize_unicode_values(self):
cfg = {
'version': '2.3',
diff --git a/tests/unit/network_test.py b/tests/unit/network_test.py
index 82cfb3be..d7ffa289 100644
--- a/tests/unit/network_test.py
+++ b/tests/unit/network_test.py
@@ -165,6 +165,6 @@ class NetworkTest(unittest.TestCase):
with mock.patch('compose.network.log') as mock_log:
check_remote_network_config(remote, net)
- mock_log.warning.assert_called_once_with(mock.ANY)
- _, args, kwargs = mock_log.warning.mock_calls[0]
+ mock_log.warn.assert_called_once_with(mock.ANY)
+ _, args, kwargs = mock_log.warn.mock_calls[0]
assert 'label "com.project.touhou.character" has changed' in args[0]
diff --git a/tests/unit/project_test.py b/tests/unit/project_test.py
index 89b080d2..4aea91a0 100644
--- a/tests/unit/project_test.py
+++ b/tests/unit/project_test.py
@@ -15,8 +15,6 @@ from compose.config.types import VolumeFromSpec
from compose.const import COMPOSEFILE_V1 as V1
from compose.const import COMPOSEFILE_V2_0 as V2_0
from compose.const import COMPOSEFILE_V2_4 as V2_4
-from compose.const import COMPOSEFILE_V3_7 as V3_7
-from compose.const import DEFAULT_TIMEOUT
from compose.const import LABEL_SERVICE
from compose.container import Container
from compose.errors import OperationFailedError
@@ -767,34 +765,6 @@ class ProjectTest(unittest.TestCase):
)
assert project.get_service('web').platform == 'linux/s390x'
- def test_build_container_operation_with_timeout_func_does_not_mutate_options_with_timeout(self):
- config_data = Config(
- version=V3_7,
- services=[
- {'name': 'web', 'image': 'busybox:latest'},
- {'name': 'db', 'image': 'busybox:latest', 'stop_grace_period': '1s'},
- ],
- networks={}, volumes={}, secrets=None, configs=None,
- )
-
- project = Project.from_config(name='test', client=self.mock_client, config_data=config_data)
-
- stop_op = project.build_container_operation_with_timeout_func('stop', options={})
-
- web_container = mock.create_autospec(Container, service='web')
- db_container = mock.create_autospec(Container, service='db')
-
- # `stop_grace_period` is not set to 'web' service,
- # then it is stopped with the default timeout.
- stop_op(web_container)
- web_container.stop.assert_called_once_with(timeout=DEFAULT_TIMEOUT)
-
- # `stop_grace_period` is set to 'db' service,
- # then it is stopped with the specified timeout and
- # the value is not overridden by the previous function call.
- stop_op(db_container)
- db_container.stop.assert_called_once_with(timeout=1)
-
@mock.patch('compose.parallel.ParallelStreamWriter._write_noansi')
def test_error_parallel_pull(self, mock_write):
project = Project.from_config(
diff --git a/tests/unit/service_test.py b/tests/unit/service_test.py
index 8c381f15..8b3352fc 100644
--- a/tests/unit/service_test.py
+++ b/tests/unit/service_test.py
@@ -333,7 +333,7 @@ class ServiceTest(unittest.TestCase):
assert service.options['environment'] == environment
assert opts['labels'][LABEL_CONFIG_HASH] == \
- '689149e6041a85f6fb4945a2146a497ed43c8a5cbd8991753d875b165f1b4de4'
+ '2524a06fcb3d781aa2c981fc40bcfa08013bb318e4273bfa388df22023e6f2aa'
assert opts['environment'] == ['also=real']
def test_get_container_create_options_sets_affinity_with_binds(self):
@@ -516,8 +516,8 @@ class ServiceTest(unittest.TestCase):
with mock.patch('compose.service.log', autospec=True) as mock_log:
service.create_container()
- assert mock_log.warning.called
- _, args, _ = mock_log.warning.mock_calls[0]
+ assert mock_log.warn.called
+ _, args, _ = mock_log.warn.mock_calls[0]
assert 'was built because it did not already exist' in args[0]
assert self.mock_client.build.call_count == 1
@@ -546,7 +546,7 @@ class ServiceTest(unittest.TestCase):
with mock.patch('compose.service.log', autospec=True) as mock_log:
service.ensure_image_exists(do_build=BuildAction.force)
- assert not mock_log.warning.called
+ assert not mock_log.warn.called
assert self.mock_client.build.call_count == 1
self.mock_client.build.call_args[1]['tag'] == 'default_foo'
@@ -676,7 +676,6 @@ class ServiceTest(unittest.TestCase):
'options': {'image': 'example.com/foo'},
'links': [('one', 'one')],
'net': 'other',
- 'secrets': [],
'networks': {'default': None},
'volumes_from': [('two', 'rw')],
}
@@ -699,7 +698,6 @@ class ServiceTest(unittest.TestCase):
'options': {'image': 'example.com/foo'},
'links': [],
'networks': {},
- 'secrets': [],
'net': 'aaabbb',
'volumes_from': [],
}
@@ -847,13 +845,13 @@ class ServiceTest(unittest.TestCase):
ports=["8080:80"])
service.scale(0)
- assert not mock_log.warning.called
+ assert not mock_log.warn.called
service.scale(1)
- assert not mock_log.warning.called
+ assert not mock_log.warn.called
service.scale(2)
- mock_log.warning.assert_called_once_with(
+ mock_log.warn.assert_called_once_with(
'The "{}" service specifies a port on the host. If multiple containers '
'for this service are created on a single host, the port will clash.'.format(name))
@@ -1391,7 +1389,7 @@ class ServiceVolumesTest(unittest.TestCase):
with mock.patch('compose.service.log', autospec=True) as mock_log:
warn_on_masked_volume(volumes_option, container_volumes, service)
- assert not mock_log.warning.called
+ assert not mock_log.warn.called
def test_warn_on_masked_volume_when_masked(self):
volumes_option = [VolumeSpec('/home/user', '/path', 'rw')]
@@ -1404,7 +1402,7 @@ class ServiceVolumesTest(unittest.TestCase):
with mock.patch('compose.service.log', autospec=True) as mock_log:
warn_on_masked_volume(volumes_option, container_volumes, service)
- mock_log.warning.assert_called_once_with(mock.ANY)
+ mock_log.warn.assert_called_once_with(mock.ANY)
def test_warn_on_masked_no_warning_with_same_path(self):
volumes_option = [VolumeSpec('/home/user', '/path', 'rw')]
@@ -1414,7 +1412,7 @@ class ServiceVolumesTest(unittest.TestCase):
with mock.patch('compose.service.log', autospec=True) as mock_log:
warn_on_masked_volume(volumes_option, container_volumes, service)
- assert not mock_log.warning.called
+ assert not mock_log.warn.called
def test_warn_on_masked_no_warning_with_container_only_option(self):
volumes_option = [VolumeSpec(None, '/path', 'rw')]
@@ -1426,7 +1424,7 @@ class ServiceVolumesTest(unittest.TestCase):
with mock.patch('compose.service.log', autospec=True) as mock_log:
warn_on_masked_volume(volumes_option, container_volumes, service)
- assert not mock_log.warning.called
+ assert not mock_log.warn.called
def test_create_with_special_volume_mode(self):
self.mock_client.inspect_image.return_value = {'Id': 'imageid'}
diff --git a/tox.ini b/tox.ini
index 57e57bc6..08efd4e6 100644
--- a/tox.ini
+++ b/tox.ini
@@ -1,5 +1,5 @@
[tox]
-envlist = py27,py37,pre-commit
+envlist = py27,py36,py37,pre-commit
[testenv]
usedevelop=True