summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorAlexander Gerasiov <gerasiov@yandex-team.ru>2018-07-10 14:38:19 +0300
committerAlexander Gerasiov <gerasiov@yandex-team.ru>2018-07-10 14:38:19 +0300
commit5e06faab1ecaa2d30100f24b39f81e307329a19e (patch)
tree8845322ece509cd0bb82c81135923d48ca5404bf
parent6951127c68046ae46df6972a37c6cd80cae1d622 (diff)
parent598a95848dfa5d5ba13c99741bb54310697a2e78 (diff)
Merge tag 'upstream/3.4.1'
Upstream version 3.4.1
-rw-r--r--PKG-INFO3
-rw-r--r--docker.egg-info/PKG-INFO3
-rw-r--r--docker.egg-info/SOURCES.txt1
-rw-r--r--docker.egg-info/requires.txt2
-rw-r--r--docker/api/build.py31
-rw-r--r--docker/api/client.py6
-rw-r--r--docker/api/config.py8
-rw-r--r--docker/api/container.py17
-rw-r--r--docker/api/daemon.py4
-rw-r--r--docker/api/plugin.py15
-rw-r--r--docker/auth.py19
-rw-r--r--docker/client.py9
-rw-r--r--docker/models/containers.py18
-rw-r--r--docker/models/images.py4
-rw-r--r--docker/models/networks.py2
-rw-r--r--docker/models/services.py4
-rw-r--r--docker/transport/unixconn.py6
-rw-r--r--docker/types/daemon.py2
-rw-r--r--docker/types/services.py2
-rw-r--r--docker/utils/build.py214
-rw-r--r--docker/utils/fnmatch.py1
-rw-r--r--docker/utils/socket.py3
-rw-r--r--docker/version.py2
-rw-r--r--requirements.txt2
-rw-r--r--setup.py12
-rw-r--r--tests/helpers.py7
-rw-r--r--tests/integration/api_build_test.py92
-rw-r--r--tests/integration/api_client_test.py40
-rw-r--r--tests/integration/api_container_test.py28
-rw-r--r--tests/integration/api_plugin_test.py2
-rw-r--r--tests/integration/models_containers_test.py3
-rw-r--r--tests/unit/api_container_test.py4
-rw-r--r--tests/unit/api_test.py60
-rw-r--r--tests/unit/auth_test.py66
-rw-r--r--tests/unit/fake_api_client.py16
-rw-r--r--tests/unit/models_containers_test.py12
-rw-r--r--tests/unit/utils_build_test.py493
-rw-r--r--tests/unit/utils_test.py480
38 files changed, 983 insertions, 710 deletions
diff --git a/PKG-INFO b/PKG-INFO
index a02d191..d8c4409 100644
--- a/PKG-INFO
+++ b/PKG-INFO
@@ -1,12 +1,11 @@
Metadata-Version: 1.1
Name: docker
-Version: 3.2.1
+Version: 3.4.1
Summary: A Python library for the Docker Engine API.
Home-page: https://github.com/docker/docker-py
Author: Joffrey F
Author-email: joffrey@docker.com
License: Apache License 2.0
-Description-Content-Type: UNKNOWN
Description: Docker SDK for Python
=====================
diff --git a/docker.egg-info/PKG-INFO b/docker.egg-info/PKG-INFO
index a02d191..d8c4409 100644
--- a/docker.egg-info/PKG-INFO
+++ b/docker.egg-info/PKG-INFO
@@ -1,12 +1,11 @@
Metadata-Version: 1.1
Name: docker
-Version: 3.2.1
+Version: 3.4.1
Summary: A Python library for the Docker Engine API.
Home-page: https://github.com/docker/docker-py
Author: Joffrey F
Author-email: joffrey@docker.com
License: Apache License 2.0
-Description-Content-Type: UNKNOWN
Description: Docker SDK for Python
=====================
diff --git a/docker.egg-info/SOURCES.txt b/docker.egg-info/SOURCES.txt
index 9a773e6..02d2f19 100644
--- a/docker.egg-info/SOURCES.txt
+++ b/docker.egg-info/SOURCES.txt
@@ -120,6 +120,7 @@ tests/unit/models_resources_test.py
tests/unit/models_services_test.py
tests/unit/ssladapter_test.py
tests/unit/swarm_test.py
+tests/unit/utils_build_test.py
tests/unit/utils_config_test.py
tests/unit/utils_json_stream_test.py
tests/unit/utils_test.py
diff --git a/docker.egg-info/requires.txt b/docker.egg-info/requires.txt
index 623f4c7..e0b0763 100644
--- a/docker.egg-info/requires.txt
+++ b/docker.egg-info/requires.txt
@@ -1,7 +1,7 @@
requests!=2.18.0,>=2.14.2
six>=1.4.0
websocket-client>=0.32.0
-docker-pycreds>=0.2.2
+docker-pycreds>=0.3.0
[:python_version < "3.3"]
ipaddress>=1.0.16
diff --git a/docker/api/build.py b/docker/api/build.py
index d69985e..419255f 100644
--- a/docker/api/build.py
+++ b/docker/api/build.py
@@ -264,6 +264,23 @@ class BuildApiMixin(object):
return self._stream_helper(response, decode=decode)
+ @utils.minimum_version('1.31')
+ def prune_builds(self):
+ """
+ Delete the builder cache
+
+ Returns:
+ (dict): A dictionary containing information about the operation's
+ result. The ``SpaceReclaimed`` key indicates the amount of
+ bytes of disk space reclaimed.
+
+ Raises:
+ :py:class:`docker.errors.APIError`
+ If the server returns an error.
+ """
+ url = self._url("/build/prune")
+ return self._result(self._post(url), True)
+
def _set_auth_headers(self, headers):
log.debug('Looking for auth config')
@@ -285,7 +302,8 @@ class BuildApiMixin(object):
# credentials/native_store.go#L68-L83
for registry in self._auth_configs.get('auths', {}).keys():
auth_data[registry] = auth.resolve_authconfig(
- self._auth_configs, registry
+ self._auth_configs, registry,
+ credstore_env=self.credstore_env,
)
else:
auth_data = self._auth_configs.get('auths', {}).copy()
@@ -316,10 +334,17 @@ def process_dockerfile(dockerfile, path):
if (os.path.splitdrive(path)[0] != os.path.splitdrive(abs_dockerfile)[0] or
os.path.relpath(abs_dockerfile, path).startswith('..')):
+ # Dockerfile not in context - read data to insert into tar later
with open(abs_dockerfile, 'r') as df:
return (
'.dockerfile.{0:x}'.format(random.getrandbits(160)),
df.read()
)
- else:
- return (dockerfile, None)
+
+ # Dockerfile is inside the context - return path relative to context root
+ if dockerfile == abs_dockerfile:
+ # Only calculate relpath if necessary to avoid errors
+ # on Windows client -> Linux Docker
+ # see https://github.com/docker/compose/issues/5969
+ dockerfile = os.path.relpath(abs_dockerfile, path)
+ return (dockerfile, None)
diff --git a/docker/api/client.py b/docker/api/client.py
index 13c292a..91da1c8 100644
--- a/docker/api/client.py
+++ b/docker/api/client.py
@@ -83,6 +83,8 @@ class APIClient(
:py:class:`~docker.tls.TLSConfig` object to use custom
configuration.
user_agent (str): Set a custom user agent for requests to the server.
+ credstore_env (dict): Override environment variables when calling the
+ credential store process.
"""
__attrs__ = requests.Session.__attrs__ + ['_auth_configs',
@@ -93,7 +95,8 @@ class APIClient(
def __init__(self, base_url=None, version=None,
timeout=DEFAULT_TIMEOUT_SECONDS, tls=False,
- user_agent=DEFAULT_USER_AGENT, num_pools=DEFAULT_NUM_POOLS):
+ user_agent=DEFAULT_USER_AGENT, num_pools=DEFAULT_NUM_POOLS,
+ credstore_env=None):
super(APIClient, self).__init__()
if tls and not base_url:
@@ -109,6 +112,7 @@ class APIClient(
self._auth_configs = auth.load_config(
config_dict=self._general_configs
)
+ self.credstore_env = credstore_env
base_url = utils.parse_host(
base_url, IS_WINDOWS_PLATFORM, tls=bool(tls)
diff --git a/docker/api/config.py b/docker/api/config.py
index b46b09c..767bef2 100644
--- a/docker/api/config.py
+++ b/docker/api/config.py
@@ -6,7 +6,7 @@ from .. import utils
class ConfigApiMixin(object):
- @utils.minimum_version('1.25')
+ @utils.minimum_version('1.30')
def create_config(self, name, data, labels=None):
"""
Create a config
@@ -35,7 +35,7 @@ class ConfigApiMixin(object):
self._post_json(url, data=body), True
)
- @utils.minimum_version('1.25')
+ @utils.minimum_version('1.30')
@utils.check_resource('id')
def inspect_config(self, id):
"""
@@ -53,7 +53,7 @@ class ConfigApiMixin(object):
url = self._url('/configs/{0}', id)
return self._result(self._get(url), True)
- @utils.minimum_version('1.25')
+ @utils.minimum_version('1.30')
@utils.check_resource('id')
def remove_config(self, id):
"""
@@ -73,7 +73,7 @@ class ConfigApiMixin(object):
self._raise_for_status(res)
return True
- @utils.minimum_version('1.25')
+ @utils.minimum_version('1.30')
def configs(self, filters=None):
"""
List configs
diff --git a/docker/api/container.py b/docker/api/container.py
index cb97b79..d4f75f5 100644
--- a/docker/api/container.py
+++ b/docker/api/container.py
@@ -139,8 +139,9 @@ class ContainerApiMixin(object):
'changes': changes
}
u = self._url("/commit")
- return self._result(self._post_json(u, data=conf, params=params),
- json=True)
+ return self._result(
+ self._post_json(u, data=conf, params=params), json=True
+ )
def containers(self, quiet=False, all=False, trunc=False, latest=False,
since=None, before=None, limit=-1, size=False,
@@ -1018,7 +1019,10 @@ class ContainerApiMixin(object):
"""
params = {'t': timeout}
url = self._url("/containers/{0}/restart", container)
- res = self._post(url, params=params)
+ conn_timeout = self.timeout
+ if conn_timeout is not None:
+ conn_timeout += timeout
+ res = self._post(url, params=params, timeout=conn_timeout)
self._raise_for_status(res)
@utils.check_resource('container')
@@ -1107,9 +1111,10 @@ class ContainerApiMixin(object):
else:
params = {'t': timeout}
url = self._url("/containers/{0}/stop", container)
-
- res = self._post(url, params=params,
- timeout=(timeout + (self.timeout or 0)))
+ conn_timeout = self.timeout
+ if conn_timeout is not None:
+ conn_timeout += timeout
+ res = self._post(url, params=params, timeout=conn_timeout)
self._raise_for_status(res)
@utils.check_resource('container')
diff --git a/docker/api/daemon.py b/docker/api/daemon.py
index fc3692c..76a94cf 100644
--- a/docker/api/daemon.py
+++ b/docker/api/daemon.py
@@ -128,7 +128,9 @@ class DaemonApiMixin(object):
elif not self._auth_configs:
self._auth_configs = auth.load_config()
- authcfg = auth.resolve_authconfig(self._auth_configs, registry)
+ authcfg = auth.resolve_authconfig(
+ self._auth_configs, registry, credstore_env=self.credstore_env,
+ )
# If we found an existing auth config for this registry and username
# combination, we can return it immediately unless reauth is requested.
if authcfg and authcfg.get('username', None) == username \
diff --git a/docker/api/plugin.py b/docker/api/plugin.py
index 73f1852..f6c0b13 100644
--- a/docker/api/plugin.py
+++ b/docker/api/plugin.py
@@ -44,7 +44,10 @@ class PluginApiMixin(object):
"""
url = self._url('/plugins/create')
- with utils.create_archive(root=plugin_data_dir, gzip=gzip) as archv:
+ with utils.create_archive(
+ root=plugin_data_dir, gzip=gzip,
+ files=set(utils.build.walk(plugin_data_dir, []))
+ ) as archv:
res = self._post(url, params={'name': name}, data=archv)
self._raise_for_status(res)
return True
@@ -167,8 +170,16 @@ class PluginApiMixin(object):
'remote': name,
}
+ headers = {}
+ registry, repo_name = auth.resolve_repository_name(name)
+ header = auth.get_config_header(self, registry)
+ if header:
+ headers['X-Registry-Auth'] = header
+
url = self._url('/plugins/privileges')
- return self._result(self._get(url, params=params), True)
+ return self._result(
+ self._get(url, params=params, headers=headers), True
+ )
@utils.minimum_version('1.25')
@utils.check_resource('name')
diff --git a/docker/auth.py b/docker/auth.py
index 48fcd8b..9635f93 100644
--- a/docker/auth.py
+++ b/docker/auth.py
@@ -44,7 +44,9 @@ def get_config_header(client, registry):
"No auth config in memory - loading from filesystem"
)
client._auth_configs = load_config()
- authcfg = resolve_authconfig(client._auth_configs, registry)
+ authcfg = resolve_authconfig(
+ client._auth_configs, registry, credstore_env=client.credstore_env
+ )
# Do not fail here if no authentication exists for this
# specific registry as we can have a readonly pull. Just
# put the header if we can.
@@ -76,7 +78,7 @@ def get_credential_store(authconfig, registry):
)
-def resolve_authconfig(authconfig, registry=None):
+def resolve_authconfig(authconfig, registry=None, credstore_env=None):
"""
Returns the authentication data from the given auth configuration for a
specific registry. As with the Docker client, legacy entries in the config
@@ -91,7 +93,7 @@ def resolve_authconfig(authconfig, registry=None):
'Using credentials store "{0}"'.format(store_name)
)
cfg = _resolve_authconfig_credstore(
- authconfig, registry, store_name
+ authconfig, registry, store_name, env=credstore_env
)
if cfg is not None:
return cfg
@@ -115,13 +117,14 @@ def resolve_authconfig(authconfig, registry=None):
return None
-def _resolve_authconfig_credstore(authconfig, registry, credstore_name):
+def _resolve_authconfig_credstore(authconfig, registry, credstore_name,
+ env=None):
if not registry or registry == INDEX_NAME:
# The ecosystem is a little schizophrenic with index.docker.io VS
# docker.io - in that case, it seems the full URL is necessary.
registry = INDEX_URL
log.debug("Looking for auth entry for {0}".format(repr(registry)))
- store = dockerpycreds.Store(credstore_name)
+ store = dockerpycreds.Store(credstore_name, environment=env)
try:
data = store.get(registry)
res = {
@@ -267,7 +270,7 @@ def load_config(config_path=None, config_dict=None):
"Couldn't find auth-related section ; attempting to interpret"
"as auth-only file"
)
- return parse_auth(config_dict)
+ return {'auths': parse_auth(config_dict)}
def _load_legacy_config(config_file):
@@ -284,14 +287,14 @@ def _load_legacy_config(config_file):
)
username, password = decode_auth(data[0])
- return {
+ return {'auths': {
INDEX_NAME: {
'username': username,
'password': password,
'email': data[1],
'serveraddress': INDEX_URL,
}
- }
+ }}
except Exception as e:
log.debug(e)
pass
diff --git a/docker/client.py b/docker/client.py
index b4364c3..8d4a52b 100644
--- a/docker/client.py
+++ b/docker/client.py
@@ -33,6 +33,8 @@ class DockerClient(object):
:py:class:`~docker.tls.TLSConfig` object to use custom
configuration.
user_agent (str): Set a custom user agent for requests to the server.
+ credstore_env (dict): Override environment variables when calling the
+ credential store process.
"""
def __init__(self, *args, **kwargs):
self.api = APIClient(*args, **kwargs)
@@ -66,6 +68,8 @@ class DockerClient(object):
assert_hostname (bool): Verify the hostname of the server.
environment (dict): The environment to read environment variables
from. Default: the value of ``os.environ``
+ credstore_env (dict): Override environment variables when calling
+ the credential store process.
Example:
@@ -77,8 +81,9 @@ class DockerClient(object):
"""
timeout = kwargs.pop('timeout', DEFAULT_TIMEOUT_SECONDS)
version = kwargs.pop('version', None)
- return cls(timeout=timeout, version=version,
- **kwargs_from_env(**kwargs))
+ return cls(
+ timeout=timeout, version=version, **kwargs_from_env(**kwargs)
+ )
# Resources
@property
diff --git a/docker/models/containers.py b/docker/models/containers.py
index 1e06ed6..b33a718 100644
--- a/docker/models/containers.py
+++ b/docker/models/containers.py
@@ -6,7 +6,7 @@ from ..api import APIClient
from ..constants import DEFAULT_DATA_CHUNK_SIZE
from ..errors import (
ContainerError, DockerException, ImageNotFound,
- create_unexpected_kwargs_error
+ NotFound, create_unexpected_kwargs_error
)
from ..types import HostConfig
from ..utils import version_gte
@@ -844,7 +844,7 @@ class ContainerCollection(Collection):
return self.prepare_model(resp)
def list(self, all=False, before=None, filters=None, limit=-1, since=None,
- sparse=False):
+ sparse=False, ignore_removed=False):
"""
List containers. Similar to the ``docker ps`` command.
@@ -882,6 +882,10 @@ class ContainerCollection(Collection):
information, but guaranteed not to block. Use
:py:meth:`Container.reload` on resulting objects to retrieve
all attributes. Default: ``False``
+ ignore_removed (bool): Ignore failures due to missing containers
+ when attempting to inspect containers from the original list.
+ Set to ``True`` if race conditions are likely. Has no effect
+ if ``sparse=True``. Default: ``False``
Returns:
(list of :py:class:`Container`)
@@ -896,7 +900,15 @@ class ContainerCollection(Collection):
if sparse:
return [self.prepare_model(r) for r in resp]
else:
- return [self.get(r['Id']) for r in resp]
+ containers = []
+ for r in resp:
+ try:
+ containers.append(self.get(r['Id']))
+ # a container may have been removed while iterating
+ except NotFound:
+ if not ignore_removed:
+ raise
+ return containers
def prune(self, filters=None):
return self.client.api.prune_containers(filters=filters)
diff --git a/docker/models/images.py b/docker/models/images.py
index d4893bb..41632c6 100644
--- a/docker/models/images.py
+++ b/docker/models/images.py
@@ -432,6 +432,10 @@ class ImageCollection(Collection):
return self.client.api.prune_images(filters=filters)
prune.__doc__ = APIClient.prune_images.__doc__
+ def prune_builds(self, *args, **kwargs):
+ return self.client.api.prune_builds(*args, **kwargs)
+ prune_builds.__doc__ = APIClient.prune_builds.__doc__
+
def normalize_platform(platform, engine_info):
if platform is None:
diff --git a/docker/models/networks.py b/docker/models/networks.py
index 1c2fbf2..be3291a 100644
--- a/docker/models/networks.py
+++ b/docker/models/networks.py
@@ -211,5 +211,5 @@ class NetworkCollection(Collection):
return networks
def prune(self, filters=None):
- self.client.api.prune_networks(filters=filters)
+ return self.client.api.prune_networks(filters=filters)
prune.__doc__ = APIClient.prune_networks.__doc__
diff --git a/docker/models/services.py b/docker/models/services.py
index 125896b..458d2c8 100644
--- a/docker/models/services.py
+++ b/docker/models/services.py
@@ -126,7 +126,7 @@ class Service(Model):
service_mode = ServiceMode('replicated', replicas)
return self.client.api.update_service(self.id, self.version,
- service_mode,
+ mode=service_mode,
fetch_current_spec=True)
def force_update(self):
@@ -276,7 +276,7 @@ CONTAINER_SPEC_KWARGS = [
'labels',
'mounts',
'open_stdin',
- 'privileges'
+ 'privileges',
'read_only',
'secrets',
'stop_grace_period',
diff --git a/docker/transport/unixconn.py b/docker/transport/unixconn.py
index cc35d00..c59821a 100644
--- a/docker/transport/unixconn.py
+++ b/docker/transport/unixconn.py
@@ -1,14 +1,10 @@
import six
import requests.adapters
import socket
+from six.moves import http_client as httplib
from .. import constants
-if six.PY3:
- import http.client as httplib
-else:
- import httplib
-
try:
import requests.packages.urllib3 as urllib3
except ImportError:
diff --git a/docker/types/daemon.py b/docker/types/daemon.py
index 852f3d8..ee8624e 100644
--- a/docker/types/daemon.py
+++ b/docker/types/daemon.py
@@ -57,6 +57,8 @@ class CancellableStream(object):
else:
sock = sock_fp._sock
+ if isinstance(sock, urllib3.contrib.pyopenssl.WrappedSocket):
+ sock = sock.socket
sock.shutdown(socket.SHUT_RDWR)
sock.close()
diff --git a/docker/types/services.py b/docker/types/services.py
index 09eb05e..31f4750 100644
--- a/docker/types/services.py
+++ b/docker/types/services.py
@@ -82,7 +82,7 @@ class ContainerSpec(dict):
args (:py:class:`list`): Arguments to the command.
hostname (string): The hostname to set on the container.
env (dict): Environment variables.
- dir (string): The working directory for commands to run in.
+ workdir (string): The working directory for commands to run in.
user (string): The user inside the container.
labels (dict): A map of labels to associate with the service.
mounts (:py:class:`list`): A list of specifications for mounts to be
diff --git a/docker/utils/build.py b/docker/utils/build.py
index b644c9f..4fa5751 100644
--- a/docker/utils/build.py
+++ b/docker/utils/build.py
@@ -1,13 +1,13 @@
import io
import os
import re
-import six
import tarfile
import tempfile
+import six
+
+from .fnmatch import fnmatch
from ..constants import IS_WINDOWS_PLATFORM
-from fnmatch import fnmatch
-from itertools import chain
_SEP = re.compile('/|\\\\') if IS_WINDOWS_PLATFORM else re.compile('/')
@@ -44,92 +44,9 @@ def exclude_paths(root, patterns, dockerfile=None):
if dockerfile is None:
dockerfile = 'Dockerfile'
- def split_path(p):
- return [pt for pt in re.split(_SEP, p) if pt and pt != '.']
-
- def normalize(p):
- # Leading and trailing slashes are not relevant. Yes,
- # "foo.py/" must exclude the "foo.py" regular file. "."
- # components are not relevant either, even if the whole
- # pattern is only ".", as the Docker reference states: "For
- # historical reasons, the pattern . is ignored."
- # ".." component must be cleared with the potential previous
- # component, regardless of whether it exists: "A preprocessing
- # step [...] eliminates . and .. elements using Go's
- # filepath.".
- i = 0
- split = split_path(p)
- while i < len(split):
- if split[i] == '..':
- del split[i]
- if i > 0:
- del split[i - 1]
- i -= 1
- else:
- i += 1
- return split
-
- patterns = (
- (True, normalize(p[1:]))
- if p.startswith('!') else
- (False, normalize(p))
- for p in patterns)
- patterns = list(reversed(list(chain(
- # Exclude empty patterns such as "." or the empty string.
- filter(lambda p: p[1], patterns),
- # Always include the Dockerfile and .dockerignore
- [(True, split_path(dockerfile)), (True, ['.dockerignore'])]))))
- return set(walk(root, patterns))
-
-
-def walk(root, patterns, default=True):
- """
- A collection of file lying below root that should be included according to
- patterns.
- """
-
- def match(p):
- if p[1][0] == '**':
- rec = (p[0], p[1][1:])
- return [p] + (match(rec) if rec[1] else [rec])
- elif fnmatch(f, p[1][0]):
- return [(p[0], p[1][1:])]
- else:
- return []
-
- for f in os.listdir(root):
- cur = os.path.join(root, f)
- # The patterns if recursing in that directory.
- sub = list(chain(*(match(p) for p in patterns)))
- # Whether this file is explicitely included / excluded.
- hit = next((p[0] for p in sub if not p[1]), None)
- # Whether this file is implicitely included / excluded.
- matched = default if hit is None else hit
- sub = list(filter(lambda p: p[1], sub))
- if os.path.isdir(cur) and not os.path.islink(cur):
- # Entirely skip directories if there are no chance any subfile will
- # be included.
- if all(not p[0] for p in sub) and not matched:
- continue
- # I think this would greatly speed up dockerignore handling by not
- # recursing into directories we are sure would be entirely
- # included, and only yielding the directory itself, which will be
- # recursively archived anyway. However the current unit test expect
- # the full list of subfiles and I'm not 100% sure it would make no
- # difference yet.
- # if all(p[0] for p in sub) and matched:
- # yield f
- # continue
- children = False
- for r in (os.path.join(f, p) for p in walk(cur, sub, matched)):
- yield r
- children = True
- # The current unit tests expect directories only under those
- # conditions. It might be simplifiable though.
- if (not sub or not children) and hit or hit is None and default:
- yield f
- elif matched:
- yield f
+ patterns.append('!' + dockerfile)
+ pm = PatternMatcher(patterns)
+ return set(pm.walk(root))
def build_file_list(root):
@@ -217,3 +134,122 @@ def mkbuildcontext(dockerfile):
t.close()
f.seek(0)
return f
+
+
+def split_path(p):
+ return [pt for pt in re.split(_SEP, p) if pt and pt != '.']
+
+
+def normalize_slashes(p):
+ if IS_WINDOWS_PLATFORM:
+ return '/'.join(split_path(p))
+ return p
+
+
+def walk(root, patterns, default=True):
+ pm = PatternMatcher(patterns)
+ return pm.walk(root)
+
+
+# Heavily based on
+# https://github.com/moby/moby/blob/master/pkg/fileutils/fileutils.go
+class PatternMatcher(object):
+ def __init__(self, patterns):
+ self.patterns = list(filter(
+ lambda p: p.dirs, [Pattern(p) for p in patterns]
+ ))
+ self.patterns.append(Pattern('!.dockerignore'))
+
+ def matches(self, filepath):
+ matched = False
+ parent_path = os.path.dirname(filepath)
+ parent_path_dirs = split_path(parent_path)
+
+ for pattern in self.patterns:
+ negative = pattern.exclusion
+ match = pattern.match(filepath)
+ if not match and parent_path != '':
+ if len(pattern.dirs) <= len(parent_path_dirs):
+ match = pattern.match(
+ os.path.sep.join(parent_path_dirs[:len(pattern.dirs)])
+ )
+
+ if match:
+ matched = not negative
+
+ return matched
+
+ def walk(self, root):
+ def rec_walk(current_dir):
+ for f in os.listdir(current_dir):
+ fpath = os.path.join(
+ os.path.relpath(current_dir, root), f
+ )
+ if fpath.startswith('.' + os.path.sep):
+ fpath = fpath[2:]
+ match = self.matches(fpath)
+ if not match:
+ yield fpath
+
+ cur = os.path.join(root, fpath)
+ if not os.path.isdir(cur) or os.path.islink(cur):
+ continue
+
+ if match:
+ # If we want to skip this file and it's a directory
+ # then we should first check to see if there's an
+ # excludes pattern (e.g. !dir/file) that starts with this
+ # dir. If so then we can't skip this dir.
+ skip = True
+
+ for pat in self.patterns:
+ if not pat.exclusion:
+ continue
+ if pat.cleaned_pattern.startswith(
+ normalize_slashes(fpath)):
+ skip = False
+ break
+ if skip:
+ continue
+ for sub in rec_walk(cur):
+ yield sub
+
+ return rec_walk(root)
+
+
+class Pattern(object):
+ def __init__(self, pattern_str):
+ self.exclusion = False
+ if pattern_str.startswith('!'):
+ self.exclusion = True
+ pattern_str = pattern_str[1:]
+
+ self.dirs = self.normalize(pattern_str)
+ self.cleaned_pattern = '/'.join(self.dirs)
+
+ @classmethod
+ def normalize(cls, p):
+
+ # Leading and trailing slashes are not relevant. Yes,
+ # "foo.py/" must exclude the "foo.py" regular file. "."
+ # components are not relevant either, even if the whole
+ # pattern is only ".", as the Docker reference states: "For
+ # historical reasons, the pattern . is ignored."
+ # ".." component must be cleared with the potential previous
+ # component, regardless of whether it exists: "A preprocessing
+ # step [...] eliminates . and .. elements using Go's
+ # filepath.".
+ i = 0
+ split = split_path(p)
+ while i < len(split):
+ if split[i] == '..':
+ del split[i]
+ if i > 0:
+ del split[i - 1]
+ i -= 1
+ else:
+ i += 1
+ return split
+
+ def match(self, filepath):
+ return fnmatch(normalize_slashes(filepath), self.cleaned_pattern)
diff --git a/docker/utils/fnmatch.py b/docker/utils/fnmatch.py
index 42461dd..cc940a2 100644
--- a/docker/utils/fnmatch.py
+++ b/docker/utils/fnmatch.py
@@ -111,4 +111,5 @@ def translate(pat):
res = '%s[%s]' % (res, stuff)
else:
res = res + re.escape(c)
+
return res + '$'
diff --git a/docker/utils/socket.py b/docker/utils/socket.py
index 0945f0a..7b96d4f 100644
--- a/docker/utils/socket.py
+++ b/docker/utils/socket.py
@@ -1,6 +1,7 @@
import errno
import os
import select
+import socket as pysocket
import struct
import six
@@ -28,6 +29,8 @@ def read(socket, n=4096):
try:
if hasattr(socket, 'recv'):
return socket.recv(n)
+ if six.PY3 and isinstance(socket, getattr(pysocket, 'SocketIO')):
+ return socket.read(n)
return os.read(socket.fileno(), n)
except EnvironmentError as e:
if e.errno not in recoverable_errors:
diff --git a/docker/version.py b/docker/version.py
index 28dd1ea..d451374 100644
--- a/docker/version.py
+++ b/docker/version.py
@@ -1,2 +1,2 @@
-version = "3.2.1"
+version = "3.4.1"
version_info = tuple([int(d) for d in version.split("-")[0].split(".")])
diff --git a/requirements.txt b/requirements.txt
index 2b281ae..6c5e7d0 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -3,7 +3,7 @@ asn1crypto==0.22.0
backports.ssl-match-hostname==3.5.0.1
cffi==1.10.0
cryptography==1.9
-docker-pycreds==0.2.2
+docker-pycreds==0.3.0
enum34==1.1.6
idna==2.5
ipaddress==1.0.18
diff --git a/setup.py b/setup.py
index 271d94f..57b2b5a 100644
--- a/setup.py
+++ b/setup.py
@@ -3,19 +3,9 @@ from __future__ import print_function
import codecs
import os
-import sys
-
-import pip
from setuptools import setup, find_packages
-if 'docker-py' in [x.project_name for x in pip.get_installed_distributions()]:
- print(
- 'ERROR: "docker-py" needs to be uninstalled before installing this'
- ' package:\npip uninstall docker-py', file=sys.stderr
- )
- sys.exit(1)
-
ROOT_DIR = os.path.dirname(__file__)
SOURCE_DIR = os.path.join(ROOT_DIR)
@@ -23,7 +13,7 @@ requirements = [
'requests >= 2.14.2, != 2.18.0',
'six >= 1.4.0',
'websocket-client >= 0.32.0',
- 'docker-pycreds >= 0.2.2'
+ 'docker-pycreds >= 0.3.0'
]
extras_require = {
diff --git a/tests/helpers.py b/tests/helpers.py
index b6b493b..b36d6d7 100644
--- a/tests/helpers.py
+++ b/tests/helpers.py
@@ -123,7 +123,12 @@ def assert_cat_socket_detached_with_keys(sock, inputs):
sock.sendall(b'make sure the socket is closed\n')
else:
sock.sendall(b"make sure the socket is closed\n")
- assert sock.recv(32) == b''
+ data = sock.recv(128)
+ # New in 18.06: error message is broadcast over the socket when reading
+ # after detach
+ assert data == b'' or data.startswith(
+ b'exec attach failed: error on attach stdin: read escape sequence'
+ )
def ctrl_with(char):
diff --git a/tests/integration/api_build_test.py b/tests/integration/api_build_test.py
index 8910eb7..baaf33e 100644
--- a/tests/integration/api_build_test.py
+++ b/tests/integration/api_build_test.py
@@ -415,18 +415,20 @@ class BuildTest(BaseAPIIntegrationTest):
f.write('hello world')
with open(os.path.join(base_dir, '.dockerignore'), 'w') as f:
f.write('.dockerignore\n')
- df = tempfile.NamedTemporaryFile()
- self.addCleanup(df.close)
- df.write(('\n'.join([
- 'FROM busybox',
- 'COPY . /src',
- 'WORKDIR /src',
- ])).encode('utf-8'))
- df.flush()
+ df_dir = tempfile.mkdtemp()
+ self.addCleanup(shutil.rmtree, df_dir)
+ df_name = os.path.join(df_dir, 'Dockerfile')
+ with open(df_name, 'wb') as df:
+ df.write(('\n'.join([
+ 'FROM busybox',
+ 'COPY . /src',
+ 'WORKDIR /src',
+ ])).encode('utf-8'))
+ df.flush()
img_name = random_name()
self.tmp_imgs.append(img_name)
stream = self.client.build(
- path=base_dir, dockerfile=df.name, tag=img_name,
+ path=base_dir, dockerfile=df_name, tag=img_name,
decode=True
)
lines = []
@@ -452,7 +454,6 @@ class BuildTest(BaseAPIIntegrationTest):
'COPY . /src',
'WORKDIR /src',
]))
- print(os.path.join(base_dir, 'custom.dockerfile'))
img_name = random_name()
self.tmp_imgs.append(img_name)
stream = self.client.build(
@@ -472,3 +473,74 @@ class BuildTest(BaseAPIIntegrationTest):
assert sorted(
[b'.', b'..', b'file.txt', b'custom.dockerfile']
) == sorted(lsdata)
+
+ def test_build_in_context_nested_dockerfile(self):
+ base_dir = tempfile.mkdtemp()
+ self.addCleanup(shutil.rmtree, base_dir)
+ with open(os.path.join(base_dir, 'file.txt'), 'w') as f:
+ f.write('hello world')
+ subdir = os.path.join(base_dir, 'hello', 'world')
+ os.makedirs(subdir)
+ with open(os.path.join(subdir, 'custom.dockerfile'), 'w') as df:
+ df.write('\n'.join([
+ 'FROM busybox',
+ 'COPY . /src',
+ 'WORKDIR /src',
+ ]))
+ img_name = random_name()
+ self.tmp_imgs.append(img_name)
+ stream = self.client.build(
+ path=base_dir, dockerfile='hello/world/custom.dockerfile',
+ tag=img_name, decode=True
+ )
+ lines = []
+ for chunk in stream:
+ lines.append(chunk)
+ assert 'Successfully tagged' in lines[-1]['stream']
+
+ ctnr = self.client.create_container(img_name, 'ls -a')
+ self.tmp_containers.append(ctnr)
+ self.client.start(ctnr)
+ lsdata = self.client.logs(ctnr).strip().split(b'\n')
+ assert len(lsdata) == 4
+ assert sorted(
+ [b'.', b'..', b'file.txt', b'hello']
+ ) == sorted(lsdata)
+
+ def test_build_in_context_abs_dockerfile(self):
+ base_dir = tempfile.mkdtemp()
+ self.addCleanup(shutil.rmtree, base_dir)
+ abs_dockerfile_path = os.path.join(base_dir, 'custom.dockerfile')
+ with open(os.path.join(base_dir, 'file.txt'), 'w') as f:
+ f.write('hello world')
+ with open(abs_dockerfile_path, 'w') as df:
+ df.write('\n'.join([
+ 'FROM busybox',
+ 'COPY . /src',
+ 'WORKDIR /src',
+ ]))
+ img_name = random_name()
+ self.tmp_imgs.append(img_name)
+ stream = self.client.build(
+ path=base_dir, dockerfile=abs_dockerfile_path, tag=img_name,
+ decode=True
+ )
+ lines = []
+ for chunk in stream:
+ lines.append(chunk)
+ assert 'Successfully tagged' in lines[-1]['stream']
+
+ ctnr = self.client.create_container(img_name, 'ls -a')
+ self.tmp_containers.append(ctnr)
+ self.client.start(ctnr)
+ lsdata = self.client.logs(ctnr).strip().split(b'\n')
+ assert len(lsdata) == 4
+ assert sorted(
+ [b'.', b'..', b'file.txt', b'custom.dockerfile']
+ ) == sorted(lsdata)
+
+ @requires_api_version('1.31')
+ def test_prune_builds(self):
+ prune_result = self.client.prune_builds()
+ assert 'SpaceReclaimed' in prune_result
+ assert isinstance(prune_result['SpaceReclaimed'], int)
diff --git a/tests/integration/api_client_test.py b/tests/integration/api_client_test.py
index 05281f8..905e064 100644
--- a/tests/integration/api_client_test.py
+++ b/tests/integration/api_client_test.py
@@ -1,6 +1,3 @@
-import base64
-import os
-import tempfile
import time
import unittest
import warnings
@@ -24,43 +21,6 @@ class InformationTest(BaseAPIIntegrationTest):
assert 'Debug' in res
-class LoadConfigTest(BaseAPIIntegrationTest):
- def test_load_legacy_config(self):
- folder = tempfile.mkdtemp()
- self.tmp_folders.append(folder)
- cfg_path = os.path.join(folder, '.dockercfg')
- f = open(cfg_path, 'w')
- auth_ = base64.b64encode(b'sakuya:izayoi').decode('ascii')
- f.write('auth = {0}\n'.format(auth_))
- f.write('email = sakuya@scarlet.net')
- f.close()
- cfg = docker.auth.load_config(cfg_path)
- assert cfg[docker.auth.INDEX_NAME] is not None
- cfg = cfg[docker.auth.INDEX_NAME]
- assert cfg['username'] == 'sakuya'
- assert cfg['password'] == 'izayoi'
- assert cfg['email'] == 'sakuya@scarlet.net'
- assert cfg.get('Auth') is None
-
- def test_load_json_config(self):
- folder = tempfile.mkdtemp()
- self.tmp_folders.append(folder)
- cfg_path = os.path.join(folder, '.dockercfg')
- f = open(os.path.join(folder, '.dockercfg'), 'w')
- auth_ = base64.b64encode(b'sakuya:izayoi').decode('ascii')
- email_ = 'sakuya@scarlet.net'
- f.write('{{"{0}": {{"auth": "{1}", "email": "{2}"}}}}\n'.format(
- docker.auth.INDEX_URL, auth_, email_))
- f.close()
- cfg = docker.auth.load_config(cfg_path)
- assert cfg[docker.auth.INDEX_URL] is not None
- cfg = cfg[docker.auth.INDEX_URL]
- assert cfg['username'] == 'sakuya'
- assert cfg['password'] == 'izayoi'
- assert cfg['email'] == 'sakuya@scarlet.net'
- assert cfg.get('Auth') is None
-
-
class AutoDetectVersionTest(unittest.TestCase):
def test_client_init(self):
client = docker.APIClient(version='auto', **kwargs_from_env())
diff --git a/tests/integration/api_container_test.py b/tests/integration/api_container_test.py
index e212518..ff70148 100644
--- a/tests/integration/api_container_test.py
+++ b/tests/integration/api_container_test.py
@@ -491,6 +491,9 @@ class CreateContainerTest(BaseAPIIntegrationTest):
assert rule in self.client.logs(ctnr).decode('utf-8')
+@pytest.mark.xfail(
+ IS_WINDOWS_PLATFORM, reason='Test not designed for Windows platform'
+)
class VolumeBindTest(BaseAPIIntegrationTest):
def setUp(self):
super(VolumeBindTest, self).setUp()
@@ -507,9 +510,6 @@ class VolumeBindTest(BaseAPIIntegrationTest):
['touch', os.path.join(self.mount_dest, self.filename)],
)
- @pytest.mark.xfail(
- IS_WINDOWS_PLATFORM, reason='Test not designed for Windows platform'
- )
def test_create_with_binds_rw(self):
container = self.run_with_volume(
@@ -525,9 +525,6 @@ class VolumeBindTest(BaseAPIIntegrationTest):
inspect_data = self.client.inspect_container(container)
self.check_container_data(inspect_data, True)
- @pytest.mark.xfail(
- IS_WINDOWS_PLATFORM, reason='Test not designed for Windows platform'
- )
def test_create_with_binds_ro(self):
self.run_with_volume(
False,
@@ -548,9 +545,6 @@ class VolumeBindTest(BaseAPIIntegrationTest):
inspect_data = self.client.inspect_container(container)
self.check_container_data(inspect_data, False)
- @pytest.mark.xfail(
- IS_WINDOWS_PLATFORM, reason='Test not designed for Windows platform'
- )
@requires_api_version('1.30')
def test_create_with_mounts(self):
mount = docker.types.Mount(
@@ -569,9 +563,6 @@ class VolumeBindTest(BaseAPIIntegrationTest):
inspect_data = self.client.inspect_container(container)
self.check_container_data(inspect_data, True)
- @pytest.mark.xfail(
- IS_WINDOWS_PLATFORM, reason='Test not designed for Windows platform'
- )
@requires_api_version('1.30')
def test_create_with_mounts_ro(self):
mount = docker.types.Mount(
@@ -1116,9 +1107,7 @@ class ContainerTopTest(BaseAPIIntegrationTest):
self.client.start(container)
res = self.client.top(container)
- if IS_WINDOWS_PLATFORM:
- assert res['Titles'] == ['PID', 'USER', 'TIME', 'COMMAND']
- else:
+ if not IS_WINDOWS_PLATFORM:
assert res['Titles'] == [
'UID', 'PID', 'PPID', 'C', 'STIME', 'TTY', 'TIME', 'CMD'
]
@@ -1165,6 +1154,15 @@ class RestartContainerTest(BaseAPIIntegrationTest):
assert info2['State']['Running'] is True
self.client.kill(id)
+ def test_restart_with_low_timeout(self):
+ container = self.client.create_container(BUSYBOX, ['sleep', '9999'])
+ self.client.start(container)
+ self.client.timeout = 1
+ self.client.restart(container, timeout=3)
+ self.client.timeout = None
+ self.client.restart(container, timeout=3)
+ self.client.kill(container)
+
def test_restart_with_dict_instead_of_id(self):
container = self.client.create_container(BUSYBOX, ['sleep', '9999'])
assert 'Id' in container
diff --git a/tests/integration/api_plugin_test.py b/tests/integration/api_plugin_test.py
index 433d44d..1150b09 100644
--- a/tests/integration/api_plugin_test.py
+++ b/tests/integration/api_plugin_test.py
@@ -135,7 +135,7 @@ class PluginTest(BaseAPIIntegrationTest):
def test_create_plugin(self):
plugin_data_dir = os.path.join(
- os.path.dirname(__file__), 'testdata/dummy-plugin'
+ os.path.dirname(__file__), os.path.join('testdata', 'dummy-plugin')
)
assert self.client.create_plugin(
'docker-sdk-py/dummy', plugin_data_dir
diff --git a/tests/integration/models_containers_test.py b/tests/integration/models_containers_test.py
index 6ddb034..ab41ea5 100644
--- a/tests/integration/models_containers_test.py
+++ b/tests/integration/models_containers_test.py
@@ -36,6 +36,9 @@ class ContainerCollectionTest(BaseIntegrationTest):
with pytest.raises(docker.errors.ImageNotFound):
client.containers.run("dockerpytest_does_not_exist")
+ @pytest.mark.skipif(
+ docker.constants.IS_WINDOWS_PLATFORM, reason="host mounts on Windows"
+ )
def test_run_with_volume(self):
client = docker.from_env(version=TEST_API_VERSION)
path = tempfile.mkdtemp()
diff --git a/tests/unit/api_container_test.py b/tests/unit/api_container_test.py
index c33f129..a7e183c 100644
--- a/tests/unit/api_container_test.py
+++ b/tests/unit/api_container_test.py
@@ -1335,7 +1335,7 @@ class ContainerTest(BaseAPIClientTest):
'POST',
url_prefix + 'containers/3cc2351ab11b/restart',
params={'t': 2},
- timeout=DEFAULT_TIMEOUT_SECONDS
+ timeout=(DEFAULT_TIMEOUT_SECONDS + 2)
)
def test_restart_container_with_dict_instead_of_id(self):
@@ -1345,7 +1345,7 @@ class ContainerTest(BaseAPIClientTest):
'POST',
url_prefix + 'containers/3cc2351ab11b/restart',
params={'t': 2},
- timeout=DEFAULT_TIMEOUT_SECONDS
+ timeout=(DEFAULT_TIMEOUT_SECONDS + 2)
)
def test_remove_container(self):
diff --git a/tests/unit/api_test.py b/tests/unit/api_test.py
index 46cbd68..af2bb1c 100644
--- a/tests/unit/api_test.py
+++ b/tests/unit/api_test.py
@@ -44,7 +44,7 @@ def response(status_code=200, content='', headers=None, reason=None, elapsed=0,
return res
-def fake_resolve_authconfig(authconfig, registry=None):
+def fake_resolve_authconfig(authconfig, registry=None, *args, **kwargs):
return None
@@ -365,7 +365,7 @@ class DockerApiTest(BaseAPIClientTest):
assert result == content
-class StreamTest(unittest.TestCase):
+class UnixSocketStreamTest(unittest.TestCase):
def setUp(self):
socket_dir = tempfile.mkdtemp()
self.build_context = tempfile.mkdtemp()
@@ -462,7 +462,61 @@ class StreamTest(unittest.TestCase):
raise e
assert list(stream) == [
- str(i).encode() for i in range(50)]
+ str(i).encode() for i in range(50)
+ ]
+
+
+class TCPSocketStreamTest(unittest.TestCase):
+ text_data = b'''
+ Now, those children out there, they're jumping through the
+ flames in the hope that the god of the fire will make them fruitful.
+ Really, you can't blame them. After all, what girl would not prefer the
+ child of a god to that of some acne-scarred artisan?
+ '''
+
+ def setUp(self):
+
+ self.server = six.moves.socketserver.ThreadingTCPServer(
+ ('', 0), self.get_handler_class()
+ )
+ self.thread = threading.Thread(target=self.server.serve_forever)
+ self.thread.setDaemon(True)
+ self.thread.start()
+ self.address = 'http://{}:{}'.format(
+ socket.gethostname(), self.server.server_address[1]
+ )
+
+ def tearDown(self):
+ self.server.shutdown()
+ self.server.server_close()
+ self.thread.join()
+
+ def get_handler_class(self):
+ text_data = self.text_data
+
+ class Handler(six.moves.BaseHTTPServer.BaseHTTPRequestHandler, object):
+ def do_POST(self):
+ self.send_response(101)
+ self.send_header(
+ 'Content-Type', 'application/vnd.docker.raw-stream'
+ )
+ self.send_header('Connection', 'Upgrade')
+ self.send_header('Upgrade', 'tcp')
+ self.end_headers()
+ self.wfile.flush()
+ time.sleep(0.2)
+ self.wfile.write(text_data)
+ self.wfile.flush()
+
+ return Handler
+
+ def test_read_from_socket(self):
+ with APIClient(base_url=self.address) as client:
+ resp = client._post(client._url('/dummy'), stream=True)
+ data = client._read_from_socket(resp, stream=True, tty=True)
+ results = b''.join(data)
+
+ assert results == self.text_data
class UserAgentTest(unittest.TestCase):
diff --git a/tests/unit/auth_test.py b/tests/unit/auth_test.py
index ee32ca0..947d680 100644
--- a/tests/unit/auth_test.py
+++ b/tests/unit/auth_test.py
@@ -282,22 +282,64 @@ class LoadConfigTest(unittest.TestCase):
cfg = auth.load_config(folder)
assert cfg is not None
- def test_load_config(self):
+ def test_load_legacy_config(self):
folder = tempfile.mkdtemp()
self.addCleanup(shutil.rmtree, folder)
- dockercfg_path = os.path.join(folder, '.dockercfg')
- with open(dockercfg_path, 'w') as f:
- auth_ = base64.b64encode(b'sakuya:izayoi').decode('ascii')
+ cfg_path = os.path.join(folder, '.dockercfg')
+ auth_ = base64.b64encode(b'sakuya:izayoi').decode('ascii')
+ with open(cfg_path, 'w') as f:
f.write('auth = {0}\n'.format(auth_))
f.write('email = sakuya@scarlet.net')
- cfg = auth.load_config(dockercfg_path)
- assert auth.INDEX_NAME in cfg
- assert cfg[auth.INDEX_NAME] is not None
- cfg = cfg[auth.INDEX_NAME]
+
+ cfg = auth.load_config(cfg_path)
+ assert auth.resolve_authconfig(cfg) is not None
+ assert cfg['auths'][auth.INDEX_NAME] is not None
+ cfg = cfg['auths'][auth.INDEX_NAME]
assert cfg['username'] == 'sakuya'
assert cfg['password'] == 'izayoi'
assert cfg['email'] == 'sakuya@scarlet.net'
- assert cfg.get('auth') is None
+ assert cfg.get('Auth') is None
+
+ def test_load_json_config(self):
+ folder = tempfile.mkdtemp()
+ self.addCleanup(shutil.rmtree, folder)
+ cfg_path = os.path.join(folder, '.dockercfg')
+ auth_ = base64.b64encode(b'sakuya:izayoi').decode('ascii')
+ email = 'sakuya@scarlet.net'
+ with open(cfg_path, 'w') as f:
+ json.dump(
+ {auth.INDEX_URL: {'auth': auth_, 'email': email}}, f
+ )
+ cfg = auth.load_config(cfg_path)
+ assert auth.resolve_authconfig(cfg) is not None
+ assert cfg['auths'][auth.INDEX_URL] is not None
+ cfg = cfg['auths'][auth.INDEX_URL]
+ assert cfg['username'] == 'sakuya'
+ assert cfg['password'] == 'izayoi'
+ assert cfg['email'] == email
+ assert cfg.get('Auth') is None
+
+ def test_load_modern_json_config(self):
+ folder = tempfile.mkdtemp()
+ self.addCleanup(shutil.rmtree, folder)
+ cfg_path = os.path.join(folder, 'config.json')
+ auth_ = base64.b64encode(b'sakuya:izayoi').decode('ascii')
+ email = 'sakuya@scarlet.net'
+ with open(cfg_path, 'w') as f:
+ json.dump({
+ 'auths': {
+ auth.INDEX_URL: {
+ 'auth': auth_, 'email': email
+ }
+ }
+ }, f)
+ cfg = auth.load_config(cfg_path)
+ assert auth.resolve_authconfig(cfg) is not None
+ assert cfg['auths'][auth.INDEX_URL] is not None
+ cfg = cfg['auths'][auth.INDEX_URL]
+ assert cfg['username'] == 'sakuya'
+ assert cfg['password'] == 'izayoi'
+ assert cfg['email'] == email
def test_load_config_with_random_name(self):
folder = tempfile.mkdtemp()
@@ -318,7 +360,7 @@ class LoadConfigTest(unittest.TestCase):
with open(dockercfg_path, 'w') as f:
json.dump(config, f)
- cfg = auth.load_config(dockercfg_path)
+ cfg = auth.load_config(dockercfg_path)['auths']
assert registry in cfg
assert cfg[registry] is not None
cfg = cfg[registry]
@@ -345,7 +387,7 @@ class LoadConfigTest(unittest.TestCase):
json.dump(config, f)
with mock.patch.dict(os.environ, {'DOCKER_CONFIG': folder}):
- cfg = auth.load_config(None)
+ cfg = auth.load_config(None)['auths']
assert registry in cfg
assert cfg[registry] is not None
cfg = cfg[registry]
@@ -422,7 +464,7 @@ class LoadConfigTest(unittest.TestCase):
json.dump(config, f)
cfg = auth.load_config(dockercfg_path)
- assert cfg == {}
+ assert cfg == {'auths': {}}
def test_load_config_invalid_auth_dict(self):
folder = tempfile.mkdtemp()
diff --git a/tests/unit/fake_api_client.py b/tests/unit/fake_api_client.py
index 15b60ea..2147bfd 100644
--- a/tests/unit/fake_api_client.py
+++ b/tests/unit/fake_api_client.py
@@ -20,15 +20,18 @@ class CopyReturnMagicMock(mock.MagicMock):
return ret
-def make_fake_api_client():
+def make_fake_api_client(overrides=None):
"""
Returns non-complete fake APIClient.
This returns most of the default cases correctly, but most arguments that
change behaviour will not work.
"""
+
+ if overrides is None:
+ overrides = {}
api_client = docker.APIClient()
- mock_client = CopyReturnMagicMock(**{
+ mock_attrs = {
'build.return_value': fake_api.FAKE_IMAGE_ID,
'commit.return_value': fake_api.post_fake_commit()[1],
'containers.return_value': fake_api.get_fake_containers()[1],
@@ -47,15 +50,18 @@ def make_fake_api_client():
'networks.return_value': fake_api.get_fake_network_list()[1],
'start.return_value': None,
'wait.return_value': {'StatusCode': 0},
- })
+ }
+ mock_attrs.update(overrides)
+ mock_client = CopyReturnMagicMock(**mock_attrs)
+
mock_client._version = docker.constants.DEFAULT_DOCKER_API_VERSION
return mock_client
-def make_fake_client():
+def make_fake_client(overrides=None):
"""
Returns a Client with a fake APIClient.
"""
client = docker.DockerClient()
- client.api = make_fake_api_client()
+ client.api = make_fake_api_client(overrides)
return client
diff --git a/tests/unit/models_containers_test.py b/tests/unit/models_containers_test.py
index 2b0b499..48a5288 100644
--- a/tests/unit/models_containers_test.py
+++ b/tests/unit/models_containers_test.py
@@ -359,6 +359,18 @@ class ContainerCollectionTest(unittest.TestCase):
assert isinstance(containers[0], Container)
assert containers[0].id == FAKE_CONTAINER_ID
+ def test_list_ignore_removed(self):
+ def side_effect(*args, **kwargs):
+ raise docker.errors.NotFound('Container not found')
+ client = make_fake_client({
+ 'inspect_container.side_effect': side_effect
+ })
+
+ with pytest.raises(docker.errors.NotFound):
+ client.containers.list(all=True, ignore_removed=False)
+
+ assert client.containers.list(all=True, ignore_removed=True) == []
+
class ContainerTest(unittest.TestCase):
def test_name(self):
diff --git a/tests/unit/utils_build_test.py b/tests/unit/utils_build_test.py
new file mode 100644
index 0000000..012f15b
--- /dev/null
+++ b/tests/unit/utils_build_test.py
@@ -0,0 +1,493 @@
+# -*- coding: utf-8 -*-
+
+import os
+import os.path
+import shutil
+import socket
+import tarfile
+import tempfile
+import unittest
+
+
+from docker.constants import IS_WINDOWS_PLATFORM
+from docker.utils import exclude_paths, tar
+
+import pytest
+
+from ..helpers import make_tree
+
+
+def convert_paths(collection):
+ return set(map(convert_path, collection))
+
+
+def convert_path(path):
+ return path.replace('/', os.path.sep)
+
+
+class ExcludePathsTest(unittest.TestCase):
+ dirs = [
+ 'foo',
+ 'foo/bar',
+ 'bar',
+ 'target',
+ 'target/subdir',
+ 'subdir',
+ 'subdir/target',
+ 'subdir/target/subdir',
+ 'subdir/subdir2',
+ 'subdir/subdir2/target',
+ 'subdir/subdir2/target/subdir'
+ ]
+
+ files = [
+ 'Dockerfile',
+ 'Dockerfile.alt',
+ '.dockerignore',
+ 'a.py',
+ 'a.go',
+ 'b.py',
+ 'cde.py',
+ 'foo/a.py',
+ 'foo/b.py',
+ 'foo/bar/a.py',
+ 'bar/a.py',
+ 'foo/Dockerfile3',
+ 'target/file.txt',
+ 'target/subdir/file.txt',
+ 'subdir/file.txt',
+ 'subdir/target/file.txt',
+ 'subdir/target/subdir/file.txt',
+ 'subdir/subdir2/file.txt',
+ 'subdir/subdir2/target/file.txt',
+ 'subdir/subdir2/target/subdir/file.txt',
+ ]
+
+ all_paths = set(dirs + files)
+
+ def setUp(self):
+ self.base = make_tree(self.dirs, self.files)
+
+ def tearDown(self):
+ shutil.rmtree(self.base)
+
+ def exclude(self, patterns, dockerfile=None):
+ return set(exclude_paths(self.base, patterns, dockerfile=dockerfile))
+
+ def test_no_excludes(self):
+ assert self.exclude(['']) == convert_paths(self.all_paths)
+
+ def test_no_dupes(self):
+ paths = exclude_paths(self.base, ['!a.py'])
+ assert sorted(paths) == sorted(set(paths))
+
+ def test_wildcard_exclude(self):
+ assert self.exclude(['*']) == set(['Dockerfile', '.dockerignore'])
+
+ def test_exclude_dockerfile_dockerignore(self):
+ """
+ Even if the .dockerignore file explicitly says to exclude
+ Dockerfile and/or .dockerignore, don't exclude them from
+ the actual tar file.
+ """
+ assert self.exclude(['Dockerfile', '.dockerignore']) == convert_paths(
+ self.all_paths
+ )
+
+ def test_exclude_custom_dockerfile(self):
+ """
+ If we're using a custom Dockerfile, make sure that's not
+ excluded.
+ """
+ assert self.exclude(['*'], dockerfile='Dockerfile.alt') == set(
+ ['Dockerfile.alt', '.dockerignore']
+ )
+
+ assert self.exclude(
+ ['*'], dockerfile='foo/Dockerfile3'
+ ) == convert_paths(set(['foo/Dockerfile3', '.dockerignore']))
+
+ # https://github.com/docker/docker-py/issues/1956
+ assert self.exclude(
+ ['*'], dockerfile='./foo/Dockerfile3'
+ ) == convert_paths(set(['foo/Dockerfile3', '.dockerignore']))
+
+ def test_exclude_dockerfile_child(self):
+ includes = self.exclude(['foo/'], dockerfile='foo/Dockerfile3')
+ assert convert_path('foo/Dockerfile3') in includes
+ assert convert_path('foo/a.py') not in includes
+
+ def test_single_filename(self):
+ assert self.exclude(['a.py']) == convert_paths(
+ self.all_paths - set(['a.py'])
+ )
+
+ def test_single_filename_leading_dot_slash(self):
+ assert self.exclude(['./a.py']) == convert_paths(
+ self.all_paths - set(['a.py'])
+ )
+
+ # As odd as it sounds, a filename pattern with a trailing slash on the
+ # end *will* result in that file being excluded.
+ def test_single_filename_trailing_slash(self):
+ assert self.exclude(['a.py/']) == convert_paths(
+ self.all_paths - set(['a.py'])
+ )
+
+ def test_wildcard_filename_start(self):
+ assert self.exclude(['*.py']) == convert_paths(
+ self.all_paths - set(['a.py', 'b.py', 'cde.py'])
+ )
+
+ def test_wildcard_with_exception(self):
+ assert self.exclude(['*.py', '!b.py']) == convert_paths(
+ self.all_paths - set(['a.py', 'cde.py'])
+ )
+
+ def test_wildcard_with_wildcard_exception(self):
+ assert self.exclude(['*.*', '!*.go']) == convert_paths(
+ self.all_paths - set([
+ 'a.py', 'b.py', 'cde.py', 'Dockerfile.alt',
+ ])
+ )
+
+ def test_wildcard_filename_end(self):
+ assert self.exclude(['a.*']) == convert_paths(
+ self.all_paths - set(['a.py', 'a.go'])
+ )
+
+ def test_question_mark(self):
+ assert self.exclude(['?.py']) == convert_paths(
+ self.all_paths - set(['a.py', 'b.py'])
+ )
+
+ def test_single_subdir_single_filename(self):
+ assert self.exclude(['foo/a.py']) == convert_paths(
+ self.all_paths - set(['foo/a.py'])
+ )
+
+ def test_single_subdir_single_filename_leading_slash(self):
+ assert self.exclude(['/foo/a.py']) == convert_paths(
+ self.all_paths - set(['foo/a.py'])
+ )
+
+ def test_exclude_include_absolute_path(self):
+ base = make_tree([], ['a.py', 'b.py'])
+ assert exclude_paths(
+ base,
+ ['/*', '!/*.py']
+ ) == set(['a.py', 'b.py'])
+
+ def test_single_subdir_with_path_traversal(self):
+ assert self.exclude(['foo/whoops/../a.py']) == convert_paths(
+ self.all_paths - set(['foo/a.py'])
+ )
+
+ def test_single_subdir_wildcard_filename(self):
+ assert self.exclude(['foo/*.py']) == convert_paths(
+ self.all_paths - set(['foo/a.py', 'foo/b.py'])
+ )
+
+ def test_wildcard_subdir_single_filename(self):
+ assert self.exclude(['*/a.py']) == convert_paths(
+ self.all_paths - set(['foo/a.py', 'bar/a.py'])
+ )
+
+ def test_wildcard_subdir_wildcard_filename(self):
+ assert self.exclude(['*/*.py']) == convert_paths(
+ self.all_paths - set(['foo/a.py', 'foo/b.py', 'bar/a.py'])
+ )
+
+ def test_directory(self):
+ assert self.exclude(['foo']) == convert_paths(
+ self.all_paths - set([
+ 'foo', 'foo/a.py', 'foo/b.py', 'foo/bar', 'foo/bar/a.py',
+ 'foo/Dockerfile3'
+ ])
+ )
+
+ def test_directory_with_trailing_slash(self):
+ assert self.exclude(['foo']) == convert_paths(
+ self.all_paths - set([
+ 'foo', 'foo/a.py', 'foo/b.py',
+ 'foo/bar', 'foo/bar/a.py', 'foo/Dockerfile3'
+ ])
+ )
+
+ def test_directory_with_single_exception(self):
+ assert self.exclude(['foo', '!foo/bar/a.py']) == convert_paths(
+ self.all_paths - set([
+ 'foo/a.py', 'foo/b.py', 'foo', 'foo/bar',
+ 'foo/Dockerfile3'
+ ])
+ )
+
+ def test_directory_with_subdir_exception(self):
+ assert self.exclude(['foo', '!foo/bar']) == convert_paths(
+ self.all_paths - set([
+ 'foo/a.py', 'foo/b.py', 'foo', 'foo/Dockerfile3'
+ ])
+ )
+
+ @pytest.mark.skipif(
+ not IS_WINDOWS_PLATFORM, reason='Backslash patterns only on Windows'
+ )
+ def test_directory_with_subdir_exception_win32_pathsep(self):
+ assert self.exclude(['foo', '!foo\\bar']) == convert_paths(
+ self.all_paths - set([
+ 'foo/a.py', 'foo/b.py', 'foo', 'foo/Dockerfile3'
+ ])
+ )
+
+ def test_directory_with_wildcard_exception(self):
+ assert self.exclude(['foo', '!foo/*.py']) == convert_paths(
+ self.all_paths - set([
+ 'foo/bar', 'foo/bar/a.py', 'foo', 'foo/Dockerfile3'
+ ])
+ )
+
+ def test_subdirectory(self):
+ assert self.exclude(['foo/bar']) == convert_paths(
+ self.all_paths - set(['foo/bar', 'foo/bar/a.py'])
+ )
+
+ @pytest.mark.skipif(
+ not IS_WINDOWS_PLATFORM, reason='Backslash patterns only on Windows'
+ )
+ def test_subdirectory_win32_pathsep(self):
+ assert self.exclude(['foo\\bar']) == convert_paths(
+ self.all_paths - set(['foo/bar', 'foo/bar/a.py'])
+ )
+
+ def test_double_wildcard(self):
+ assert self.exclude(['**/a.py']) == convert_paths(
+ self.all_paths - set(
+ ['a.py', 'foo/a.py', 'foo/bar/a.py', 'bar/a.py']
+ )
+ )
+
+ assert self.exclude(['foo/**/bar']) == convert_paths(
+ self.all_paths - set(['foo/bar', 'foo/bar/a.py'])
+ )
+
+ def test_single_and_double_wildcard(self):
+ assert self.exclude(['**/target/*/*']) == convert_paths(
+ self.all_paths - set(
+ ['target/subdir/file.txt',
+ 'subdir/target/subdir/file.txt',
+ 'subdir/subdir2/target/subdir/file.txt']
+ )
+ )
+
+ def test_trailing_double_wildcard(self):
+ assert self.exclude(['subdir/**']) == convert_paths(
+ self.all_paths - set(
+ ['subdir/file.txt',
+ 'subdir/target/file.txt',
+ 'subdir/target/subdir/file.txt',
+ 'subdir/subdir2/file.txt',
+ 'subdir/subdir2/target/file.txt',
+ 'subdir/subdir2/target/subdir/file.txt',
+ 'subdir/target',
+ 'subdir/target/subdir',
+ 'subdir/subdir2',
+ 'subdir/subdir2/target',
+ 'subdir/subdir2/target/subdir']
+ )
+ )
+
+ def test_double_wildcard_with_exception(self):
+ assert self.exclude(['**', '!bar', '!foo/bar']) == convert_paths(
+ set([
+ 'foo/bar', 'foo/bar/a.py', 'bar', 'bar/a.py', 'Dockerfile',
+ '.dockerignore',
+ ])
+ )
+
+ def test_include_wildcard(self):
+ # This may be surprising but it matches the CLI's behavior
+ # (tested with 18.05.0-ce on linux)
+ base = make_tree(['a'], ['a/b.py'])
+ assert exclude_paths(
+ base,
+ ['*', '!*/b.py']
+ ) == set()
+
+ def test_last_line_precedence(self):
+ base = make_tree(
+ [],
+ ['garbage.md',
+ 'trash.md',
+ 'README.md',
+ 'README-bis.md',
+ 'README-secret.md'])
+ assert exclude_paths(
+ base,
+ ['*.md', '!README*.md', 'README-secret.md']
+ ) == set(['README.md', 'README-bis.md'])
+
+ def test_parent_directory(self):
+ base = make_tree(
+ [],
+ ['a.py',
+ 'b.py',
+ 'c.py'])
+ # Dockerignore reference stipulates that absolute paths are
+ # equivalent to relative paths, hence /../foo should be
+ # equivalent to ../foo. It also stipulates that paths are run
+ # through Go's filepath.Clean, which explicitely "replace
+ # "/.." by "/" at the beginning of a path".
+ assert exclude_paths(
+ base,
+ ['../a.py', '/../b.py']
+ ) == set(['c.py'])
+
+
+class TarTest(unittest.TestCase):
+ def test_tar_with_excludes(self):
+ dirs = [
+ 'foo',
+ 'foo/bar',
+ 'bar',
+ ]
+
+ files = [
+ 'Dockerfile',
+ 'Dockerfile.alt',
+ '.dockerignore',
+ 'a.py',
+ 'a.go',
+ 'b.py',
+ 'cde.py',
+ 'foo/a.py',
+ 'foo/b.py',
+ 'foo/bar/a.py',
+ 'bar/a.py',
+ ]
+
+ exclude = [
+ '*.py',
+ '!b.py',
+ '!a.go',
+ 'foo',
+ 'Dockerfile*',
+ '.dockerignore',
+ ]
+
+ expected_names = set([
+ 'Dockerfile',
+ '.dockerignore',
+ 'a.go',
+ 'b.py',
+ 'bar',
+ 'bar/a.py',
+ ])
+
+ base = make_tree(dirs, files)
+ self.addCleanup(shutil.rmtree, base)
+
+ with tar(base, exclude=exclude) as archive:
+ tar_data = tarfile.open(fileobj=archive)
+ assert sorted(tar_data.getnames()) == sorted(expected_names)
+
+ def test_tar_with_empty_directory(self):
+ base = tempfile.mkdtemp()
+ self.addCleanup(shutil.rmtree, base)
+ for d in ['foo', 'bar']:
+ os.makedirs(os.path.join(base, d))
+ with tar(base) as archive:
+ tar_data = tarfile.open(fileobj=archive)
+ assert sorted(tar_data.getnames()) == ['bar', 'foo']
+
+ @pytest.mark.skipif(
+ IS_WINDOWS_PLATFORM or os.geteuid() == 0,
+ reason='root user always has access ; no chmod on Windows'
+ )
+ def test_tar_with_inaccessible_file(self):
+ base = tempfile.mkdtemp()
+ full_path = os.path.join(base, 'foo')
+ self.addCleanup(shutil.rmtree, base)
+ with open(full_path, 'w') as f:
+ f.write('content')
+ os.chmod(full_path, 0o222)
+ with pytest.raises(IOError) as ei:
+ tar(base)
+
+ assert 'Can not read file in context: {}'.format(full_path) in (
+ ei.exconly()
+ )
+
+ @pytest.mark.skipif(IS_WINDOWS_PLATFORM, reason='No symlinks on Windows')
+ def test_tar_with_file_symlinks(self):
+ base = tempfile.mkdtemp()
+ self.addCleanup(shutil.rmtree, base)
+ with open(os.path.join(base, 'foo'), 'w') as f:
+ f.write("content")
+ os.makedirs(os.path.join(base, 'bar'))
+ os.symlink('../foo', os.path.join(base, 'bar/foo'))
+ with tar(base) as archive:
+ tar_data = tarfile.open(fileobj=archive)
+ assert sorted(tar_data.getnames()) == ['bar', 'bar/foo', 'foo']
+
+ @pytest.mark.skipif(IS_WINDOWS_PLATFORM, reason='No symlinks on Windows')
+ def test_tar_with_directory_symlinks(self):
+ base = tempfile.mkdtemp()
+ self.addCleanup(shutil.rmtree, base)
+ for d in ['foo', 'bar']:
+ os.makedirs(os.path.join(base, d))
+ os.symlink('../foo', os.path.join(base, 'bar/foo'))
+ with tar(base) as archive:
+ tar_data = tarfile.open(fileobj=archive)
+ assert sorted(tar_data.getnames()) == ['bar', 'bar/foo', 'foo']
+
+ @pytest.mark.skipif(IS_WINDOWS_PLATFORM, reason='No symlinks on Windows')
+ def test_tar_with_broken_symlinks(self):
+ base = tempfile.mkdtemp()
+ self.addCleanup(shutil.rmtree, base)
+ for d in ['foo', 'bar']:
+ os.makedirs(os.path.join(base, d))
+
+ os.symlink('../baz', os.path.join(base, 'bar/foo'))
+ with tar(base) as archive:
+ tar_data = tarfile.open(fileobj=archive)
+ assert sorted(tar_data.getnames()) == ['bar', 'bar/foo', 'foo']
+
+ @pytest.mark.skipif(IS_WINDOWS_PLATFORM, reason='No UNIX sockets on Win32')
+ def test_tar_socket_file(self):
+ base = tempfile.mkdtemp()
+ self.addCleanup(shutil.rmtree, base)
+ for d in ['foo', 'bar']:
+ os.makedirs(os.path.join(base, d))
+ sock = socket.socket(socket.AF_UNIX)
+ self.addCleanup(sock.close)
+ sock.bind(os.path.join(base, 'test.sock'))
+ with tar(base) as archive:
+ tar_data = tarfile.open(fileobj=archive)
+ assert sorted(tar_data.getnames()) == ['bar', 'foo']
+
+ def tar_test_negative_mtime_bug(self):
+ base = tempfile.mkdtemp()
+ filename = os.path.join(base, 'th.txt')
+ self.addCleanup(shutil.rmtree, base)
+ with open(filename, 'w') as f:
+ f.write('Invisible Full Moon')
+ os.utime(filename, (12345, -3600.0))
+ with tar(base) as archive:
+ tar_data = tarfile.open(fileobj=archive)
+ assert tar_data.getnames() == ['th.txt']
+ assert tar_data.getmember('th.txt').mtime == -3600
+
+ @pytest.mark.skipif(IS_WINDOWS_PLATFORM, reason='No symlinks on Windows')
+ def test_tar_directory_link(self):
+ dirs = ['a', 'b', 'a/c']
+ files = ['a/hello.py', 'b/utils.py', 'a/c/descend.py']
+ base = make_tree(dirs, files)
+ self.addCleanup(shutil.rmtree, base)
+ os.symlink(os.path.join(base, 'b'), os.path.join(base, 'a/c/b'))
+ with tar(base) as archive:
+ tar_data = tarfile.open(fileobj=archive)
+ names = tar_data.getnames()
+ for member in dirs + files:
+ assert member in names
+ assert 'a/c/b' in names
+ assert 'a/c/b/utils.py' not in names
diff --git a/tests/unit/utils_test.py b/tests/unit/utils_test.py
index 00456e8..8880cfe 100644
--- a/tests/unit/utils_test.py
+++ b/tests/unit/utils_test.py
@@ -5,29 +5,25 @@ import json
import os
import os.path
import shutil
-import socket
import sys
-import tarfile
import tempfile
import unittest
-import pytest
-import six
from docker.api.client import APIClient
-from docker.constants import IS_WINDOWS_PLATFORM
from docker.errors import DockerException
from docker.utils import (
- parse_repository_tag, parse_host, convert_filters, kwargs_from_env,
- parse_bytes, parse_env_file, exclude_paths, convert_volume_binds,
- decode_json_header, tar, split_command, parse_devices, update_headers,
+ convert_filters, convert_volume_binds, decode_json_header, kwargs_from_env,
+ parse_bytes, parse_devices, parse_env_file, parse_host,
+ parse_repository_tag, split_command, update_headers,
)
from docker.utils.ports import build_port_bindings, split_port
from docker.utils.utils import format_environment
-from ..helpers import make_tree
+import pytest
+import six
TEST_CERT_DIR = os.path.join(
os.path.dirname(__file__),
@@ -608,472 +604,6 @@ class PortsTest(unittest.TestCase):
assert port_bindings["2000"] == [("127.0.0.1", "2000")]
-def convert_paths(collection):
- return set(map(convert_path, collection))
-
-
-def convert_path(path):
- return path.replace('/', os.path.sep)
-
-
-class ExcludePathsTest(unittest.TestCase):
- dirs = [
- 'foo',
- 'foo/bar',
- 'bar',
- 'target',
- 'target/subdir',
- 'subdir',
- 'subdir/target',
- 'subdir/target/subdir',
- 'subdir/subdir2',
- 'subdir/subdir2/target',
- 'subdir/subdir2/target/subdir'
- ]
-
- files = [
- 'Dockerfile',
- 'Dockerfile.alt',
- '.dockerignore',
- 'a.py',
- 'a.go',
- 'b.py',
- 'cde.py',
- 'foo/a.py',
- 'foo/b.py',
- 'foo/bar/a.py',
- 'bar/a.py',
- 'foo/Dockerfile3',
- 'target/file.txt',
- 'target/subdir/file.txt',
- 'subdir/file.txt',
- 'subdir/target/file.txt',
- 'subdir/target/subdir/file.txt',
- 'subdir/subdir2/file.txt',
- 'subdir/subdir2/target/file.txt',
- 'subdir/subdir2/target/subdir/file.txt',
- ]
-
- all_paths = set(dirs + files)
-
- def setUp(self):
- self.base = make_tree(self.dirs, self.files)
-
- def tearDown(self):
- shutil.rmtree(self.base)
-
- def exclude(self, patterns, dockerfile=None):
- return set(exclude_paths(self.base, patterns, dockerfile=dockerfile))
-
- def test_no_excludes(self):
- assert self.exclude(['']) == convert_paths(self.all_paths)
-
- def test_no_dupes(self):
- paths = exclude_paths(self.base, ['!a.py'])
- assert sorted(paths) == sorted(set(paths))
-
- def test_wildcard_exclude(self):
- assert self.exclude(['*']) == set(['Dockerfile', '.dockerignore'])
-
- def test_exclude_dockerfile_dockerignore(self):
- """
- Even if the .dockerignore file explicitly says to exclude
- Dockerfile and/or .dockerignore, don't exclude them from
- the actual tar file.
- """
- assert self.exclude(['Dockerfile', '.dockerignore']) == convert_paths(
- self.all_paths
- )
-
- def test_exclude_custom_dockerfile(self):
- """
- If we're using a custom Dockerfile, make sure that's not
- excluded.
- """
- assert self.exclude(['*'], dockerfile='Dockerfile.alt') == set(
- ['Dockerfile.alt', '.dockerignore']
- )
-
- assert self.exclude(
- ['*'], dockerfile='foo/Dockerfile3'
- ) == convert_paths(set(['foo/Dockerfile3', '.dockerignore']))
-
- # https://github.com/docker/docker-py/issues/1956
- assert self.exclude(
- ['*'], dockerfile='./foo/Dockerfile3'
- ) == convert_paths(set(['foo/Dockerfile3', '.dockerignore']))
-
- def test_exclude_dockerfile_child(self):
- includes = self.exclude(['foo/'], dockerfile='foo/Dockerfile3')
- assert convert_path('foo/Dockerfile3') in includes
- assert convert_path('foo/a.py') not in includes
-
- def test_single_filename(self):
- assert self.exclude(['a.py']) == convert_paths(
- self.all_paths - set(['a.py'])
- )
-
- def test_single_filename_leading_dot_slash(self):
- assert self.exclude(['./a.py']) == convert_paths(
- self.all_paths - set(['a.py'])
- )
-
- # As odd as it sounds, a filename pattern with a trailing slash on the
- # end *will* result in that file being excluded.
- def test_single_filename_trailing_slash(self):
- assert self.exclude(['a.py/']) == convert_paths(
- self.all_paths - set(['a.py'])
- )
-
- def test_wildcard_filename_start(self):
- assert self.exclude(['*.py']) == convert_paths(
- self.all_paths - set(['a.py', 'b.py', 'cde.py'])
- )
-
- def test_wildcard_with_exception(self):
- assert self.exclude(['*.py', '!b.py']) == convert_paths(
- self.all_paths - set(['a.py', 'cde.py'])
- )
-
- def test_wildcard_with_wildcard_exception(self):
- assert self.exclude(['*.*', '!*.go']) == convert_paths(
- self.all_paths - set([
- 'a.py', 'b.py', 'cde.py', 'Dockerfile.alt',
- ])
- )
-
- def test_wildcard_filename_end(self):
- assert self.exclude(['a.*']) == convert_paths(
- self.all_paths - set(['a.py', 'a.go'])
- )
-
- def test_question_mark(self):
- assert self.exclude(['?.py']) == convert_paths(
- self.all_paths - set(['a.py', 'b.py'])
- )
-
- def test_single_subdir_single_filename(self):
- assert self.exclude(['foo/a.py']) == convert_paths(
- self.all_paths - set(['foo/a.py'])
- )
-
- def test_single_subdir_single_filename_leading_slash(self):
- assert self.exclude(['/foo/a.py']) == convert_paths(
- self.all_paths - set(['foo/a.py'])
- )
-
- def test_exclude_include_absolute_path(self):
- base = make_tree([], ['a.py', 'b.py'])
- assert exclude_paths(
- base,
- ['/*', '!/*.py']
- ) == set(['a.py', 'b.py'])
-
- def test_single_subdir_with_path_traversal(self):
- assert self.exclude(['foo/whoops/../a.py']) == convert_paths(
- self.all_paths - set(['foo/a.py'])
- )
-
- def test_single_subdir_wildcard_filename(self):
- assert self.exclude(['foo/*.py']) == convert_paths(
- self.all_paths - set(['foo/a.py', 'foo/b.py'])
- )
-
- def test_wildcard_subdir_single_filename(self):
- assert self.exclude(['*/a.py']) == convert_paths(
- self.all_paths - set(['foo/a.py', 'bar/a.py'])
- )
-
- def test_wildcard_subdir_wildcard_filename(self):
- assert self.exclude(['*/*.py']) == convert_paths(
- self.all_paths - set(['foo/a.py', 'foo/b.py', 'bar/a.py'])
- )
-
- def test_directory(self):
- assert self.exclude(['foo']) == convert_paths(
- self.all_paths - set([
- 'foo', 'foo/a.py', 'foo/b.py', 'foo/bar', 'foo/bar/a.py',
- 'foo/Dockerfile3'
- ])
- )
-
- def test_directory_with_trailing_slash(self):
- assert self.exclude(['foo']) == convert_paths(
- self.all_paths - set([
- 'foo', 'foo/a.py', 'foo/b.py',
- 'foo/bar', 'foo/bar/a.py', 'foo/Dockerfile3'
- ])
- )
-
- def test_directory_with_single_exception(self):
- assert self.exclude(['foo', '!foo/bar/a.py']) == convert_paths(
- self.all_paths - set([
- 'foo/a.py', 'foo/b.py', 'foo', 'foo/bar',
- 'foo/Dockerfile3'
- ])
- )
-
- def test_directory_with_subdir_exception(self):
- assert self.exclude(['foo', '!foo/bar']) == convert_paths(
- self.all_paths - set([
- 'foo/a.py', 'foo/b.py', 'foo', 'foo/Dockerfile3'
- ])
- )
-
- @pytest.mark.skipif(
- not IS_WINDOWS_PLATFORM, reason='Backslash patterns only on Windows'
- )
- def test_directory_with_subdir_exception_win32_pathsep(self):
- assert self.exclude(['foo', '!foo\\bar']) == convert_paths(
- self.all_paths - set([
- 'foo/a.py', 'foo/b.py', 'foo', 'foo/Dockerfile3'
- ])
- )
-
- def test_directory_with_wildcard_exception(self):
- assert self.exclude(['foo', '!foo/*.py']) == convert_paths(
- self.all_paths - set([
- 'foo/bar', 'foo/bar/a.py', 'foo', 'foo/Dockerfile3'
- ])
- )
-
- def test_subdirectory(self):
- assert self.exclude(['foo/bar']) == convert_paths(
- self.all_paths - set(['foo/bar', 'foo/bar/a.py'])
- )
-
- @pytest.mark.skipif(
- not IS_WINDOWS_PLATFORM, reason='Backslash patterns only on Windows'
- )
- def test_subdirectory_win32_pathsep(self):
- assert self.exclude(['foo\\bar']) == convert_paths(
- self.all_paths - set(['foo/bar', 'foo/bar/a.py'])
- )
-
- def test_double_wildcard(self):
- assert self.exclude(['**/a.py']) == convert_paths(
- self.all_paths - set(
- ['a.py', 'foo/a.py', 'foo/bar/a.py', 'bar/a.py']
- )
- )
-
- assert self.exclude(['foo/**/bar']) == convert_paths(
- self.all_paths - set(['foo/bar', 'foo/bar/a.py'])
- )
-
- def test_single_and_double_wildcard(self):
- assert self.exclude(['**/target/*/*']) == convert_paths(
- self.all_paths - set(
- ['target/subdir/file.txt',
- 'subdir/target/subdir/file.txt',
- 'subdir/subdir2/target/subdir/file.txt']
- )
- )
-
- def test_trailing_double_wildcard(self):
- assert self.exclude(['subdir/**']) == convert_paths(
- self.all_paths - set(
- ['subdir/file.txt',
- 'subdir/target/file.txt',
- 'subdir/target/subdir/file.txt',
- 'subdir/subdir2/file.txt',
- 'subdir/subdir2/target/file.txt',
- 'subdir/subdir2/target/subdir/file.txt',
- 'subdir/target',
- 'subdir/target/subdir',
- 'subdir/subdir2',
- 'subdir/subdir2/target',
- 'subdir/subdir2/target/subdir']
- )
- )
-
- def test_include_wildcard(self):
- base = make_tree(['a'], ['a/b.py'])
- assert exclude_paths(
- base,
- ['*', '!*/b.py']
- ) == convert_paths(['a/b.py'])
-
- def test_last_line_precedence(self):
- base = make_tree(
- [],
- ['garbage.md',
- 'thrash.md',
- 'README.md',
- 'README-bis.md',
- 'README-secret.md'])
- assert exclude_paths(
- base,
- ['*.md', '!README*.md', 'README-secret.md']
- ) == set(['README.md', 'README-bis.md'])
-
- def test_parent_directory(self):
- base = make_tree(
- [],
- ['a.py',
- 'b.py',
- 'c.py'])
- # Dockerignore reference stipulates that absolute paths are
- # equivalent to relative paths, hence /../foo should be
- # equivalent to ../foo. It also stipulates that paths are run
- # through Go's filepath.Clean, which explicitely "replace
- # "/.." by "/" at the beginning of a path".
- assert exclude_paths(
- base,
- ['../a.py', '/../b.py']
- ) == set(['c.py'])
-
-
-class TarTest(unittest.TestCase):
- def test_tar_with_excludes(self):
- dirs = [
- 'foo',
- 'foo/bar',
- 'bar',
- ]
-
- files = [
- 'Dockerfile',
- 'Dockerfile.alt',
- '.dockerignore',
- 'a.py',
- 'a.go',
- 'b.py',
- 'cde.py',
- 'foo/a.py',
- 'foo/b.py',
- 'foo/bar/a.py',
- 'bar/a.py',
- ]
-
- exclude = [
- '*.py',
- '!b.py',
- '!a.go',
- 'foo',
- 'Dockerfile*',
- '.dockerignore',
- ]
-
- expected_names = set([
- 'Dockerfile',
- '.dockerignore',
- 'a.go',
- 'b.py',
- 'bar',
- 'bar/a.py',
- ])
-
- base = make_tree(dirs, files)
- self.addCleanup(shutil.rmtree, base)
-
- with tar(base, exclude=exclude) as archive:
- tar_data = tarfile.open(fileobj=archive)
- assert sorted(tar_data.getnames()) == sorted(expected_names)
-
- def test_tar_with_empty_directory(self):
- base = tempfile.mkdtemp()
- self.addCleanup(shutil.rmtree, base)
- for d in ['foo', 'bar']:
- os.makedirs(os.path.join(base, d))
- with tar(base) as archive:
- tar_data = tarfile.open(fileobj=archive)
- assert sorted(tar_data.getnames()) == ['bar', 'foo']
-
- @pytest.mark.skipif(
- IS_WINDOWS_PLATFORM or os.geteuid() == 0,
- reason='root user always has access ; no chmod on Windows'
- )
- def test_tar_with_inaccessible_file(self):
- base = tempfile.mkdtemp()
- full_path = os.path.join(base, 'foo')
- self.addCleanup(shutil.rmtree, base)
- with open(full_path, 'w') as f:
- f.write('content')
- os.chmod(full_path, 0o222)
- with pytest.raises(IOError) as ei:
- tar(base)
-
- assert 'Can not read file in context: {}'.format(full_path) in (
- ei.exconly()
- )
-
- @pytest.mark.skipif(IS_WINDOWS_PLATFORM, reason='No symlinks on Windows')
- def test_tar_with_file_symlinks(self):
- base = tempfile.mkdtemp()
- self.addCleanup(shutil.rmtree, base)
- with open(os.path.join(base, 'foo'), 'w') as f:
- f.write("content")
- os.makedirs(os.path.join(base, 'bar'))
- os.symlink('../foo', os.path.join(base, 'bar/foo'))
- with tar(base) as archive:
- tar_data = tarfile.open(fileobj=archive)
- assert sorted(tar_data.getnames()) == ['bar', 'bar/foo', 'foo']
-
- @pytest.mark.skipif(IS_WINDOWS_PLATFORM, reason='No symlinks on Windows')
- def test_tar_with_directory_symlinks(self):
- base = tempfile.mkdtemp()
- self.addCleanup(shutil.rmtree, base)
- for d in ['foo', 'bar']:
- os.makedirs(os.path.join(base, d))
- os.symlink('../foo', os.path.join(base, 'bar/foo'))
- with tar(base) as archive:
- tar_data = tarfile.open(fileobj=archive)
- assert sorted(tar_data.getnames()) == ['bar', 'bar/foo', 'foo']
-
- @pytest.mark.skipif(IS_WINDOWS_PLATFORM, reason='No symlinks on Windows')
- def test_tar_with_broken_symlinks(self):
- base = tempfile.mkdtemp()
- self.addCleanup(shutil.rmtree, base)
- for d in ['foo', 'bar']:
- os.makedirs(os.path.join(base, d))
-
- os.symlink('../baz', os.path.join(base, 'bar/foo'))
- with tar(base) as archive:
- tar_data = tarfile.open(fileobj=archive)
- assert sorted(tar_data.getnames()) == ['bar', 'bar/foo', 'foo']
-
- @pytest.mark.skipif(IS_WINDOWS_PLATFORM, reason='No UNIX sockets on Win32')
- def test_tar_socket_file(self):
- base = tempfile.mkdtemp()
- self.addCleanup(shutil.rmtree, base)
- for d in ['foo', 'bar']:
- os.makedirs(os.path.join(base, d))
- sock = socket.socket(socket.AF_UNIX)
- self.addCleanup(sock.close)
- sock.bind(os.path.join(base, 'test.sock'))
- with tar(base) as archive:
- tar_data = tarfile.open(fileobj=archive)
- assert sorted(tar_data.getnames()) == ['bar', 'foo']
-
- def tar_test_negative_mtime_bug(self):
- base = tempfile.mkdtemp()
- filename = os.path.join(base, 'th.txt')
- self.addCleanup(shutil.rmtree, base)
- with open(filename, 'w') as f:
- f.write('Invisible Full Moon')
- os.utime(filename, (12345, -3600.0))
- with tar(base) as archive:
- tar_data = tarfile.open(fileobj=archive)
- assert tar_data.getnames() == ['th.txt']
- assert tar_data.getmember('th.txt').mtime == -3600
-
- @pytest.mark.skipif(IS_WINDOWS_PLATFORM, reason='No symlinks on Windows')
- def test_tar_directory_link(self):
- dirs = ['a', 'b', 'a/c']
- files = ['a/hello.py', 'b/utils.py', 'a/c/descend.py']
- base = make_tree(dirs, files)
- self.addCleanup(shutil.rmtree, base)
- os.symlink(os.path.join(base, 'b'), os.path.join(base, 'a/c/b'))
- with tar(base) as archive:
- tar_data = tarfile.open(fileobj=archive)
- names = tar_data.getnames()
- for member in dirs + files:
- assert member in names
- assert 'a/c/b' in names
- assert 'a/c/b/utils.py' not in names
-
-
class FormatEnvironmentTest(unittest.TestCase):
def test_format_env_binary_unicode_value(self):
env_dict = {