summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorOndřej Nový <onovy@debian.org>2016-09-08 14:23:57 +0200
committerOndřej Nový <onovy@debian.org>2016-09-08 14:23:57 +0200
commit08afb27969ed9291f7aecb24e811c9f135b7c08d (patch)
treecf3521e7f299cba6882f55332318359b13a8be24
parent8f8b69b1c9f46feef38b9c540f3d39998e23add0 (diff)
parentbe343351f9237abe05da5f9f57e5409fdcf531f4 (diff)
merge patched into master
-rw-r--r--MANIFEST.in2
-rw-r--r--PKG-INFO5
-rw-r--r--README.md8
-rw-r--r--README.rst37
-rw-r--r--debian/.git-dpm6
-rw-r--r--debian/patches/0002-Lower-Docker-API-version-to-1.20-for-Docker-1.8.3-co.patch11
-rw-r--r--debian/patches/requirements.patch37
-rw-r--r--docker/__init__.py2
-rw-r--r--docker/api/build.py11
-rw-r--r--docker/api/container.py46
-rw-r--r--docker/api/daemon.py4
-rw-r--r--docker/api/image.py8
-rw-r--r--docker/api/network.py25
-rw-r--r--docker/auth/auth.py15
-rw-r--r--docker/client.py67
-rw-r--r--docker/constants.py7
-rw-r--r--docker/ssladapter/ssladapter.py9
-rw-r--r--docker/tls.py7
-rw-r--r--docker/transport/__init__.py6
-rw-r--r--docker/transport/npipeconn.py80
-rw-r--r--docker/transport/npipesocket.py191
-rw-r--r--docker/transport/unixconn.py (renamed from docker/unixconn/unixconn.py)8
-rw-r--r--docker/unixconn/__init__.py1
-rw-r--r--docker/utils/utils.py258
-rw-r--r--docker/version.py2
-rw-r--r--docker_py.egg-info/PKG-INFO5
-rw-r--r--docker_py.egg-info/SOURCES.txt15
-rw-r--r--docker_py.egg-info/pbr.json1
-rw-r--r--docker_py.egg-info/requires.txt8
-rw-r--r--requirements.txt4
-rw-r--r--setup.py17
-rw-r--r--test-requirements.txt2
-rw-r--r--tests/integration/api_test.py2
-rw-r--r--tests/integration/build_test.py28
-rw-r--r--tests/integration/container_test.py92
-rw-r--r--tests/integration/image_test.py14
-rw-r--r--tests/integration/network_test.py141
-rw-r--r--tests/unit/api_test.py30
-rw-r--r--tests/unit/auth_test.py5
-rw-r--r--tests/unit/client_test.py70
-rw-r--r--tests/unit/container_test.py129
-rw-r--r--tests/unit/fake_api.py7
-rw-r--r--tests/unit/image_test.py3
-rw-r--r--tests/unit/ssladapter_test.py79
-rw-r--r--tests/unit/testdata/certs/ca.pem0
-rw-r--r--tests/unit/testdata/certs/cert.pem0
-rw-r--r--tests/unit/testdata/certs/key.pem0
-rw-r--r--tests/unit/utils_test.py174
48 files changed, 1421 insertions, 258 deletions
diff --git a/MANIFEST.in b/MANIFEST.in
index ab64732..ee6cdbb 100644
--- a/MANIFEST.in
+++ b/MANIFEST.in
@@ -1,5 +1,7 @@
include test-requirements.txt
include requirements.txt
include README.md
+include README.rst
include LICENSE
recursive-include tests *.py
+recursive-include tests/unit/testdata *
diff --git a/PKG-INFO b/PKG-INFO
index fd5c3e7..79c1598 100644
--- a/PKG-INFO
+++ b/PKG-INFO
@@ -1,6 +1,6 @@
Metadata-Version: 1.1
Name: docker-py
-Version: 1.7.2
+Version: 1.9.0
Summary: Python client for Docker.
Home-page: https://github.com/docker/docker-py/
Author: UNKNOWN
@@ -13,9 +13,12 @@ Classifier: Environment :: Other Environment
Classifier: Intended Audience :: Developers
Classifier: Operating System :: OS Independent
Classifier: Programming Language :: Python
+Classifier: Programming Language :: Python :: 2
Classifier: Programming Language :: Python :: 2.6
Classifier: Programming Language :: Python :: 2.7
+Classifier: Programming Language :: Python :: 3
Classifier: Programming Language :: Python :: 3.3
Classifier: Programming Language :: Python :: 3.4
+Classifier: Programming Language :: Python :: 3.5
Classifier: Topic :: Utilities
Classifier: License :: OSI Approved :: Apache Software License
diff --git a/README.md b/README.md
index 385193a..bdec785 100644
--- a/README.md
+++ b/README.md
@@ -3,12 +3,12 @@ docker-py
[![Build Status](https://travis-ci.org/docker/docker-py.png)](https://travis-ci.org/docker/docker-py)
-An API client for docker written in Python
+A Python library for the Docker Remote API. It does everything the `docker` command does, but from within Python – run containers, manage them, pull/push images, etc.
Installation
------------
-Our latest stable is always available on PyPi.
+The latest stable version is always available on PyPi.
pip install docker-py
@@ -17,8 +17,8 @@ Documentation
[![Documentation Status](https://readthedocs.org/projects/docker-py/badge/?version=latest)](https://readthedocs.org/projects/docker-py/?badge=latest)
-Full documentation is hosted on [ReadTheDocs](http://docker-py.readthedocs.org/en/latest/).
-Sources are available in the `docs/` directory.
+[Read the full documentation here](https://docker-py.readthedocs.io/en/latest/).
+The source is available in the `docs/` directory.
License
diff --git a/README.rst b/README.rst
new file mode 100644
index 0000000..757b82c
--- /dev/null
+++ b/README.rst
@@ -0,0 +1,37 @@
+docker-py
+=========
+
+|Build Status|
+
+A Python library for the Docker Remote API. It does everything the
+``docker`` command does, but from within Python – run containers, manage
+them, pull/push images, etc.
+
+Installation
+------------
+
+The latest stable version is always available on PyPi.
+
+::
+
+ pip install docker-py
+
+Documentation
+-------------
+
+|Documentation Status|
+
+`Read the full documentation
+here <https://docker-py.readthedocs.io/en/latest/>`__. The source is
+available in the ``docs/`` directory.
+
+License
+-------
+
+Docker is licensed under the Apache License, Version 2.0. See LICENSE
+for full license text
+
+.. |Build Status| image:: https://travis-ci.org/docker/docker-py.png
+ :target: https://travis-ci.org/docker/docker-py
+.. |Documentation Status| image:: https://readthedocs.org/projects/docker-py/badge/?version=latest
+ :target: https://readthedocs.org/projects/docker-py/?badge=latest
diff --git a/debian/.git-dpm b/debian/.git-dpm
index f926a07..872177f 100644
--- a/debian/.git-dpm
+++ b/debian/.git-dpm
@@ -1,7 +1,7 @@
# see git-dpm(1) from git-dpm package
-07e7d1cfb5d89ae7d71d9c4b18e2a64ad47658e5
-07e7d1cfb5d89ae7d71d9c4b18e2a64ad47658e5
-63875477fa2e0a529fcf52e36a3f9cb3db861000
+be343351f9237abe05da5f9f57e5409fdcf531f4
+be343351f9237abe05da5f9f57e5409fdcf531f4
+49556cb01423a89a6d2ce7a58b1f5cb64dde0ff3
49556cb01423a89a6d2ce7a58b1f5cb64dde0ff3
python-docker_1.9.0.orig.tar.gz
8ae12366415c9d017ce464eb721e01d3489bff28
diff --git a/debian/patches/0002-Lower-Docker-API-version-to-1.20-for-Docker-1.8.3-co.patch b/debian/patches/0002-Lower-Docker-API-version-to-1.20-for-Docker-1.8.3-co.patch
index fca3ee3..3d64ad5 100644
--- a/debian/patches/0002-Lower-Docker-API-version-to-1.20-for-Docker-1.8.3-co.patch
+++ b/debian/patches/0002-Lower-Docker-API-version-to-1.20-for-Docker-1.8.3-co.patch
@@ -1,4 +1,4 @@
-From 07e7d1cfb5d89ae7d71d9c4b18e2a64ad47658e5 Mon Sep 17 00:00:00 2001
+From be343351f9237abe05da5f9f57e5409fdcf531f4 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Ond=C5=99ej=20Nov=C3=BD?= <novy@ondrej.org>
Date: Tue, 29 Mar 2016 21:48:58 +0200
Subject: Lower Docker API version to 1.20 for Docker 1.8.3 compatibility
@@ -8,11 +8,14 @@ Subject: Lower Docker API version to 1.20 for Docker 1.8.3 compatibility
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/docker/constants.py b/docker/constants.py
-index 0627ba0..3647a3b 100644
+index 904d50e..3726824 100644
--- a/docker/constants.py
+++ b/docker/constants.py
-@@ -1,4 +1,4 @@
--DEFAULT_DOCKER_API_VERSION = '1.21'
+@@ -1,7 +1,7 @@
+ import sys
+ from .version import version
+
+-DEFAULT_DOCKER_API_VERSION = '1.22'
+DEFAULT_DOCKER_API_VERSION = '1.20'
DEFAULT_TIMEOUT_SECONDS = 60
STREAM_HEADER_SIZE_BYTES = 8
diff --git a/debian/patches/requirements.patch b/debian/patches/requirements.patch
index 86cc240..824d621 100644
--- a/debian/patches/requirements.patch
+++ b/debian/patches/requirements.patch
@@ -1,4 +1,4 @@
-From 91cb802b9ddffb1351959b1c3797adc525595ea5 Mon Sep 17 00:00:00 2001
+From 5de320ded0f8b25f62e19b971341506d31db371c Mon Sep 17 00:00:00 2001
From: Tianon Gravi <tianon@debian.org>
Date: Sun, 8 Nov 2015 10:36:49 -0800
Subject: Unpin dependencies so newer versions satisfy them appropriately
@@ -7,37 +7,41 @@ Forwarded: https://github.com/dotcloud/docker-py/issues/101 (upstream has no int
Patch-Name: requirements.patch
---
- docker_py.egg-info/requires.txt | 4 ++--
- requirements.txt | 4 ++--
+ docker_py.egg-info/requires.txt | 2 +-
+ requirements.txt | 6 +++---
setup.py | 4 ++--
test-requirements.txt | 10 +++++-----
4 files changed, 11 insertions(+), 11 deletions(-)
diff --git a/docker_py.egg-info/requires.txt b/docker_py.egg-info/requires.txt
-index 43cfa6a..aa5b97e 100644
+index 111527c..c3febfb 100644
--- a/docker_py.egg-info/requires.txt
+++ b/docker_py.egg-info/requires.txt
-@@ -1,3 +1,3 @@
+@@ -1,4 +1,4 @@
-requests >= 2.5.2
+requests >= 2.5.3
six >= 1.4.0
--websocket-client >= 0.32.0
-+websocket-client >= 0.18.0
+ websocket-client >= 0.32.0
+
diff --git a/requirements.txt b/requirements.txt
-index 72c255d..cd79880 100644
+index a79b7bf..b06a541 100644
--- a/requirements.txt
+++ b/requirements.txt
-@@ -1,3 +1,3 @@
+@@ -1,5 +1,5 @@
-requests==2.5.3
+requests>=2.5.3
six>=1.4.0
-websocket-client==0.32.0
-+websocket-client>=0.18.0
++websocket-client>=0.32.0
+ backports.ssl_match_hostname>=3.5 ; python_version < '3.5'
+-ipaddress==1.0.16 ; python_version < '3.3'
+\ No newline at end of file
++ipaddress==1.0.16 ; python_version < '3.3'
diff --git a/setup.py b/setup.py
-index 6d86163..f2af6b9 100644
+index ac58b1f..f7c0443 100644
--- a/setup.py
+++ b/setup.py
-@@ -7,9 +7,9 @@ ROOT_DIR = os.path.dirname(__file__)
+@@ -9,9 +9,9 @@ ROOT_DIR = os.path.dirname(__file__)
SOURCE_DIR = os.path.join(ROOT_DIR)
requirements = [
@@ -48,20 +52,19 @@ index 6d86163..f2af6b9 100644
+ 'websocket-client >= 0.18.0',
]
- exec(open('docker/version.py').read())
+ if sys.platform == 'win32':
diff --git a/test-requirements.txt b/test-requirements.txt
-index be49988..45b2b38 100644
+index 460db10..9f9eb50 100644
--- a/test-requirements.txt
+++ b/test-requirements.txt
@@ -1,5 +1,5 @@
-mock==1.0.1
--pytest==2.7.2
+-pytest==2.9.1
-coverage==3.7.1
-pytest-cov==2.1.0
-flake8==2.4.1
-\ No newline at end of file
+mock>=1.0.1
-+pytest>=2.7.2
++pytest>=2.9.1
+coverage>=3.7.1
+pytest-cov>=2.1.0
+flake8>=2.4.1
diff --git a/docker/__init__.py b/docker/__init__.py
index 3844c81..84d0734 100644
--- a/docker/__init__.py
+++ b/docker/__init__.py
@@ -17,4 +17,4 @@ from .version import version, version_info
__version__ = version
__title__ = 'docker-py'
-from .client import Client, AutoVersionClient # flake8: noqa
+from .client import Client, AutoVersionClient, from_env # flake8: noqa
diff --git a/docker/api/build.py b/docker/api/build.py
index 6bfaba1..971a50e 100644
--- a/docker/api/build.py
+++ b/docker/api/build.py
@@ -17,11 +17,15 @@ class BuildApiMixin(object):
nocache=False, rm=False, stream=False, timeout=None,
custom_context=False, encoding=None, pull=False,
forcerm=False, dockerfile=None, container_limits=None,
- decode=False, buildargs=None):
+ decode=False, buildargs=None, gzip=False):
remote = context = headers = None
container_limits = container_limits or {}
if path is None and fileobj is None:
raise TypeError("Either path or fileobj needs to be provided.")
+ if gzip and encoding is not None:
+ raise errors.DockerException(
+ 'Can not use custom encoding if gzip is enabled'
+ )
for key in container_limits.keys():
if key not in constants.CONTAINER_LIMITS_KEYS:
@@ -46,7 +50,10 @@ class BuildApiMixin(object):
if os.path.exists(dockerignore):
with open(dockerignore, 'r') as f:
exclude = list(filter(bool, f.read().splitlines()))
- context = utils.tar(path, exclude=exclude, dockerfile=dockerfile)
+ context = utils.tar(
+ path, exclude=exclude, dockerfile=dockerfile, gzip=gzip
+ )
+ encoding = 'gzip' if gzip else encoding
if utils.compare_version('1.8', self._version) >= 0:
stream = True
diff --git a/docker/api/container.py b/docker/api/container.py
index ceac173..9cc14db 100644
--- a/docker/api/container.py
+++ b/docker/api/container.py
@@ -40,13 +40,14 @@ class ContainerApiMixin(object):
@utils.check_resource
def commit(self, container, repository=None, tag=None, message=None,
- author=None, conf=None):
+ author=None, changes=None, conf=None):
params = {
'container': container,
'repo': repository,
'tag': tag,
'comment': message,
- 'author': author
+ 'author': author,
+ 'changes': changes
}
u = self._url("/commit")
return self._result(self._post_json(u, data=conf, params=params),
@@ -186,6 +187,8 @@ class ContainerApiMixin(object):
url = self._url("/containers/{0}/kill", container)
params = {}
if signal is not None:
+ if not isinstance(signal, six.string_types):
+ signal = int(signal)
params['signal'] = signal
res = self._post(url, params=params)
@@ -193,12 +196,14 @@ class ContainerApiMixin(object):
@utils.check_resource
def logs(self, container, stdout=True, stderr=True, stream=False,
- timestamps=False, tail='all', since=None):
+ timestamps=False, tail='all', since=None, follow=None):
if utils.compare_version('1.11', self._version) >= 0:
+ if follow is None:
+ follow = stream
params = {'stderr': stderr and 1 or 0,
'stdout': stdout and 1 or 0,
'timestamps': timestamps and 1 or 0,
- 'follow': stream and 1 or 0,
+ 'follow': follow and 1 or 0,
}
if utils.compare_version('1.13', self._version) >= 0:
if tail != 'all' and (not isinstance(tail, int) or tail < 0):
@@ -396,6 +401,39 @@ class ContainerApiMixin(object):
res = self._post(url)
self._raise_for_status(res)
+ @utils.minimum_version('1.22')
+ @utils.check_resource
+ def update_container(
+ self, container, blkio_weight=None, cpu_period=None, cpu_quota=None,
+ cpu_shares=None, cpuset_cpus=None, cpuset_mems=None, mem_limit=None,
+ mem_reservation=None, memswap_limit=None, kernel_memory=None
+ ):
+ url = self._url('/containers/{0}/update', container)
+ data = {}
+ if blkio_weight:
+ data['BlkioWeight'] = blkio_weight
+ if cpu_period:
+ data['CpuPeriod'] = cpu_period
+ if cpu_shares:
+ data['CpuShares'] = cpu_shares
+ if cpu_quota:
+ data['CpuQuota'] = cpu_quota
+ if cpuset_cpus:
+ data['CpusetCpus'] = cpuset_cpus
+ if cpuset_mems:
+ data['CpusetMems'] = cpuset_mems
+ if mem_limit:
+ data['Memory'] = utils.parse_bytes(mem_limit)
+ if mem_reservation:
+ data['MemoryReservation'] = utils.parse_bytes(mem_reservation)
+ if memswap_limit:
+ data['MemorySwap'] = utils.parse_bytes(memswap_limit)
+ if kernel_memory:
+ data['KernelMemory'] = utils.parse_bytes(kernel_memory)
+
+ res = self._post_json(url, data=data)
+ return self._result(res, True)
+
@utils.check_resource
def wait(self, container, timeout=None):
url = self._url("/containers/{0}/wait", container)
diff --git a/docker/api/daemon.py b/docker/api/daemon.py
index a149e5e..9ebe73c 100644
--- a/docker/api/daemon.py
+++ b/docker/api/daemon.py
@@ -49,8 +49,6 @@ class DaemonApiMixin(object):
elif not self._auth_configs:
self._auth_configs = auth.load_config()
- registry = registry or auth.INDEX_URL
-
authcfg = auth.resolve_authconfig(self._auth_configs, registry)
# If we found an existing auth config for this registry and username
# combination, we can return it immediately unless reauth is requested.
@@ -67,7 +65,7 @@ class DaemonApiMixin(object):
response = self._post_json(self._url('/auth'), data=req_data)
if response.status_code == 200:
- self._auth_configs[registry] = req_data
+ self._auth_configs[registry or auth.INDEX_NAME] = req_data
return self._result(response, json=True)
def ping(self):
diff --git a/docker/api/image.py b/docker/api/image.py
index 8493b38..3e66347 100644
--- a/docker/api/image.py
+++ b/docker/api/image.py
@@ -148,7 +148,7 @@ class ImageApiMixin(object):
self._raise_for_status(res)
def pull(self, repository, tag=None, stream=False,
- insecure_registry=False, auth_config=None):
+ insecure_registry=False, auth_config=None, decode=False):
if insecure_registry:
warnings.warn(
INSECURE_REGISTRY_DEPRECATION_WARNING.format('pull()'),
@@ -200,12 +200,12 @@ class ImageApiMixin(object):
self._raise_for_status(response)
if stream:
- return self._stream_helper(response)
+ return self._stream_helper(response, decode=decode)
return self._result(response)
def push(self, repository, tag=None, stream=False,
- insecure_registry=False):
+ insecure_registry=False, decode=False):
if insecure_registry:
warnings.warn(
INSECURE_REGISTRY_DEPRECATION_WARNING.format('push()'),
@@ -241,7 +241,7 @@ class ImageApiMixin(object):
self._raise_for_status(response)
if stream:
- return self._stream_helper(response)
+ return self._stream_helper(response, decode=decode)
return self._result(response)
diff --git a/docker/api/network.py b/docker/api/network.py
index d9a6128..a35f0a4 100644
--- a/docker/api/network.py
+++ b/docker/api/network.py
@@ -1,6 +1,8 @@
import json
-from ..utils import check_resource, minimum_version, normalize_links
+from ..errors import InvalidVersion
+from ..utils import check_resource, minimum_version
+from ..utils import version_lt
class NetworkApiMixin(object):
@@ -19,7 +21,8 @@ class NetworkApiMixin(object):
return self._result(res, json=True)
@minimum_version('1.21')
- def create_network(self, name, driver=None, options=None, ipam=None):
+ def create_network(self, name, driver=None, options=None, ipam=None,
+ check_duplicate=None, internal=False):
if options is not None and not isinstance(options, dict):
raise TypeError('options must be a dictionary')
@@ -28,7 +31,15 @@ class NetworkApiMixin(object):
'Driver': driver,
'Options': options,
'IPAM': ipam,
+ 'CheckDuplicate': check_duplicate
}
+
+ if internal:
+ if version_lt(self._version, '1.22'):
+ raise InvalidVersion('Internal networks are not '
+ 'supported in API version < 1.22')
+ data['Internal'] = True
+
url = self._url("/networks/create")
res = self._post_json(url, data=data)
return self._result(res, json=True)
@@ -48,14 +59,16 @@ class NetworkApiMixin(object):
@check_resource
@minimum_version('1.21')
def connect_container_to_network(self, container, net_id,
+ ipv4_address=None, ipv6_address=None,
aliases=None, links=None):
data = {
"Container": container,
- "EndpointConfig": {
- "Aliases": aliases,
- "Links": normalize_links(links) if links else None,
- },
+ "EndpointConfig": self.create_endpoint_config(
+ aliases=aliases, links=links, ipv4_address=ipv4_address,
+ ipv6_address=ipv6_address
+ ),
}
+
url = self._url("/networks/{0}/connect", net_id)
res = self._post_json(url, data=data)
self._raise_for_status(res)
diff --git a/docker/auth/auth.py b/docker/auth/auth.py
index eedb794..d23e6f3 100644
--- a/docker/auth/auth.py
+++ b/docker/auth/auth.py
@@ -117,7 +117,7 @@ def parse_auth(entries, raise_on_error=False):
conf = {}
for registry, entry in six.iteritems(entries):
- if not (isinstance(entry, dict) and 'auth' in entry):
+ if not isinstance(entry, dict):
log.debug(
'Config entry for key {0} is not auth config'.format(registry)
)
@@ -130,6 +130,16 @@ def parse_auth(entries, raise_on_error=False):
'Invalid configuration for registry {0}'.format(registry)
)
return {}
+ if 'auth' not in entry:
+ # Starting with engine v1.11 (API 1.23), an empty dictionary is
+ # a valid value in the auths config.
+ # https://github.com/docker/compose/issues/3265
+ log.debug(
+ 'Auth data for {0} is absent. Client might be using a '
+ 'credentials store instead.'
+ )
+ return {}
+
username, password = decode_auth(entry['auth'])
log.debug(
'Found entry (registry={0}, username={1})'
@@ -189,6 +199,9 @@ def load_config(config_path=None):
if data.get('HttpHeaders'):
log.debug("Found 'HttpHeaders' section")
res.update({'HttpHeaders': data['HttpHeaders']})
+ if data.get('credsStore'):
+ log.debug("Found 'credsStore' section")
+ res.update({'credsStore': data['credsStore']})
if res:
return res
else:
diff --git a/docker/client.py b/docker/client.py
index 7d1f7c4..81e9de9 100644
--- a/docker/client.py
+++ b/docker/client.py
@@ -14,7 +14,6 @@
import json
import struct
-import sys
import requests
import requests.exceptions
@@ -26,10 +25,18 @@ from . import api
from . import constants
from . import errors
from .auth import auth
-from .unixconn import unixconn
from .ssladapter import ssladapter
-from .utils import utils, check_resource, update_headers
from .tls import TLSConfig
+from .transport import UnixAdapter
+from .utils import utils, check_resource, update_headers, kwargs_from_env
+try:
+ from .transport import NpipeAdapter
+except ImportError:
+ pass
+
+
+def from_env(**kwargs):
+ return Client.from_env(**kwargs)
class Client(
@@ -42,7 +49,8 @@ class Client(
api.VolumeApiMixin,
api.NetworkApiMixin):
def __init__(self, base_url=None, version=None,
- timeout=constants.DEFAULT_TIMEOUT_SECONDS, tls=False):
+ timeout=constants.DEFAULT_TIMEOUT_SECONDS, tls=False,
+ user_agent=constants.DEFAULT_USER_AGENT):
super(Client, self).__init__()
if tls and not base_url:
@@ -52,14 +60,30 @@ class Client(
self.base_url = base_url
self.timeout = timeout
+ self.headers['User-Agent'] = user_agent
self._auth_configs = auth.load_config()
- base_url = utils.parse_host(base_url, sys.platform, tls=bool(tls))
+ base_url = utils.parse_host(
+ base_url, constants.IS_WINDOWS_PLATFORM, tls=bool(tls)
+ )
if base_url.startswith('http+unix://'):
- self._custom_adapter = unixconn.UnixAdapter(base_url, timeout)
+ self._custom_adapter = UnixAdapter(base_url, timeout)
self.mount('http+docker://', self._custom_adapter)
self.base_url = 'http+docker://localunixsocket'
+ elif base_url.startswith('npipe://'):
+ if not constants.IS_WINDOWS_PLATFORM:
+ raise errors.DockerException(
+ 'The npipe:// protocol is only supported on Windows'
+ )
+ try:
+ self._custom_adapter = NpipeAdapter(base_url, timeout)
+ except NameError:
+ raise errors.DockerException(
+ 'Install pypiwin32 package to enable npipe:// support'
+ )
+ self.mount('http+docker://', self._custom_adapter)
+ self.base_url = 'http+docker://localnpipe'
else:
# Use SSLAdapter for the ability to specify SSL version
if isinstance(tls, TLSConfig):
@@ -84,6 +108,10 @@ class Client(
)
)
+ @classmethod
+ def from_env(cls, **kwargs):
+ return cls(**kwargs_from_env(**kwargs))
+
def _retrieve_server_version(self):
try:
return self.version(api_version=False)["ApiVersion"]
@@ -283,14 +311,29 @@ class Client(
""" Depending on the combination of python version and whether we're
connecting over http or https, we might need to access _sock, which
may or may not exist; or we may need to just settimeout on socket
- itself, which also may or may not have settimeout on it.
+ itself, which also may or may not have settimeout on it. To avoid
+ missing the correct one, we try both.
- To avoid missing the correct one, we try both.
+ We also do not want to set the timeout if it is already disabled, as
+ you run the risk of changing a socket that was non-blocking to
+ blocking, for example when using gevent.
"""
- if hasattr(socket, "settimeout"):
- socket.settimeout(None)
- if hasattr(socket, "_sock") and hasattr(socket._sock, "settimeout"):
- socket._sock.settimeout(None)
+ sockets = [socket, getattr(socket, '_sock', None)]
+
+ for s in sockets:
+ if not hasattr(s, 'settimeout'):
+ continue
+
+ timeout = -1
+
+ if hasattr(s, 'gettimeout'):
+ timeout = s.gettimeout()
+
+ # Don't change the timeout if it is already disabled.
+ if timeout is None or timeout == 0.0:
+ continue
+
+ s.settimeout(None)
def _get_result(self, container, stream, res):
cont = self.inspect_container(container)
diff --git a/docker/constants.py b/docker/constants.py
index 3647a3b..3726824 100644
--- a/docker/constants.py
+++ b/docker/constants.py
@@ -1,3 +1,6 @@
+import sys
+from .version import version
+
DEFAULT_DOCKER_API_VERSION = '1.20'
DEFAULT_TIMEOUT_SECONDS = 60
STREAM_HEADER_SIZE_BYTES = 8
@@ -8,3 +11,7 @@ CONTAINER_LIMITS_KEYS = [
INSECURE_REGISTRY_DEPRECATION_WARNING = \
'The `insecure_registry` argument to {} ' \
'is deprecated and non-functional. Please remove it.'
+
+IS_WINDOWS_PLATFORM = (sys.platform == 'win32')
+
+DEFAULT_USER_AGENT = "docker-py/{0}".format(version)
diff --git a/docker/ssladapter/ssladapter.py b/docker/ssladapter/ssladapter.py
index 5b43aa2..e17dfad 100644
--- a/docker/ssladapter/ssladapter.py
+++ b/docker/ssladapter/ssladapter.py
@@ -2,6 +2,8 @@
https://lukasa.co.uk/2013/01/Choosing_SSL_Version_In_Requests/
https://github.com/kennethreitz/requests/pull/799
"""
+import sys
+
from distutils.version import StrictVersion
from requests.adapters import HTTPAdapter
@@ -10,8 +12,15 @@ try:
except ImportError:
import urllib3
+
PoolManager = urllib3.poolmanager.PoolManager
+# Monkey-patching match_hostname with a version that supports
+# IP-address checking. Not necessary for Python 3.5 and above
+if sys.version_info[0] < 3 or sys.version_info[1] < 5:
+ from backports.ssl_match_hostname import match_hostname
+ urllib3.connection.match_hostname = match_hostname
+
class SSLAdapter(HTTPAdapter):
'''An HTTPS Transport Adapter that uses an arbitrary SSL version.'''
diff --git a/docker/tls.py b/docker/tls.py
index 83b0ff7..7abfa60 100644
--- a/docker/tls.py
+++ b/docker/tls.py
@@ -1,4 +1,5 @@
import os
+import ssl
from . import errors
from .ssladapter import ssladapter
@@ -19,10 +20,14 @@ class TLSConfig(object):
# here, but also disable any public/default CA pool verification by
# leaving tls_verify=False
- self.ssl_version = ssl_version
self.assert_hostname = assert_hostname
self.assert_fingerprint = assert_fingerprint
+ # TLS v1.0 seems to be the safest default; SSLv23 fails in mysterious
+ # ways: https://github.com/docker/docker-py/issues/963
+
+ self.ssl_version = ssl_version or ssl.PROTOCOL_TLSv1
+
# "tls" and "tls_verify" must have both or neither cert/key files
# In either case, Alert the user when both are expected, but any are
# missing.
diff --git a/docker/transport/__init__.py b/docker/transport/__init__.py
new file mode 100644
index 0000000..d647483
--- /dev/null
+++ b/docker/transport/__init__.py
@@ -0,0 +1,6 @@
+# flake8: noqa
+from .unixconn import UnixAdapter
+try:
+ from .npipeconn import NpipeAdapter
+except ImportError:
+ pass \ No newline at end of file
diff --git a/docker/transport/npipeconn.py b/docker/transport/npipeconn.py
new file mode 100644
index 0000000..736ddf6
--- /dev/null
+++ b/docker/transport/npipeconn.py
@@ -0,0 +1,80 @@
+import six
+import requests.adapters
+
+from .npipesocket import NpipeSocket
+
+if six.PY3:
+ import http.client as httplib
+else:
+ import httplib
+
+try:
+ import requests.packages.urllib3 as urllib3
+except ImportError:
+ import urllib3
+
+
+RecentlyUsedContainer = urllib3._collections.RecentlyUsedContainer
+
+
+class NpipeHTTPConnection(httplib.HTTPConnection, object):
+ def __init__(self, npipe_path, timeout=60):
+ super(NpipeHTTPConnection, self).__init__(
+ 'localhost', timeout=timeout
+ )
+ self.npipe_path = npipe_path
+ self.timeout = timeout
+
+ def connect(self):
+ sock = NpipeSocket()
+ sock.settimeout(self.timeout)
+ sock.connect(self.npipe_path)
+ self.sock = sock
+
+
+class NpipeHTTPConnectionPool(urllib3.connectionpool.HTTPConnectionPool):
+ def __init__(self, npipe_path, timeout=60):
+ super(NpipeHTTPConnectionPool, self).__init__(
+ 'localhost', timeout=timeout
+ )
+ self.npipe_path = npipe_path
+ self.timeout = timeout
+
+ def _new_conn(self):
+ return NpipeHTTPConnection(
+ self.npipe_path, self.timeout
+ )
+
+
+class NpipeAdapter(requests.adapters.HTTPAdapter):
+ def __init__(self, base_url, timeout=60):
+ self.npipe_path = base_url.replace('npipe://', '')
+ self.timeout = timeout
+ self.pools = RecentlyUsedContainer(
+ 10, dispose_func=lambda p: p.close()
+ )
+ super(NpipeAdapter, self).__init__()
+
+ def get_connection(self, url, proxies=None):
+ with self.pools.lock:
+ pool = self.pools.get(url)
+ if pool:
+ return pool
+
+ pool = NpipeHTTPConnectionPool(
+ self.npipe_path, self.timeout
+ )
+ self.pools[url] = pool
+
+ return pool
+
+ def request_url(self, request, proxies):
+ # The select_proxy utility in requests errors out when the provided URL
+ # doesn't have a hostname, like is the case when using a UNIX socket.
+ # Since proxies are an irrelevant notion in the case of UNIX sockets
+ # anyway, we simply return the path URL directly.
+ # See also: https://github.com/docker/docker-py/issues/811
+ return request.path_url
+
+ def close(self):
+ self.pools.clear()
diff --git a/docker/transport/npipesocket.py b/docker/transport/npipesocket.py
new file mode 100644
index 0000000..35418ef
--- /dev/null
+++ b/docker/transport/npipesocket.py
@@ -0,0 +1,191 @@
+import functools
+import io
+
+import win32file
+import win32pipe
+
+cSECURITY_SQOS_PRESENT = 0x100000
+cSECURITY_ANONYMOUS = 0
+cPIPE_READMODE_MESSAGE = 2
+
+
+def check_closed(f):
+ @functools.wraps(f)
+ def wrapped(self, *args, **kwargs):
+ if self._closed:
+ raise RuntimeError(
+ 'Can not reuse socket after connection was closed.'
+ )
+ return f(self, *args, **kwargs)
+ return wrapped
+
+
+class NpipeSocket(object):
+ """ Partial implementation of the socket API over windows named pipes.
+ This implementation is only designed to be used as a client socket,
+ and server-specific methods (bind, listen, accept...) are not
+ implemented.
+ """
+ def __init__(self, handle=None):
+ self._timeout = win32pipe.NMPWAIT_USE_DEFAULT_WAIT
+ self._handle = handle
+ self._closed = False
+
+ def accept(self):
+ raise NotImplementedError()
+
+ def bind(self, address):
+ raise NotImplementedError()
+
+ def close(self):
+ self._handle.Close()
+ self._closed = True
+
+ @check_closed
+ def connect(self, address):
+ win32pipe.WaitNamedPipe(address, self._timeout)
+ handle = win32file.CreateFile(
+ address,
+ win32file.GENERIC_READ | win32file.GENERIC_WRITE,
+ 0,
+ None,
+ win32file.OPEN_EXISTING,
+ cSECURITY_ANONYMOUS | cSECURITY_SQOS_PRESENT,
+ 0
+ )
+ self.flags = win32pipe.GetNamedPipeInfo(handle)[0]
+
+ self._handle = handle
+ self._address = address
+
+ @check_closed
+ def connect_ex(self, address):
+ return self.connect(address)
+
+ @check_closed
+ def detach(self):
+ self._closed = True
+ return self._handle
+
+ @check_closed
+ def dup(self):
+ return NpipeSocket(self._handle)
+
+ @check_closed
+ def fileno(self):
+ return int(self._handle)
+
+ def getpeername(self):
+ return self._address
+
+ def getsockname(self):
+ return self._address
+
+ def getsockopt(self, level, optname, buflen=None):
+ raise NotImplementedError()
+
+ def ioctl(self, control, option):
+ raise NotImplementedError()
+
+ def listen(self, backlog):
+ raise NotImplementedError()
+
+ def makefile(self, mode=None, bufsize=None):
+ if mode.strip('b') != 'r':
+ raise NotImplementedError()
+ rawio = NpipeFileIOBase(self)
+ if bufsize is None:
+ bufsize = io.DEFAULT_BUFFER_SIZE
+ return io.BufferedReader(rawio, buffer_size=bufsize)
+
+ @check_closed
+ def recv(self, bufsize, flags=0):
+ err, data = win32file.ReadFile(self._handle, bufsize)
+ return data
+
+ @check_closed
+ def recvfrom(self, bufsize, flags=0):
+ data = self.recv(bufsize, flags)
+ return (data, self._address)
+
+ @check_closed
+ def recvfrom_into(self, buf, nbytes=0, flags=0):
+ return self.recv_into(buf, nbytes, flags), self._address
+
+ @check_closed
+ def recv_into(self, buf, nbytes=0):
+ readbuf = buf
+ if not isinstance(buf, memoryview):
+ readbuf = memoryview(buf)
+
+ err, data = win32file.ReadFile(
+ self._handle,
+ readbuf[:nbytes] if nbytes else readbuf
+ )
+ return len(data)
+
+ @check_closed
+ def send(self, string, flags=0):
+ err, nbytes = win32file.WriteFile(self._handle, string)
+ return nbytes
+
+ @check_closed
+ def sendall(self, string, flags=0):
+ return self.send(string, flags)
+
+ @check_closed
+ def sendto(self, string, address):
+ self.connect(address)
+ return self.send(string)
+
+ def setblocking(self, flag):
+ if flag:
+ return self.settimeout(None)
+ return self.settimeout(0)
+
+ def settimeout(self, value):
+ if value is None:
+ self._timeout = win32pipe.NMPWAIT_NOWAIT
+ elif not isinstance(value, (float, int)) or value < 0:
+ raise ValueError('Timeout value out of range')
+ elif value == 0:
+ self._timeout = win32pipe.NMPWAIT_USE_DEFAULT_WAIT
+ else:
+ self._timeout = value
+
+ def gettimeout(self):
+ return self._timeout
+
+ def setsockopt(self, level, optname, value):
+ raise NotImplementedError()
+
+ @check_closed
+ def shutdown(self, how):
+ return self.close()
+
+
+class NpipeFileIOBase(io.RawIOBase):
+ def __init__(self, npipe_socket):
+ self.sock = npipe_socket
+
+ def close(self):
+ super(NpipeFileIOBase, self).close()
+ self.sock = None
+
+ def fileno(self):
+ return self.sock.fileno()
+
+ def isatty(self):
+ return False
+
+ def readable(self):
+ return True
+
+ def readinto(self, buf):
+ return self.sock.recv_into(buf)
+
+ def seekable(self):
+ return False
+
+ def writable(self):
+ return False
diff --git a/docker/unixconn/unixconn.py b/docker/transport/unixconn.py
index d7e249e..f4d83ef 100644
--- a/docker/unixconn/unixconn.py
+++ b/docker/transport/unixconn.py
@@ -30,7 +30,9 @@ RecentlyUsedContainer = urllib3._collections.RecentlyUsedContainer
class UnixHTTPConnection(httplib.HTTPConnection, object):
def __init__(self, base_url, unix_socket, timeout=60):
- httplib.HTTPConnection.__init__(self, 'localhost', timeout=timeout)
+ super(UnixHTTPConnection, self).__init__(
+ 'localhost', timeout=timeout
+ )
self.base_url = base_url
self.unix_socket = unix_socket
self.timeout = timeout
@@ -44,8 +46,8 @@ class UnixHTTPConnection(httplib.HTTPConnection, object):
class UnixHTTPConnectionPool(urllib3.connectionpool.HTTPConnectionPool):
def __init__(self, base_url, socket_path, timeout=60):
- urllib3.connectionpool.HTTPConnectionPool.__init__(
- self, 'localhost', timeout=timeout
+ super(UnixHTTPConnectionPool, self).__init__(
+ 'localhost', timeout=timeout
)
self.base_url = base_url
self.socket_path = socket_path
diff --git a/docker/unixconn/__init__.py b/docker/unixconn/__init__.py
deleted file mode 100644
index 53711fc..0000000
--- a/docker/unixconn/__init__.py
+++ /dev/null
@@ -1 +0,0 @@
-from .unixconn import UnixAdapter # flake8: noqa
diff --git a/docker/utils/utils.py b/docker/utils/utils.py
index 6fcf037..2ef8ef0 100644
--- a/docker/utils/utils.py
+++ b/docker/utils/utils.py
@@ -91,10 +91,10 @@ def decode_json_header(header):
return json.loads(data)
-def tar(path, exclude=None, dockerfile=None, fileobj=None):
+def tar(path, exclude=None, dockerfile=None, fileobj=None, gzip=False):
if not fileobj:
fileobj = tempfile.NamedTemporaryFile()
- t = tarfile.open(mode='w', fileobj=fileobj)
+ t = tarfile.open(mode='w:gz' if gzip else 'w', fileobj=fileobj)
root = os.path.abspath(path)
exclude = exclude or []
@@ -199,6 +199,9 @@ def get_paths(root, exclude_patterns, include_patterns, has_exceptions=False):
def match_path(path, pattern):
pattern = pattern.rstrip('/')
+ if pattern:
+ pattern = os.path.relpath(pattern)
+
pattern_components = pattern.split('/')
path_components = path.split('/')[:len(pattern_components)]
return fnmatch('/'.join(path_components), pattern)
@@ -337,6 +340,35 @@ def convert_volume_binds(binds):
return result
+def convert_tmpfs_mounts(tmpfs):
+ if isinstance(tmpfs, dict):
+ return tmpfs
+
+ if not isinstance(tmpfs, list):
+ raise ValueError(
+ 'Expected tmpfs value to be either a list or a dict, found: {}'
+ .format(type(tmpfs).__name__)
+ )
+
+ result = {}
+ for mount in tmpfs:
+ if isinstance(mount, six.string_types):
+ if ":" in mount:
+ name, options = mount.split(":", 1)
+ else:
+ name = mount
+ options = ""
+
+ else:
+ raise ValueError(
+ "Expected item in tmpfs list to be a string, found: {}"
+ .format(type(mount).__name__)
+ )
+
+ result[name] = options
+ return result
+
+
def parse_repository_tag(repo_name):
parts = repo_name.rsplit('@', 1)
if len(parts) == 2:
@@ -351,13 +383,13 @@ def parse_repository_tag(repo_name):
# fd:// protocol unsupported (for obvious reasons)
# Added support for http and https
# Protocol translation: tcp -> http, unix -> http+unix
-def parse_host(addr, platform=None, tls=False):
+def parse_host(addr, is_win32=False, tls=False):
proto = "http+unix"
host = DEFAULT_HTTP_HOST
port = None
path = ''
- if not addr and platform == 'win32':
+ if not addr and is_win32:
addr = '{0}:{1}'.format(DEFAULT_HTTP_HOST, 2375)
if not addr or addr.strip() == 'unix://':
@@ -371,15 +403,19 @@ def parse_host(addr, platform=None, tls=False):
if addr == 'tcp://':
raise errors.DockerException(
- "Invalid bind address format: {0}".format(addr))
+ "Invalid bind address format: {0}".format(addr)
+ )
elif addr.startswith('unix://'):
addr = addr[7:]
elif addr.startswith('tcp://'):
- proto = "http"
+ proto = 'http{0}'.format('s' if tls else '')
addr = addr[6:]
elif addr.startswith('https://'):
proto = "https"
addr = addr[8:]
+ elif addr.startswith('npipe://'):
+ proto = 'npipe'
+ addr = addr[8:]
elif addr.startswith('fd://'):
raise errors.DockerException("fd protocol is not implemented")
else:
@@ -415,7 +451,7 @@ def parse_host(addr, platform=None, tls=False):
else:
host = addr
- if proto == "http+unix":
+ if proto == "http+unix" or proto == 'npipe':
return "{0}://{1}".format(proto, host)
return "{0}://{1}:{2}{3}".format(proto, host, port, path)
@@ -449,27 +485,29 @@ def parse_devices(devices):
return device_list
-def kwargs_from_env(ssl_version=None, assert_hostname=None):
- host = os.environ.get('DOCKER_HOST')
+def kwargs_from_env(ssl_version=None, assert_hostname=None, environment=None):
+ if not environment:
+ environment = os.environ
+ host = environment.get('DOCKER_HOST')
# empty string for cert path is the same as unset.
- cert_path = os.environ.get('DOCKER_CERT_PATH') or None
+ cert_path = environment.get('DOCKER_CERT_PATH') or None
# empty string for tls verify counts as "false".
# Any value or 'unset' counts as true.
- tls_verify = os.environ.get('DOCKER_TLS_VERIFY')
+ tls_verify = environment.get('DOCKER_TLS_VERIFY')
if tls_verify == '':
tls_verify = False
- enable_tls = True
else:
tls_verify = tls_verify is not None
- enable_tls = cert_path or tls_verify
+ enable_tls = cert_path or tls_verify
params = {}
if host:
- params['base_url'] = (host.replace('tcp://', 'https://')
- if enable_tls else host)
+ params['base_url'] = (
+ host.replace('tcp://', 'https://') if enable_tls else host
+ )
if not enable_tls:
return params
@@ -511,48 +549,44 @@ def datetime_to_timestamp(dt):
return delta.seconds + delta.days * 24 * 3600
-def longint(n):
- if six.PY3:
- return int(n)
- return long(n)
-
-
def parse_bytes(s):
+ if isinstance(s, six.integer_types + (float,)):
+ return s
if len(s) == 0:
- s = 0
- else:
- if s[-2:-1].isalpha() and s[-1].isalpha():
- if s[-1] == "b" or s[-1] == "B":
- s = s[:-1]
- units = BYTE_UNITS
- suffix = s[-1].lower()
-
- # Check if the variable is a string representation of an int
- # without a units part. Assuming that the units are bytes.
- if suffix.isdigit():
- digits_part = s
- suffix = 'b'
- else:
- digits_part = s[:-1]
+ return 0
- if suffix in units.keys() or suffix.isdigit():
- try:
- digits = longint(digits_part)
- except ValueError:
- raise errors.DockerException(
- 'Failed converting the string value for memory ({0}) to'
- ' an integer.'.format(digits_part)
- )
+ if s[-2:-1].isalpha() and s[-1].isalpha():
+ if s[-1] == "b" or s[-1] == "B":
+ s = s[:-1]
+ units = BYTE_UNITS
+ suffix = s[-1].lower()
+
+ # Check if the variable is a string representation of an int
+ # without a units part. Assuming that the units are bytes.
+ if suffix.isdigit():
+ digits_part = s
+ suffix = 'b'
+ else:
+ digits_part = s[:-1]
- # Reconvert to long for the final result
- s = longint(digits * units[suffix])
- else:
+ if suffix in units.keys() or suffix.isdigit():
+ try:
+ digits = int(digits_part)
+ except ValueError:
raise errors.DockerException(
- 'The specified value for memory ({0}) should specify the'
- ' units. The postfix should be one of the `b` `k` `m` `g`'
- ' characters'.format(s)
+ 'Failed converting the string value for memory ({0}) to'
+ ' an integer.'.format(digits_part)
)
+ # Reconvert to long for the final result
+ s = int(digits * units[suffix])
+ else:
+ raise errors.DockerException(
+ 'The specified value for memory ({0}) should specify the'
+ ' units. The postfix should be one of the `b` `k` `m` `g`'
+ ' characters'.format(s)
+ )
+
return s
@@ -581,8 +615,12 @@ def create_host_config(binds=None, port_bindings=None, lxc_conf=None,
security_opt=None, ulimits=None, log_config=None,
mem_limit=None, memswap_limit=None, mem_swappiness=None,
cgroup_parent=None, group_add=None, cpu_quota=None,
- cpu_period=None, oom_kill_disable=False, shm_size=None,
- version=None):
+ cpu_period=None, blkio_weight=None,
+ blkio_weight_device=None, device_read_bps=None,
+ device_write_bps=None, device_read_iops=None,
+ device_write_iops=None, oom_kill_disable=False,
+ shm_size=None, version=None, tmpfs=None,
+ oom_score_adj=None):
host_config = {}
@@ -594,16 +632,10 @@ def create_host_config(binds=None, port_bindings=None, lxc_conf=None,
version = constants.DEFAULT_DOCKER_API_VERSION
if mem_limit is not None:
- if isinstance(mem_limit, six.string_types):
- mem_limit = parse_bytes(mem_limit)
-
- host_config['Memory'] = mem_limit
+ host_config['Memory'] = parse_bytes(mem_limit)
if memswap_limit is not None:
- if isinstance(memswap_limit, six.string_types):
- memswap_limit = parse_bytes(memswap_limit)
-
- host_config['MemorySwap'] = memswap_limit
+ host_config['MemorySwap'] = parse_bytes(memswap_limit)
if mem_swappiness is not None:
if version_lt(version, '1.20'):
@@ -638,6 +670,15 @@ def create_host_config(binds=None, port_bindings=None, lxc_conf=None,
host_config['OomKillDisable'] = oom_kill_disable
+ if oom_score_adj:
+ if version_lt(version, '1.22'):
+ raise host_config_version_error('oom_score_adj', '1.22')
+ if not isinstance(oom_score_adj, int):
+ raise host_config_type_error(
+ 'oom_score_adj', oom_score_adj, 'int'
+ )
+ host_config['OomScoreAdj'] = oom_score_adj
+
if publish_all_ports:
host_config['PublishAllPorts'] = publish_all_ports
@@ -755,6 +796,63 @@ def create_host_config(binds=None, port_bindings=None, lxc_conf=None,
host_config['CpuPeriod'] = cpu_period
+ if blkio_weight:
+ if not isinstance(blkio_weight, int):
+ raise host_config_type_error('blkio_weight', blkio_weight, 'int')
+ if version_lt(version, '1.22'):
+ raise host_config_version_error('blkio_weight', '1.22')
+ host_config["BlkioWeight"] = blkio_weight
+
+ if blkio_weight_device:
+ if not isinstance(blkio_weight_device, list):
+ raise host_config_type_error(
+ 'blkio_weight_device', blkio_weight_device, 'list'
+ )
+ if version_lt(version, '1.22'):
+ raise host_config_version_error('blkio_weight_device', '1.22')
+ host_config["BlkioWeightDevice"] = blkio_weight_device
+
+ if device_read_bps:
+ if not isinstance(device_read_bps, list):
+ raise host_config_type_error(
+ 'device_read_bps', device_read_bps, 'list'
+ )
+ if version_lt(version, '1.22'):
+ raise host_config_version_error('device_read_bps', '1.22')
+ host_config["BlkioDeviceReadBps"] = device_read_bps
+
+ if device_write_bps:
+ if not isinstance(device_write_bps, list):
+ raise host_config_type_error(
+ 'device_write_bps', device_write_bps, 'list'
+ )
+ if version_lt(version, '1.22'):
+ raise host_config_version_error('device_write_bps', '1.22')
+ host_config["BlkioDeviceWriteBps"] = device_write_bps
+
+ if device_read_iops:
+ if not isinstance(device_read_iops, list):
+ raise host_config_type_error(
+ 'device_read_iops', device_read_iops, 'list'
+ )
+ if version_lt(version, '1.22'):
+ raise host_config_version_error('device_read_iops', '1.22')
+ host_config["BlkioDeviceReadIOps"] = device_read_iops
+
+ if device_write_iops:
+ if not isinstance(device_write_iops, list):
+ raise host_config_type_error(
+ 'device_write_iops', device_write_iops, 'list'
+ )
+ if version_lt(version, '1.22'):
+ raise host_config_version_error('device_write_iops', '1.22')
+ host_config["BlkioDeviceWriteIOps"] = device_write_iops
+
+ if tmpfs:
+ if version_lt(version, '1.22'):
+ raise host_config_version_error('tmpfs', '1.22')
+ host_config["Tmpfs"] = convert_tmpfs_mounts(tmpfs)
+
return host_config
@@ -774,19 +872,30 @@ def create_networking_config(endpoints_config=None):
return networking_config
-def create_endpoint_config(version, aliases=None, links=None):
+def create_endpoint_config(version, aliases=None, links=None,
+ ipv4_address=None, ipv6_address=None):
+ if version_lt(version, '1.22'):
+ raise errors.InvalidVersion(
+ 'Endpoint config is not supported for API version < 1.22'
+ )
endpoint_config = {}
if aliases:
- if version_lt(version, '1.22'):
- raise host_config_version_error('endpoint_config.aliases', '1.22')
endpoint_config["Aliases"] = aliases
if links:
- if version_lt(version, '1.22'):
- raise host_config_version_error('endpoint_config.links', '1.22')
endpoint_config["Links"] = normalize_links(links)
+ ipam_config = {}
+ if ipv4_address:
+ ipam_config['IPv4Address'] = ipv4_address
+
+ if ipv6_address:
+ ipam_config['IPv6Address'] = ipv6_address
+
+ if ipam_config:
+ endpoint_config['IPAMConfig'] = ipam_config
+
return endpoint_config
@@ -803,7 +912,7 @@ def parse_env_file(env_file):
if line[0] == '#':
continue
- parse_line = line.strip().split('=')
+ parse_line = line.strip().split('=', 1)
if len(parse_line) == 2:
k, v = parse_line
environment[k] = v
@@ -821,6 +930,14 @@ def split_command(command):
return shlex.split(command)
+def format_environment(environment):
+ def format_env(key, value):
+ if value is None:
+ return key
+ return '{key}={value}'.format(key=key, value=value)
+ return [format_env(*var) for var in six.iteritems(environment)]
+
+
def create_container_config(
version, image, command, hostname=None, user=None, detach=False,
stdin_open=False, tty=False, mem_limit=None, ports=None, environment=None,
@@ -836,10 +953,7 @@ def create_container_config(
entrypoint = split_command(entrypoint)
if isinstance(environment, dict):
- environment = [
- six.text_type('{0}={1}').format(k, v)
- for k, v in six.iteritems(environment)
- ]
+ environment = format_environment(environment)
if labels is not None and compare_version('1.18', version) < 0:
raise errors.InvalidVersion(
@@ -873,9 +987,9 @@ def create_container_config(
if isinstance(labels, list):
labels = dict((lbl, six.text_type('')) for lbl in labels)
- if isinstance(mem_limit, six.string_types):
+ if mem_limit is not None:
mem_limit = parse_bytes(mem_limit)
- if isinstance(memswap_limit, six.string_types):
+ if memswap_limit is not None:
memswap_limit = parse_bytes(memswap_limit)
if isinstance(ports, list):
diff --git a/docker/version.py b/docker/version.py
index f98f42a..95405c7 100644
--- a/docker/version.py
+++ b/docker/version.py
@@ -1,2 +1,2 @@
-version = "1.7.2"
+version = "1.9.0"
version_info = tuple([int(d) for d in version.split("-")[0].split(".")])
diff --git a/docker_py.egg-info/PKG-INFO b/docker_py.egg-info/PKG-INFO
index fd5c3e7..79c1598 100644
--- a/docker_py.egg-info/PKG-INFO
+++ b/docker_py.egg-info/PKG-INFO
@@ -1,6 +1,6 @@
Metadata-Version: 1.1
Name: docker-py
-Version: 1.7.2
+Version: 1.9.0
Summary: Python client for Docker.
Home-page: https://github.com/docker/docker-py/
Author: UNKNOWN
@@ -13,9 +13,12 @@ Classifier: Environment :: Other Environment
Classifier: Intended Audience :: Developers
Classifier: Operating System :: OS Independent
Classifier: Programming Language :: Python
+Classifier: Programming Language :: Python :: 2
Classifier: Programming Language :: Python :: 2.6
Classifier: Programming Language :: Python :: 2.7
+Classifier: Programming Language :: Python :: 3
Classifier: Programming Language :: Python :: 3.3
Classifier: Programming Language :: Python :: 3.4
+Classifier: Programming Language :: Python :: 3.5
Classifier: Topic :: Utilities
Classifier: License :: OSI Approved :: Apache Software License
diff --git a/docker_py.egg-info/SOURCES.txt b/docker_py.egg-info/SOURCES.txt
index f8897b1..69cc77b 100644
--- a/docker_py.egg-info/SOURCES.txt
+++ b/docker_py.egg-info/SOURCES.txt
@@ -1,6 +1,7 @@
LICENSE
MANIFEST.in
README.md
+README.rst
requirements.txt
setup.cfg
setup.py
@@ -23,8 +24,10 @@ docker/auth/__init__.py
docker/auth/auth.py
docker/ssladapter/__init__.py
docker/ssladapter/ssladapter.py
-docker/unixconn/__init__.py
-docker/unixconn/unixconn.py
+docker/transport/__init__.py
+docker/transport/npipeconn.py
+docker/transport/npipesocket.py
+docker/transport/unixconn.py
docker/utils/__init__.py
docker/utils/decorators.py
docker/utils/types.py
@@ -35,7 +38,6 @@ docker_py.egg-info/PKG-INFO
docker_py.egg-info/SOURCES.txt
docker_py.egg-info/dependency_links.txt
docker_py.egg-info/not-zip-safe
-docker_py.egg-info/pbr.json
docker_py.egg-info/requires.txt
docker_py.egg-info/top_level.txt
tests/__init__.py
@@ -55,11 +57,16 @@ tests/unit/__init__.py
tests/unit/api_test.py
tests/unit/auth_test.py
tests/unit/build_test.py
+tests/unit/client_test.py
tests/unit/container_test.py
tests/unit/exec_test.py
tests/unit/fake_api.py
tests/unit/fake_stat.py
tests/unit/image_test.py
tests/unit/network_test.py
+tests/unit/ssladapter_test.py
tests/unit/utils_test.py
-tests/unit/volume_test.py \ No newline at end of file
+tests/unit/volume_test.py
+tests/unit/testdata/certs/ca.pem
+tests/unit/testdata/certs/cert.pem
+tests/unit/testdata/certs/key.pem \ No newline at end of file
diff --git a/docker_py.egg-info/pbr.json b/docker_py.egg-info/pbr.json
deleted file mode 100644
index d0a4bff..0000000
--- a/docker_py.egg-info/pbr.json
+++ /dev/null
@@ -1 +0,0 @@
-{"is_release": false, "git_version": "36f3480"} \ No newline at end of file
diff --git a/docker_py.egg-info/requires.txt b/docker_py.egg-info/requires.txt
index aa5b97e..c3febfb 100644
--- a/docker_py.egg-info/requires.txt
+++ b/docker_py.egg-info/requires.txt
@@ -1,3 +1,9 @@
requests >= 2.5.3
six >= 1.4.0
-websocket-client >= 0.18.0
+websocket-client >= 0.32.0
+
+[:python_version < "3.3"]
+ipaddress >= 1.0.16
+
+[:python_version < "3.5"]
+backports.ssl_match_hostname >= 3.5
diff --git a/requirements.txt b/requirements.txt
index cd79880..b06a541 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -1,3 +1,5 @@
requests>=2.5.3
six>=1.4.0
-websocket-client>=0.18.0
+websocket-client>=0.32.0
+backports.ssl_match_hostname>=3.5 ; python_version < '3.5'
+ipaddress==1.0.16 ; python_version < '3.3'
diff --git a/setup.py b/setup.py
index f2af6b9..f7c0443 100644
--- a/setup.py
+++ b/setup.py
@@ -1,8 +1,10 @@
#!/usr/bin/env python
import os
import sys
+
from setuptools import setup
+
ROOT_DIR = os.path.dirname(__file__)
SOURCE_DIR = os.path.join(ROOT_DIR)
@@ -12,6 +14,15 @@ requirements = [
'websocket-client >= 0.18.0',
]
+if sys.platform == 'win32':
+ requirements.append('pypiwin32 >= 219')
+
+extras_require = {
+ ':python_version < "3.5"': 'backports.ssl_match_hostname >= 3.5',
+ ':python_version < "3.3"': 'ipaddress >= 1.0.16',
+}
+
+version = None
exec(open('docker/version.py').read())
with open('./test-requirements.txt') as test_reqs_txt:
@@ -24,11 +35,12 @@ setup(
description="Python client for Docker.",
url='https://github.com/docker/docker-py/',
packages=[
- 'docker', 'docker.api', 'docker.auth', 'docker.unixconn',
+ 'docker', 'docker.api', 'docker.auth', 'docker.transport',
'docker.utils', 'docker.utils.ports', 'docker.ssladapter'
],
install_requires=requirements,
tests_require=test_requirements,
+ extras_require=extras_require,
zip_safe=False,
test_suite='tests',
classifiers=[
@@ -37,10 +49,13 @@ setup(
'Intended Audience :: Developers',
'Operating System :: OS Independent',
'Programming Language :: Python',
+ 'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
+ 'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
+ 'Programming Language :: Python :: 3.5',
'Topic :: Utilities',
'License :: OSI Approved :: Apache Software License',
],
diff --git a/test-requirements.txt b/test-requirements.txt
index 45b2b38..9f9eb50 100644
--- a/test-requirements.txt
+++ b/test-requirements.txt
@@ -1,5 +1,5 @@
mock>=1.0.1
-pytest>=2.7.2
+pytest>=2.9.1
coverage>=3.7.1
pytest-cov>=2.1.0
flake8>=2.4.1
diff --git a/tests/integration/api_test.py b/tests/integration/api_test.py
index e120c84..67ed068 100644
--- a/tests/integration/api_test.py
+++ b/tests/integration/api_test.py
@@ -49,7 +49,7 @@ class LinkTest(helpers.BaseTestCase):
container2 = self.client.create_container(
helpers.BUSYBOX, 'cat', host_config=self.client.create_host_config(
- links={link_path: link_alias}, network_mode='none'
+ links={link_path: link_alias}
)
)
container2_id = container2['Id']
diff --git a/tests/integration/build_test.py b/tests/integration/build_test.py
index 26164ae..cc8a862 100644
--- a/tests/integration/build_test.py
+++ b/tests/integration/build_test.py
@@ -6,6 +6,8 @@ import tempfile
import six
+from docker import errors
+
from .. import helpers
from ..base import requires_api_version
@@ -138,3 +140,29 @@ class BuildTest(helpers.BaseTestCase):
control_chars[0], control_chars[1], snippet
)
self.assertTrue(any([line == expected for line in lines]))
+
+ def test_build_gzip_encoding(self):
+ base_dir = tempfile.mkdtemp()
+ self.addCleanup(shutil.rmtree, base_dir)
+
+ with open(os.path.join(base_dir, 'Dockerfile'), 'w') as f:
+ f.write("\n".join([
+ 'FROM busybox',
+ 'MAINTAINER docker-py',
+ 'ADD . /test',
+ ]))
+
+ stream = self.client.build(
+ path=base_dir, stream=True, decode=True, nocache=True,
+ gzip=True
+ )
+
+ lines = []
+ for chunk in stream:
+ lines.append(chunk)
+
+ assert 'Successfully built' in lines[-1]['stream']
+
+ def test_build_gzip_custom_encoding(self):
+ with self.assertRaises(errors.DockerException):
+ self.client.build(path='.', gzip=True, encoding='text/html')
diff --git a/tests/integration/container_test.py b/tests/integration/container_test.py
index 1714599..56b648a 100644
--- a/tests/integration/container_test.py
+++ b/tests/integration/container_test.py
@@ -280,15 +280,14 @@ class CreateContainerTest(helpers.BaseTestCase):
config={}
)
- container = self.client.create_container(
- BUSYBOX, ['true'],
- host_config=self.client.create_host_config(log_config=log_config)
- )
-
expected_msg = "logger: no log driver named 'asdf-nope' is registered"
-
with pytest.raises(docker.errors.APIError) as excinfo:
# raises an internal server error 500
+ container = self.client.create_container(
+ BUSYBOX, ['true'], host_config=self.client.create_host_config(
+ log_config=log_config
+ )
+ )
self.client.start(container)
assert expected_msg in str(excinfo.value)
@@ -370,6 +369,35 @@ class CreateContainerTest(helpers.BaseTestCase):
self.assertRaises(ValueError,
self.client.create_host_config, pid_mode='40')
+ def test_create_with_environment_variable_no_value(self):
+ container = self.client.create_container(
+ BUSYBOX,
+ ['echo'],
+ environment={'Foo': None, 'Other': 'one', 'Blank': ''},
+ )
+ self.tmp_containers.append(container['Id'])
+ config = self.client.inspect_container(container['Id'])
+ assert (
+ sorted(config['Config']['Env']) ==
+ sorted(['Foo', 'Other=one', 'Blank='])
+ )
+
+ @requires_api_version('1.22')
+ def test_create_with_tmpfs(self):
+ tmpfs = {
+ '/tmp1': 'size=3M'
+ }
+
+ container = self.client.create_container(
+ BUSYBOX,
+ ['echo'],
+ host_config=self.client.create_host_config(
+ tmpfs=tmpfs))
+
+ self.tmp_containers.append(container['Id'])
+ config = self.client.inspect_container(container)
+ assert config['HostConfig']['Tmpfs'] == tmpfs
+
class VolumeBindTest(helpers.BaseTestCase):
def setUp(self):
@@ -666,7 +694,7 @@ Line2'''
logs = self.client.logs(id, tail=1)
self.assertEqual(logs, 'Line2\n'.encode(encoding='ascii'))
- def test_logs_streaming(self):
+ def test_logs_streaming_and_follow(self):
snippet = 'Flowering Nights (Sakuya Iyazoi)'
container = self.client.create_container(
BUSYBOX, 'echo {0}'.format(snippet)
@@ -675,7 +703,7 @@ Line2'''
self.tmp_containers.append(id)
self.client.start(id)
logs = six.binary_type()
- for chunk in self.client.logs(id, stream=True):
+ for chunk in self.client.logs(id, stream=True, follow=True):
logs += chunk
exitcode = self.client.wait(id)
@@ -812,6 +840,36 @@ class KillTest(helpers.BaseTestCase):
self.assertIn('Running', state)
self.assertEqual(state['Running'], False, state)
+ def test_kill_with_signal_name(self):
+ id = self.client.create_container(BUSYBOX, ['sleep', '60'])
+ self.client.start(id)
+ self.tmp_containers.append(id)
+ self.client.kill(id, signal='SIGKILL')
+ exitcode = self.client.wait(id)
+ self.assertNotEqual(exitcode, 0)
+ container_info = self.client.inspect_container(id)
+ self.assertIn('State', container_info)
+ state = container_info['State']
+ self.assertIn('ExitCode', state)
+ self.assertNotEqual(state['ExitCode'], 0)
+ self.assertIn('Running', state)
+ self.assertEqual(state['Running'], False, state)
+
+ def test_kill_with_signal_integer(self):
+ id = self.client.create_container(BUSYBOX, ['sleep', '60'])
+ self.client.start(id)
+ self.tmp_containers.append(id)
+ self.client.kill(id, signal=9)
+ exitcode = self.client.wait(id)
+ self.assertNotEqual(exitcode, 0)
+ container_info = self.client.inspect_container(id)
+ self.assertIn('State', container_info)
+ state = container_info['State']
+ self.assertIn('ExitCode', state)
+ self.assertNotEqual(state['ExitCode'], 0)
+ self.assertIn('Running', state)
+ self.assertEqual(state['Running'], False, state)
+
class PortTest(helpers.BaseTestCase):
def test_port(self):
@@ -1031,3 +1089,21 @@ class GetContainerStatsTest(helpers.BaseTestCase):
for key in ['read', 'network', 'precpu_stats', 'cpu_stats',
'memory_stats', 'blkio_stats']:
self.assertIn(key, chunk)
+
+
+class ContainerUpdateTest(helpers.BaseTestCase):
+ @requires_api_version('1.22')
+ def test_update_container(self):
+ old_mem_limit = 400 * 1024 * 1024
+ new_mem_limit = 300 * 1024 * 1024
+ container = self.client.create_container(
+ BUSYBOX, 'top', host_config=self.client.create_host_config(
+ mem_limit=old_mem_limit
+ ), cpu_shares=102
+ )
+ self.tmp_containers.append(container)
+ self.client.start(container)
+ self.client.update_container(container, mem_limit=new_mem_limit)
+ inspect_data = self.client.inspect_container(container)
+ self.assertEqual(inspect_data['HostConfig']['Memory'], new_mem_limit)
+ self.assertEqual(inspect_data['HostConfig']['CpuShares'], 102)
diff --git a/tests/integration/image_test.py b/tests/integration/image_test.py
index 825f632..9f38366 100644
--- a/tests/integration/image_test.py
+++ b/tests/integration/image_test.py
@@ -90,6 +90,20 @@ class CommitTest(helpers.BaseTestCase):
self.assertIn('Parent', img)
self.assertEqual(img['Parent'], busybox_id)
+ def test_commit_with_changes(self):
+ cid = self.client.create_container(BUSYBOX, ['touch', '/test'])
+ self.tmp_containers.append(cid)
+ self.client.start(cid)
+ img_id = self.client.commit(
+ cid, changes=['EXPOSE 8000', 'CMD ["bash"]']
+ )
+ self.tmp_imgs.append(img_id)
+ img = self.client.inspect_image(img_id)
+ assert 'Container' in img
+ assert img['Container'].startswith(cid['Id'])
+ assert '8000/tcp' in img['Config']['ExposedPorts']
+ assert img['Config']['Cmd'] == ['bash']
+
class RemoveImageTest(helpers.BaseTestCase):
def test_remove(self):
diff --git a/tests/integration/network_test.py b/tests/integration/network_test.py
index a379bbf..f719fea 100644
--- a/tests/integration/network_test.py
+++ b/tests/integration/network_test.py
@@ -138,9 +138,11 @@ class TestNetworks(helpers.BaseTestCase):
self.client.connect_container_to_network(
container, net_id, aliases=['foo', 'bar'])
container_data = self.client.inspect_container(container)
- self.assertEqual(
- container_data['NetworkSettings']['Networks'][net_name]['Aliases'],
- ['foo', 'bar'])
+ aliases = (
+ container_data['NetworkSettings']['Networks'][net_name]['Aliases']
+ )
+ assert 'foo' in aliases
+ assert 'bar' in aliases
@requires_api_version('1.21')
def test_connect_on_container_create(self):
@@ -183,9 +185,69 @@ class TestNetworks(helpers.BaseTestCase):
self.client.start(container)
container_data = self.client.inspect_container(container)
+ aliases = (
+ container_data['NetworkSettings']['Networks'][net_name]['Aliases']
+ )
+ assert 'foo' in aliases
+ assert 'bar' in aliases
+
+ @requires_api_version('1.22')
+ def test_create_with_ipv4_address(self):
+ net_name, net_id = self.create_network(
+ ipam=create_ipam_config(
+ driver='default',
+ pool_configs=[create_ipam_pool(subnet="132.124.0.0/16")],
+ ),
+ )
+ container = self.client.create_container(
+ image='busybox', command='top',
+ host_config=self.client.create_host_config(network_mode=net_name),
+ networking_config=self.client.create_networking_config({
+ net_name: self.client.create_endpoint_config(
+ ipv4_address='132.124.0.23'
+ )
+ })
+ )
+ self.tmp_containers.append(container)
+ self.client.start(container)
+
+ container_data = self.client.inspect_container(container)
self.assertEqual(
- container_data['NetworkSettings']['Networks'][net_name]['Aliases'],
- ['foo', 'bar'])
+ container_data[
+ 'NetworkSettings']['Networks'][net_name]['IPAMConfig'][
+ 'IPv4Address'
+ ],
+ '132.124.0.23'
+ )
+
+ @requires_api_version('1.22')
+ def test_create_with_ipv6_address(self):
+ net_name, net_id = self.create_network(
+ ipam=create_ipam_config(
+ driver='default',
+ pool_configs=[create_ipam_pool(subnet="2001:389::1/64")],
+ ),
+ )
+ container = self.client.create_container(
+ image='busybox', command='top',
+ host_config=self.client.create_host_config(network_mode=net_name),
+ networking_config=self.client.create_networking_config({
+ net_name: self.client.create_endpoint_config(
+ ipv6_address='2001:389::f00d'
+ )
+ })
+ )
+ self.tmp_containers.append(container)
+ self.client.start(container)
+
+ container_data = self.client.inspect_container(container)
+ self.assertEqual(
+ container_data[
+ 'NetworkSettings']['Networks'][net_name]['IPAMConfig'][
+ 'IPv6Address'
+ ],
+ '2001:389::f00d'
+ )
@requires_api_version('1.22')
def test_create_with_links(self):
@@ -212,6 +274,13 @@ class TestNetworks(helpers.BaseTestCase):
self.execute(container, ['nslookup', 'bar'])
+ @requires_api_version('1.21')
+ def test_create_check_duplicate(self):
+ net_name, net_id = self.create_network()
+ with self.assertRaises(docker.errors.APIError):
+ self.client.create_network(net_name, check_duplicate=True)
+ self.client.create_network(net_name, check_duplicate=False)
+
@requires_api_version('1.22')
def test_connect_with_links(self):
net_name, net_id = self.create_network()
@@ -235,3 +304,65 @@ class TestNetworks(helpers.BaseTestCase):
)
self.execute(container, ['nslookup', 'bar'])
+
+ @requires_api_version('1.22')
+ def test_connect_with_ipv4_address(self):
+ net_name, net_id = self.create_network(
+ ipam=create_ipam_config(
+ driver='default',
+ pool_configs=[
+ create_ipam_pool(
+ subnet="172.28.0.0/16", iprange="172.28.5.0/24",
+ gateway="172.28.5.254"
+ )
+ ]
+ )
+ )
+
+ container = self.create_and_start(
+ host_config=self.client.create_host_config(network_mode=net_name))
+
+ self.client.disconnect_container_from_network(container, net_name)
+ self.client.connect_container_to_network(
+ container, net_name, ipv4_address='172.28.5.24'
+ )
+
+ container_data = self.client.inspect_container(container)
+ net_data = container_data['NetworkSettings']['Networks'][net_name]
+ self.assertEqual(
+ net_data['IPAMConfig']['IPv4Address'], '172.28.5.24'
+ )
+
+ @requires_api_version('1.22')
+ def test_connect_with_ipv6_address(self):
+ net_name, net_id = self.create_network(
+ ipam=create_ipam_config(
+ driver='default',
+ pool_configs=[
+ create_ipam_pool(
+ subnet="2001:389::1/64", iprange="2001:389::0/96",
+ gateway="2001:389::ffff"
+ )
+ ]
+ )
+ )
+
+ container = self.create_and_start(
+ host_config=self.client.create_host_config(network_mode=net_name))
+
+ self.client.disconnect_container_from_network(container, net_name)
+ self.client.connect_container_to_network(
+ container, net_name, ipv6_address='2001:389::f00d'
+ )
+
+ container_data = self.client.inspect_container(container)
+ net_data = container_data['NetworkSettings']['Networks'][net_name]
+ self.assertEqual(
+ net_data['IPAMConfig']['IPv6Address'], '2001:389::f00d'
+ )
+
+ @requires_api_version('1.23')
+ def test_create_internal_networks(self):
+ _, net_id = self.create_network(internal=True)
+ net = self.client.inspect_network(net_id)
+ assert net['Internal'] is True
diff --git a/tests/unit/api_test.py b/tests/unit/api_test.py
index 23fd191..bfe196c 100644
--- a/tests/unit/api_test.py
+++ b/tests/unit/api_test.py
@@ -415,3 +415,33 @@ class StreamTest(base.Cleanup, base.BaseTestCase):
self.assertEqual(list(stream), [
str(i).encode() for i in range(50)])
+
+
+class UserAgentTest(base.BaseTestCase):
+ def setUp(self):
+ self.patcher = mock.patch.object(
+ docker.Client,
+ 'send',
+ return_value=fake_resp("GET", "%s/version" % fake_api.prefix)
+ )
+ self.mock_send = self.patcher.start()
+
+ def tearDown(self):
+ self.patcher.stop()
+
+ def test_default_user_agent(self):
+ client = docker.Client()
+ client.version()
+
+ self.assertEqual(self.mock_send.call_count, 1)
+ headers = self.mock_send.call_args[0][0].headers
+ expected = 'docker-py/%s' % docker.__version__
+ self.assertEqual(headers['User-Agent'], expected)
+
+ def test_custom_user_agent(self):
+ client = docker.Client(user_agent='foo/bar')
+ client.version()
+
+ self.assertEqual(self.mock_send.call_count, 1)
+ headers = self.mock_send.call_args[0][0].headers
+ self.assertEqual(headers['User-Agent'], 'foo/bar')
diff --git a/tests/unit/auth_test.py b/tests/unit/auth_test.py
index 921aae0..4ea4047 100644
--- a/tests/unit/auth_test.py
+++ b/tests/unit/auth_test.py
@@ -459,6 +459,5 @@ class LoadConfigTest(base.Cleanup, base.BaseTestCase):
with open(dockercfg_path, 'w') as f:
json.dump(config, f)
- self.assertRaises(
- errors.InvalidConfigFile, auth.load_config, dockercfg_path
- )
+ cfg = auth.load_config(dockercfg_path)
+ assert cfg == {}
diff --git a/tests/unit/client_test.py b/tests/unit/client_test.py
new file mode 100644
index 0000000..b21f1d6
--- /dev/null
+++ b/tests/unit/client_test.py
@@ -0,0 +1,70 @@
+import os
+from docker.client import Client
+from .. import base
+
+TEST_CERT_DIR = os.path.join(
+ os.path.dirname(__file__),
+ 'testdata/certs',
+)
+
+
+class ClientTest(base.BaseTestCase):
+ def setUp(self):
+ self.os_environ = os.environ.copy()
+
+ def tearDown(self):
+ os.environ = self.os_environ
+
+ def test_from_env(self):
+ """Test that environment variables are passed through to
+ utils.kwargs_from_env(). KwargsFromEnvTest tests that environment
+ variables are parsed correctly."""
+ os.environ.update(DOCKER_HOST='tcp://192.168.59.103:2376',
+ DOCKER_CERT_PATH=TEST_CERT_DIR,
+ DOCKER_TLS_VERIFY='1')
+ client = Client.from_env()
+ self.assertEqual(client.base_url, "https://192.168.59.103:2376")
+
+
+class DisableSocketTest(base.BaseTestCase):
+ class DummySocket(object):
+ def __init__(self, timeout=60):
+ self.timeout = timeout
+
+ def settimeout(self, timeout):
+ self.timeout = timeout
+
+ def gettimeout(self):
+ return self.timeout
+
+ def setUp(self):
+ self.client = Client()
+
+ def test_disable_socket_timeout(self):
+ """Test that the timeout is disabled on a generic socket object."""
+ socket = self.DummySocket()
+
+ self.client._disable_socket_timeout(socket)
+
+ self.assertEqual(socket.timeout, None)
+
+ def test_disable_socket_timeout2(self):
+ """Test that the timeouts are disabled on a generic socket object
+ and it's _sock object if present."""
+ socket = self.DummySocket()
+ socket._sock = self.DummySocket()
+
+ self.client._disable_socket_timeout(socket)
+
+ self.assertEqual(socket.timeout, None)
+ self.assertEqual(socket._sock.timeout, None)
+
+ def test_disable_socket_timout_non_blocking(self):
+ """Test that a non-blocking socket does not get set to blocking."""
+ socket = self.DummySocket()
+ socket._sock = self.DummySocket(0.0)
+
+ self.client._disable_socket_timeout(socket)
+
+ self.assertEqual(socket.timeout, None)
+ self.assertEqual(socket._sock.timeout, 0.0)
diff --git a/tests/unit/container_test.py b/tests/unit/container_test.py
index c2b2573..2a72c17 100644
--- a/tests/unit/container_test.py
+++ b/tests/unit/container_test.py
@@ -1016,6 +1016,64 @@ class CreateContainerTest(DockerClientTest):
}
}}'''))
+ @requires_api_version('1.22')
+ def test_create_container_with_tmpfs_list(self):
+
+ self.client.create_container(
+ 'busybox', 'true', host_config=self.client.create_host_config(
+ tmpfs=[
+ "/tmp",
+ "/mnt:size=3G,uid=100"
+ ]
+ )
+ )
+
+ args = fake_request.call_args
+ self.assertEqual(args[0][1], url_prefix +
+ 'containers/create')
+ expected_payload = self.base_create_payload()
+ expected_payload['HostConfig'] = self.client.create_host_config()
+ expected_payload['HostConfig']['Tmpfs'] = {
+ "/tmp": "",
+ "/mnt": "size=3G,uid=100"
+ }
+ self.assertEqual(json.loads(args[1]['data']), expected_payload)
+ self.assertEqual(args[1]['headers'],
+ {'Content-Type': 'application/json'})
+ self.assertEqual(
+ args[1]['timeout'],
+ DEFAULT_TIMEOUT_SECONDS
+ )
+
+ @requires_api_version('1.22')
+ def test_create_container_with_tmpfs_dict(self):
+
+ self.client.create_container(
+ 'busybox', 'true', host_config=self.client.create_host_config(
+ tmpfs={
+ "/tmp": "",
+ "/mnt": "size=3G,uid=100"
+ }
+ )
+ )
+
+ args = fake_request.call_args
+ self.assertEqual(args[0][1], url_prefix +
+ 'containers/create')
+ expected_payload = self.base_create_payload()
+ expected_payload['HostConfig'] = self.client.create_host_config()
+ expected_payload['HostConfig']['Tmpfs'] = {
+ "/tmp": "",
+ "/mnt": "size=3G,uid=100"
+ }
+ self.assertEqual(json.loads(args[1]['data']), expected_payload)
+ self.assertEqual(args[1]['headers'],
+ {'Content-Type': 'application/json'})
+ self.assertEqual(
+ args[1]['timeout'],
+ DEFAULT_TIMEOUT_SECONDS
+ )
+
class ContainerTest(DockerClientTest):
def test_list_containers(self):
@@ -1121,6 +1179,36 @@ class ContainerTest(DockerClientTest):
def test_log_streaming(self):
with mock.patch('docker.Client.inspect_container',
fake_inspect_container):
+ self.client.logs(fake_api.FAKE_CONTAINER_ID, stream=True,
+ follow=False)
+
+ fake_request.assert_called_with(
+ 'GET',
+ url_prefix + 'containers/3cc2351ab11b/logs',
+ params={'timestamps': 0, 'follow': 0, 'stderr': 1, 'stdout': 1,
+ 'tail': 'all'},
+ timeout=DEFAULT_TIMEOUT_SECONDS,
+ stream=True
+ )
+
+ def test_log_following(self):
+ with mock.patch('docker.Client.inspect_container',
+ fake_inspect_container):
+ self.client.logs(fake_api.FAKE_CONTAINER_ID, stream=False,
+ follow=True)
+
+ fake_request.assert_called_with(
+ 'GET',
+ url_prefix + 'containers/3cc2351ab11b/logs',
+ params={'timestamps': 0, 'follow': 1, 'stderr': 1, 'stdout': 1,
+ 'tail': 'all'},
+ timeout=DEFAULT_TIMEOUT_SECONDS,
+ stream=False
+ )
+
+ def test_log_following_backwards(self):
+ with mock.patch('docker.Client.inspect_container',
+ fake_inspect_container):
self.client.logs(fake_api.FAKE_CONTAINER_ID, stream=True)
fake_request.assert_called_with(
@@ -1132,12 +1220,27 @@ class ContainerTest(DockerClientTest):
stream=True
)
+ def test_log_streaming_and_following(self):
+ with mock.patch('docker.Client.inspect_container',
+ fake_inspect_container):
+ self.client.logs(fake_api.FAKE_CONTAINER_ID, stream=True,
+ follow=True)
+
+ fake_request.assert_called_with(
+ 'GET',
+ url_prefix + 'containers/3cc2351ab11b/logs',
+ params={'timestamps': 0, 'follow': 1, 'stderr': 1, 'stdout': 1,
+ 'tail': 'all'},
+ timeout=DEFAULT_TIMEOUT_SECONDS,
+ stream=True
+ )
+
def test_log_tail(self):
with mock.patch('docker.Client.inspect_container',
fake_inspect_container):
self.client.logs(fake_api.FAKE_CONTAINER_ID, stream=False,
- tail=10)
+ follow=False, tail=10)
fake_request.assert_called_with(
'GET',
@@ -1153,7 +1256,7 @@ class ContainerTest(DockerClientTest):
with mock.patch('docker.Client.inspect_container',
fake_inspect_container):
self.client.logs(fake_api.FAKE_CONTAINER_ID, stream=False,
- since=ts)
+ follow=False, since=ts)
fake_request.assert_called_with(
'GET',
@@ -1170,7 +1273,7 @@ class ContainerTest(DockerClientTest):
with mock.patch('docker.Client.inspect_container',
fake_inspect_container):
self.client.logs(fake_api.FAKE_CONTAINER_ID, stream=False,
- since=time)
+ follow=False, since=time)
fake_request.assert_called_with(
'GET',
@@ -1188,7 +1291,7 @@ class ContainerTest(DockerClientTest):
with mock.patch('docker.Client._stream_raw_result',
m):
self.client.logs(fake_api.FAKE_CONTAINER_ID,
- stream=True)
+ follow=True, stream=True)
self.assertTrue(m.called)
fake_request.assert_called_with(
@@ -1407,3 +1510,21 @@ class ContainerTest(DockerClientTest):
params={'ps_args': 'waux'},
timeout=DEFAULT_TIMEOUT_SECONDS
)
+
+ @requires_api_version('1.22')
+ def test_container_update(self):
+ self.client.update_container(
+ fake_api.FAKE_CONTAINER_ID, mem_limit='2k', cpu_shares=124,
+ blkio_weight=345
+ )
+ args = fake_request.call_args
+ self.assertEqual(
+ args[0][1], url_prefix + 'containers/3cc2351ab11b/update'
+ )
+ self.assertEqual(
+ json.loads(args[1]['data']),
+ {'Memory': 2 * 1024, 'CpuShares': 124, 'BlkioWeight': 345}
+ )
+ self.assertEqual(
+ args[1]['headers']['Content-Type'], 'application/json'
+ )
diff --git a/tests/unit/fake_api.py b/tests/unit/fake_api.py
index 8852da0..9952595 100644
--- a/tests/unit/fake_api.py
+++ b/tests/unit/fake_api.py
@@ -441,6 +441,11 @@ def get_fake_volume():
def fake_remove_volume():
return 204, None
+
+def post_fake_update_container():
+ return 200, {'Warnings': []}
+
+
# Maps real api url to fake response callback
prefix = 'http+docker://localunixsocket'
fake_responses = {
@@ -478,6 +483,8 @@ fake_responses = {
get_fake_diff,
'{1}/{0}/containers/3cc2351ab11b/export'.format(CURRENT_VERSION, prefix):
get_fake_export,
+ '{1}/{0}/containers/3cc2351ab11b/update'.format(CURRENT_VERSION, prefix):
+ post_fake_update_container,
'{1}/{0}/containers/3cc2351ab11b/exec'.format(CURRENT_VERSION, prefix):
post_fake_exec_create,
'{1}/{0}/exec/d5d177f121dc/start'.format(CURRENT_VERSION, prefix):
diff --git a/tests/unit/image_test.py b/tests/unit/image_test.py
index a46e48e..8fd894c 100644
--- a/tests/unit/image_test.py
+++ b/tests/unit/image_test.py
@@ -101,7 +101,8 @@ class ImageTest(DockerClientTest):
'comment': None,
'tag': None,
'container': '3cc2351ab11b',
- 'author': None
+ 'author': None,
+ 'changes': None
},
timeout=DEFAULT_TIMEOUT_SECONDS
)
diff --git a/tests/unit/ssladapter_test.py b/tests/unit/ssladapter_test.py
new file mode 100644
index 0000000..2ad1cad
--- /dev/null
+++ b/tests/unit/ssladapter_test.py
@@ -0,0 +1,79 @@
+from docker.ssladapter import ssladapter
+
+try:
+ from backports.ssl_match_hostname import (
+ match_hostname, CertificateError
+ )
+except ImportError:
+ from ssl import (
+ match_hostname, CertificateError
+ )
+
+try:
+ from ssl import OP_NO_SSLv3, OP_NO_SSLv2, OP_NO_TLSv1
+except ImportError:
+ OP_NO_SSLv2 = 0x1000000
+ OP_NO_SSLv3 = 0x2000000
+ OP_NO_TLSv1 = 0x4000000
+
+from .. import base
+
+
+class SSLAdapterTest(base.BaseTestCase):
+ def test_only_uses_tls(self):
+ ssl_context = ssladapter.urllib3.util.ssl_.create_urllib3_context()
+
+ assert ssl_context.options & OP_NO_SSLv3
+ assert ssl_context.options & OP_NO_SSLv2
+ assert not ssl_context.options & OP_NO_TLSv1
+
+
+class MatchHostnameTest(base.BaseTestCase):
+ cert = {
+ 'issuer': (
+ (('countryName', u'US'),),
+ (('stateOrProvinceName', u'California'),),
+ (('localityName', u'San Francisco'),),
+ (('organizationName', u'Docker Inc'),),
+ (('organizationalUnitName', u'Docker-Python'),),
+ (('commonName', u'localhost'),),
+ (('emailAddress', u'info@docker.com'),)
+ ),
+ 'notAfter': 'Mar 25 23:08:23 2030 GMT',
+ 'notBefore': u'Mar 25 23:08:23 2016 GMT',
+ 'serialNumber': u'BD5F894C839C548F',
+ 'subject': (
+ (('countryName', u'US'),),
+ (('stateOrProvinceName', u'California'),),
+ (('localityName', u'San Francisco'),),
+ (('organizationName', u'Docker Inc'),),
+ (('organizationalUnitName', u'Docker-Python'),),
+ (('commonName', u'localhost'),),
+ (('emailAddress', u'info@docker.com'),)
+ ),
+ 'subjectAltName': (
+ ('DNS', u'localhost'),
+ ('DNS', u'*.gensokyo.jp'),
+ ('IP Address', u'127.0.0.1'),
+ ),
+ 'version': 3
+ }
+
+ def test_match_ip_address_success(self):
+ assert match_hostname(self.cert, '127.0.0.1') is None
+
+ def test_match_localhost_success(self):
+ assert match_hostname(self.cert, 'localhost') is None
+
+ def test_match_dns_success(self):
+ assert match_hostname(self.cert, 'touhou.gensokyo.jp') is None
+
+ def test_match_ip_address_failure(self):
+ self.assertRaises(
+ CertificateError, match_hostname, self.cert, '192.168.0.25'
+ )
+
+ def test_match_dns_failure(self):
+ self.assertRaises(
+ CertificateError, match_hostname, self.cert, 'foobar.co.uk'
+ )
diff --git a/tests/unit/testdata/certs/ca.pem b/tests/unit/testdata/certs/ca.pem
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/tests/unit/testdata/certs/ca.pem
diff --git a/tests/unit/testdata/certs/cert.pem b/tests/unit/testdata/certs/cert.pem
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/tests/unit/testdata/certs/cert.pem
diff --git a/tests/unit/testdata/certs/key.pem b/tests/unit/testdata/certs/key.pem
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/tests/unit/testdata/certs/key.pem
diff --git a/tests/unit/utils_test.py b/tests/unit/utils_test.py
index 87796d1..128778f 100644
--- a/tests/unit/utils_test.py
+++ b/tests/unit/utils_test.py
@@ -12,17 +12,9 @@ import tempfile
import pytest
import six
-try:
- from ssl import OP_NO_SSLv3, OP_NO_SSLv2, OP_NO_TLSv1
-except ImportError:
- OP_NO_SSLv2 = 0x1000000
- OP_NO_SSLv3 = 0x2000000
- OP_NO_TLSv1 = 0x4000000
-
from docker.client import Client
from docker.constants import DEFAULT_DOCKER_API_VERSION
from docker.errors import DockerException, InvalidVersion
-from docker.ssladapter import ssladapter
from docker.utils import (
parse_repository_tag, parse_host, convert_filters, kwargs_from_env,
create_host_config, Ulimit, LogConfig, parse_bytes, parse_env_file,
@@ -72,6 +64,25 @@ class HostConfigTest(base.BaseTestCase):
config = create_host_config(version='1.20', cpu_period=1999)
self.assertEqual(config.get('CpuPeriod'), 1999)
+ def test_create_host_config_with_blkio_constraints(self):
+ blkio_rate = [{"Path": "/dev/sda", "Rate": 1000}]
+ config = create_host_config(version='1.22',
+ blkio_weight=1999,
+ blkio_weight_device=blkio_rate,
+ device_read_bps=blkio_rate,
+ device_write_bps=blkio_rate,
+ device_read_iops=blkio_rate,
+ device_write_iops=blkio_rate)
+
+ self.assertEqual(config.get('BlkioWeight'), 1999)
+ self.assertTrue(config.get('BlkioWeightDevice') is blkio_rate)
+ self.assertTrue(config.get('BlkioDeviceReadBps') is blkio_rate)
+ self.assertTrue(config.get('BlkioDeviceWriteBps') is blkio_rate)
+ self.assertTrue(config.get('BlkioDeviceReadIOps') is blkio_rate)
+ self.assertTrue(config.get('BlkioDeviceWriteIOps') is blkio_rate)
+ self.assertEqual(blkio_rate[0]['Path'], "/dev/sda")
+ self.assertEqual(blkio_rate[0]['Rate'], 1000)
+
def test_create_host_config_with_shm_size(self):
config = create_host_config(version='1.22', shm_size=67108864)
self.assertEqual(config.get('ShmSize'), 67108864)
@@ -87,6 +98,16 @@ class HostConfigTest(base.BaseTestCase):
InvalidVersion, lambda: create_host_config(version='1.18.3',
oom_kill_disable=True))
+ def test_create_host_config_with_oom_score_adj(self):
+ config = create_host_config(version='1.22', oom_score_adj=100)
+ self.assertEqual(config.get('OomScoreAdj'), 100)
+ self.assertRaises(
+ InvalidVersion, lambda: create_host_config(version='1.21',
+ oom_score_adj=100))
+ self.assertRaises(
+ TypeError, lambda: create_host_config(version='1.22',
+ oom_score_adj='100'))
+
def test_create_endpoint_config_with_aliases(self):
config = create_endpoint_config(version='1.22', aliases=['foo', 'bar'])
assert config == {'Aliases': ['foo', 'bar']}
@@ -228,19 +249,7 @@ class KwargsFromEnvTest(base.BaseTestCase):
DOCKER_TLS_VERIFY='')
os.environ.pop('DOCKER_CERT_PATH', None)
kwargs = kwargs_from_env(assert_hostname=True)
- self.assertEqual('https://192.168.59.103:2376', kwargs['base_url'])
- self.assertTrue('ca.pem' in kwargs['tls'].ca_cert)
- self.assertTrue('cert.pem' in kwargs['tls'].cert[0])
- self.assertTrue('key.pem' in kwargs['tls'].cert[1])
- self.assertEqual(True, kwargs['tls'].assert_hostname)
- self.assertEqual(False, kwargs['tls'].verify)
- try:
- client = Client(**kwargs)
- self.assertEqual(kwargs['base_url'], client.base_url)
- self.assertEqual(kwargs['tls'].cert, client.cert)
- self.assertFalse(kwargs['tls'].verify)
- except TypeError as e:
- self.fail(e)
+ self.assertEqual('tcp://192.168.59.103:2376', kwargs['base_url'])
def test_kwargs_from_env_no_cert_path(self):
try:
@@ -261,6 +270,20 @@ class KwargsFromEnvTest(base.BaseTestCase):
if temp_dir:
shutil.rmtree(temp_dir)
+ def test_kwargs_from_env_alternate_env(self):
+ # Values in os.environ are entirely ignored if an alternate is
+ # provided
+ os.environ.update(
+ DOCKER_HOST='tcp://192.168.59.103:2376',
+ DOCKER_CERT_PATH=TEST_CERT_DIR,
+ DOCKER_TLS_VERIFY=''
+ )
+ kwargs = kwargs_from_env(environment={
+ 'DOCKER_HOST': 'http://docker.gensokyo.jp:2581',
+ })
+ assert 'http://docker.gensokyo.jp:2581' == kwargs['base_url']
+ assert 'tls' not in kwargs
+
class ConverVolumeBindsTest(base.BaseTestCase):
def test_convert_volume_binds_empty(self):
@@ -295,56 +318,30 @@ class ConverVolumeBindsTest(base.BaseTestCase):
self.assertEqual(convert_volume_binds(data), ['/mnt/vol1:/data:rw'])
def test_convert_volume_binds_unicode_bytes_input(self):
- if six.PY2:
- expected = [unicode('/mnt/지연:/unicode/박:rw', 'utf-8')]
-
- data = {
- '/mnt/지연': {
- 'bind': '/unicode/박',
- 'mode': 'rw'
- }
- }
- self.assertEqual(
- convert_volume_binds(data), expected
- )
- else:
- expected = ['/mnt/지연:/unicode/박:rw']
+ expected = [u'/mnt/지연:/unicode/박:rw']
- data = {
- bytes('/mnt/지연', 'utf-8'): {
- 'bind': bytes('/unicode/박', 'utf-8'),
- 'mode': 'rw'
- }
+ data = {
+ u'/mnt/지연'.encode('utf-8'): {
+ 'bind': u'/unicode/박'.encode('utf-8'),
+ 'mode': 'rw'
}
- self.assertEqual(
- convert_volume_binds(data), expected
- )
+ }
+ self.assertEqual(
+ convert_volume_binds(data), expected
+ )
def test_convert_volume_binds_unicode_unicode_input(self):
- if six.PY2:
- expected = [unicode('/mnt/지연:/unicode/박:rw', 'utf-8')]
-
- data = {
- unicode('/mnt/지연', 'utf-8'): {
- 'bind': unicode('/unicode/박', 'utf-8'),
- 'mode': 'rw'
- }
- }
- self.assertEqual(
- convert_volume_binds(data), expected
- )
- else:
- expected = ['/mnt/지연:/unicode/박:rw']
+ expected = [u'/mnt/지연:/unicode/박:rw']
- data = {
- '/mnt/지연': {
- 'bind': '/unicode/박',
- 'mode': 'rw'
- }
+ data = {
+ u'/mnt/지연': {
+ 'bind': u'/unicode/박',
+ 'mode': 'rw'
}
- self.assertEqual(
- convert_volume_binds(data), expected
- )
+ }
+ self.assertEqual(
+ convert_volume_binds(data), expected
+ )
class ParseEnvFileTest(base.BaseTestCase):
@@ -367,6 +364,14 @@ class ParseEnvFileTest(base.BaseTestCase):
{'USER': 'jdoe', 'PASS': 'secret'})
os.unlink(env_file)
+ def test_parse_env_file_with_equals_character(self):
+ env_file = self.generate_tempfile(
+ file_content='USER=jdoe\nPASS=sec==ret')
+ get_parse_env_file = parse_env_file(env_file)
+ self.assertEqual(get_parse_env_file,
+ {'USER': 'jdoe', 'PASS': 'sec==ret'})
+ os.unlink(env_file)
+
def test_parse_env_file_commented_line(self):
env_file = self.generate_tempfile(
file_content='USER=jdoe\n#PASS=secret')
@@ -402,6 +407,7 @@ class ParseHostTest(base.BaseTestCase):
'somehost.net:80/service/swarm': (
'http://somehost.net:80/service/swarm'
),
+ 'npipe:////./pipe/docker_engine': 'npipe:////./pipe/docker_engine',
}
for host in invalid_hosts:
@@ -416,15 +422,18 @@ class ParseHostTest(base.BaseTestCase):
tcp_port = 'http://127.0.0.1:2375'
for val in [None, '']:
- for platform in ['darwin', 'linux2', None]:
- assert parse_host(val, platform) == unix_socket
-
- assert parse_host(val, 'win32') == tcp_port
+ assert parse_host(val, is_win32=False) == unix_socket
+ assert parse_host(val, is_win32=True) == tcp_port
def test_parse_host_tls(self):
host_value = 'myhost.docker.net:3348'
expected_result = 'https://myhost.docker.net:3348'
- self.assertEqual(parse_host(host_value, None, True), expected_result)
+ assert parse_host(host_value, tls=True) == expected_result
+
+ def test_parse_host_tls_tcp_proto(self):
+ host_value = 'tcp://myhost.docker.net:3348'
+ expected_result = 'https://myhost.docker.net:3348'
+ assert parse_host(host_value, tls=True) == expected_result
class ParseRepositoryTagTest(base.BaseTestCase):
@@ -595,13 +604,7 @@ class UtilsTest(base.BaseTestCase):
class SplitCommandTest(base.BaseTestCase):
def test_split_command_with_unicode(self):
- if six.PY2:
- self.assertEqual(
- split_command(unicode('echo μμ', 'utf-8')),
- ['echo', 'μμ']
- )
- else:
- self.assertEqual(split_command('echo μμ'), ['echo', 'μμ'])
+ self.assertEqual(split_command(u'echo μμ'), ['echo', 'μμ'])
@pytest.mark.skipif(six.PY3, reason="shlex doesn't support bytes in py3")
def test_split_command_with_bytes(self):
@@ -785,6 +788,9 @@ class ExcludePathsTest(base.BaseTestCase):
def test_single_filename(self):
assert self.exclude(['a.py']) == self.all_paths - set(['a.py'])
+ def test_single_filename_leading_dot_slash(self):
+ assert self.exclude(['./a.py']) == self.all_paths - set(['a.py'])
+
# As odd as it sounds, a filename pattern with a trailing slash on the
# end *will* result in that file being excluded.
def test_single_filename_trailing_slash(self):
@@ -814,6 +820,11 @@ class ExcludePathsTest(base.BaseTestCase):
def test_single_subdir_single_filename(self):
assert self.exclude(['foo/a.py']) == self.all_paths - set(['foo/a.py'])
+ def test_single_subdir_with_path_traversal(self):
+ assert self.exclude(['foo/whoops/../a.py']) == self.all_paths - set([
+ 'foo/a.py',
+ ])
+
def test_single_subdir_wildcard_filename(self):
assert self.exclude(['foo/*.py']) == self.all_paths - set([
'foo/a.py', 'foo/b.py',
@@ -945,12 +956,3 @@ class TarTest(base.Cleanup, base.BaseTestCase):
self.assertEqual(
sorted(tar_data.getnames()), ['bar', 'bar/foo', 'foo']
)
-
-
-class SSLAdapterTest(base.BaseTestCase):
- def test_only_uses_tls(self):
- ssl_context = ssladapter.urllib3.util.ssl_.create_urllib3_context()
-
- assert ssl_context.options & OP_NO_SSLv3
- assert ssl_context.options & OP_NO_SSLv2
- assert not ssl_context.options & OP_NO_TLSv1