summaryrefslogtreecommitdiff
path: root/docker/api
diff options
context:
space:
mode:
Diffstat (limited to 'docker/api')
-rw-r--r--docker/api/build.py69
-rw-r--r--docker/api/client.py167
-rw-r--r--docker/api/config.py17
-rw-r--r--docker/api/container.py192
-rw-r--r--docker/api/daemon.py30
-rw-r--r--docker/api/exec_api.py20
-rw-r--r--docker/api/image.py105
-rw-r--r--docker/api/network.py16
-rw-r--r--docker/api/plugin.py6
-rw-r--r--docker/api/secret.py9
-rw-r--r--docker/api/service.py66
-rw-r--r--docker/api/swarm.py107
-rw-r--r--docker/api/volume.py8
13 files changed, 526 insertions, 286 deletions
diff --git a/docker/api/build.py b/docker/api/build.py
index 419255f..aac43c4 100644
--- a/docker/api/build.py
+++ b/docker/api/build.py
@@ -12,14 +12,15 @@ from .. import utils
log = logging.getLogger(__name__)
-class BuildApiMixin(object):
+class BuildApiMixin:
def build(self, path=None, tag=None, quiet=False, fileobj=None,
nocache=False, rm=False, timeout=None,
custom_context=False, encoding=None, pull=False,
forcerm=False, dockerfile=None, container_limits=None,
decode=False, buildargs=None, gzip=False, shmsize=None,
labels=None, cache_from=None, target=None, network_mode=None,
- squash=None, extra_hosts=None, platform=None, isolation=None):
+ squash=None, extra_hosts=None, platform=None, isolation=None,
+ use_config_proxy=True):
"""
Similar to the ``docker build`` command. Either ``path`` or ``fileobj``
needs to be set. ``path`` can be a local path (to a directory
@@ -103,6 +104,10 @@ class BuildApiMixin(object):
platform (str): Platform in the format ``os[/arch[/variant]]``
isolation (str): Isolation technology used during build.
Default: `None`.
+ use_config_proxy (bool): If ``True``, and if the docker client
+ configuration file (``~/.docker/config.json`` by default)
+ contains a proxy configuration, the corresponding environment
+ variables will be set in the container being built.
Returns:
A generator for the build output.
@@ -116,6 +121,7 @@ class BuildApiMixin(object):
remote = context = None
headers = {}
container_limits = container_limits or {}
+ buildargs = buildargs or {}
if path is None and fileobj is None:
raise TypeError("Either path or fileobj needs to be provided.")
if gzip and encoding is not None:
@@ -126,7 +132,7 @@ class BuildApiMixin(object):
for key in container_limits.keys():
if key not in constants.CONTAINER_LIMITS_KEYS:
raise errors.DockerException(
- 'Invalid container_limits key {0}'.format(key)
+ f'Invalid container_limits key {key}'
)
if custom_context:
@@ -144,7 +150,7 @@ class BuildApiMixin(object):
dockerignore = os.path.join(path, '.dockerignore')
exclude = None
if os.path.exists(dockerignore):
- with open(dockerignore, 'r') as f:
+ with open(dockerignore) as f:
exclude = list(filter(
lambda x: x != '' and x[0] != '#',
[l.strip() for l in f.read().splitlines()]
@@ -168,6 +174,10 @@ class BuildApiMixin(object):
}
params.update(container_limits)
+ if use_config_proxy:
+ proxy_args = self._proxy_configs.get_environment()
+ for k, v in proxy_args.items():
+ buildargs.setdefault(k, v)
if buildargs:
params.update({'buildargs': json.dumps(buildargs)})
@@ -286,40 +296,32 @@ class BuildApiMixin(object):
# If we don't have any auth data so far, try reloading the config
# file one more time in case anything showed up in there.
- if not self._auth_configs:
+ if not self._auth_configs or self._auth_configs.is_empty:
log.debug("No auth config in memory - loading from filesystem")
- self._auth_configs = auth.load_config()
+ self._auth_configs = auth.load_config(
+ credstore_env=self.credstore_env
+ )
# Send the full auth configuration (if any exists), since the build
# could use any (or all) of the registries.
if self._auth_configs:
- auth_data = {}
- if self._auth_configs.get('credsStore'):
- # Using a credentials store, we need to retrieve the
- # credentials for each registry listed in the config.json file
- # Matches CLI behavior: https://github.com/docker/docker/blob/
- # 67b85f9d26f1b0b2b240f2d794748fac0f45243c/cliconfig/
- # credentials/native_store.go#L68-L83
- for registry in self._auth_configs.get('auths', {}).keys():
- auth_data[registry] = auth.resolve_authconfig(
- self._auth_configs, registry,
- credstore_env=self.credstore_env,
- )
- else:
- auth_data = self._auth_configs.get('auths', {}).copy()
- # See https://github.com/docker/docker-py/issues/1683
- if auth.INDEX_NAME in auth_data:
- auth_data[auth.INDEX_URL] = auth_data[auth.INDEX_NAME]
+ auth_data = self._auth_configs.get_all_credentials()
+
+ # See https://github.com/docker/docker-py/issues/1683
+ if (auth.INDEX_URL not in auth_data and
+ auth.INDEX_NAME in auth_data):
+ auth_data[auth.INDEX_URL] = auth_data.get(auth.INDEX_NAME, {})
log.debug(
- 'Sending auth config ({0})'.format(
+ 'Sending auth config ({})'.format(
', '.join(repr(k) for k in auth_data.keys())
)
)
- headers['X-Registry-Config'] = auth.encode_header(
- auth_data
- )
+ if auth_data:
+ headers['X-Registry-Config'] = auth.encode_header(
+ auth_data
+ )
else:
log.debug('No auth config found')
@@ -331,13 +333,20 @@ def process_dockerfile(dockerfile, path):
abs_dockerfile = dockerfile
if not os.path.isabs(dockerfile):
abs_dockerfile = os.path.join(path, dockerfile)
-
+ if constants.IS_WINDOWS_PLATFORM and path.startswith(
+ constants.WINDOWS_LONGPATH_PREFIX):
+ abs_dockerfile = '{}{}'.format(
+ constants.WINDOWS_LONGPATH_PREFIX,
+ os.path.normpath(
+ abs_dockerfile[len(constants.WINDOWS_LONGPATH_PREFIX):]
+ )
+ )
if (os.path.splitdrive(path)[0] != os.path.splitdrive(abs_dockerfile)[0] or
os.path.relpath(abs_dockerfile, path).startswith('..')):
# Dockerfile not in context - read data to insert into tar later
- with open(abs_dockerfile, 'r') as df:
+ with open(abs_dockerfile) as df:
return (
- '.dockerfile.{0:x}'.format(random.getrandbits(160)),
+ f'.dockerfile.{random.getrandbits(160):x}',
df.read()
)
diff --git a/docker/api/client.py b/docker/api/client.py
index 91da1c8..2667922 100644
--- a/docker/api/client.py
+++ b/docker/api/client.py
@@ -1,12 +1,25 @@
import json
import struct
+import urllib
from functools import partial
import requests
import requests.exceptions
-import six
import websocket
+from .. import auth
+from ..constants import (DEFAULT_NUM_POOLS, DEFAULT_NUM_POOLS_SSH,
+ DEFAULT_MAX_POOL_SIZE, DEFAULT_TIMEOUT_SECONDS,
+ DEFAULT_USER_AGENT, IS_WINDOWS_PLATFORM,
+ MINIMUM_DOCKER_API_VERSION, STREAM_HEADER_SIZE_BYTES)
+from ..errors import (DockerException, InvalidVersion, TLSParameterError,
+ create_api_error_from_http_exception)
+from ..tls import TLSConfig
+from ..transport import SSLHTTPAdapter, UnixHTTPAdapter
+from ..utils import check_resource, config, update_headers, utils
+from ..utils.json_stream import json_stream
+from ..utils.proxy import ProxyConfig
+from ..utils.socket import consume_socket_output, demux_adaptor, frames_iter
from .build import BuildApiMixin
from .config import ConfigApiMixin
from .container import ContainerApiMixin
@@ -19,23 +32,14 @@ from .secret import SecretApiMixin
from .service import ServiceApiMixin
from .swarm import SwarmApiMixin
from .volume import VolumeApiMixin
-from .. import auth
-from ..constants import (
- DEFAULT_TIMEOUT_SECONDS, DEFAULT_USER_AGENT, IS_WINDOWS_PLATFORM,
- DEFAULT_DOCKER_API_VERSION, STREAM_HEADER_SIZE_BYTES, DEFAULT_NUM_POOLS,
- MINIMUM_DOCKER_API_VERSION
-)
-from ..errors import (
- DockerException, InvalidVersion, TLSParameterError,
- create_api_error_from_http_exception
-)
-from ..tls import TLSConfig
-from ..transport import SSLAdapter, UnixAdapter
-from ..utils import utils, check_resource, update_headers, config
-from ..utils.socket import frames_iter, socket_raw_iter
-from ..utils.json_stream import json_stream
+
+try:
+ from ..transport import NpipeHTTPAdapter
+except ImportError:
+ pass
+
try:
- from ..transport import NpipeAdapter
+ from ..transport import SSHHTTPAdapter
except ImportError:
pass
@@ -76,7 +80,7 @@ class APIClient(
base_url (str): URL to the Docker server. For example,
``unix:///var/run/docker.sock`` or ``tcp://127.0.0.1:1234``.
version (str): The version of the API to use. Set to ``auto`` to
- automatically detect the server's version. Default: ``1.30``
+ automatically detect the server's version. Default: ``1.35``
timeout (int): Default timeout for API calls, in seconds.
tls (bool or :py:class:`~docker.tls.TLSConfig`): Enable TLS. Pass
``True`` to enable it with default options, or pass a
@@ -85,6 +89,11 @@ class APIClient(
user_agent (str): Set a custom user agent for requests to the server.
credstore_env (dict): Override environment variables when calling the
credential store process.
+ use_ssh_client (bool): If set to `True`, an ssh connection is made
+ via shelling out to the ssh client. Ensure the ssh client is
+ installed and configured on the host.
+ max_pool_size (int): The maximum number of connections
+ to save in the pool.
"""
__attrs__ = requests.Session.__attrs__ + ['_auth_configs',
@@ -95,9 +104,10 @@ class APIClient(
def __init__(self, base_url=None, version=None,
timeout=DEFAULT_TIMEOUT_SECONDS, tls=False,
- user_agent=DEFAULT_USER_AGENT, num_pools=DEFAULT_NUM_POOLS,
- credstore_env=None):
- super(APIClient, self).__init__()
+ user_agent=DEFAULT_USER_AGENT, num_pools=None,
+ credstore_env=None, use_ssh_client=False,
+ max_pool_size=DEFAULT_MAX_POOL_SIZE):
+ super().__init__()
if tls and not base_url:
raise TLSParameterError(
@@ -109,17 +119,31 @@ class APIClient(
self.headers['User-Agent'] = user_agent
self._general_configs = config.load_general_config()
+
+ proxy_config = self._general_configs.get('proxies', {})
+ try:
+ proxies = proxy_config[base_url]
+ except KeyError:
+ proxies = proxy_config.get('default', {})
+
+ self._proxy_configs = ProxyConfig.from_dict(proxies)
+
self._auth_configs = auth.load_config(
- config_dict=self._general_configs
+ config_dict=self._general_configs, credstore_env=credstore_env,
)
self.credstore_env = credstore_env
base_url = utils.parse_host(
base_url, IS_WINDOWS_PLATFORM, tls=bool(tls)
)
+ # SSH has a different default for num_pools to all other adapters
+ num_pools = num_pools or DEFAULT_NUM_POOLS_SSH if \
+ base_url.startswith('ssh://') else DEFAULT_NUM_POOLS
+
if base_url.startswith('http+unix://'):
- self._custom_adapter = UnixAdapter(
- base_url, timeout, pool_connections=num_pools
+ self._custom_adapter = UnixHTTPAdapter(
+ base_url, timeout, pool_connections=num_pools,
+ max_pool_size=max_pool_size
)
self.mount('http+docker://', self._custom_adapter)
self._unmount('http://', 'https://')
@@ -132,8 +156,9 @@ class APIClient(
'The npipe:// protocol is only supported on Windows'
)
try:
- self._custom_adapter = NpipeAdapter(
- base_url, timeout, pool_connections=num_pools
+ self._custom_adapter = NpipeHTTPAdapter(
+ base_url, timeout, pool_connections=num_pools,
+ max_pool_size=max_pool_size
)
except NameError:
raise DockerException(
@@ -141,26 +166,40 @@ class APIClient(
)
self.mount('http+docker://', self._custom_adapter)
self.base_url = 'http+docker://localnpipe'
+ elif base_url.startswith('ssh://'):
+ try:
+ self._custom_adapter = SSHHTTPAdapter(
+ base_url, timeout, pool_connections=num_pools,
+ max_pool_size=max_pool_size, shell_out=use_ssh_client
+ )
+ except NameError:
+ raise DockerException(
+ 'Install paramiko package to enable ssh:// support'
+ )
+ self.mount('http+docker://ssh', self._custom_adapter)
+ self._unmount('http://', 'https://')
+ self.base_url = 'http+docker://ssh'
else:
# Use SSLAdapter for the ability to specify SSL version
if isinstance(tls, TLSConfig):
tls.configure_client(self)
elif tls:
- self._custom_adapter = SSLAdapter(pool_connections=num_pools)
+ self._custom_adapter = SSLHTTPAdapter(
+ pool_connections=num_pools)
self.mount('https://', self._custom_adapter)
self.base_url = base_url
# version detection needs to be after unix adapter mounting
- if version is None:
- self._version = DEFAULT_DOCKER_API_VERSION
- elif isinstance(version, six.string_types):
- if version.lower() == 'auto':
- self._version = self._retrieve_server_version()
- else:
- self._version = version
+ if version is None or (isinstance(
+ version,
+ str
+ ) and version.lower() == 'auto'):
+ self._version = self._retrieve_server_version()
else:
+ self._version = version
+ if not isinstance(self._version, str):
raise DockerException(
- 'Version parameter must be a string or None. Found {0}'.format(
+ 'Version parameter must be a string or None. Found {}'.format(
type(version).__name__
)
)
@@ -180,7 +219,7 @@ class APIClient(
)
except Exception as e:
raise DockerException(
- 'Error while fetching server API version: {0}'.format(e)
+ f'Error while fetching server API version: {e}'
)
def _set_request_timeout(self, kwargs):
@@ -207,21 +246,21 @@ class APIClient(
def _url(self, pathfmt, *args, **kwargs):
for arg in args:
- if not isinstance(arg, six.string_types):
+ if not isinstance(arg, str):
raise ValueError(
- 'Expected a string but found {0} ({1}) '
+ 'Expected a string but found {} ({}) '
'instead'.format(arg, type(arg))
)
- quote_f = partial(six.moves.urllib.parse.quote, safe="/:")
+ quote_f = partial(urllib.parse.quote, safe="/:")
args = map(quote_f, args)
if kwargs.get('versioned_api', True):
- return '{0}/v{1}{2}'.format(
+ return '{}/v{}{}'.format(
self.base_url, self._version, pathfmt.format(*args)
)
else:
- return '{0}{1}'.format(self.base_url, pathfmt.format(*args))
+ return f'{self.base_url}{pathfmt.format(*args)}'
def _raise_for_status(self, response):
"""Raises stored :class:`APIError`, if one occurred."""
@@ -245,7 +284,7 @@ class APIClient(
# so we do this disgusting thing here.
data2 = {}
if data is not None and isinstance(data, dict):
- for k, v in six.iteritems(data):
+ for k, v in iter(data.items()):
if v is not None:
data2[k] = v
elif data is not None:
@@ -279,12 +318,12 @@ class APIClient(
self._raise_for_status(response)
if self.base_url == "http+docker://localnpipe":
sock = response.raw._fp.fp.raw.sock
- elif six.PY3:
+ elif self.base_url.startswith('http+docker://ssh'):
+ sock = response.raw._fp.fp.channel
+ else:
sock = response.raw._fp.fp.raw
if self.base_url.startswith("https://"):
sock = sock._sock
- else:
- sock = response.raw._fp.fp._sock
try:
# Keep a reference to the response to stop it being garbage
# collected. If the response is garbage collected, it will
@@ -302,8 +341,7 @@ class APIClient(
if response.raw._fp.chunked:
if decode:
- for chunk in json_stream(self._stream_helper(response, False)):
- yield chunk
+ yield from json_stream(self._stream_helper(response, False))
else:
reader = response.raw
while not reader.closed:
@@ -359,22 +397,31 @@ class APIClient(
def _stream_raw_result(self, response, chunk_size=1, decode=True):
''' Stream result for TTY-enabled container and raw binary data'''
self._raise_for_status(response)
- for out in response.iter_content(chunk_size, decode):
- yield out
- def _read_from_socket(self, response, stream, tty=False):
+ # Disable timeout on the underlying socket to prevent
+ # Read timed out(s) for long running processes
+ socket = self._get_raw_response_socket(response)
+ self._disable_socket_timeout(socket)
+
+ yield from response.iter_content(chunk_size, decode)
+
+ def _read_from_socket(self, response, stream, tty=True, demux=False):
socket = self._get_raw_response_socket(response)
- gen = None
- if tty is False:
- gen = frames_iter(socket)
+ gen = frames_iter(socket, tty)
+
+ if demux:
+ # The generator will output tuples (stdout, stderr)
+ gen = (demux_adaptor(*frame) for frame in gen)
else:
- gen = socket_raw_iter(socket)
+ # The generator will output strings
+ gen = (data for (_, data) in gen)
if stream:
return gen
else:
- return six.binary_type().join(gen)
+ # Wait for all the frames, concatenate them, and return the result
+ return consume_socket_output(gen, demux=demux)
def _disable_socket_timeout(self, socket):
""" Depending on the combination of python version and whether we're
@@ -420,7 +467,7 @@ class APIClient(
self._result(res, binary=True)
self._raise_for_status(res)
- sep = six.binary_type()
+ sep = b''
if stream:
return self._multiplexed_response_stream_helper(res)
else:
@@ -434,7 +481,7 @@ class APIClient(
def get_adapter(self, url):
try:
- return super(APIClient, self).get_adapter(url)
+ return super().get_adapter(url)
except requests.exceptions.InvalidSchema as e:
if self._custom_adapter:
return self._custom_adapter
@@ -452,9 +499,11 @@ class APIClient(
Args:
dockercfg_path (str): Use a custom path for the Docker config file
(default ``$HOME/.docker/config.json`` if present,
- otherwise``$HOME/.dockercfg``)
+ otherwise ``$HOME/.dockercfg``)
Returns:
None
"""
- self._auth_configs = auth.load_config(dockercfg_path)
+ self._auth_configs = auth.load_config(
+ dockercfg_path, credstore_env=self.credstore_env
+ )
diff --git a/docker/api/config.py b/docker/api/config.py
index 767bef2..88c367e 100644
--- a/docker/api/config.py
+++ b/docker/api/config.py
@@ -1,13 +1,11 @@
import base64
-import six
-
from .. import utils
-class ConfigApiMixin(object):
+class ConfigApiMixin:
@utils.minimum_version('1.30')
- def create_config(self, name, data, labels=None):
+ def create_config(self, name, data, labels=None, templating=None):
"""
Create a config
@@ -15,6 +13,9 @@ class ConfigApiMixin(object):
name (string): Name of the config
data (bytes): Config data to be stored
labels (dict): A mapping of labels to assign to the config
+ templating (dict): dictionary containing the name of the
+ templating driver to be used expressed as
+ { name: <templating_driver_name>}
Returns (dict): ID of the newly created config
"""
@@ -22,12 +23,12 @@ class ConfigApiMixin(object):
data = data.encode('utf-8')
data = base64.b64encode(data)
- if six.PY3:
- data = data.decode('ascii')
+ data = data.decode('ascii')
body = {
'Data': data,
'Name': name,
- 'Labels': labels
+ 'Labels': labels,
+ 'Templating': templating
}
url = self._url('/configs/create')
@@ -42,7 +43,7 @@ class ConfigApiMixin(object):
Retrieve config metadata
Args:
- id (string): Full ID of the config to remove
+ id (string): Full ID of the config to inspect
Returns (dict): A dictionary of metadata
diff --git a/docker/api/container.py b/docker/api/container.py
index d4f75f5..83fcd4f 100644
--- a/docker/api/container.py
+++ b/docker/api/container.py
@@ -1,19 +1,19 @@
-import six
from datetime import datetime
from .. import errors
from .. import utils
from ..constants import DEFAULT_DATA_CHUNK_SIZE
-from ..types import (
- CancellableStream, ContainerConfig, EndpointConfig, HostConfig,
- NetworkingConfig
-)
+from ..types import CancellableStream
+from ..types import ContainerConfig
+from ..types import EndpointConfig
+from ..types import HostConfig
+from ..types import NetworkingConfig
-class ContainerApiMixin(object):
+class ContainerApiMixin:
@utils.check_resource('container')
def attach(self, container, stdout=True, stderr=True,
- stream=False, logs=False):
+ stream=False, logs=False, demux=False):
"""
Attach to a container.
@@ -28,11 +28,15 @@ class ContainerApiMixin(object):
stream (bool): Return container output progressively as an iterator
of strings, rather than a single string.
logs (bool): Include the container's previous output.
+ demux (bool): Keep stdout and stderr separate.
Returns:
- By default, the container's output as a single string.
+ By default, the container's output as a single string (two if
+ ``demux=True``: one for stdout and one for stderr).
- If ``stream=True``, an iterator of output strings.
+ If ``stream=True``, an iterator of output strings. If
+ ``demux=True``, two iterators are returned: one for stdout and one
+ for stderr.
Raises:
:py:class:`docker.errors.APIError`
@@ -54,8 +58,7 @@ class ContainerApiMixin(object):
response = self._post(u, headers=headers, params=params, stream=True)
output = self._read_from_socket(
- response, stream, self._check_is_tty(container)
- )
+ response, stream, self._check_is_tty(container), demux=demux)
if stream:
return CancellableStream(output, response)
@@ -169,7 +172,8 @@ class ContainerApiMixin(object):
- `exited` (int): Only containers with specified exit code
- `status` (str): One of ``restarting``, ``running``,
``paused``, ``exited``
- - `label` (str): format either ``"key"`` or ``"key=value"``
+ - `label` (str|list): format either ``"key"``, ``"key=value"``
+ or a list of such.
- `id` (str): The id of the container.
- `name` (str): The name of the container.
- `ancestor` (str): Filter by container ancestor. Format of
@@ -218,7 +222,8 @@ class ContainerApiMixin(object):
working_dir=None, domainname=None, host_config=None,
mac_address=None, labels=None, stop_signal=None,
networking_config=None, healthcheck=None,
- stop_timeout=None, runtime=None):
+ stop_timeout=None, runtime=None,
+ use_config_proxy=True):
"""
Creates a container. Parameters are similar to those for the ``docker
run`` command except it doesn't support the attach options (``-a``).
@@ -237,9 +242,9 @@ class ContainerApiMixin(object):
.. code-block:: python
- container_id = cli.create_container(
+ container_id = client.api.create_container(
'busybox', 'ls', ports=[1111, 2222],
- host_config=cli.create_host_config(port_bindings={
+ host_config=client.api.create_host_config(port_bindings={
1111: 4567,
2222: None
})
@@ -251,22 +256,22 @@ class ContainerApiMixin(object):
.. code-block:: python
- cli.create_host_config(port_bindings={1111: ('127.0.0.1', 4567)})
+ client.api.create_host_config(port_bindings={1111: ('127.0.0.1', 4567)})
Or without host port assignment:
.. code-block:: python
- cli.create_host_config(port_bindings={1111: ('127.0.0.1',)})
+ client.api.create_host_config(port_bindings={1111: ('127.0.0.1',)})
If you wish to use UDP instead of TCP (default), you need to declare
ports as such in both the config and host config:
.. code-block:: python
- container_id = cli.create_container(
+ container_id = client.api.create_container(
'busybox', 'ls', ports=[(1111, 'udp'), 2222],
- host_config=cli.create_host_config(port_bindings={
+ host_config=client.api.create_host_config(port_bindings={
'1111/udp': 4567, 2222: None
})
)
@@ -276,7 +281,7 @@ class ContainerApiMixin(object):
.. code-block:: python
- cli.create_host_config(port_bindings={
+ client.api.create_host_config(port_bindings={
1111: [1234, 4567]
})
@@ -284,7 +289,7 @@ class ContainerApiMixin(object):
.. code-block:: python
- cli.create_host_config(port_bindings={
+ client.api.create_host_config(port_bindings={
1111: [
('192.168.0.100', 1234),
('192.168.0.101', 1234)
@@ -300,9 +305,9 @@ class ContainerApiMixin(object):
.. code-block:: python
- container_id = cli.create_container(
+ container_id = client.api.create_container(
'busybox', 'ls', volumes=['/mnt/vol1', '/mnt/vol2'],
- host_config=cli.create_host_config(binds={
+ host_config=client.api.create_host_config(binds={
'/home/user1/': {
'bind': '/mnt/vol2',
'mode': 'rw',
@@ -319,9 +324,9 @@ class ContainerApiMixin(object):
.. code-block:: python
- container_id = cli.create_container(
+ container_id = client.api.create_container(
'busybox', 'ls', volumes=['/mnt/vol1', '/mnt/vol2'],
- host_config=cli.create_host_config(binds=[
+ host_config=client.api.create_host_config(binds=[
'/home/user1/:/mnt/vol2',
'/var/www:/mnt/vol1:ro',
])
@@ -339,15 +344,15 @@ class ContainerApiMixin(object):
.. code-block:: python
- networking_config = docker_client.create_networking_config({
- 'network1': docker_client.create_endpoint_config(
+ networking_config = client.api.create_networking_config({
+ 'network1': client.api.create_endpoint_config(
ipv4_address='172.28.0.124',
aliases=['foo', 'bar'],
links=['container2']
)
})
- ctnr = docker_client.create_container(
+ ctnr = client.api.create_container(
img, command, networking_config=networking_config
)
@@ -387,6 +392,10 @@ class ContainerApiMixin(object):
runtime (str): Runtime to use with this container.
healthcheck (dict): Specify a test to perform to check that the
container is healthy.
+ use_config_proxy (bool): If ``True``, and if the docker client
+ configuration file (``~/.docker/config.json`` by default)
+ contains a proxy configuration, the corresponding environment
+ variables will be set in the container being created.
Returns:
A dictionary with an image 'Id' key and a 'Warnings' key.
@@ -397,9 +406,17 @@ class ContainerApiMixin(object):
:py:class:`docker.errors.APIError`
If the server returns an error.
"""
- if isinstance(volumes, six.string_types):
+ if isinstance(volumes, str):
volumes = [volumes, ]
+ if isinstance(environment, dict):
+ environment = utils.utils.format_environment(environment)
+
+ if use_config_proxy:
+ environment = self._proxy_configs.inject_proxy_environment(
+ environment
+ ) or None
+
config = self.create_container_config(
image, command, hostname, user, detach, stdin_open, tty,
ports, environment, volumes,
@@ -461,34 +478,33 @@ class ContainerApiMixin(object):
For example, ``/dev/sda:/dev/xvda:rwm`` allows the container
to have read-write access to the host's ``/dev/sda`` via a
node named ``/dev/xvda`` inside the container.
+ device_requests (:py:class:`list`): Expose host resources such as
+ GPUs to the container, as a list of
+ :py:class:`docker.types.DeviceRequest` instances.
dns (:py:class:`list`): Set custom DNS servers.
dns_opt (:py:class:`list`): Additional options to be added to the
container's ``resolv.conf`` file
dns_search (:py:class:`list`): DNS search domains.
- extra_hosts (dict): Addtional hostnames to resolve inside the
+ extra_hosts (dict): Additional hostnames to resolve inside the
container, as a mapping of hostname to IP address.
group_add (:py:class:`list`): List of additional group names and/or
IDs that the container process will run as.
init (bool): Run an init inside the container that forwards
signals and reaps processes
- init_path (str): Path to the docker-init binary
ipc_mode (str): Set the IPC mode for the container.
- isolation (str): Isolation technology to use. Default: `None`.
- links (dict or list of tuples): Either a dictionary mapping name
- to alias or as a list of ``(name, alias)`` tuples.
- log_config (dict): Logging configuration, as a dictionary with
- keys:
-
- - ``type`` The logging driver name.
- - ``config`` A dictionary of configuration for the logging
- driver.
-
+ isolation (str): Isolation technology to use. Default: ``None``.
+ links (dict): Mapping of links using the
+ ``{'container': 'alias'}`` format. The alias is optional.
+ Containers declared in this dict will be linked to the new
+ container using the provided alias. Default: ``None``.
+ log_config (LogConfig): Logging configuration
lxc_conf (dict): LXC config.
mem_limit (float or str): Memory limit. Accepts float values
(which represent the memory limit of the created container in
bytes) or a string with a units identification char
(``100000b``, ``1000k``, ``128m``, ``1g``). If a string is
specified without a units character, bytes are assumed as an
+ mem_reservation (float or str): Memory soft limit.
mem_swappiness (int): Tune a container's memory swappiness
behavior. Accepts number between 0 and 100.
memswap_limit (str or int): Maximum amount of memory + swap a
@@ -500,11 +516,13 @@ class ContainerApiMixin(object):
network_mode (str): One of:
- ``bridge`` Create a new network stack for the container on
- on the bridge network.
+ the bridge network.
- ``none`` No networking for this container.
- ``container:<name|id>`` Reuse another container's network
stack.
- ``host`` Use the host network stack.
+ This mode is incompatible with ``port_bindings``.
+
oom_kill_disable (bool): Whether to disable OOM killer.
oom_score_adj (int): An integer value containing the score given
to the container in order to tune OOM killer preferences.
@@ -513,7 +531,8 @@ class ContainerApiMixin(object):
pids_limit (int): Tune a container's pids limit. Set ``-1`` for
unlimited.
port_bindings (dict): See :py:meth:`create_container`
- for more information.
+ for more information.
+ Imcompatible with ``host`` in ``network_mode``.
privileged (bool): Give extended privileges to this container.
publish_all_ports (bool): Publish all ports to the host.
read_only (bool): Mount the container's root filesystem as read
@@ -543,10 +562,12 @@ class ContainerApiMixin(object):
}
ulimits (:py:class:`list`): Ulimits to set inside the container,
- as a list of dicts.
+ as a list of :py:class:`docker.types.Ulimit` instances.
userns_mode (str): Sets the user namespace mode for the container
when user namespace remapping option is enabled. Supported
values are: ``host``
+ uts_mode (str): Sets the UTS namespace mode for the container.
+ Supported values are: ``host``
volumes_from (:py:class:`list`): List of container names or IDs to
get volumes from.
runtime (str): Runtime to use with this container.
@@ -558,7 +579,7 @@ class ContainerApiMixin(object):
Example:
- >>> cli.create_host_config(privileged=True, cap_drop=['MKNOD'],
+ >>> client.api.create_host_config(privileged=True, cap_drop=['MKNOD'],
volumes_from=['nostalgic_newton'])
{'CapDrop': ['MKNOD'], 'LxcConf': None, 'Privileged': True,
'VolumesFrom': ['nostalgic_newton'], 'PublishAllPorts': False}
@@ -589,11 +610,11 @@ class ContainerApiMixin(object):
Example:
- >>> docker_client.create_network('network1')
- >>> networking_config = docker_client.create_networking_config({
- 'network1': docker_client.create_endpoint_config()
+ >>> client.api.create_network('network1')
+ >>> networking_config = client.api.create_networking_config({
+ 'network1': client.api.create_endpoint_config()
})
- >>> container = docker_client.create_container(
+ >>> container = client.api.create_container(
img, command, networking_config=networking_config
)
@@ -609,24 +630,27 @@ class ContainerApiMixin(object):
aliases (:py:class:`list`): A list of aliases for this endpoint.
Names in that list can be used within the network to reach the
container. Defaults to ``None``.
- links (:py:class:`list`): A list of links for this endpoint.
- Containers declared in this list will be linked to this
- container. Defaults to ``None``.
+ links (dict): Mapping of links for this endpoint using the
+ ``{'container': 'alias'}`` format. The alias is optional.
+ Containers declared in this dict will be linked to this
+ container using the provided alias. Defaults to ``None``.
ipv4_address (str): The IP address of this container on the
network, using the IPv4 protocol. Defaults to ``None``.
ipv6_address (str): The IP address of this container on the
network, using the IPv6 protocol. Defaults to ``None``.
link_local_ips (:py:class:`list`): A list of link-local (IPv4/IPv6)
addresses.
+ driver_opt (dict): A dictionary of options to provide to the
+ network driver. Defaults to ``None``.
Returns:
(dict) An endpoint config.
Example:
- >>> endpoint_config = client.create_endpoint_config(
+ >>> endpoint_config = client.api.create_endpoint_config(
aliases=['web', 'app'],
- links=['app_db'],
+ links={'app_db': 'db', 'another': None},
ipv4_address='132.65.0.123'
)
@@ -676,7 +700,8 @@ class ContainerApiMixin(object):
return self._stream_raw_result(res, chunk_size, False)
@utils.check_resource('container')
- def get_archive(self, container, path, chunk_size=DEFAULT_DATA_CHUNK_SIZE):
+ def get_archive(self, container, path, chunk_size=DEFAULT_DATA_CHUNK_SIZE,
+ encode_stream=False):
"""
Retrieve a file or folder from a container in the form of a tar
archive.
@@ -687,6 +712,8 @@ class ContainerApiMixin(object):
chunk_size (int): The number of bytes returned by each iteration
of the generator. If ``None``, data will be streamed as it is
received. Default: 2 MB
+ encode_stream (bool): Determines if data should be encoded
+ (gzip-compressed) during transmission. Default: False
Returns:
(tuple): First element is a raw tar data stream. Second element is
@@ -695,12 +722,29 @@ class ContainerApiMixin(object):
Raises:
:py:class:`docker.errors.APIError`
If the server returns an error.
+
+ Example:
+
+ >>> c = docker.APIClient()
+ >>> f = open('./sh_bin.tar', 'wb')
+ >>> bits, stat = c.api.get_archive(container, '/bin/sh')
+ >>> print(stat)
+ {'name': 'sh', 'size': 1075464, 'mode': 493,
+ 'mtime': '2018-10-01T15:37:48-07:00', 'linkTarget': ''}
+ >>> for chunk in bits:
+ ... f.write(chunk)
+ >>> f.close()
"""
params = {
'path': path
}
+ headers = {
+ "Accept-Encoding": "gzip, deflate"
+ } if encode_stream else {
+ "Accept-Encoding": "identity"
+ }
url = self._url('/containers/{0}/archive', container)
- res = self._get(url, params=params, stream=True)
+ res = self._get(url, params=params, stream=True, headers=headers)
self._raise_for_status(res)
encoded_stat = res.headers.get('x-docker-container-path-stat')
return (
@@ -744,7 +788,7 @@ class ContainerApiMixin(object):
url = self._url("/containers/{0}/kill", container)
params = {}
if signal is not None:
- if not isinstance(signal, six.string_types):
+ if not isinstance(signal, str):
signal = int(signal)
params['signal'] = signal
res = self._post(url, params=params)
@@ -763,16 +807,16 @@ class ContainerApiMixin(object):
Args:
container (str): The container to get logs from
- stdout (bool): Get ``STDOUT``
- stderr (bool): Get ``STDERR``
- stream (bool): Stream the response
- timestamps (bool): Show timestamps
+ stdout (bool): Get ``STDOUT``. Default ``True``
+ stderr (bool): Get ``STDERR``. Default ``True``
+ stream (bool): Stream the response. Default ``False``
+ timestamps (bool): Show timestamps. Default ``False``
tail (str or int): Output specified number of lines at the end of
logs. Either an integer of number of lines or the string
``all``. Default ``all``
since (datetime or int): Show logs since a given datetime or
integer epoch (in seconds)
- follow (bool): Follow log output
+ follow (bool): Follow log output. Default ``False``
until (datetime or int): Show logs that occurred before the given
datetime or integer epoch (in seconds)
@@ -870,7 +914,7 @@ class ContainerApiMixin(object):
.. code-block:: python
- >>> cli.port('7174d6347063', 80)
+ >>> client.api.port('7174d6347063', 80)
[{'HostIp': '0.0.0.0', 'HostPort': '80'}]
"""
res = self._get(self._url("/containers/{0}/json", container))
@@ -888,9 +932,10 @@ class ContainerApiMixin(object):
if '/' in private_port:
return port_settings.get(private_port)
- h_ports = port_settings.get(private_port + '/tcp')
- if h_ports is None:
- h_ports = port_settings.get(private_port + '/udp')
+ for protocol in ['tcp', 'udp', 'sctp']:
+ h_ports = port_settings.get(private_port + '/' + protocol)
+ if h_ports:
+ break
return h_ports
@@ -1048,10 +1093,10 @@ class ContainerApiMixin(object):
Example:
- >>> container = cli.create_container(
+ >>> container = client.api.create_container(
... image='busybox:latest',
... command='/bin/sleep 30')
- >>> cli.start(container=container.get('Id'))
+ >>> client.api.start(container=container.get('Id'))
"""
if args or kwargs:
raise errors.DeprecatedMethod(
@@ -1072,7 +1117,8 @@ class ContainerApiMixin(object):
Args:
container (str): The container to stream statistics from
decode (bool): If set to true, stream will be decoded into dicts
- on the fly. False by default.
+ on the fly. Only applicable if ``stream`` is True.
+ False by default.
stream (bool): If set to false, only the current stats will be
returned instead of a stream. True by default.
@@ -1086,6 +1132,10 @@ class ContainerApiMixin(object):
return self._stream_helper(self._get(url, stream=True),
decode=decode)
else:
+ if decode:
+ raise errors.InvalidArgument(
+ "decode is only available in conjunction with stream=True"
+ )
return self._result(self._get(url, params={'stream': False}),
json=True)
@@ -1170,8 +1220,8 @@ class ContainerApiMixin(object):
cpu_shares (int): CPU shares (relative weight)
cpuset_cpus (str): CPUs in which to allow execution
cpuset_mems (str): MEMs in which to allow execution
- mem_limit (int or str): Memory limit
- mem_reservation (int or str): Memory soft limit
+ mem_limit (float or str): Memory limit
+ mem_reservation (float or str): Memory soft limit
memswap_limit (int or str): Total memory (memory + swap), -1 to
disable swap
kernel_memory (int or str): Kernel memory limit
diff --git a/docker/api/daemon.py b/docker/api/daemon.py
index 76a94cf..a857213 100644
--- a/docker/api/daemon.py
+++ b/docker/api/daemon.py
@@ -4,7 +4,7 @@ from datetime import datetime
from .. import auth, types, utils
-class DaemonApiMixin(object):
+class DaemonApiMixin:
@utils.minimum_version('1.25')
def df(self):
"""
@@ -42,8 +42,8 @@ class DaemonApiMixin(object):
Example:
- >>> for event in client.events()
- ... print event
+ >>> for event in client.events(decode=True)
+ ... print(event)
{u'from': u'image/with:tag',
u'id': u'container-id',
u'status': u'start',
@@ -54,7 +54,7 @@ class DaemonApiMixin(object):
>>> events = client.events()
>>> for event in events:
- ... print event
+ ... print(event)
>>> # and cancel from another thread
>>> events.close()
"""
@@ -109,7 +109,7 @@ class DaemonApiMixin(object):
the Docker server.
dockercfg_path (str): Use a custom path for the Docker config file
(default ``$HOME/.docker/config.json`` if present,
- otherwise``$HOME/.dockercfg``)
+ otherwise ``$HOME/.dockercfg``)
Returns:
(dict): The response from the login request
@@ -124,13 +124,15 @@ class DaemonApiMixin(object):
# If dockercfg_path is passed check to see if the config file exists,
# if so load that config.
if dockercfg_path and os.path.exists(dockercfg_path):
- self._auth_configs = auth.load_config(dockercfg_path)
- elif not self._auth_configs:
- self._auth_configs = auth.load_config()
-
- authcfg = auth.resolve_authconfig(
- self._auth_configs, registry, credstore_env=self.credstore_env,
- )
+ self._auth_configs = auth.load_config(
+ dockercfg_path, credstore_env=self.credstore_env
+ )
+ elif not self._auth_configs or self._auth_configs.is_empty:
+ self._auth_configs = auth.load_config(
+ credstore_env=self.credstore_env
+ )
+
+ authcfg = self._auth_configs.resolve_authconfig(registry)
# If we found an existing auth config for this registry and username
# combination, we can return it immediately unless reauth is requested.
if authcfg and authcfg.get('username', None) == username \
@@ -146,9 +148,7 @@ class DaemonApiMixin(object):
response = self._post_json(self._url('/auth'), data=req_data)
if response.status_code == 200:
- if 'auths' not in self._auth_configs:
- self._auth_configs['auths'] = {}
- self._auth_configs['auths'][registry or auth.INDEX_NAME] = req_data
+ self._auth_configs.add_auth(registry or auth.INDEX_NAME, req_data)
return self._result(response, json=True)
def ping(self):
diff --git a/docker/api/exec_api.py b/docker/api/exec_api.py
index 986d87f..496308a 100644
--- a/docker/api/exec_api.py
+++ b/docker/api/exec_api.py
@@ -1,10 +1,8 @@
-import six
-
from .. import errors
from .. import utils
-class ExecApiMixin(object):
+class ExecApiMixin:
@utils.check_resource('container')
def exec_create(self, container, cmd, stdout=True, stderr=True,
stdin=False, tty=False, privileged=False, user='',
@@ -45,7 +43,7 @@ class ExecApiMixin(object):
'Setting environment for exec is not supported in API < 1.25'
)
- if isinstance(cmd, six.string_types):
+ if isinstance(cmd, str):
cmd = utils.split_command(cmd)
if isinstance(environment, dict):
@@ -118,7 +116,7 @@ class ExecApiMixin(object):
@utils.check_resource('exec_id')
def exec_start(self, exec_id, detach=False, tty=False, stream=False,
- socket=False):
+ socket=False, demux=False):
"""
Start a previously set up exec instance.
@@ -130,11 +128,15 @@ class ExecApiMixin(object):
stream (bool): Stream response data. Default: False
socket (bool): Return the connection socket to allow custom
read/write operations.
+ demux (bool): Return stdout and stderr separately
Returns:
- (generator or str): If ``stream=True``, a generator yielding
- response chunks. If ``socket=True``, a socket object for the
- connection. A string containing response data otherwise.
+
+ (generator or str or tuple): If ``stream=True``, a generator
+ yielding response chunks. If ``socket=True``, a socket object for
+ the connection. A string containing response data otherwise. If
+ ``demux=True``, a tuple with two elements of type byte: stdout and
+ stderr.
Raises:
:py:class:`docker.errors.APIError`
@@ -162,4 +164,4 @@ class ExecApiMixin(object):
return self._result(res)
if socket:
return self._get_raw_response_socket(res)
- return self._read_from_socket(res, stream, tty)
+ return self._read_from_socket(res, stream, tty=tty, demux=demux)
diff --git a/docker/api/image.py b/docker/api/image.py
index 5f05d88..772d889 100644
--- a/docker/api/image.py
+++ b/docker/api/image.py
@@ -1,15 +1,13 @@
import logging
import os
-import six
-
from .. import auth, errors, utils
from ..constants import DEFAULT_DATA_CHUNK_SIZE
log = logging.getLogger(__name__)
-class ImageApiMixin(object):
+class ImageApiMixin:
@utils.check_resource('image')
def get_image(self, image, chunk_size=DEFAULT_DATA_CHUNK_SIZE):
@@ -31,8 +29,8 @@ class ImageApiMixin(object):
Example:
- >>> image = cli.get_image("busybox:latest")
- >>> f = open('/tmp/busybox-latest.tar', 'w')
+ >>> image = client.api.get_image("busybox:latest")
+ >>> f = open('/tmp/busybox-latest.tar', 'wb')
>>> for chunk in image:
>>> f.write(chunk)
>>> f.close()
@@ -70,7 +68,8 @@ class ImageApiMixin(object):
filters (dict): Filters to be processed on the image list.
Available filters:
- ``dangling`` (bool)
- - ``label`` (str): format either ``key`` or ``key=value``
+ - `label` (str|list): format either ``"key"``, ``"key=value"``
+ or a list of such.
Returns:
(dict or list): A list if ``quiet=True``, otherwise a dict.
@@ -80,10 +79,18 @@ class ImageApiMixin(object):
If the server returns an error.
"""
params = {
- 'filter': name,
'only_ids': 1 if quiet else 0,
'all': 1 if all else 0,
}
+ if name:
+ if utils.version_lt(self._version, '1.25'):
+ # only use "filter" on API 1.24 and under, as it is deprecated
+ params['filter'] = name
+ else:
+ if filters:
+ filters['reference'] = name
+ else:
+ filters = {'reference': name}
if filters:
params['filters'] = utils.convert_filters(filters)
res = self._result(self._get(self._url("/images/json"), params=params),
@@ -121,7 +128,7 @@ class ImageApiMixin(object):
params = _import_image_params(
repository, tag, image,
- src=(src if isinstance(src, six.string_types) else None),
+ src=(src if isinstance(src, str) else None),
changes=changes
)
headers = {'Content-Type': 'application/tar'}
@@ -130,7 +137,7 @@ class ImageApiMixin(object):
return self._result(
self._post(u, data=None, params=params)
)
- elif isinstance(src, six.string_types): # from file path
+ elif isinstance(src, str): # from file path
with open(src, 'rb') as f:
return self._result(
self._post(
@@ -247,12 +254,15 @@ class ImageApiMixin(object):
@utils.minimum_version('1.30')
@utils.check_resource('image')
- def inspect_distribution(self, image):
+ def inspect_distribution(self, image, auth_config=None):
"""
Get image digest and platform information by contacting the registry.
Args:
image (str): The image name to inspect
+ auth_config (dict): Override the credentials that are found in the
+ config for this request. ``auth_config`` should contain the
+ ``username`` and ``password`` keys to be valid.
Returns:
(dict): A dict containing distribution data
@@ -261,9 +271,21 @@ class ImageApiMixin(object):
:py:class:`docker.errors.APIError`
If the server returns an error.
"""
+ registry, _ = auth.resolve_repository_name(image)
+
+ headers = {}
+ if auth_config is None:
+ header = auth.get_config_header(self, registry)
+ if header:
+ headers['X-Registry-Auth'] = header
+ else:
+ log.debug('Sending supplied auth config')
+ headers['X-Registry-Auth'] = auth.encode_header(auth_config)
+
+ url = self._url("/distribution/{0}/json", image)
return self._result(
- self._get(self._url("/distribution/{0}/json", image)), True
+ self._get(url, headers=headers), True
)
def load_image(self, data, quiet=None):
@@ -327,21 +349,24 @@ class ImageApiMixin(object):
return self._result(self._post(url, params=params), True)
def pull(self, repository, tag=None, stream=False, auth_config=None,
- decode=False, platform=None):
+ decode=False, platform=None, all_tags=False):
"""
Pulls an image. Similar to the ``docker pull`` command.
Args:
repository (str): The repository to pull
- tag (str): The tag to pull
- stream (bool): Stream the output as a generator
- auth_config (dict): Override the credentials that
- :py:meth:`~docker.api.daemon.DaemonApiMixin.login` has set for
- this request. ``auth_config`` should contain the ``username``
- and ``password`` keys to be valid.
+ tag (str): The tag to pull. If ``tag`` is ``None`` or empty, it
+ is set to ``latest``.
+ stream (bool): Stream the output as a generator. Make sure to
+ consume the generator, otherwise pull might get cancelled.
+ auth_config (dict): Override the credentials that are found in the
+ config for this request. ``auth_config`` should contain the
+ ``username`` and ``password`` keys to be valid.
decode (bool): Decode the JSON data from the server into dicts.
Only applies with ``stream=True``
platform (str): Platform in the format ``os[/arch[/variant]]``
+ all_tags (bool): Pull all image tags, the ``tag`` parameter is
+ ignored.
Returns:
(generator or str): The output
@@ -352,8 +377,8 @@ class ImageApiMixin(object):
Example:
- >>> for line in cli.pull('busybox', stream=True):
- ... print(json.dumps(json.loads(line), indent=4))
+ >>> for line in client.api.pull('busybox', stream=True, decode=True):
+ ... print(json.dumps(line, indent=4))
{
"status": "Pulling image (latest) from busybox",
"progressDetail": {},
@@ -366,8 +391,12 @@ class ImageApiMixin(object):
}
"""
- if not tag:
- repository, tag = utils.parse_repository_tag(repository)
+ repository, image_tag = utils.parse_repository_tag(repository)
+ tag = tag or image_tag or 'latest'
+
+ if all_tags:
+ tag = None
+
registry, repo_name = auth.resolve_repository_name(repository)
params = {
@@ -413,10 +442,9 @@ class ImageApiMixin(object):
repository (str): The repository to push to
tag (str): An optional tag to push
stream (bool): Stream the output as a blocking generator
- auth_config (dict): Override the credentials that
- :py:meth:`~docker.api.daemon.DaemonApiMixin.login` has set for
- this request. ``auth_config`` should contain the ``username``
- and ``password`` keys to be valid.
+ auth_config (dict): Override the credentials that are found in the
+ config for this request. ``auth_config`` should contain the
+ ``username`` and ``password`` keys to be valid.
decode (bool): Decode the JSON data from the server into dicts.
Only applies with ``stream=True``
@@ -428,12 +456,12 @@ class ImageApiMixin(object):
If the server returns an error.
Example:
- >>> for line in cli.push('yourname/app', stream=True):
- ... print line
- {"status":"Pushing repository yourname/app (1 tags)"}
- {"status":"Pushing","progressDetail":{},"id":"511136ea3c5a"}
- {"status":"Image already pushed, skipping","progressDetail":{},
- "id":"511136ea3c5a"}
+ >>> for line in client.api.push('yourname/app', stream=True, decode=True):
+ ... print(line)
+ {'status': 'Pushing repository yourname/app (1 tags)'}
+ {'status': 'Pushing','progressDetail': {}, 'id': '511136ea3c5a'}
+ {'status': 'Image already pushed, skipping', 'progressDetail':{},
+ 'id': '511136ea3c5a'}
...
"""
@@ -479,13 +507,14 @@ class ImageApiMixin(object):
res = self._delete(self._url("/images/{0}", image), params=params)
return self._result(res, True)
- def search(self, term):
+ def search(self, term, limit=None):
"""
Search for images on Docker Hub. Similar to the ``docker search``
command.
Args:
term (str): A term to search for.
+ limit (int): The maximum number of results to return.
Returns:
(list of dicts): The response of the search.
@@ -494,8 +523,12 @@ class ImageApiMixin(object):
:py:class:`docker.errors.APIError`
If the server returns an error.
"""
+ params = {'term': term}
+ if limit is not None:
+ params['limit'] = limit
+
return self._result(
- self._get(self._url("/images/search"), params={'term': term}),
+ self._get(self._url("/images/search"), params=params),
True
)
@@ -519,7 +552,7 @@ class ImageApiMixin(object):
Example:
- >>> client.tag('ubuntu', 'localhost:5000/ubuntu', 'latest',
+ >>> client.api.tag('ubuntu', 'localhost:5000/ubuntu', 'latest',
force=True)
"""
params = {
@@ -536,7 +569,7 @@ class ImageApiMixin(object):
def is_file(src):
try:
return (
- isinstance(src, six.string_types) and
+ isinstance(src, str) and
os.path.isfile(src)
)
except TypeError: # a data string will make isfile() raise a TypeError
diff --git a/docker/api/network.py b/docker/api/network.py
index 57ed8d3..e95c5fc 100644
--- a/docker/api/network.py
+++ b/docker/api/network.py
@@ -4,10 +4,10 @@ from ..utils import version_lt
from .. import utils
-class NetworkApiMixin(object):
+class NetworkApiMixin:
def networks(self, names=None, ids=None, filters=None):
"""
- List networks. Similar to the ``docker networks ls`` command.
+ List networks. Similar to the ``docker network ls`` command.
Args:
names (:py:class:`list`): List of names to filter by
@@ -15,7 +15,8 @@ class NetworkApiMixin(object):
filters (dict): Filters to be processed on the network list.
Available filters:
- ``driver=[<driver-name>]`` Matches a network's driver.
- - ``label=[<key>]`` or ``label=[<key>=<value>]``.
+ - ``label=[<key>]``, ``label=[<key>=<value>]`` or a list of
+ such.
- ``type=["custom"|"builtin"]`` Filters networks by type.
Returns:
@@ -74,7 +75,7 @@ class NetworkApiMixin(object):
Example:
A network using the bridge driver:
- >>> client.create_network("network1", driver="bridge")
+ >>> client.api.create_network("network1", driver="bridge")
You can also create more advanced networks with custom IPAM
configurations. For example, setting the subnet to
@@ -89,7 +90,7 @@ class NetworkApiMixin(object):
>>> ipam_config = docker.types.IPAMConfig(
pool_configs=[ipam_pool]
)
- >>> docker_client.create_network("network1", driver="bridge",
+ >>> client.api.create_network("network1", driver="bridge",
ipam=ipam_config)
"""
if options is not None and not isinstance(options, dict):
@@ -215,7 +216,7 @@ class NetworkApiMixin(object):
def connect_container_to_network(self, container, net_id,
ipv4_address=None, ipv6_address=None,
aliases=None, links=None,
- link_local_ips=None):
+ link_local_ips=None, driver_opt=None):
"""
Connect a container to a network.
@@ -239,7 +240,8 @@ class NetworkApiMixin(object):
"Container": container,
"EndpointConfig": self.create_endpoint_config(
aliases=aliases, links=links, ipv4_address=ipv4_address,
- ipv6_address=ipv6_address, link_local_ips=link_local_ips
+ ipv6_address=ipv6_address, link_local_ips=link_local_ips,
+ driver_opt=driver_opt
),
}
diff --git a/docker/api/plugin.py b/docker/api/plugin.py
index f6c0b13..57110f1 100644
--- a/docker/api/plugin.py
+++ b/docker/api/plugin.py
@@ -1,9 +1,7 @@
-import six
-
from .. import auth, utils
-class PluginApiMixin(object):
+class PluginApiMixin:
@utils.minimum_version('1.25')
@utils.check_resource('name')
def configure_plugin(self, name, options):
@@ -21,7 +19,7 @@ class PluginApiMixin(object):
url = self._url('/plugins/{0}/set', name)
data = options
if isinstance(data, dict):
- data = ['{0}={1}'.format(k, v) for k, v in six.iteritems(data)]
+ data = [f'{k}={v}' for k, v in data.items()]
res = self._post_json(url, data=data)
self._raise_for_status(res)
return True
diff --git a/docker/api/secret.py b/docker/api/secret.py
index fa4c2ab..cd440b9 100644
--- a/docker/api/secret.py
+++ b/docker/api/secret.py
@@ -1,12 +1,10 @@
import base64
-import six
-
from .. import errors
from .. import utils
-class SecretApiMixin(object):
+class SecretApiMixin:
@utils.minimum_version('1.25')
def create_secret(self, name, data, labels=None, driver=None):
"""
@@ -25,8 +23,7 @@ class SecretApiMixin(object):
data = data.encode('utf-8')
data = base64.b64encode(data)
- if six.PY3:
- data = data.decode('ascii')
+ data = data.decode('ascii')
body = {
'Data': data,
'Name': name,
@@ -53,7 +50,7 @@ class SecretApiMixin(object):
Retrieve secret metadata
Args:
- id (string): Full ID of the secret to remove
+ id (string): Full ID of the secret to inspect
Returns (dict): A dictionary of metadata
diff --git a/docker/api/service.py b/docker/api/service.py
index 03b0ca6..371f541 100644
--- a/docker/api/service.py
+++ b/docker/api/service.py
@@ -2,7 +2,8 @@ from .. import auth, errors, utils
from ..types import ServiceMode
-def _check_api_features(version, task_template, update_config, endpoint_spec):
+def _check_api_features(version, task_template, update_config, endpoint_spec,
+ rollback_config):
def raise_version_error(param, min_version):
raise errors.InvalidVersion(
@@ -18,10 +19,24 @@ def _check_api_features(version, task_template, update_config, endpoint_spec):
if 'Monitor' in update_config:
raise_version_error('UpdateConfig.monitor', '1.25')
+ if utils.version_lt(version, '1.28'):
+ if update_config.get('FailureAction') == 'rollback':
+ raise_version_error(
+ 'UpdateConfig.failure_action rollback', '1.28'
+ )
+
if utils.version_lt(version, '1.29'):
if 'Order' in update_config:
raise_version_error('UpdateConfig.order', '1.29')
+ if rollback_config is not None:
+ if utils.version_lt(version, '1.28'):
+ raise_version_error('rollback_config', '1.28')
+
+ if utils.version_lt(version, '1.29'):
+ if 'Order' in update_config:
+ raise_version_error('RollbackConfig.order', '1.29')
+
if endpoint_spec is not None:
if utils.version_lt(version, '1.32') and 'Ports' in endpoint_spec:
if any(p.get('PublishMode') for p in endpoint_spec['Ports']):
@@ -30,7 +45,7 @@ def _check_api_features(version, task_template, update_config, endpoint_spec):
if task_template is not None:
if 'ForceUpdate' in task_template and utils.version_lt(
version, '1.25'):
- raise_version_error('force_update', '1.25')
+ raise_version_error('force_update', '1.25')
if task_template.get('Placement'):
if utils.version_lt(version, '1.30'):
@@ -73,6 +88,10 @@ def _check_api_features(version, task_template, update_config, endpoint_spec):
if container_spec.get('Isolation') is not None:
raise_version_error('ContainerSpec.isolation', '1.35')
+ if utils.version_lt(version, '1.38'):
+ if container_spec.get('Init') is not None:
+ raise_version_error('ContainerSpec.init', '1.38')
+
if task_template.get('Resources'):
if utils.version_lt(version, '1.32'):
if task_template['Resources'].get('GenericResources'):
@@ -94,12 +113,12 @@ def _merge_task_template(current, override):
return merged
-class ServiceApiMixin(object):
+class ServiceApiMixin:
@utils.minimum_version('1.24')
def create_service(
self, task_template, name=None, labels=None, mode=None,
update_config=None, networks=None, endpoint_config=None,
- endpoint_spec=None
+ endpoint_spec=None, rollback_config=None
):
"""
Create a service.
@@ -114,8 +133,11 @@ class ServiceApiMixin(object):
or global). Defaults to replicated.
update_config (UpdateConfig): Specification for the update strategy
of the service. Default: ``None``
- networks (:py:class:`list`): List of network names or IDs to attach
- the service to. Default: ``None``.
+ rollback_config (RollbackConfig): Specification for the rollback
+ strategy of the service. Default: ``None``
+ networks (:py:class:`list`): List of network names or IDs or
+ :py:class:`~docker.types.NetworkAttachmentConfig` to attach the
+ service to. Default: ``None``.
endpoint_spec (EndpointSpec): Properties that can be configured to
access and load balance a service. Default: ``None``.
@@ -129,7 +151,8 @@ class ServiceApiMixin(object):
"""
_check_api_features(
- self._version, task_template, update_config, endpoint_spec
+ self._version, task_template, update_config, endpoint_spec,
+ rollback_config
)
url = self._url('/services/create')
@@ -160,6 +183,9 @@ class ServiceApiMixin(object):
if update_config is not None:
data['UpdateConfig'] = update_config
+ if rollback_config is not None:
+ data['RollbackConfig'] = rollback_config
+
return self._result(
self._post_json(url, data=data, headers=headers), True
)
@@ -176,7 +202,8 @@ class ServiceApiMixin(object):
into the service inspect output.
Returns:
- ``True`` if successful.
+ (dict): A dictionary of the server-side representation of the
+ service, including all relevant properties.
Raises:
:py:class:`docker.errors.APIError`
@@ -336,7 +363,8 @@ class ServiceApiMixin(object):
def update_service(self, service, version, task_template=None, name=None,
labels=None, mode=None, update_config=None,
networks=None, endpoint_config=None,
- endpoint_spec=None, fetch_current_spec=False):
+ endpoint_spec=None, fetch_current_spec=False,
+ rollback_config=None):
"""
Update a service.
@@ -354,15 +382,18 @@ class ServiceApiMixin(object):
or global). Defaults to replicated.
update_config (UpdateConfig): Specification for the update strategy
of the service. Default: ``None``.
- networks (:py:class:`list`): List of network names or IDs to attach
- the service to. Default: ``None``.
+ rollback_config (RollbackConfig): Specification for the rollback
+ strategy of the service. Default: ``None``
+ networks (:py:class:`list`): List of network names or IDs or
+ :py:class:`~docker.types.NetworkAttachmentConfig` to attach the
+ service to. Default: ``None``.
endpoint_spec (EndpointSpec): Properties that can be configured to
access and load balance a service. Default: ``None``.
fetch_current_spec (boolean): Use the undefined settings from the
current specification of the service. Default: ``False``
Returns:
- ``True`` if successful.
+ A dictionary containing a ``Warnings`` key.
Raises:
:py:class:`docker.errors.APIError`
@@ -370,7 +401,8 @@ class ServiceApiMixin(object):
"""
_check_api_features(
- self._version, task_template, update_config, endpoint_spec
+ self._version, task_template, update_config, endpoint_spec,
+ rollback_config
)
if fetch_current_spec:
@@ -416,6 +448,11 @@ class ServiceApiMixin(object):
else:
data['UpdateConfig'] = current.get('UpdateConfig')
+ if rollback_config is not None:
+ data['RollbackConfig'] = rollback_config
+ else:
+ data['RollbackConfig'] = current.get('RollbackConfig')
+
if networks is not None:
converted_networks = utils.convert_service_networks(networks)
if utils.version_lt(self._version, '1.25'):
@@ -440,5 +477,4 @@ class ServiceApiMixin(object):
resp = self._post_json(
url, data=data, params={'version': version}, headers=headers
)
- self._raise_for_status(resp)
- return True
+ return self._result(resp, json=True)
diff --git a/docker/api/swarm.py b/docker/api/swarm.py
index 04595da..db40fdd 100644
--- a/docker/api/swarm.py
+++ b/docker/api/swarm.py
@@ -1,5 +1,6 @@
import logging
-from six.moves import http_client
+import http.client as http_client
+from ..constants import DEFAULT_SWARM_ADDR_POOL, DEFAULT_SWARM_SUBNET_SIZE
from .. import errors
from .. import types
from .. import utils
@@ -7,7 +8,7 @@ from .. import utils
log = logging.getLogger(__name__)
-class SwarmApiMixin(object):
+class SwarmApiMixin:
def create_swarm_spec(self, *args, **kwargs):
"""
@@ -57,10 +58,10 @@ class SwarmApiMixin(object):
Example:
- >>> spec = client.create_swarm_spec(
+ >>> spec = client.api.create_swarm_spec(
snapshot_interval=5000, log_entries_for_slow_followers=1200
)
- >>> client.init_swarm(
+ >>> client.api.init_swarm(
advertise_addr='eth0', listen_addr='0.0.0.0:5000',
force_new_cluster=False, swarm_spec=spec
)
@@ -82,7 +83,9 @@ class SwarmApiMixin(object):
@utils.minimum_version('1.24')
def init_swarm(self, advertise_addr=None, listen_addr='0.0.0.0:2377',
- force_new_cluster=False, swarm_spec=None):
+ force_new_cluster=False, swarm_spec=None,
+ default_addr_pool=None, subnet_size=None,
+ data_path_addr=None):
"""
Initialize a new Swarm using the current connected engine as the first
node.
@@ -107,9 +110,17 @@ class SwarmApiMixin(object):
swarm_spec (dict): Configuration settings of the new Swarm. Use
``APIClient.create_swarm_spec`` to generate a valid
configuration. Default: None
+ default_addr_pool (list of strings): Default Address Pool specifies
+ default subnet pools for global scope networks. Each pool
+ should be specified as a CIDR block, like '10.0.0.0/8'.
+ Default: None
+ subnet_size (int): SubnetSize specifies the subnet size of the
+ networks created from the default subnet pool. Default: None
+ data_path_addr (string): Address or interface to use for data path
+ traffic. For example, 192.168.1.1, or an interface, like eth0.
Returns:
- ``True`` if successful.
+ (str): The ID of the created node.
Raises:
:py:class:`docker.errors.APIError`
@@ -119,15 +130,44 @@ class SwarmApiMixin(object):
url = self._url('/swarm/init')
if swarm_spec is not None and not isinstance(swarm_spec, dict):
raise TypeError('swarm_spec must be a dictionary')
+
+ if default_addr_pool is not None:
+ if utils.version_lt(self._version, '1.39'):
+ raise errors.InvalidVersion(
+ 'Address pool is only available for API version >= 1.39'
+ )
+ # subnet_size becomes 0 if not set with default_addr_pool
+ if subnet_size is None:
+ subnet_size = DEFAULT_SWARM_SUBNET_SIZE
+
+ if subnet_size is not None:
+ if utils.version_lt(self._version, '1.39'):
+ raise errors.InvalidVersion(
+ 'Subnet size is only available for API version >= 1.39'
+ )
+ # subnet_size is ignored if set without default_addr_pool
+ if default_addr_pool is None:
+ default_addr_pool = DEFAULT_SWARM_ADDR_POOL
+
data = {
'AdvertiseAddr': advertise_addr,
'ListenAddr': listen_addr,
+ 'DefaultAddrPool': default_addr_pool,
+ 'SubnetSize': subnet_size,
'ForceNewCluster': force_new_cluster,
'Spec': swarm_spec,
}
+
+ if data_path_addr is not None:
+ if utils.version_lt(self._version, '1.30'):
+ raise errors.InvalidVersion(
+ 'Data address path is only available for '
+ 'API version >= 1.30'
+ )
+ data['DataPathAddr'] = data_path_addr
+
response = self._post_json(url, data=data)
- self._raise_for_status(response)
- return True
+ return self._result(response, json=True)
@utils.minimum_version('1.24')
def inspect_swarm(self):
@@ -165,7 +205,7 @@ class SwarmApiMixin(object):
@utils.minimum_version('1.24')
def join_swarm(self, remote_addrs, join_token, listen_addr='0.0.0.0:2377',
- advertise_addr=None):
+ advertise_addr=None, data_path_addr=None):
"""
Make this Engine join a swarm that has already been created.
@@ -176,7 +216,7 @@ class SwarmApiMixin(object):
listen_addr (string): Listen address used for inter-manager
communication if the node gets promoted to manager, as well as
determining the networking interface used for the VXLAN Tunnel
- Endpoint (VTEP). Default: ``None``
+ Endpoint (VTEP). Default: ``'0.0.0.0:2377``
advertise_addr (string): Externally reachable address advertised
to other nodes. This can either be an address/port combination
in the form ``192.168.1.1:4567``, or an interface followed by a
@@ -184,6 +224,8 @@ class SwarmApiMixin(object):
the port number from the listen address is used. If
AdvertiseAddr is not specified, it will be automatically
detected when possible. Default: ``None``
+ data_path_addr (string): Address or interface to use for data path
+ traffic. For example, 192.168.1.1, or an interface, like eth0.
Returns:
``True`` if the request went through.
@@ -193,11 +235,20 @@ class SwarmApiMixin(object):
If the server returns an error.
"""
data = {
- "RemoteAddrs": remote_addrs,
- "ListenAddr": listen_addr,
- "JoinToken": join_token,
- "AdvertiseAddr": advertise_addr,
+ 'RemoteAddrs': remote_addrs,
+ 'ListenAddr': listen_addr,
+ 'JoinToken': join_token,
+ 'AdvertiseAddr': advertise_addr,
}
+
+ if data_path_addr is not None:
+ if utils.version_lt(self._version, '1.30'):
+ raise errors.InvalidVersion(
+ 'Data address path is only available for '
+ 'API version >= 1.30'
+ )
+ data['DataPathAddr'] = data_path_addr
+
url = self._url('/swarm/join')
response = self._post_json(url, data=data)
self._raise_for_status(response)
@@ -303,8 +354,8 @@ class SwarmApiMixin(object):
Example:
- >>> key = client.get_unlock_key()
- >>> client.unlock_node(key)
+ >>> key = client.api.get_unlock_key()
+ >>> client.unlock_swarm(key)
"""
if isinstance(key, dict):
@@ -345,7 +396,7 @@ class SwarmApiMixin(object):
'Role': 'manager',
'Labels': {'foo': 'bar'}
}
- >>> client.update_node(node_id='24ifsmvkjbyhk', version=8,
+ >>> client.api.update_node(node_id='24ifsmvkjbyhk', version=8,
node_spec=node_spec)
"""
@@ -355,8 +406,10 @@ class SwarmApiMixin(object):
return True
@utils.minimum_version('1.24')
- def update_swarm(self, version, swarm_spec=None, rotate_worker_token=False,
- rotate_manager_token=False):
+ def update_swarm(self, version, swarm_spec=None,
+ rotate_worker_token=False,
+ rotate_manager_token=False,
+ rotate_manager_unlock_key=False):
"""
Update the Swarm's configuration
@@ -370,6 +423,8 @@ class SwarmApiMixin(object):
``False``.
rotate_manager_token (bool): Rotate the manager join token.
Default: ``False``.
+ rotate_manager_unlock_key (bool): Rotate the manager unlock key.
+ Default: ``False``.
Returns:
``True`` if the request went through.
@@ -378,12 +433,20 @@ class SwarmApiMixin(object):
:py:class:`docker.errors.APIError`
If the server returns an error.
"""
-
url = self._url('/swarm/update')
- response = self._post_json(url, data=swarm_spec, params={
+ params = {
'rotateWorkerToken': rotate_worker_token,
'rotateManagerToken': rotate_manager_token,
'version': version
- })
+ }
+ if rotate_manager_unlock_key:
+ if utils.version_lt(self._version, '1.25'):
+ raise errors.InvalidVersion(
+ 'Rotate manager unlock key '
+ 'is only available for API version >= 1.25'
+ )
+ params['rotateManagerUnlockKey'] = rotate_manager_unlock_key
+
+ response = self._post_json(url, data=swarm_spec, params=params)
self._raise_for_status(response)
return True
diff --git a/docker/api/volume.py b/docker/api/volume.py
index 900a608..86b0018 100644
--- a/docker/api/volume.py
+++ b/docker/api/volume.py
@@ -2,7 +2,7 @@ from .. import errors
from .. import utils
-class VolumeApiMixin(object):
+class VolumeApiMixin:
def volumes(self, filters=None):
"""
List volumes currently registered by the docker daemon. Similar to the
@@ -21,7 +21,7 @@ class VolumeApiMixin(object):
Example:
- >>> cli.volumes()
+ >>> client.api.volumes()
{u'Volumes': [{u'Driver': u'local',
u'Mountpoint': u'/var/lib/docker/volumes/foobar/_data',
u'Name': u'foobar'},
@@ -56,7 +56,7 @@ class VolumeApiMixin(object):
Example:
- >>> volume = cli.create_volume(name='foobar', driver='local',
+ >>> volume = client.api.create_volume(name='foobar', driver='local',
driver_opts={'foo': 'bar', 'baz': 'false'},
labels={"key": "value"})
>>> print(volume)
@@ -104,7 +104,7 @@ class VolumeApiMixin(object):
Example:
- >>> cli.inspect_volume('foobar')
+ >>> client.api.inspect_volume('foobar')
{u'Driver': u'local',
u'Mountpoint': u'/var/lib/docker/volumes/foobar/_data',
u'Name': u'foobar'}