summaryrefslogtreecommitdiff
path: root/tests
diff options
context:
space:
mode:
Diffstat (limited to 'tests')
-rw-r--r--tests/integration/api_build_test.py1
-rw-r--r--tests/integration/api_container_test.py24
-rw-r--r--tests/integration/api_image_test.py4
-rw-r--r--tests/integration/api_network_test.py21
-rw-r--r--tests/integration/api_service_test.py13
-rw-r--r--tests/integration/context_api_test.py59
-rw-r--r--tests/integration/models_images_test.py8
-rw-r--r--tests/ssh/__init__.py0
-rw-r--r--tests/ssh/api_build_test.py595
-rw-r--r--tests/ssh/base.py130
-rw-r--r--tests/unit/api_container_test.py64
-rw-r--r--tests/unit/api_image_test.py23
-rw-r--r--tests/unit/api_network_test.py4
-rw-r--r--tests/unit/api_test.py51
-rw-r--r--tests/unit/client_test.py186
-rw-r--r--tests/unit/context_test.py49
-rw-r--r--tests/unit/errors_test.py20
-rw-r--r--tests/unit/fake_api.py3
-rw-r--r--tests/unit/fake_api_client.py8
-rw-r--r--tests/unit/models_containers_test.py4
-rw-r--r--tests/unit/models_images_test.py24
-rw-r--r--tests/unit/models_services_test.py2
-rw-r--r--tests/unit/sshadapter_test.py32
-rw-r--r--tests/unit/utils_build_test.py2
-rw-r--r--tests/unit/utils_test.py30
25 files changed, 1260 insertions, 97 deletions
diff --git a/tests/integration/api_build_test.py b/tests/integration/api_build_test.py
index 5712812..b830a10 100644
--- a/tests/integration/api_build_test.py
+++ b/tests/integration/api_build_test.py
@@ -339,7 +339,6 @@ class BuildTest(BaseAPIIntegrationTest):
assert self.client.inspect_image(img_name)
ctnr = self.run_container(img_name, 'cat /hosts-file')
- self.tmp_containers.append(ctnr)
logs = self.client.logs(ctnr)
if six.PY3:
logs = logs.decode('utf-8')
diff --git a/tests/integration/api_container_test.py b/tests/integration/api_container_test.py
index 1ba3eaa..65e611b 100644
--- a/tests/integration/api_container_test.py
+++ b/tests/integration/api_container_test.py
@@ -273,11 +273,14 @@ class CreateContainerTest(BaseAPIIntegrationTest):
def test_invalid_log_driver_raises_exception(self):
log_config = docker.types.LogConfig(
- type='asdf-nope',
+ type='asdf',
config={}
)
- expected_msg = "logger: no log driver named 'asdf-nope' is registered"
+ expected_msgs = [
+ "logger: no log driver named 'asdf' is registered",
+ "error looking up logging plugin asdf: plugin \"asdf\" not found",
+ ]
with pytest.raises(docker.errors.APIError) as excinfo:
# raises an internal server error 500
container = self.client.create_container(
@@ -287,7 +290,7 @@ class CreateContainerTest(BaseAPIIntegrationTest):
)
self.client.start(container)
- assert excinfo.value.explanation == expected_msg
+ assert excinfo.value.explanation in expected_msgs
def test_valid_no_log_driver_specified(self):
log_config = docker.types.LogConfig(
@@ -1102,6 +1105,8 @@ class PortTest(BaseAPIIntegrationTest):
class ContainerTopTest(BaseAPIIntegrationTest):
+ @pytest.mark.xfail(reason='Output of docker top depends on host distro, '
+ 'and is not formalized.')
def test_top(self):
container = self.client.create_container(
TEST_IMG, ['sleep', '60']
@@ -1112,9 +1117,7 @@ class ContainerTopTest(BaseAPIIntegrationTest):
self.client.start(container)
res = self.client.top(container)
if not IS_WINDOWS_PLATFORM:
- assert res['Titles'] == [
- 'UID', 'PID', 'PPID', 'C', 'STIME', 'TTY', 'TIME', 'CMD'
- ]
+ assert res['Titles'] == [u'PID', u'USER', u'TIME', u'COMMAND']
assert len(res['Processes']) == 1
assert res['Processes'][0][-1] == 'sleep 60'
self.client.kill(container)
@@ -1122,6 +1125,8 @@ class ContainerTopTest(BaseAPIIntegrationTest):
@pytest.mark.skipif(
IS_WINDOWS_PLATFORM, reason='No psargs support on windows'
)
+ @pytest.mark.xfail(reason='Output of docker top depends on host distro, '
+ 'and is not formalized.')
def test_top_with_psargs(self):
container = self.client.create_container(
TEST_IMG, ['sleep', '60'])
@@ -1129,11 +1134,8 @@ class ContainerTopTest(BaseAPIIntegrationTest):
self.tmp_containers.append(container)
self.client.start(container)
- res = self.client.top(container, 'waux')
- assert res['Titles'] == [
- 'USER', 'PID', '%CPU', '%MEM', 'VSZ', 'RSS',
- 'TTY', 'STAT', 'START', 'TIME', 'COMMAND'
- ]
+ res = self.client.top(container, '-eopid,user')
+ assert res['Titles'] == [u'PID', u'USER']
assert len(res['Processes']) == 1
assert res['Processes'][0][10] == 'sleep 60'
diff --git a/tests/integration/api_image_test.py b/tests/integration/api_image_test.py
index 2bc96ab..37e26a3 100644
--- a/tests/integration/api_image_test.py
+++ b/tests/integration/api_image_test.py
@@ -42,7 +42,7 @@ class PullImageTest(BaseAPIIntegrationTest):
self.client.remove_image('hello-world')
except docker.errors.APIError:
pass
- res = self.client.pull('hello-world', tag='latest')
+ res = self.client.pull('hello-world')
self.tmp_imgs.append('hello-world')
assert type(res) == six.text_type
assert len(self.client.images('hello-world')) >= 1
@@ -55,7 +55,7 @@ class PullImageTest(BaseAPIIntegrationTest):
except docker.errors.APIError:
pass
stream = self.client.pull(
- 'hello-world', tag='latest', stream=True, decode=True)
+ 'hello-world', stream=True, decode=True)
self.tmp_imgs.append('hello-world')
for chunk in stream:
assert isinstance(chunk, dict)
diff --git a/tests/integration/api_network_test.py b/tests/integration/api_network_test.py
index 0f26827..af22da8 100644
--- a/tests/integration/api_network_test.py
+++ b/tests/integration/api_network_test.py
@@ -275,6 +275,27 @@ class TestNetworks(BaseAPIIntegrationTest):
assert 'LinkLocalIPs' in net_cfg['IPAMConfig']
assert net_cfg['IPAMConfig']['LinkLocalIPs'] == ['169.254.8.8']
+ @requires_api_version('1.32')
+ def test_create_with_driveropt(self):
+ container = self.client.create_container(
+ TEST_IMG, 'top',
+ networking_config=self.client.create_networking_config(
+ {
+ 'bridge': self.client.create_endpoint_config(
+ driver_opt={'com.docker-py.setting': 'on'}
+ )
+ }
+ ),
+ host_config=self.client.create_host_config(network_mode='bridge')
+ )
+ self.tmp_containers.append(container)
+ self.client.start(container)
+ container_data = self.client.inspect_container(container)
+ net_cfg = container_data['NetworkSettings']['Networks']['bridge']
+ assert 'DriverOpts' in net_cfg
+ assert 'com.docker-py.setting' in net_cfg['DriverOpts']
+ assert net_cfg['DriverOpts']['com.docker-py.setting'] == 'on'
+
@requires_api_version('1.22')
def test_create_with_links(self):
net_name, net_id = self.create_network()
diff --git a/tests/integration/api_service_test.py b/tests/integration/api_service_test.py
index b6b7ec5..7e5336e 100644
--- a/tests/integration/api_service_test.py
+++ b/tests/integration/api_service_test.py
@@ -471,6 +471,19 @@ class ServiceTest(BaseAPIIntegrationTest):
assert 'Placement' in svc_info['Spec']['TaskTemplate']
assert svc_info['Spec']['TaskTemplate']['Placement'] == placemt
+ @requires_api_version('1.40')
+ def test_create_service_with_placement_maxreplicas(self):
+ container_spec = docker.types.ContainerSpec(TEST_IMG, ['true'])
+ placemt = docker.types.Placement(maxreplicas=1)
+ task_tmpl = docker.types.TaskTemplate(
+ container_spec, placement=placemt
+ )
+ name = self.get_service_name()
+ svc_id = self.client.create_service(task_tmpl, name=name)
+ svc_info = self.client.inspect_service(svc_id)
+ assert 'Placement' in svc_info['Spec']['TaskTemplate']
+ assert svc_info['Spec']['TaskTemplate']['Placement'] == placemt
+
def test_create_service_with_endpoint_spec(self):
container_spec = docker.types.ContainerSpec(TEST_IMG, ['true'])
task_tmpl = docker.types.TaskTemplate(container_spec)
diff --git a/tests/integration/context_api_test.py b/tests/integration/context_api_test.py
new file mode 100644
index 0000000..a2a12a5
--- /dev/null
+++ b/tests/integration/context_api_test.py
@@ -0,0 +1,59 @@
+import os
+import tempfile
+import pytest
+from docker import errors
+from docker.context import ContextAPI
+from docker.tls import TLSConfig
+from .base import BaseAPIIntegrationTest
+
+
+class ContextLifecycleTest(BaseAPIIntegrationTest):
+ def test_lifecycle(self):
+ assert ContextAPI.get_context().Name == "default"
+ assert not ContextAPI.get_context("test")
+ assert ContextAPI.get_current_context().Name == "default"
+
+ dirpath = tempfile.mkdtemp()
+ ca = tempfile.NamedTemporaryFile(
+ prefix=os.path.join(dirpath, "ca.pem"), mode="r")
+ cert = tempfile.NamedTemporaryFile(
+ prefix=os.path.join(dirpath, "cert.pem"), mode="r")
+ key = tempfile.NamedTemporaryFile(
+ prefix=os.path.join(dirpath, "key.pem"), mode="r")
+
+ # create context 'test
+ docker_tls = TLSConfig(
+ client_cert=(cert.name, key.name),
+ ca_cert=ca.name)
+ ContextAPI.create_context(
+ "test", tls_cfg=docker_tls)
+
+ # check for a context 'test' in the context store
+ assert any([ctx.Name == "test" for ctx in ContextAPI.contexts()])
+ # retrieve a context object for 'test'
+ assert ContextAPI.get_context("test")
+ # remove context
+ ContextAPI.remove_context("test")
+ with pytest.raises(errors.ContextNotFound):
+ ContextAPI.inspect_context("test")
+ # check there is no 'test' context in store
+ assert not ContextAPI.get_context("test")
+
+ ca.close()
+ key.close()
+ cert.close()
+
+ def test_context_remove(self):
+ ContextAPI.create_context("test")
+ assert ContextAPI.inspect_context("test")["Name"] == "test"
+
+ ContextAPI.remove_context("test")
+ with pytest.raises(errors.ContextNotFound):
+ ContextAPI.inspect_context("test")
+
+ def test_load_context_without_orchestrator(self):
+ ContextAPI.create_context("test")
+ ctx = ContextAPI.get_context("test")
+ assert ctx
+ assert ctx.Name == "test"
+ assert ctx.Orchestrator is None
diff --git a/tests/integration/models_images_test.py b/tests/integration/models_images_test.py
index 375d972..0d60f37 100644
--- a/tests/integration/models_images_test.py
+++ b/tests/integration/models_images_test.py
@@ -86,9 +86,11 @@ class ImageCollectionTest(BaseIntegrationTest):
def test_pull_multiple(self):
client = docker.from_env(version=TEST_API_VERSION)
- images = client.images.pull('hello-world')
- assert len(images) == 1
- assert 'hello-world:latest' in images[0].attrs['RepoTags']
+ images = client.images.pull('hello-world', all_tags=True)
+ assert len(images) >= 1
+ assert any([
+ 'hello-world:latest' in img.attrs['RepoTags'] for img in images
+ ])
def test_load_error(self):
client = docker.from_env(version=TEST_API_VERSION)
diff --git a/tests/ssh/__init__.py b/tests/ssh/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/tests/ssh/__init__.py
diff --git a/tests/ssh/api_build_test.py b/tests/ssh/api_build_test.py
new file mode 100644
index 0000000..b830a10
--- /dev/null
+++ b/tests/ssh/api_build_test.py
@@ -0,0 +1,595 @@
+import io
+import os
+import shutil
+import tempfile
+
+from docker import errors
+from docker.utils.proxy import ProxyConfig
+
+import pytest
+import six
+
+from .base import BaseAPIIntegrationTest, TEST_IMG
+from ..helpers import random_name, requires_api_version, requires_experimental
+
+
+class BuildTest(BaseAPIIntegrationTest):
+ def test_build_with_proxy(self):
+ self.client._proxy_configs = ProxyConfig(
+ ftp='a', http='b', https='c', no_proxy='d'
+ )
+
+ script = io.BytesIO('\n'.join([
+ 'FROM busybox',
+ 'RUN env | grep "FTP_PROXY=a"',
+ 'RUN env | grep "ftp_proxy=a"',
+ 'RUN env | grep "HTTP_PROXY=b"',
+ 'RUN env | grep "http_proxy=b"',
+ 'RUN env | grep "HTTPS_PROXY=c"',
+ 'RUN env | grep "https_proxy=c"',
+ 'RUN env | grep "NO_PROXY=d"',
+ 'RUN env | grep "no_proxy=d"',
+ ]).encode('ascii'))
+
+ self.client.build(fileobj=script, decode=True)
+
+ def test_build_with_proxy_and_buildargs(self):
+ self.client._proxy_configs = ProxyConfig(
+ ftp='a', http='b', https='c', no_proxy='d'
+ )
+
+ script = io.BytesIO('\n'.join([
+ 'FROM busybox',
+ 'RUN env | grep "FTP_PROXY=XXX"',
+ 'RUN env | grep "ftp_proxy=xxx"',
+ 'RUN env | grep "HTTP_PROXY=b"',
+ 'RUN env | grep "http_proxy=b"',
+ 'RUN env | grep "HTTPS_PROXY=c"',
+ 'RUN env | grep "https_proxy=c"',
+ 'RUN env | grep "NO_PROXY=d"',
+ 'RUN env | grep "no_proxy=d"',
+ ]).encode('ascii'))
+
+ self.client.build(
+ fileobj=script,
+ decode=True,
+ buildargs={'FTP_PROXY': 'XXX', 'ftp_proxy': 'xxx'}
+ )
+
+ def test_build_streaming(self):
+ script = io.BytesIO('\n'.join([
+ 'FROM busybox',
+ 'RUN mkdir -p /tmp/test',
+ 'EXPOSE 8080',
+ 'ADD https://dl.dropboxusercontent.com/u/20637798/silence.tar.gz'
+ ' /tmp/silence.tar.gz'
+ ]).encode('ascii'))
+ stream = self.client.build(fileobj=script, decode=True)
+ logs = []
+ for chunk in stream:
+ logs.append(chunk)
+ assert len(logs) > 0
+
+ def test_build_from_stringio(self):
+ if six.PY3:
+ return
+ script = io.StringIO(six.text_type('\n').join([
+ 'FROM busybox',
+ 'RUN mkdir -p /tmp/test',
+ 'EXPOSE 8080',
+ 'ADD https://dl.dropboxusercontent.com/u/20637798/silence.tar.gz'
+ ' /tmp/silence.tar.gz'
+ ]))
+ stream = self.client.build(fileobj=script)
+ logs = ''
+ for chunk in stream:
+ if six.PY3:
+ chunk = chunk.decode('utf-8')
+ logs += chunk
+ assert logs != ''
+
+ def test_build_with_dockerignore(self):
+ base_dir = tempfile.mkdtemp()
+ self.addCleanup(shutil.rmtree, base_dir)
+
+ with open(os.path.join(base_dir, 'Dockerfile'), 'w') as f:
+ f.write("\n".join([
+ 'FROM busybox',
+ 'ADD . /test',
+ ]))
+
+ with open(os.path.join(base_dir, '.dockerignore'), 'w') as f:
+ f.write("\n".join([
+ 'ignored',
+ 'Dockerfile',
+ '.dockerignore',
+ '!ignored/subdir/excepted-file',
+ '', # empty line,
+ '#*', # comment line
+ ]))
+
+ with open(os.path.join(base_dir, 'not-ignored'), 'w') as f:
+ f.write("this file should not be ignored")
+
+ with open(os.path.join(base_dir, '#file.txt'), 'w') as f:
+ f.write('this file should not be ignored')
+
+ subdir = os.path.join(base_dir, 'ignored', 'subdir')
+ os.makedirs(subdir)
+ with open(os.path.join(subdir, 'file'), 'w') as f:
+ f.write("this file should be ignored")
+
+ with open(os.path.join(subdir, 'excepted-file'), 'w') as f:
+ f.write("this file should not be ignored")
+
+ tag = 'docker-py-test-build-with-dockerignore'
+ stream = self.client.build(
+ path=base_dir,
+ tag=tag,
+ )
+ for chunk in stream:
+ pass
+
+ c = self.client.create_container(tag, ['find', '/test', '-type', 'f'])
+ self.client.start(c)
+ self.client.wait(c)
+ logs = self.client.logs(c)
+
+ if six.PY3:
+ logs = logs.decode('utf-8')
+
+ assert sorted(list(filter(None, logs.split('\n')))) == sorted([
+ '/test/#file.txt',
+ '/test/ignored/subdir/excepted-file',
+ '/test/not-ignored'
+ ])
+
+ def test_build_with_buildargs(self):
+ script = io.BytesIO('\n'.join([
+ 'FROM scratch',
+ 'ARG test',
+ 'USER $test'
+ ]).encode('ascii'))
+
+ stream = self.client.build(
+ fileobj=script, tag='buildargs', buildargs={'test': 'OK'}
+ )
+ self.tmp_imgs.append('buildargs')
+ for chunk in stream:
+ pass
+
+ info = self.client.inspect_image('buildargs')
+ assert info['Config']['User'] == 'OK'
+
+ @requires_api_version('1.22')
+ def test_build_shmsize(self):
+ script = io.BytesIO('\n'.join([
+ 'FROM scratch',
+ 'CMD sh -c "echo \'Hello, World!\'"',
+ ]).encode('ascii'))
+
+ tag = 'shmsize'
+ shmsize = 134217728
+
+ stream = self.client.build(
+ fileobj=script, tag=tag, shmsize=shmsize
+ )
+ self.tmp_imgs.append(tag)
+ for chunk in stream:
+ pass
+
+ # There is currently no way to get the shmsize
+ # that was used to build the image
+
+ @requires_api_version('1.24')
+ def test_build_isolation(self):
+ script = io.BytesIO('\n'.join([
+ 'FROM scratch',
+ 'CMD sh -c "echo \'Deaf To All But The Song\''
+ ]).encode('ascii'))
+
+ stream = self.client.build(
+ fileobj=script, tag='isolation',
+ isolation='default'
+ )
+
+ for chunk in stream:
+ pass
+
+ @requires_api_version('1.23')
+ def test_build_labels(self):
+ script = io.BytesIO('\n'.join([
+ 'FROM scratch',
+ ]).encode('ascii'))
+
+ labels = {'test': 'OK'}
+
+ stream = self.client.build(
+ fileobj=script, tag='labels', labels=labels
+ )
+ self.tmp_imgs.append('labels')
+ for chunk in stream:
+ pass
+
+ info = self.client.inspect_image('labels')
+ assert info['Config']['Labels'] == labels
+
+ @requires_api_version('1.25')
+ def test_build_with_cache_from(self):
+ script = io.BytesIO('\n'.join([
+ 'FROM busybox',
+ 'ENV FOO=bar',
+ 'RUN touch baz',
+ 'RUN touch bax',
+ ]).encode('ascii'))
+
+ stream = self.client.build(fileobj=script, tag='build1')
+ self.tmp_imgs.append('build1')
+ for chunk in stream:
+ pass
+
+ stream = self.client.build(
+ fileobj=script, tag='build2', cache_from=['build1'],
+ decode=True
+ )
+ self.tmp_imgs.append('build2')
+ counter = 0
+ for chunk in stream:
+ if 'Using cache' in chunk.get('stream', ''):
+ counter += 1
+ assert counter == 3
+ self.client.remove_image('build2')
+
+ counter = 0
+ stream = self.client.build(
+ fileobj=script, tag='build2', cache_from=['nosuchtag'],
+ decode=True
+ )
+ for chunk in stream:
+ if 'Using cache' in chunk.get('stream', ''):
+ counter += 1
+ assert counter == 0
+
+ @requires_api_version('1.29')
+ def test_build_container_with_target(self):
+ script = io.BytesIO('\n'.join([
+ 'FROM busybox as first',
+ 'RUN mkdir -p /tmp/test',
+ 'RUN touch /tmp/silence.tar.gz',
+ 'FROM alpine:latest',
+ 'WORKDIR /root/'
+ 'COPY --from=first /tmp/silence.tar.gz .',
+ 'ONBUILD RUN echo "This should not be in the final image"'
+ ]).encode('ascii'))
+
+ stream = self.client.build(
+ fileobj=script, target='first', tag='build1'
+ )
+ self.tmp_imgs.append('build1')
+ for chunk in stream:
+ pass
+
+ info = self.client.inspect_image('build1')
+ assert not info['Config']['OnBuild']
+
+ @requires_api_version('1.25')
+ def test_build_with_network_mode(self):
+ # Set up pingable endpoint on custom network
+ network = self.client.create_network(random_name())['Id']
+ self.tmp_networks.append(network)
+ container = self.client.create_container(TEST_IMG, 'top')
+ self.tmp_containers.append(container)
+ self.client.start(container)
+ self.client.connect_container_to_network(
+ container, network, aliases=['pingtarget.docker']
+ )
+
+ script = io.BytesIO('\n'.join([
+ 'FROM busybox',
+ 'RUN ping -c1 pingtarget.docker'
+ ]).encode('ascii'))
+
+ stream = self.client.build(
+ fileobj=script, network_mode=network,
+ tag='dockerpytest_customnetbuild'
+ )
+
+ self.tmp_imgs.append('dockerpytest_customnetbuild')
+ for chunk in stream:
+ pass
+
+ assert self.client.inspect_image('dockerpytest_customnetbuild')
+
+ script.seek(0)
+ stream = self.client.build(
+ fileobj=script, network_mode='none',
+ tag='dockerpytest_nonebuild', nocache=True, decode=True
+ )
+
+ self.tmp_imgs.append('dockerpytest_nonebuild')
+ logs = [chunk for chunk in stream]
+ assert 'errorDetail' in logs[-1]
+ assert logs[-1]['errorDetail']['code'] == 1
+
+ with pytest.raises(errors.NotFound):
+ self.client.inspect_image('dockerpytest_nonebuild')
+
+ @requires_api_version('1.27')
+ def test_build_with_extra_hosts(self):
+ img_name = 'dockerpytest_extrahost_build'
+ self.tmp_imgs.append(img_name)
+
+ script = io.BytesIO('\n'.join([
+ 'FROM busybox',
+ 'RUN ping -c1 hello.world.test',
+ 'RUN ping -c1 extrahost.local.test',
+ 'RUN cp /etc/hosts /hosts-file'
+ ]).encode('ascii'))
+
+ stream = self.client.build(
+ fileobj=script, tag=img_name,
+ extra_hosts={
+ 'extrahost.local.test': '127.0.0.1',
+ 'hello.world.test': '127.0.0.1',
+ }, decode=True
+ )
+ for chunk in stream:
+ if 'errorDetail' in chunk:
+ pytest.fail(chunk)
+
+ assert self.client.inspect_image(img_name)
+ ctnr = self.run_container(img_name, 'cat /hosts-file')
+ logs = self.client.logs(ctnr)
+ if six.PY3:
+ logs = logs.decode('utf-8')
+ assert '127.0.0.1\textrahost.local.test' in logs
+ assert '127.0.0.1\thello.world.test' in logs
+
+ @requires_experimental(until=None)
+ @requires_api_version('1.25')
+ def test_build_squash(self):
+ script = io.BytesIO('\n'.join([
+ 'FROM busybox',
+ 'RUN echo blah > /file_1',
+ 'RUN echo blahblah > /file_2',
+ 'RUN echo blahblahblah > /file_3'
+ ]).encode('ascii'))
+
+ def build_squashed(squash):
+ tag = 'squash' if squash else 'nosquash'
+ stream = self.client.build(
+ fileobj=script, tag=tag, squash=squash
+ )
+ self.tmp_imgs.append(tag)
+ for chunk in stream:
+ pass
+
+ return self.client.inspect_image(tag)
+
+ non_squashed = build_squashed(False)
+ squashed = build_squashed(True)
+ assert len(non_squashed['RootFS']['Layers']) == 4
+ assert len(squashed['RootFS']['Layers']) == 2
+
+ def test_build_stderr_data(self):
+ control_chars = ['\x1b[91m', '\x1b[0m']
+ snippet = 'Ancient Temple (Mystic Oriental Dream ~ Ancient Temple)'
+ script = io.BytesIO(b'\n'.join([
+ b'FROM busybox',
+ 'RUN sh -c ">&2 echo \'{0}\'"'.format(snippet).encode('utf-8')
+ ]))
+
+ stream = self.client.build(
+ fileobj=script, decode=True, nocache=True
+ )
+ lines = []
+ for chunk in stream:
+ lines.append(chunk.get('stream'))
+ expected = '{0}{2}\n{1}'.format(
+ control_chars[0], control_chars[1], snippet
+ )
+ assert any([line == expected for line in lines])
+
+ def test_build_gzip_encoding(self):
+ base_dir = tempfile.mkdtemp()
+ self.addCleanup(shutil.rmtree, base_dir)
+
+ with open(os.path.join(base_dir, 'Dockerfile'), 'w') as f:
+ f.write("\n".join([
+ 'FROM busybox',
+ 'ADD . /test',
+ ]))
+
+ stream = self.client.build(
+ path=base_dir, decode=True, nocache=True,
+ gzip=True
+ )
+
+ lines = []
+ for chunk in stream:
+ lines.append(chunk)
+
+ assert 'Successfully built' in lines[-1]['stream']
+
+ def test_build_with_dockerfile_empty_lines(self):
+ base_dir = tempfile.mkdtemp()
+ self.addCleanup(shutil.rmtree, base_dir)
+ with open(os.path.join(base_dir, 'Dockerfile'), 'w') as f:
+ f.write('FROM busybox\n')
+ with open(os.path.join(base_dir, '.dockerignore'), 'w') as f:
+ f.write('\n'.join([
+ ' ',
+ '',
+ '\t\t',
+ '\t ',
+ ]))
+
+ stream = self.client.build(
+ path=base_dir, decode=True, nocache=True
+ )
+
+ lines = []
+ for chunk in stream:
+ lines.append(chunk)
+ assert 'Successfully built' in lines[-1]['stream']
+
+ def test_build_gzip_custom_encoding(self):
+ with pytest.raises(errors.DockerException):
+ self.client.build(path='.', gzip=True, encoding='text/html')
+
+ @requires_api_version('1.32')
+ @requires_experimental(until=None)
+ def test_build_invalid_platform(self):
+ script = io.BytesIO('FROM busybox\n'.encode('ascii'))
+
+ with pytest.raises(errors.APIError) as excinfo:
+ stream = self.client.build(fileobj=script, platform='foobar')
+ for _ in stream:
+ pass
+
+ # Some API versions incorrectly returns 500 status; assert 4xx or 5xx
+ assert excinfo.value.is_error()
+ assert 'unknown operating system' in excinfo.exconly() \
+ or 'invalid platform' in excinfo.exconly()
+
+ def test_build_out_of_context_dockerfile(self):
+ base_dir = tempfile.mkdtemp()
+ self.addCleanup(shutil.rmtree, base_dir)
+ with open(os.path.join(base_dir, 'file.txt'), 'w') as f:
+ f.write('hello world')
+ with open(os.path.join(base_dir, '.dockerignore'), 'w') as f:
+ f.write('.dockerignore\n')
+ df_dir = tempfile.mkdtemp()
+ self.addCleanup(shutil.rmtree, df_dir)
+ df_name = os.path.join(df_dir, 'Dockerfile')
+ with open(df_name, 'wb') as df:
+ df.write(('\n'.join([
+ 'FROM busybox',
+ 'COPY . /src',
+ 'WORKDIR /src',
+ ])).encode('utf-8'))
+ df.flush()
+ img_name = random_name()
+ self.tmp_imgs.append(img_name)
+ stream = self.client.build(
+ path=base_dir, dockerfile=df_name, tag=img_name,
+ decode=True
+ )
+ lines = []
+ for chunk in stream:
+ lines.append(chunk)
+ assert 'Successfully tagged' in lines[-1]['stream']
+
+ ctnr = self.client.create_container(img_name, 'ls -a')
+ self.tmp_containers.append(ctnr)
+ self.client.start(ctnr)
+ lsdata = self.client.logs(ctnr).strip().split(b'\n')
+ assert len(lsdata) == 3
+ assert sorted([b'.', b'..', b'file.txt']) == sorted(lsdata)
+
+ def test_build_in_context_dockerfile(self):
+ base_dir = tempfile.mkdtemp()
+ self.addCleanup(shutil.rmtree, base_dir)
+ with open(os.path.join(base_dir, 'file.txt'), 'w') as f:
+ f.write('hello world')
+ with open(os.path.join(base_dir, 'custom.dockerfile'), 'w') as df:
+ df.write('\n'.join([
+ 'FROM busybox',
+ 'COPY . /src',
+ 'WORKDIR /src',
+ ]))
+ img_name = random_name()
+ self.tmp_imgs.append(img_name)
+ stream = self.client.build(
+ path=base_dir, dockerfile='custom.dockerfile', tag=img_name,
+ decode=True
+ )
+ lines = []
+ for chunk in stream:
+ lines.append(chunk)
+ assert 'Successfully tagged' in lines[-1]['stream']
+
+ ctnr = self.client.create_container(img_name, 'ls -a')
+ self.tmp_containers.append(ctnr)
+ self.client.start(ctnr)
+ lsdata = self.client.logs(ctnr).strip().split(b'\n')
+ assert len(lsdata) == 4
+ assert sorted(
+ [b'.', b'..', b'file.txt', b'custom.dockerfile']
+ ) == sorted(lsdata)
+
+ def test_build_in_context_nested_dockerfile(self):
+ base_dir = tempfile.mkdtemp()
+ self.addCleanup(shutil.rmtree, base_dir)
+ with open(os.path.join(base_dir, 'file.txt'), 'w') as f:
+ f.write('hello world')
+ subdir = os.path.join(base_dir, 'hello', 'world')
+ os.makedirs(subdir)
+ with open(os.path.join(subdir, 'custom.dockerfile'), 'w') as df:
+ df.write('\n'.join([
+ 'FROM busybox',
+ 'COPY . /src',
+ 'WORKDIR /src',
+ ]))
+ img_name = random_name()
+ self.tmp_imgs.append(img_name)
+ stream = self.client.build(
+ path=base_dir, dockerfile='hello/world/custom.dockerfile',
+ tag=img_name, decode=True
+ )
+ lines = []
+ for chunk in stream:
+ lines.append(chunk)
+ assert 'Successfully tagged' in lines[-1]['stream']
+
+ ctnr = self.client.create_container(img_name, 'ls -a')
+ self.tmp_containers.append(ctnr)
+ self.client.start(ctnr)
+ lsdata = self.client.logs(ctnr).strip().split(b'\n')
+ assert len(lsdata) == 4
+ assert sorted(
+ [b'.', b'..', b'file.txt', b'hello']
+ ) == sorted(lsdata)
+
+ def test_build_in_context_abs_dockerfile(self):
+ base_dir = tempfile.mkdtemp()
+ self.addCleanup(shutil.rmtree, base_dir)
+ abs_dockerfile_path = os.path.join(base_dir, 'custom.dockerfile')
+ with open(os.path.join(base_dir, 'file.txt'), 'w') as f:
+ f.write('hello world')
+ with open(abs_dockerfile_path, 'w') as df:
+ df.write('\n'.join([
+ 'FROM busybox',
+ 'COPY . /src',
+ 'WORKDIR /src',
+ ]))
+ img_name = random_name()
+ self.tmp_imgs.append(img_name)
+ stream = self.client.build(
+ path=base_dir, dockerfile=abs_dockerfile_path, tag=img_name,
+ decode=True
+ )
+ lines = []
+ for chunk in stream:
+ lines.append(chunk)
+ assert 'Successfully tagged' in lines[-1]['stream']
+
+ ctnr = self.client.create_container(img_name, 'ls -a')
+ self.tmp_containers.append(ctnr)
+ self.client.start(ctnr)
+ lsdata = self.client.logs(ctnr).strip().split(b'\n')
+ assert len(lsdata) == 4
+ assert sorted(
+ [b'.', b'..', b'file.txt', b'custom.dockerfile']
+ ) == sorted(lsdata)
+
+ @requires_api_version('1.31')
+ @pytest.mark.xfail(
+ True,
+ reason='Currently fails on 18.09: '
+ 'https://github.com/moby/moby/issues/37920'
+ )
+ def test_prune_builds(self):
+ prune_result = self.client.prune_builds()
+ assert 'SpaceReclaimed' in prune_result
+ assert isinstance(prune_result['SpaceReclaimed'], int)
diff --git a/tests/ssh/base.py b/tests/ssh/base.py
new file mode 100644
index 0000000..c723d82
--- /dev/null
+++ b/tests/ssh/base.py
@@ -0,0 +1,130 @@
+import os
+import shutil
+import unittest
+
+import docker
+from .. import helpers
+from docker.utils import kwargs_from_env
+
+TEST_IMG = 'alpine:3.10'
+TEST_API_VERSION = os.environ.get('DOCKER_TEST_API_VERSION')
+
+
+class BaseIntegrationTest(unittest.TestCase):
+ """
+ A base class for integration test cases. It cleans up the Docker server
+ after itself.
+ """
+
+ def setUp(self):
+ self.tmp_imgs = []
+ self.tmp_containers = []
+ self.tmp_folders = []
+ self.tmp_volumes = []
+ self.tmp_networks = []
+ self.tmp_plugins = []
+ self.tmp_secrets = []
+ self.tmp_configs = []
+
+ def tearDown(self):
+ client = docker.from_env(version=TEST_API_VERSION, use_ssh_client=True)
+ try:
+ for img in self.tmp_imgs:
+ try:
+ client.api.remove_image(img)
+ except docker.errors.APIError:
+ pass
+ for container in self.tmp_containers:
+ try:
+ client.api.remove_container(container, force=True, v=True)
+ except docker.errors.APIError:
+ pass
+ for network in self.tmp_networks:
+ try:
+ client.api.remove_network(network)
+ except docker.errors.APIError:
+ pass
+ for volume in self.tmp_volumes:
+ try:
+ client.api.remove_volume(volume)
+ except docker.errors.APIError:
+ pass
+
+ for secret in self.tmp_secrets:
+ try:
+ client.api.remove_secret(secret)
+ except docker.errors.APIError:
+ pass
+
+ for config in self.tmp_configs:
+ try:
+ client.api.remove_config(config)
+ except docker.errors.APIError:
+ pass
+
+ for folder in self.tmp_folders:
+ shutil.rmtree(folder)
+ finally:
+ client.close()
+
+
+class BaseAPIIntegrationTest(BaseIntegrationTest):
+ """
+ A test case for `APIClient` integration tests. It sets up an `APIClient`
+ as `self.client`.
+ """
+ @classmethod
+ def setUpClass(cls):
+ cls.client = cls.get_client_instance()
+ cls.client.pull(TEST_IMG)
+
+ def tearDown(self):
+ super(BaseAPIIntegrationTest, self).tearDown()
+ self.client.close()
+
+ @staticmethod
+ def get_client_instance():
+ return docker.APIClient(
+ version=TEST_API_VERSION,
+ timeout=60,
+ use_ssh_client=True,
+ **kwargs_from_env()
+ )
+
+ @staticmethod
+ def _init_swarm(client, **kwargs):
+ return client.init_swarm(
+ '127.0.0.1', listen_addr=helpers.swarm_listen_addr(), **kwargs
+ )
+
+ def run_container(self, *args, **kwargs):
+ container = self.client.create_container(*args, **kwargs)
+ self.tmp_containers.append(container)
+ self.client.start(container)
+ exitcode = self.client.wait(container)['StatusCode']
+
+ if exitcode != 0:
+ output = self.client.logs(container)
+ raise Exception(
+ "Container exited with code {}:\n{}"
+ .format(exitcode, output))
+
+ return container
+
+ def create_and_start(self, image=TEST_IMG, command='top', **kwargs):
+ container = self.client.create_container(
+ image=image, command=command, **kwargs)
+ self.tmp_containers.append(container)
+ self.client.start(container)
+ return container
+
+ def execute(self, container, cmd, exit_code=0, **kwargs):
+ exc = self.client.exec_create(container, cmd, **kwargs)
+ output = self.client.exec_start(exc)
+ actual_exit_code = self.client.exec_inspect(exc)['ExitCode']
+ msg = "Expected `{}` to exit with code {} but returned {}:\n{}".format(
+ " ".join(cmd), exit_code, actual_exit_code, output)
+ assert actual_exit_code == exit_code, msg
+
+ def init_swarm(self, **kwargs):
+ return self._init_swarm(self.client, **kwargs)
diff --git a/tests/unit/api_container_test.py b/tests/unit/api_container_test.py
index a7e183c..8a0577e 100644
--- a/tests/unit/api_container_test.py
+++ b/tests/unit/api_container_test.py
@@ -5,6 +5,7 @@ import json
import signal
import docker
+from docker.api import APIClient
import pytest
import six
@@ -12,7 +13,7 @@ from . import fake_api
from ..helpers import requires_api_version
from .api_test import (
BaseAPIClientTest, url_prefix, fake_request, DEFAULT_TIMEOUT_SECONDS,
- fake_inspect_container
+ fake_inspect_container, url_base
)
try:
@@ -767,6 +768,67 @@ class CreateContainerTest(BaseAPIClientTest):
assert args[1]['headers'] == {'Content-Type': 'application/json'}
assert args[1]['timeout'] == DEFAULT_TIMEOUT_SECONDS
+ def test_create_container_with_device_requests(self):
+ client = APIClient(version='1.40')
+ fake_api.fake_responses.setdefault(
+ '{0}/v1.40/containers/create'.format(fake_api.prefix),
+ fake_api.post_fake_create_container,
+ )
+ client.create_container(
+ 'busybox', 'true', host_config=client.create_host_config(
+ device_requests=[
+ {
+ 'device_ids': [
+ '0',
+ 'GPU-3a23c669-1f69-c64e-cf85-44e9b07e7a2a'
+ ]
+ },
+ {
+ 'driver': 'nvidia',
+ 'Count': -1,
+ 'capabilities': [
+ ['gpu', 'utility']
+ ],
+ 'options': {
+ 'key': 'value'
+ }
+ }
+ ]
+ )
+ )
+
+ args = fake_request.call_args
+ assert args[0][1] == url_base + 'v1.40/' + 'containers/create'
+ expected_payload = self.base_create_payload()
+ expected_payload['HostConfig'] = client.create_host_config()
+ expected_payload['HostConfig']['DeviceRequests'] = [
+ {
+ 'Driver': '',
+ 'Count': 0,
+ 'DeviceIDs': [
+ '0',
+ 'GPU-3a23c669-1f69-c64e-cf85-44e9b07e7a2a'
+ ],
+ 'Capabilities': [],
+ 'Options': {}
+ },
+ {
+ 'Driver': 'nvidia',
+ 'Count': -1,
+ 'DeviceIDs': [],
+ 'Capabilities': [
+ ['gpu', 'utility']
+ ],
+ 'Options': {
+ 'key': 'value'
+ }
+ }
+ ]
+ assert json.loads(args[1]['data']) == expected_payload
+ assert args[1]['headers']['Content-Type'] == 'application/json'
+ assert set(args[1]['headers']) <= {'Content-Type', 'User-Agent'}
+ assert args[1]['timeout'] == DEFAULT_TIMEOUT_SECONDS
+
def test_create_container_with_labels_dict(self):
labels_dict = {
six.text_type('foo'): six.text_type('1'),
diff --git a/tests/unit/api_image_test.py b/tests/unit/api_image_test.py
index 1e2315d..0b60df4 100644
--- a/tests/unit/api_image_test.py
+++ b/tests/unit/api_image_test.py
@@ -26,7 +26,18 @@ class ImageTest(BaseAPIClientTest):
fake_request.assert_called_with(
'GET',
url_prefix + 'images/json',
- params={'filter': None, 'only_ids': 0, 'all': 1},
+ params={'only_ids': 0, 'all': 1},
+ timeout=DEFAULT_TIMEOUT_SECONDS
+ )
+
+ def test_images_name(self):
+ self.client.images('foo:bar')
+
+ fake_request.assert_called_with(
+ 'GET',
+ url_prefix + 'images/json',
+ params={'only_ids': 0, 'all': 0,
+ 'filters': '{"reference": ["foo:bar"]}'},
timeout=DEFAULT_TIMEOUT_SECONDS
)
@@ -36,7 +47,7 @@ class ImageTest(BaseAPIClientTest):
fake_request.assert_called_with(
'GET',
url_prefix + 'images/json',
- params={'filter': None, 'only_ids': 1, 'all': 1},
+ params={'only_ids': 1, 'all': 1},
timeout=DEFAULT_TIMEOUT_SECONDS
)
@@ -46,7 +57,7 @@ class ImageTest(BaseAPIClientTest):
fake_request.assert_called_with(
'GET',
url_prefix + 'images/json',
- params={'filter': None, 'only_ids': 1, 'all': 0},
+ params={'only_ids': 1, 'all': 0},
timeout=DEFAULT_TIMEOUT_SECONDS
)
@@ -56,7 +67,7 @@ class ImageTest(BaseAPIClientTest):
fake_request.assert_called_with(
'GET',
url_prefix + 'images/json',
- params={'filter': None, 'only_ids': 0, 'all': 0,
+ params={'only_ids': 0, 'all': 0,
'filters': '{"dangling": ["true"]}'},
timeout=DEFAULT_TIMEOUT_SECONDS
)
@@ -67,7 +78,7 @@ class ImageTest(BaseAPIClientTest):
args = fake_request.call_args
assert args[0][1] == url_prefix + 'images/create'
assert args[1]['params'] == {
- 'tag': None, 'fromImage': 'joffrey/test001'
+ 'tag': 'latest', 'fromImage': 'joffrey/test001'
}
assert not args[1]['stream']
@@ -77,7 +88,7 @@ class ImageTest(BaseAPIClientTest):
args = fake_request.call_args
assert args[0][1] == url_prefix + 'images/create'
assert args[1]['params'] == {
- 'tag': None, 'fromImage': 'joffrey/test001'
+ 'tag': 'latest', 'fromImage': 'joffrey/test001'
}
assert args[1]['stream']
diff --git a/tests/unit/api_network_test.py b/tests/unit/api_network_test.py
index c78554d..758f013 100644
--- a/tests/unit/api_network_test.py
+++ b/tests/unit/api_network_test.py
@@ -136,7 +136,8 @@ class NetworkTest(BaseAPIClientTest):
container={'Id': container_id},
net_id=network_id,
aliases=['foo', 'bar'],
- links=[('baz', 'quux')]
+ links=[('baz', 'quux')],
+ driver_opt={'com.docker-py.setting': 'yes'},
)
assert post.call_args[0][0] == (
@@ -148,6 +149,7 @@ class NetworkTest(BaseAPIClientTest):
'EndpointConfig': {
'Aliases': ['foo', 'bar'],
'Links': ['baz:quux'],
+ 'DriverOpts': {'com.docker-py.setting': 'yes'},
},
}
diff --git a/tests/unit/api_test.py b/tests/unit/api_test.py
index f4d220a..cb14b74 100644
--- a/tests/unit/api_test.py
+++ b/tests/unit/api_test.py
@@ -1,26 +1,26 @@
import datetime
-import json
import io
+import json
import os
import re
import shutil
import socket
+import struct
import tempfile
import threading
import time
import unittest
import docker
-from docker.api import APIClient
+import pytest
import requests
-from requests.packages import urllib3
import six
-import struct
+from docker.api import APIClient
+from docker.constants import DEFAULT_DOCKER_API_VERSION
+from requests.packages import urllib3
from . import fake_api
-import pytest
-
try:
from unittest import mock
except ImportError:
@@ -105,7 +105,7 @@ class BaseAPIClientTest(unittest.TestCase):
_read_from_socket=fake_read_from_socket
)
self.patcher.start()
- self.client = APIClient()
+ self.client = APIClient(version=DEFAULT_DOCKER_API_VERSION)
def tearDown(self):
self.client.close()
@@ -282,27 +282,37 @@ class DockerApiTest(BaseAPIClientTest):
return socket_adapter.socket_path
def test_url_compatibility_unix(self):
- c = APIClient(base_url="unix://socket")
+ c = APIClient(
+ base_url="unix://socket",
+ version=DEFAULT_DOCKER_API_VERSION)
assert self._socket_path_for_client_session(c) == '/socket'
def test_url_compatibility_unix_triple_slash(self):
- c = APIClient(base_url="unix:///socket")
+ c = APIClient(
+ base_url="unix:///socket",
+ version=DEFAULT_DOCKER_API_VERSION)
assert self._socket_path_for_client_session(c) == '/socket'
def test_url_compatibility_http_unix_triple_slash(self):
- c = APIClient(base_url="http+unix:///socket")
+ c = APIClient(
+ base_url="http+unix:///socket",
+ version=DEFAULT_DOCKER_API_VERSION)
assert self._socket_path_for_client_session(c) == '/socket'
def test_url_compatibility_http(self):
- c = APIClient(base_url="http://hostname:1234")
+ c = APIClient(
+ base_url="http://hostname:1234",
+ version=DEFAULT_DOCKER_API_VERSION)
assert c.base_url == "http://hostname:1234"
def test_url_compatibility_tcp(self):
- c = APIClient(base_url="tcp://hostname:1234")
+ c = APIClient(
+ base_url="tcp://hostname:1234",
+ version=DEFAULT_DOCKER_API_VERSION)
assert c.base_url == "http://hostname:1234"
@@ -447,7 +457,9 @@ class UnixSocketStreamTest(unittest.TestCase):
b'\r\n'
) + b'\r\n'.join(lines)
- with APIClient(base_url="http+unix://" + self.socket_file) as client:
+ with APIClient(
+ base_url="http+unix://" + self.socket_file,
+ version=DEFAULT_DOCKER_API_VERSION) as client:
for i in range(5):
try:
stream = client.build(
@@ -532,7 +544,10 @@ class TCPSocketStreamTest(unittest.TestCase):
def request(self, stream=None, tty=None, demux=None):
assert stream is not None and tty is not None and demux is not None
- with APIClient(base_url=self.address) as client:
+ with APIClient(
+ base_url=self.address,
+ version=DEFAULT_DOCKER_API_VERSION
+ ) as client:
if tty:
url = client._url('/tty')
else:
@@ -597,7 +612,7 @@ class UserAgentTest(unittest.TestCase):
self.patcher.stop()
def test_default_user_agent(self):
- client = APIClient()
+ client = APIClient(version=DEFAULT_DOCKER_API_VERSION)
client.version()
assert self.mock_send.call_count == 1
@@ -606,7 +621,9 @@ class UserAgentTest(unittest.TestCase):
assert headers['User-Agent'] == expected
def test_custom_user_agent(self):
- client = APIClient(user_agent='foo/bar')
+ client = APIClient(
+ user_agent='foo/bar',
+ version=DEFAULT_DOCKER_API_VERSION)
client.version()
assert self.mock_send.call_count == 1
@@ -626,7 +643,7 @@ class DisableSocketTest(unittest.TestCase):
return self.timeout
def setUp(self):
- self.client = APIClient()
+ self.client = APIClient(version=DEFAULT_DOCKER_API_VERSION)
def test_disable_socket_timeout(self):
"""Test that the timeout is disabled on a generic socket object."""
diff --git a/tests/unit/client_test.py b/tests/unit/client_test.py
index cce99c5..ad88e84 100644
--- a/tests/unit/client_test.py
+++ b/tests/unit/client_test.py
@@ -1,22 +1,24 @@
import datetime
+import os
+import unittest
+
import docker
-from docker.utils import kwargs_from_env
+import pytest
from docker.constants import (
- DEFAULT_DOCKER_API_VERSION, DEFAULT_TIMEOUT_SECONDS
+ DEFAULT_DOCKER_API_VERSION, DEFAULT_TIMEOUT_SECONDS,
+ DEFAULT_MAX_POOL_SIZE, IS_WINDOWS_PLATFORM
)
-import os
-import unittest
+from docker.utils import kwargs_from_env
from . import fake_api
-import pytest
try:
from unittest import mock
except ImportError:
import mock
-
TEST_CERT_DIR = os.path.join(os.path.dirname(__file__), 'testdata/certs')
+POOL_SIZE = 20
class ClientTest(unittest.TestCase):
@@ -25,33 +27,33 @@ class ClientTest(unittest.TestCase):
def test_events(self, mock_func):
since = datetime.datetime(2016, 1, 1, 0, 0)
mock_func.return_value = fake_api.get_fake_events()[1]
- client = docker.from_env()
+ client = docker.from_env(version=DEFAULT_DOCKER_API_VERSION)
assert client.events(since=since) == mock_func.return_value
mock_func.assert_called_with(since=since)
@mock.patch('docker.api.APIClient.info')
def test_info(self, mock_func):
mock_func.return_value = fake_api.get_fake_info()[1]
- client = docker.from_env()
+ client = docker.from_env(version=DEFAULT_DOCKER_API_VERSION)
assert client.info() == mock_func.return_value
mock_func.assert_called_with()
@mock.patch('docker.api.APIClient.ping')
def test_ping(self, mock_func):
mock_func.return_value = True
- client = docker.from_env()
+ client = docker.from_env(version=DEFAULT_DOCKER_API_VERSION)
assert client.ping() is True
mock_func.assert_called_with()
@mock.patch('docker.api.APIClient.version')
def test_version(self, mock_func):
mock_func.return_value = fake_api.get_fake_version()[1]
- client = docker.from_env()
+ client = docker.from_env(version=DEFAULT_DOCKER_API_VERSION)
assert client.version() == mock_func.return_value
mock_func.assert_called_with()
def test_call_api_client_method(self):
- client = docker.from_env()
+ client = docker.from_env(version=DEFAULT_DOCKER_API_VERSION)
with pytest.raises(AttributeError) as cm:
client.create_container()
s = cm.exconly()
@@ -65,7 +67,9 @@ class ClientTest(unittest.TestCase):
assert "this method is now on the object APIClient" not in s
def test_call_containers(self):
- client = docker.DockerClient(**kwargs_from_env())
+ client = docker.DockerClient(
+ version=DEFAULT_DOCKER_API_VERSION,
+ **kwargs_from_env())
with pytest.raises(TypeError) as cm:
client.containers()
@@ -74,6 +78,84 @@ class ClientTest(unittest.TestCase):
assert "'ContainerCollection' object is not callable" in s
assert "docker.APIClient" in s
+ @pytest.mark.skipif(
+ IS_WINDOWS_PLATFORM, reason='Unix Connection Pool only on Linux'
+ )
+ @mock.patch("docker.transport.unixconn.UnixHTTPConnectionPool")
+ def test_default_pool_size_unix(self, mock_obj):
+ client = docker.DockerClient(
+ version=DEFAULT_DOCKER_API_VERSION
+ )
+ mock_obj.return_value.urlopen.return_value.status = 200
+ client.ping()
+
+ base_url = "{base_url}/v{version}/_ping".format(
+ base_url=client.api.base_url,
+ version=client.api._version
+ )
+
+ mock_obj.assert_called_once_with(base_url,
+ "/var/run/docker.sock",
+ 60,
+ maxsize=DEFAULT_MAX_POOL_SIZE
+ )
+
+ @pytest.mark.skipif(
+ not IS_WINDOWS_PLATFORM, reason='Npipe Connection Pool only on Windows'
+ )
+ @mock.patch("docker.transport.npipeconn.NpipeHTTPConnectionPool")
+ def test_default_pool_size_win(self, mock_obj):
+ client = docker.DockerClient(
+ version=DEFAULT_DOCKER_API_VERSION
+ )
+ mock_obj.return_value.urlopen.return_value.status = 200
+ client.ping()
+
+ mock_obj.assert_called_once_with("//./pipe/docker_engine",
+ 60,
+ maxsize=DEFAULT_MAX_POOL_SIZE
+ )
+
+ @pytest.mark.skipif(
+ IS_WINDOWS_PLATFORM, reason='Unix Connection Pool only on Linux'
+ )
+ @mock.patch("docker.transport.unixconn.UnixHTTPConnectionPool")
+ def test_pool_size_unix(self, mock_obj):
+ client = docker.DockerClient(
+ version=DEFAULT_DOCKER_API_VERSION,
+ max_pool_size=POOL_SIZE
+ )
+ mock_obj.return_value.urlopen.return_value.status = 200
+ client.ping()
+
+ base_url = "{base_url}/v{version}/_ping".format(
+ base_url=client.api.base_url,
+ version=client.api._version
+ )
+
+ mock_obj.assert_called_once_with(base_url,
+ "/var/run/docker.sock",
+ 60,
+ maxsize=POOL_SIZE
+ )
+
+ @pytest.mark.skipif(
+ not IS_WINDOWS_PLATFORM, reason='Npipe Connection Pool only on Windows'
+ )
+ @mock.patch("docker.transport.npipeconn.NpipeHTTPConnectionPool")
+ def test_pool_size_win(self, mock_obj):
+ client = docker.DockerClient(
+ version=DEFAULT_DOCKER_API_VERSION,
+ max_pool_size=POOL_SIZE
+ )
+ mock_obj.return_value.urlopen.return_value.status = 200
+ client.ping()
+
+ mock_obj.assert_called_once_with("//./pipe/docker_engine",
+ 60,
+ maxsize=POOL_SIZE
+ )
+
class FromEnvTest(unittest.TestCase):
@@ -90,7 +172,7 @@ class FromEnvTest(unittest.TestCase):
os.environ.update(DOCKER_HOST='tcp://192.168.59.103:2376',
DOCKER_CERT_PATH=TEST_CERT_DIR,
DOCKER_TLS_VERIFY='1')
- client = docker.from_env()
+ client = docker.from_env(version=DEFAULT_DOCKER_API_VERSION)
assert client.api.base_url == "https://192.168.59.103:2376"
def test_from_env_with_version(self):
@@ -102,11 +184,85 @@ class FromEnvTest(unittest.TestCase):
assert client.api._version == '2.32'
def test_from_env_without_version_uses_default(self):
- client = docker.from_env()
+ client = docker.from_env(version=DEFAULT_DOCKER_API_VERSION)
assert client.api._version == DEFAULT_DOCKER_API_VERSION
def test_from_env_without_timeout_uses_default(self):
- client = docker.from_env()
+ client = docker.from_env(version=DEFAULT_DOCKER_API_VERSION)
assert client.api.timeout == DEFAULT_TIMEOUT_SECONDS
+
+ @pytest.mark.skipif(
+ IS_WINDOWS_PLATFORM, reason='Unix Connection Pool only on Linux'
+ )
+ @mock.patch("docker.transport.unixconn.UnixHTTPConnectionPool")
+ def test_default_pool_size_from_env_unix(self, mock_obj):
+ client = docker.from_env(version=DEFAULT_DOCKER_API_VERSION)
+ mock_obj.return_value.urlopen.return_value.status = 200
+ client.ping()
+
+ base_url = "{base_url}/v{version}/_ping".format(
+ base_url=client.api.base_url,
+ version=client.api._version
+ )
+
+ mock_obj.assert_called_once_with(base_url,
+ "/var/run/docker.sock",
+ 60,
+ maxsize=DEFAULT_MAX_POOL_SIZE
+ )
+
+ @pytest.mark.skipif(
+ not IS_WINDOWS_PLATFORM, reason='Npipe Connection Pool only on Windows'
+ )
+ @mock.patch("docker.transport.npipeconn.NpipeHTTPConnectionPool")
+ def test_default_pool_size_from_env_win(self, mock_obj):
+ client = docker.from_env(version=DEFAULT_DOCKER_API_VERSION)
+ mock_obj.return_value.urlopen.return_value.status = 200
+ client.ping()
+
+ mock_obj.assert_called_once_with("//./pipe/docker_engine",
+ 60,
+ maxsize=DEFAULT_MAX_POOL_SIZE
+ )
+
+ @pytest.mark.skipif(
+ IS_WINDOWS_PLATFORM, reason='Unix Connection Pool only on Linux'
+ )
+ @mock.patch("docker.transport.unixconn.UnixHTTPConnectionPool")
+ def test_pool_size_from_env_unix(self, mock_obj):
+ client = docker.from_env(
+ version=DEFAULT_DOCKER_API_VERSION,
+ max_pool_size=POOL_SIZE
+ )
+ mock_obj.return_value.urlopen.return_value.status = 200
+ client.ping()
+
+ base_url = "{base_url}/v{version}/_ping".format(
+ base_url=client.api.base_url,
+ version=client.api._version
+ )
+
+ mock_obj.assert_called_once_with(base_url,
+ "/var/run/docker.sock",
+ 60,
+ maxsize=POOL_SIZE
+ )
+
+ @pytest.mark.skipif(
+ not IS_WINDOWS_PLATFORM, reason='Npipe Connection Pool only on Windows'
+ )
+ @mock.patch("docker.transport.npipeconn.NpipeHTTPConnectionPool")
+ def test_pool_size_from_env_win(self, mock_obj):
+ client = docker.from_env(
+ version=DEFAULT_DOCKER_API_VERSION,
+ max_pool_size=POOL_SIZE
+ )
+ mock_obj.return_value.urlopen.return_value.status = 200
+ client.ping()
+
+ mock_obj.assert_called_once_with("//./pipe/docker_engine",
+ 60,
+ maxsize=POOL_SIZE
+ )
diff --git a/tests/unit/context_test.py b/tests/unit/context_test.py
new file mode 100644
index 0000000..6d6d672
--- /dev/null
+++ b/tests/unit/context_test.py
@@ -0,0 +1,49 @@
+import unittest
+import docker
+import pytest
+from docker.constants import DEFAULT_UNIX_SOCKET
+from docker.constants import DEFAULT_NPIPE
+from docker.constants import IS_WINDOWS_PLATFORM
+from docker.context import ContextAPI, Context
+
+
+class BaseContextTest(unittest.TestCase):
+ @pytest.mark.skipif(
+ IS_WINDOWS_PLATFORM, reason='Linux specific path check'
+ )
+ def test_url_compatibility_on_linux(self):
+ c = Context("test")
+ assert c.Host == DEFAULT_UNIX_SOCKET.strip("http+")
+
+ @pytest.mark.skipif(
+ not IS_WINDOWS_PLATFORM, reason='Windows specific path check'
+ )
+ def test_url_compatibility_on_windows(self):
+ c = Context("test")
+ assert c.Host == DEFAULT_NPIPE
+
+ def test_fail_on_default_context_create(self):
+ with pytest.raises(docker.errors.ContextException):
+ ContextAPI.create_context("default")
+
+ def test_default_in_context_list(self):
+ found = False
+ ctx = ContextAPI.contexts()
+ for c in ctx:
+ if c.Name == "default":
+ found = True
+ assert found is True
+
+ def test_get_current_context(self):
+ assert ContextAPI.get_current_context().Name == "default"
+
+ def test_https_host(self):
+ c = Context("test", host="tcp://testdomain:8080", tls=True)
+ assert c.Host == "https://testdomain:8080"
+
+ def test_context_inspect_without_params(self):
+ ctx = ContextAPI.inspect_context()
+ assert ctx["Name"] == "default"
+ assert ctx["Metadata"]["StackOrchestrator"] == "swarm"
+ assert ctx["Endpoints"]["docker"]["Host"] in [
+ DEFAULT_NPIPE, DEFAULT_UNIX_SOCKET.strip("http+")]
diff --git a/tests/unit/errors_test.py b/tests/unit/errors_test.py
index 2134f86..54c2ba8 100644
--- a/tests/unit/errors_test.py
+++ b/tests/unit/errors_test.py
@@ -101,17 +101,17 @@ class APIErrorTest(unittest.TestCase):
assert err.is_error() is True
def test_create_error_from_exception(self):
- resp = requests.Response()
- resp.status_code = 500
- err = APIError('')
+ resp = requests.Response()
+ resp.status_code = 500
+ err = APIError('')
+ try:
+ resp.raise_for_status()
+ except requests.exceptions.HTTPError as e:
try:
- resp.raise_for_status()
- except requests.exceptions.HTTPError as e:
- try:
- create_api_error_from_http_exception(e)
- except APIError as e:
- err = e
- assert err.is_server_error() is True
+ create_api_error_from_http_exception(e)
+ except APIError as e:
+ err = e
+ assert err.is_server_error() is True
class ContainerErrorTest(unittest.TestCase):
diff --git a/tests/unit/fake_api.py b/tests/unit/fake_api.py
index e609b64..27e463d 100644
--- a/tests/unit/fake_api.py
+++ b/tests/unit/fake_api.py
@@ -1,6 +1,7 @@
-from . import fake_stat
from docker import constants
+from . import fake_stat
+
CURRENT_VERSION = 'v{0}'.format(constants.DEFAULT_DOCKER_API_VERSION)
FAKE_CONTAINER_ID = '3cc2351ab11b'
diff --git a/tests/unit/fake_api_client.py b/tests/unit/fake_api_client.py
index 2147bfd..e85001d 100644
--- a/tests/unit/fake_api_client.py
+++ b/tests/unit/fake_api_client.py
@@ -1,6 +1,7 @@
import copy
-import docker
+import docker
+from docker.constants import DEFAULT_DOCKER_API_VERSION
from . import fake_api
try:
@@ -30,7 +31,7 @@ def make_fake_api_client(overrides=None):
if overrides is None:
overrides = {}
- api_client = docker.APIClient()
+ api_client = docker.APIClient(version=DEFAULT_DOCKER_API_VERSION)
mock_attrs = {
'build.return_value': fake_api.FAKE_IMAGE_ID,
'commit.return_value': fake_api.post_fake_commit()[1],
@@ -50,6 +51,7 @@ def make_fake_api_client(overrides=None):
'networks.return_value': fake_api.get_fake_network_list()[1],
'start.return_value': None,
'wait.return_value': {'StatusCode': 0},
+ 'version.return_value': fake_api.get_fake_version()
}
mock_attrs.update(overrides)
mock_client = CopyReturnMagicMock(**mock_attrs)
@@ -62,6 +64,6 @@ def make_fake_client(overrides=None):
"""
Returns a Client with a fake APIClient.
"""
- client = docker.DockerClient()
+ client = docker.DockerClient(version=DEFAULT_DOCKER_API_VERSION)
client.api = make_fake_api_client(overrides)
return client
diff --git a/tests/unit/models_containers_test.py b/tests/unit/models_containers_test.py
index da5f0ab..c7aa46b 100644
--- a/tests/unit/models_containers_test.py
+++ b/tests/unit/models_containers_test.py
@@ -233,7 +233,7 @@ class ContainerCollectionTest(unittest.TestCase):
assert container.id == FAKE_CONTAINER_ID
client.api.pull.assert_called_with(
- 'alpine', platform=None, tag=None, stream=True
+ 'alpine', platform=None, tag='latest', all_tags=False, stream=True
)
def test_run_with_error(self):
@@ -450,7 +450,7 @@ class ContainerTest(unittest.TestCase):
container = client.containers.get(FAKE_CONTAINER_ID)
container.get_archive('foo')
client.api.get_archive.assert_called_with(
- FAKE_CONTAINER_ID, 'foo', DEFAULT_DATA_CHUNK_SIZE
+ FAKE_CONTAINER_ID, 'foo', DEFAULT_DATA_CHUNK_SIZE, False
)
def test_image(self):
diff --git a/tests/unit/models_images_test.py b/tests/unit/models_images_test.py
index fd894ab..e3d070c 100644
--- a/tests/unit/models_images_test.py
+++ b/tests/unit/models_images_test.py
@@ -44,9 +44,25 @@ class ImageCollectionTest(unittest.TestCase):
def test_pull(self):
client = make_fake_client()
- image = client.images.pull('test_image:latest')
+ image = client.images.pull('test_image:test')
client.api.pull.assert_called_with(
- 'test_image', tag='latest', stream=True
+ 'test_image', tag='test', all_tags=False, stream=True
+ )
+ client.api.inspect_image.assert_called_with('test_image:test')
+ assert isinstance(image, Image)
+ assert image.id == FAKE_IMAGE_ID
+
+ def test_pull_tag_precedence(self):
+ client = make_fake_client()
+ image = client.images.pull('test_image:latest', tag='test')
+ client.api.pull.assert_called_with(
+ 'test_image', tag='test', all_tags=False, stream=True
+ )
+ client.api.inspect_image.assert_called_with('test_image:test')
+
+ image = client.images.pull('test_image')
+ client.api.pull.assert_called_with(
+ 'test_image', tag='latest', all_tags=False, stream=True
)
client.api.inspect_image.assert_called_with('test_image:latest')
assert isinstance(image, Image)
@@ -54,9 +70,9 @@ class ImageCollectionTest(unittest.TestCase):
def test_pull_multiple(self):
client = make_fake_client()
- images = client.images.pull('test_image')
+ images = client.images.pull('test_image', all_tags=True)
client.api.pull.assert_called_with(
- 'test_image', tag=None, stream=True
+ 'test_image', tag='latest', all_tags=True, stream=True
)
client.api.images.assert_called_with(
all=False, name='test_image', filters=None
diff --git a/tests/unit/models_services_test.py b/tests/unit/models_services_test.py
index a4ac50c..07bb589 100644
--- a/tests/unit/models_services_test.py
+++ b/tests/unit/models_services_test.py
@@ -28,6 +28,7 @@ class CreateServiceKwargsTest(unittest.TestCase):
'constraints': ['foo=bar'],
'preferences': ['bar=baz'],
'platforms': [('x86_64', 'linux')],
+ 'maxreplicas': 1
})
task_template = kwargs.pop('task_template')
@@ -47,6 +48,7 @@ class CreateServiceKwargsTest(unittest.TestCase):
'Constraints': ['foo=bar'],
'Preferences': ['bar=baz'],
'Platforms': [{'Architecture': 'x86_64', 'OS': 'linux'}],
+ 'MaxReplicas': 1,
}
assert task_template['LogDriver'] == {
'Name': 'logdriver',
diff --git a/tests/unit/sshadapter_test.py b/tests/unit/sshadapter_test.py
new file mode 100644
index 0000000..ddee592
--- /dev/null
+++ b/tests/unit/sshadapter_test.py
@@ -0,0 +1,32 @@
+import unittest
+import docker
+from docker.transport.sshconn import SSHSocket
+
+class SSHAdapterTest(unittest.TestCase):
+ def test_ssh_hostname_prefix_trim(self):
+ conn = docker.transport.SSHHTTPAdapter(base_url="ssh://user@hostname:1234", shell_out=True)
+ assert conn.ssh_host == "user@hostname:1234"
+
+ def test_ssh_parse_url(self):
+ c = SSHSocket(host="user@hostname:1234")
+ assert c.host == "hostname"
+ assert c.port == "1234"
+ assert c.user == "user"
+
+ def test_ssh_parse_hostname_only(self):
+ c = SSHSocket(host="hostname")
+ assert c.host == "hostname"
+ assert c.port == None
+ assert c.user == None
+
+ def test_ssh_parse_user_and_hostname(self):
+ c = SSHSocket(host="user@hostname")
+ assert c.host == "hostname"
+ assert c.port == None
+ assert c.user == "user"
+
+ def test_ssh_parse_hostname_and_port(self):
+ c = SSHSocket(host="hostname:22")
+ assert c.host == "hostname"
+ assert c.port == "22"
+ assert c.user == None \ No newline at end of file
diff --git a/tests/unit/utils_build_test.py b/tests/unit/utils_build_test.py
index 012f15b..bc6fb5f 100644
--- a/tests/unit/utils_build_test.py
+++ b/tests/unit/utils_build_test.py
@@ -335,7 +335,7 @@ class ExcludePathsTest(unittest.TestCase):
# Dockerignore reference stipulates that absolute paths are
# equivalent to relative paths, hence /../foo should be
# equivalent to ../foo. It also stipulates that paths are run
- # through Go's filepath.Clean, which explicitely "replace
+ # through Go's filepath.Clean, which explicitly "replace
# "/.." by "/" at the beginning of a path".
assert exclude_paths(
base,
diff --git a/tests/unit/utils_test.py b/tests/unit/utils_test.py
index d9cb002..a53151c 100644
--- a/tests/unit/utils_test.py
+++ b/tests/unit/utils_test.py
@@ -5,27 +5,21 @@ import json
import os
import os.path
import shutil
-import sys
import tempfile
import unittest
-
+import pytest
+import six
from docker.api.client import APIClient
-from docker.constants import IS_WINDOWS_PLATFORM
+from docker.constants import IS_WINDOWS_PLATFORM, DEFAULT_DOCKER_API_VERSION
from docker.errors import DockerException
-from docker.utils import (
- convert_filters, convert_volume_binds, decode_json_header, kwargs_from_env,
- parse_bytes, parse_devices, parse_env_file, parse_host,
- parse_repository_tag, split_command, update_headers,
-)
-
+from docker.utils import (convert_filters, convert_volume_binds,
+ decode_json_header, kwargs_from_env, parse_bytes,
+ parse_devices, parse_env_file, parse_host,
+ parse_repository_tag, split_command, update_headers)
from docker.utils.ports import build_port_bindings, split_port
from docker.utils.utils import format_environment
-import pytest
-
-import six
-
TEST_CERT_DIR = os.path.join(
os.path.dirname(__file__),
'testdata/certs',
@@ -41,7 +35,7 @@ class DecoratorsTest(unittest.TestCase):
def f(self, headers=None):
return headers
- client = APIClient()
+ client = APIClient(version=DEFAULT_DOCKER_API_VERSION)
client._general_configs = {}
g = update_headers(f)
@@ -92,6 +86,7 @@ class KwargsFromEnvTest(unittest.TestCase):
assert kwargs['tls'].verify
parsed_host = parse_host(kwargs['base_url'], IS_WINDOWS_PLATFORM, True)
+ kwargs['version'] = DEFAULT_DOCKER_API_VERSION
try:
client = APIClient(**kwargs)
assert parsed_host == client.base_url
@@ -112,6 +107,7 @@ class KwargsFromEnvTest(unittest.TestCase):
assert kwargs['tls'].assert_hostname is True
assert kwargs['tls'].verify is False
parsed_host = parse_host(kwargs['base_url'], IS_WINDOWS_PLATFORM, True)
+ kwargs['version'] = DEFAULT_DOCKER_API_VERSION
try:
client = APIClient(**kwargs)
assert parsed_host == client.base_url
@@ -447,11 +443,7 @@ class ParseBytesTest(unittest.TestCase):
parse_bytes("127.0.0.1K")
def test_parse_bytes_float(self):
- with pytest.raises(DockerException):
- parse_bytes("1.5k")
-
- def test_parse_bytes_maxint(self):
- assert parse_bytes("{0}k".format(sys.maxsize)) == sys.maxsize * 1024
+ assert parse_bytes("1.5k") == 1536
class UtilsTest(unittest.TestCase):