From d58ca9720725219fd25a4145b8b5adbe1ed2ebc5 Mon Sep 17 00:00:00 2001 From: Roger Camargo Date: Thu, 3 Jun 2021 09:33:24 -0300 Subject: [PATCH 01/12] [DOCS] Update the Image.save documentation with a working example. Issue #836 Signed-off-by: Roger Camargo --- docker/models/images.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docker/models/images.py b/docker/models/images.py index e63558859..28cfc93ce 100644 --- a/docker/models/images.py +++ b/docker/models/images.py @@ -84,9 +84,9 @@ def save(self, chunk_size=DEFAULT_DATA_CHUNK_SIZE, named=False): Example: - >>> image = cli.get_image("busybox:latest") + >>> image = cli.images.get("busybox:latest") >>> f = open('/tmp/busybox-latest.tar', 'wb') - >>> for chunk in image: + >>> for chunk in image.save(): >>> f.write(chunk) >>> f.close() """ From 5fcc293ba268a89ea1535114d36fbdcb73ec3d88 Mon Sep 17 00:00:00 2001 From: Anthony Sottile Date: Mon, 5 Jul 2021 18:24:23 -0400 Subject: [PATCH 02/12] use python3.6+ constructs Signed-off-by: Anthony Sottile --- docker/api/build.py | 12 +-- docker/api/client.py | 20 ++-- docker/api/config.py | 7 +- docker/api/container.py | 8 +- docker/api/daemon.py | 2 +- docker/api/exec_api.py | 6 +- docker/api/image.py | 10 +- docker/api/network.py | 2 +- docker/api/plugin.py | 6 +- docker/api/secret.py | 7 +- docker/api/service.py | 4 +- docker/api/swarm.py | 4 +- docker/api/volume.py | 2 +- docker/auth.py | 38 ++++--- docker/client.py | 4 +- docker/constants.py | 2 +- docker/context/api.py | 6 +- docker/context/config.py | 4 +- docker/context/context.py | 4 +- docker/credentials/store.py | 27 ++--- docker/errors.py | 26 ++--- docker/models/configs.py | 2 +- docker/models/images.py | 14 ++- docker/models/plugins.py | 5 +- docker/models/resource.py | 9 +- docker/models/secrets.py | 2 +- docker/models/swarm.py | 2 +- docker/tls.py | 2 +- docker/transport/basehttpadapter.py | 2 +- docker/transport/npipeconn.py | 17 ++- docker/transport/npipesocket.py | 8 +- docker/transport/sshconn.py | 27 +++-- docker/transport/ssladapter.py | 4 +- docker/transport/unixconn.py | 26 ++--- docker/types/base.py | 5 +- docker/types/containers.py | 42 ++++---- docker/types/daemon.py | 4 +- docker/types/healthcheck.py | 8 +- docker/types/services.py | 20 ++-- docker/utils/build.py | 25 ++--- docker/utils/config.py | 6 +- docker/utils/decorators.py | 2 +- docker/utils/fnmatch.py | 2 +- docker/utils/json_stream.py | 13 +-- docker/utils/ports.py | 2 +- docker/utils/socket.py | 14 ++- docker/utils/utils.py | 32 +++--- docker/version.py | 2 +- docs/conf.py | 19 ++-- scripts/versions.py | 4 +- setup.py | 1 - tests/helpers.py | 11 +- tests/integration/api_build_test.py | 19 ++-- tests/integration/api_client_test.py | 2 +- tests/integration/api_config_test.py | 4 +- tests/integration/api_container_test.py | 42 ++++---- tests/integration/api_exec_test.py | 2 +- tests/integration/api_image_test.py | 6 +- tests/integration/api_network_test.py | 2 +- tests/integration/api_secret_test.py | 4 +- tests/integration/api_service_test.py | 32 +++--- tests/integration/api_swarm_test.py | 4 +- tests/integration/base.py | 4 +- tests/integration/conftest.py | 6 +- tests/integration/credentials/store_test.py | 7 +- tests/integration/credentials/utils_test.py | 2 +- tests/integration/models_images_test.py | 22 ++-- tests/integration/regression_test.py | 10 +- tests/ssh/api_build_test.py | 19 ++-- tests/ssh/base.py | 2 +- tests/unit/api_container_test.py | 27 +++-- tests/unit/api_exec_test.py | 10 +- tests/unit/api_image_test.py | 2 +- tests/unit/api_network_test.py | 20 ++-- tests/unit/api_test.py | 44 ++++---- tests/unit/api_volume_test.py | 4 +- tests/unit/auth_test.py | 22 ++-- tests/unit/client_test.py | 2 +- tests/unit/dockertypes_test.py | 4 +- tests/unit/errors_test.py | 2 +- tests/unit/fake_api.py | 100 ++++++++--------- tests/unit/fake_api_client.py | 4 +- tests/unit/models_resources_test.py | 2 +- tests/unit/models_secrets_test.py | 2 +- tests/unit/models_services_test.py | 8 +- tests/unit/ssladapter_test.py | 38 +++---- tests/unit/swarm_test.py | 2 - tests/unit/utils_build_test.py | 112 ++++++++++---------- tests/unit/utils_config_test.py | 2 +- tests/unit/utils_json_stream_test.py | 12 +-- tests/unit/utils_proxy_test.py | 7 +- tests/unit/utils_test.py | 34 +++--- 92 files changed, 524 insertions(+), 658 deletions(-) diff --git a/docker/api/build.py b/docker/api/build.py index 365129a06..aac43c460 100644 --- a/docker/api/build.py +++ b/docker/api/build.py @@ -12,7 +12,7 @@ log = logging.getLogger(__name__) -class BuildApiMixin(object): +class BuildApiMixin: def build(self, path=None, tag=None, quiet=False, fileobj=None, nocache=False, rm=False, timeout=None, custom_context=False, encoding=None, pull=False, @@ -132,7 +132,7 @@ def build(self, path=None, tag=None, quiet=False, fileobj=None, for key in container_limits.keys(): if key not in constants.CONTAINER_LIMITS_KEYS: raise errors.DockerException( - 'Invalid container_limits key {0}'.format(key) + f'Invalid container_limits key {key}' ) if custom_context: @@ -150,7 +150,7 @@ def build(self, path=None, tag=None, quiet=False, fileobj=None, dockerignore = os.path.join(path, '.dockerignore') exclude = None if os.path.exists(dockerignore): - with open(dockerignore, 'r') as f: + with open(dockerignore) as f: exclude = list(filter( lambda x: x != '' and x[0] != '#', [l.strip() for l in f.read().splitlines()] @@ -313,7 +313,7 @@ def _set_auth_headers(self, headers): auth_data[auth.INDEX_URL] = auth_data.get(auth.INDEX_NAME, {}) log.debug( - 'Sending auth config ({0})'.format( + 'Sending auth config ({})'.format( ', '.join(repr(k) for k in auth_data.keys()) ) ) @@ -344,9 +344,9 @@ def process_dockerfile(dockerfile, path): if (os.path.splitdrive(path)[0] != os.path.splitdrive(abs_dockerfile)[0] or os.path.relpath(abs_dockerfile, path).startswith('..')): # Dockerfile not in context - read data to insert into tar later - with open(abs_dockerfile, 'r') as df: + with open(abs_dockerfile) as df: return ( - '.dockerfile.{0:x}'.format(random.getrandbits(160)), + f'.dockerfile.{random.getrandbits(160):x}', df.read() ) diff --git a/docker/api/client.py b/docker/api/client.py index ee9ad9c3b..f0cb39b86 100644 --- a/docker/api/client.py +++ b/docker/api/client.py @@ -107,7 +107,7 @@ def __init__(self, base_url=None, version=None, user_agent=DEFAULT_USER_AGENT, num_pools=None, credstore_env=None, use_ssh_client=False, max_pool_size=DEFAULT_MAX_POOL_SIZE): - super(APIClient, self).__init__() + super().__init__() if tls and not base_url: raise TLSParameterError( @@ -199,7 +199,7 @@ def __init__(self, base_url=None, version=None, self._version = version if not isinstance(self._version, str): raise DockerException( - 'Version parameter must be a string or None. Found {0}'.format( + 'Version parameter must be a string or None. Found {}'.format( type(version).__name__ ) ) @@ -219,7 +219,7 @@ def _retrieve_server_version(self): ) except Exception as e: raise DockerException( - 'Error while fetching server API version: {0}'.format(e) + f'Error while fetching server API version: {e}' ) def _set_request_timeout(self, kwargs): @@ -248,7 +248,7 @@ def _url(self, pathfmt, *args, **kwargs): for arg in args: if not isinstance(arg, str): raise ValueError( - 'Expected a string but found {0} ({1}) ' + 'Expected a string but found {} ({}) ' 'instead'.format(arg, type(arg)) ) @@ -256,11 +256,11 @@ def _url(self, pathfmt, *args, **kwargs): args = map(quote_f, args) if kwargs.get('versioned_api', True): - return '{0}/v{1}{2}'.format( + return '{}/v{}{}'.format( self.base_url, self._version, pathfmt.format(*args) ) else: - return '{0}{1}'.format(self.base_url, pathfmt.format(*args)) + return f'{self.base_url}{pathfmt.format(*args)}' def _raise_for_status(self, response): """Raises stored :class:`APIError`, if one occurred.""" @@ -341,8 +341,7 @@ def _stream_helper(self, response, decode=False): if response.raw._fp.chunked: if decode: - for chunk in json_stream(self._stream_helper(response, False)): - yield chunk + yield from json_stream(self._stream_helper(response, False)) else: reader = response.raw while not reader.closed: @@ -398,8 +397,7 @@ def _multiplexed_response_stream_helper(self, response): def _stream_raw_result(self, response, chunk_size=1, decode=True): ''' Stream result for TTY-enabled container and raw binary data''' self._raise_for_status(response) - for out in response.iter_content(chunk_size, decode): - yield out + yield from response.iter_content(chunk_size, decode) def _read_from_socket(self, response, stream, tty=True, demux=False): socket = self._get_raw_response_socket(response) @@ -477,7 +475,7 @@ def _unmount(self, *args): def get_adapter(self, url): try: - return super(APIClient, self).get_adapter(url) + return super().get_adapter(url) except requests.exceptions.InvalidSchema as e: if self._custom_adapter: return self._custom_adapter diff --git a/docker/api/config.py b/docker/api/config.py index 93e5168f6..8cf74e1a2 100644 --- a/docker/api/config.py +++ b/docker/api/config.py @@ -1,11 +1,9 @@ import base64 -import six - from .. import utils -class ConfigApiMixin(object): +class ConfigApiMixin: @utils.minimum_version('1.30') def create_config(self, name, data, labels=None): """ @@ -22,8 +20,7 @@ def create_config(self, name, data, labels=None): data = data.encode('utf-8') data = base64.b64encode(data) - if six.PY3: - data = data.decode('ascii') + data = data.decode('ascii') body = { 'Data': data, 'Name': name, diff --git a/docker/api/container.py b/docker/api/container.py index 369eba952..83fcd4f64 100644 --- a/docker/api/container.py +++ b/docker/api/container.py @@ -1,7 +1,5 @@ from datetime import datetime -import six - from .. import errors from .. import utils from ..constants import DEFAULT_DATA_CHUNK_SIZE @@ -12,7 +10,7 @@ from ..types import NetworkingConfig -class ContainerApiMixin(object): +class ContainerApiMixin: @utils.check_resource('container') def attach(self, container, stdout=True, stderr=True, stream=False, logs=False, demux=False): @@ -408,7 +406,7 @@ def create_container(self, image, command=None, hostname=None, user=None, :py:class:`docker.errors.APIError` If the server returns an error. """ - if isinstance(volumes, six.string_types): + if isinstance(volumes, str): volumes = [volumes, ] if isinstance(environment, dict): @@ -790,7 +788,7 @@ def kill(self, container, signal=None): url = self._url("/containers/{0}/kill", container) params = {} if signal is not None: - if not isinstance(signal, six.string_types): + if not isinstance(signal, str): signal = int(signal) params['signal'] = signal res = self._post(url, params=params) diff --git a/docker/api/daemon.py b/docker/api/daemon.py index 6b719268e..a85721326 100644 --- a/docker/api/daemon.py +++ b/docker/api/daemon.py @@ -4,7 +4,7 @@ from .. import auth, types, utils -class DaemonApiMixin(object): +class DaemonApiMixin: @utils.minimum_version('1.25') def df(self): """ diff --git a/docker/api/exec_api.py b/docker/api/exec_api.py index 4c49ac338..496308a0f 100644 --- a/docker/api/exec_api.py +++ b/docker/api/exec_api.py @@ -1,10 +1,8 @@ -import six - from .. import errors from .. import utils -class ExecApiMixin(object): +class ExecApiMixin: @utils.check_resource('container') def exec_create(self, container, cmd, stdout=True, stderr=True, stdin=False, tty=False, privileged=False, user='', @@ -45,7 +43,7 @@ def exec_create(self, container, cmd, stdout=True, stderr=True, 'Setting environment for exec is not supported in API < 1.25' ) - if isinstance(cmd, six.string_types): + if isinstance(cmd, str): cmd = utils.split_command(cmd) if isinstance(environment, dict): diff --git a/docker/api/image.py b/docker/api/image.py index 772101f4e..772d88957 100644 --- a/docker/api/image.py +++ b/docker/api/image.py @@ -1,15 +1,13 @@ import logging import os -import six - from .. import auth, errors, utils from ..constants import DEFAULT_DATA_CHUNK_SIZE log = logging.getLogger(__name__) -class ImageApiMixin(object): +class ImageApiMixin: @utils.check_resource('image') def get_image(self, image, chunk_size=DEFAULT_DATA_CHUNK_SIZE): @@ -130,7 +128,7 @@ def import_image(self, src=None, repository=None, tag=None, image=None, params = _import_image_params( repository, tag, image, - src=(src if isinstance(src, six.string_types) else None), + src=(src if isinstance(src, str) else None), changes=changes ) headers = {'Content-Type': 'application/tar'} @@ -139,7 +137,7 @@ def import_image(self, src=None, repository=None, tag=None, image=None, return self._result( self._post(u, data=None, params=params) ) - elif isinstance(src, six.string_types): # from file path + elif isinstance(src, str): # from file path with open(src, 'rb') as f: return self._result( self._post( @@ -571,7 +569,7 @@ def tag(self, image, repository, tag=None, force=False): def is_file(src): try: return ( - isinstance(src, six.string_types) and + isinstance(src, str) and os.path.isfile(src) ) except TypeError: # a data string will make isfile() raise a TypeError diff --git a/docker/api/network.py b/docker/api/network.py index 139c2d1a8..0b76bf321 100644 --- a/docker/api/network.py +++ b/docker/api/network.py @@ -4,7 +4,7 @@ from .. import utils -class NetworkApiMixin(object): +class NetworkApiMixin: def networks(self, names=None, ids=None, filters=None): """ List networks. Similar to the ``docker network ls`` command. diff --git a/docker/api/plugin.py b/docker/api/plugin.py index f6c0b1338..57110f113 100644 --- a/docker/api/plugin.py +++ b/docker/api/plugin.py @@ -1,9 +1,7 @@ -import six - from .. import auth, utils -class PluginApiMixin(object): +class PluginApiMixin: @utils.minimum_version('1.25') @utils.check_resource('name') def configure_plugin(self, name, options): @@ -21,7 +19,7 @@ def configure_plugin(self, name, options): url = self._url('/plugins/{0}/set', name) data = options if isinstance(data, dict): - data = ['{0}={1}'.format(k, v) for k, v in six.iteritems(data)] + data = [f'{k}={v}' for k, v in data.items()] res = self._post_json(url, data=data) self._raise_for_status(res) return True diff --git a/docker/api/secret.py b/docker/api/secret.py index e57952b53..cd440b95f 100644 --- a/docker/api/secret.py +++ b/docker/api/secret.py @@ -1,12 +1,10 @@ import base64 -import six - from .. import errors from .. import utils -class SecretApiMixin(object): +class SecretApiMixin: @utils.minimum_version('1.25') def create_secret(self, name, data, labels=None, driver=None): """ @@ -25,8 +23,7 @@ def create_secret(self, name, data, labels=None, driver=None): data = data.encode('utf-8') data = base64.b64encode(data) - if six.PY3: - data = data.decode('ascii') + data = data.decode('ascii') body = { 'Data': data, 'Name': name, diff --git a/docker/api/service.py b/docker/api/service.py index e9027bfa2..371f541e1 100644 --- a/docker/api/service.py +++ b/docker/api/service.py @@ -45,7 +45,7 @@ def raise_version_error(param, min_version): if task_template is not None: if 'ForceUpdate' in task_template and utils.version_lt( version, '1.25'): - raise_version_error('force_update', '1.25') + raise_version_error('force_update', '1.25') if task_template.get('Placement'): if utils.version_lt(version, '1.30'): @@ -113,7 +113,7 @@ def _merge_task_template(current, override): return merged -class ServiceApiMixin(object): +class ServiceApiMixin: @utils.minimum_version('1.24') def create_service( self, task_template, name=None, labels=None, mode=None, diff --git a/docker/api/swarm.py b/docker/api/swarm.py index 897f08e42..2ec1aea5e 100644 --- a/docker/api/swarm.py +++ b/docker/api/swarm.py @@ -1,5 +1,5 @@ import logging -from six.moves import http_client +import http.client as http_client from ..constants import DEFAULT_SWARM_ADDR_POOL, DEFAULT_SWARM_SUBNET_SIZE from .. import errors from .. import types @@ -8,7 +8,7 @@ log = logging.getLogger(__name__) -class SwarmApiMixin(object): +class SwarmApiMixin: def create_swarm_spec(self, *args, **kwargs): """ diff --git a/docker/api/volume.py b/docker/api/volume.py index 900a6086b..c6b26fe38 100644 --- a/docker/api/volume.py +++ b/docker/api/volume.py @@ -2,7 +2,7 @@ from .. import utils -class VolumeApiMixin(object): +class VolumeApiMixin: def volumes(self, filters=None): """ List volumes currently registered by the docker daemon. Similar to the diff --git a/docker/auth.py b/docker/auth.py index 6a07ea205..4fa798fcc 100644 --- a/docker/auth.py +++ b/docker/auth.py @@ -2,14 +2,12 @@ import json import logging -import six - from . import credentials from . import errors from .utils import config INDEX_NAME = 'docker.io' -INDEX_URL = 'https://index.{0}/v1/'.format(INDEX_NAME) +INDEX_URL = f'https://index.{INDEX_NAME}/v1/' TOKEN_USERNAME = '' log = logging.getLogger(__name__) @@ -18,13 +16,13 @@ def resolve_repository_name(repo_name): if '://' in repo_name: raise errors.InvalidRepository( - 'Repository name cannot contain a scheme ({0})'.format(repo_name) + f'Repository name cannot contain a scheme ({repo_name})' ) index_name, remote_name = split_repo_name(repo_name) if index_name[0] == '-' or index_name[-1] == '-': raise errors.InvalidRepository( - 'Invalid index name ({0}). Cannot begin or end with a' + 'Invalid index name ({}). Cannot begin or end with a' ' hyphen.'.format(index_name) ) return resolve_index_name(index_name), remote_name @@ -98,10 +96,10 @@ def parse_auth(cls, entries, raise_on_error=False): """ conf = {} - for registry, entry in six.iteritems(entries): + for registry, entry in entries.items(): if not isinstance(entry, dict): log.debug( - 'Config entry for key {0} is not auth config'.format( + 'Config entry for key {} is not auth config'.format( registry ) ) @@ -111,14 +109,14 @@ def parse_auth(cls, entries, raise_on_error=False): # keys is not formatted properly. if raise_on_error: raise errors.InvalidConfigFile( - 'Invalid configuration for registry {0}'.format( + 'Invalid configuration for registry {}'.format( registry ) ) return {} if 'identitytoken' in entry: log.debug( - 'Found an IdentityToken entry for registry {0}'.format( + 'Found an IdentityToken entry for registry {}'.format( registry ) ) @@ -132,7 +130,7 @@ def parse_auth(cls, entries, raise_on_error=False): # a valid value in the auths config. # https://github.com/docker/compose/issues/3265 log.debug( - 'Auth data for {0} is absent. Client might be using a ' + 'Auth data for {} is absent. Client might be using a ' 'credentials store instead.'.format(registry) ) conf[registry] = {} @@ -140,7 +138,7 @@ def parse_auth(cls, entries, raise_on_error=False): username, password = decode_auth(entry['auth']) log.debug( - 'Found entry (registry={0}, username={1})' + 'Found entry (registry={}, username={})' .format(repr(registry), repr(username)) ) @@ -170,7 +168,7 @@ def load_config(cls, config_path, config_dict, credstore_env=None): try: with open(config_file) as f: config_dict = json.load(f) - except (IOError, KeyError, ValueError) as e: + except (OSError, KeyError, ValueError) as e: # Likely missing new Docker config file or it's in an # unknown format, continue to attempt to read old location # and format. @@ -230,7 +228,7 @@ def resolve_authconfig(self, registry=None): store_name = self.get_credential_store(registry) if store_name is not None: log.debug( - 'Using credentials store "{0}"'.format(store_name) + f'Using credentials store "{store_name}"' ) cfg = self._resolve_authconfig_credstore(registry, store_name) if cfg is not None: @@ -239,15 +237,15 @@ def resolve_authconfig(self, registry=None): # Default to the public index server registry = resolve_index_name(registry) if registry else INDEX_NAME - log.debug("Looking for auth entry for {0}".format(repr(registry))) + log.debug(f"Looking for auth entry for {repr(registry)}") if registry in self.auths: - log.debug("Found {0}".format(repr(registry))) + log.debug(f"Found {repr(registry)}") return self.auths[registry] - for key, conf in six.iteritems(self.auths): + for key, conf in self.auths.items(): if resolve_index_name(key) == registry: - log.debug("Found {0}".format(repr(key))) + log.debug(f"Found {repr(key)}") return conf log.debug("No entry found") @@ -258,7 +256,7 @@ def _resolve_authconfig_credstore(self, registry, credstore_name): # The ecosystem is a little schizophrenic with index.docker.io VS # docker.io - in that case, it seems the full URL is necessary. registry = INDEX_URL - log.debug("Looking for auth entry for {0}".format(repr(registry))) + log.debug(f"Looking for auth entry for {repr(registry)}") store = self._get_store_instance(credstore_name) try: data = store.get(registry) @@ -278,7 +276,7 @@ def _resolve_authconfig_credstore(self, registry, credstore_name): return None except credentials.StoreError as e: raise errors.DockerException( - 'Credentials store error: {0}'.format(repr(e)) + f'Credentials store error: {repr(e)}' ) def _get_store_instance(self, name): @@ -329,7 +327,7 @@ def convert_to_hostname(url): def decode_auth(auth): - if isinstance(auth, six.string_types): + if isinstance(auth, str): auth = auth.encode('ascii') s = base64.b64decode(auth) login, pwd = s.split(b':', 1) diff --git a/docker/client.py b/docker/client.py index 5add5d730..4dbd846f1 100644 --- a/docker/client.py +++ b/docker/client.py @@ -13,7 +13,7 @@ from .utils import kwargs_from_env -class DockerClient(object): +class DockerClient: """ A client for communicating with a Docker server. @@ -212,7 +212,7 @@ def close(self): close.__doc__ = APIClient.close.__doc__ def __getattr__(self, name): - s = ["'DockerClient' object has no attribute '{}'".format(name)] + s = [f"'DockerClient' object has no attribute '{name}'"] # If a user calls a method on APIClient, they if hasattr(APIClient, name): s.append("In Docker SDK for Python 2.0, this method is now on the " diff --git a/docker/constants.py b/docker/constants.py index 43fce6138..218e49153 100644 --- a/docker/constants.py +++ b/docker/constants.py @@ -28,7 +28,7 @@ IS_WINDOWS_PLATFORM = (sys.platform == 'win32') WINDOWS_LONGPATH_PREFIX = '\\\\?\\' -DEFAULT_USER_AGENT = "docker-sdk-python/{0}".format(version) +DEFAULT_USER_AGENT = f"docker-sdk-python/{version}" DEFAULT_NUM_POOLS = 25 # The OpenSSH server default value for MaxSessions is 10 which means we can diff --git a/docker/context/api.py b/docker/context/api.py index c45115bce..380e8c4c4 100644 --- a/docker/context/api.py +++ b/docker/context/api.py @@ -9,7 +9,7 @@ from docker.context import Context -class ContextAPI(object): +class ContextAPI: """Context API. Contains methods for context management: create, list, remove, get, inspect. @@ -109,7 +109,7 @@ def contexts(cls): if filename == METAFILE: try: data = json.load( - open(os.path.join(dirname, filename), "r")) + open(os.path.join(dirname, filename))) names.append(data["Name"]) except Exception as e: raise errors.ContextException( @@ -138,7 +138,7 @@ def set_current_context(cls, name="default"): err = write_context_name_to_docker_config(name) if err: raise errors.ContextException( - 'Failed to set current context: {}'.format(err)) + f'Failed to set current context: {err}') @classmethod def remove_context(cls, name): diff --git a/docker/context/config.py b/docker/context/config.py index baf54f797..d761aef13 100644 --- a/docker/context/config.py +++ b/docker/context/config.py @@ -15,7 +15,7 @@ def get_current_context_name(): docker_cfg_path = find_config_file() if docker_cfg_path: try: - with open(docker_cfg_path, "r") as f: + with open(docker_cfg_path) as f: name = json.load(f).get("currentContext", "default") except Exception: return "default" @@ -29,7 +29,7 @@ def write_context_name_to_docker_config(name=None): config = {} if docker_cfg_path: try: - with open(docker_cfg_path, "r") as f: + with open(docker_cfg_path) as f: config = json.load(f) except Exception as e: return e diff --git a/docker/context/context.py b/docker/context/context.py index f4aff6b0d..dbaa01cb5 100644 --- a/docker/context/context.py +++ b/docker/context/context.py @@ -94,7 +94,7 @@ def _load_meta(cls, name): try: with open(meta_file) as f: metadata = json.load(f) - except (IOError, KeyError, ValueError) as e: + except (OSError, KeyError, ValueError) as e: # unknown format raise Exception("""Detected corrupted meta file for context {} : {}""".format(name, e)) @@ -171,7 +171,7 @@ def remove(self): rmtree(self.tls_path) def __repr__(self): - return "<%s: '%s'>" % (self.__class__.__name__, self.name) + return f"<{self.__class__.__name__}: '{self.name}'>" def __str__(self): return json.dumps(self.__call__(), indent=2) diff --git a/docker/credentials/store.py b/docker/credentials/store.py index 001788897..e55976f18 100644 --- a/docker/credentials/store.py +++ b/docker/credentials/store.py @@ -2,15 +2,13 @@ import json import subprocess -import six - from . import constants from . import errors from .utils import create_environment_dict from .utils import find_executable -class Store(object): +class Store: def __init__(self, program, environment=None): """ Create a store object that acts as an interface to perform the basic operations for storing, retrieving @@ -30,7 +28,7 @@ def get(self, server): """ Retrieve credentials for `server`. If no credentials are found, a `StoreError` will be raised. """ - if not isinstance(server, six.binary_type): + if not isinstance(server, bytes): server = server.encode('utf-8') data = self._execute('get', server) result = json.loads(data.decode('utf-8')) @@ -41,7 +39,7 @@ def get(self, server): # raise CredentialsNotFound if result['Username'] == '' and result['Secret'] == '': raise errors.CredentialsNotFound( - 'No matching credentials in {}'.format(self.program) + f'No matching credentials in {self.program}' ) return result @@ -61,7 +59,7 @@ def erase(self, server): """ Erase credentials for `server`. Raises a `StoreError` if an error occurs. """ - if not isinstance(server, six.binary_type): + if not isinstance(server, bytes): server = server.encode('utf-8') self._execute('erase', server) @@ -75,20 +73,9 @@ def _execute(self, subcmd, data_input): output = None env = create_environment_dict(self.environment) try: - if six.PY3: - output = subprocess.check_output( - [self.exe, subcmd], input=data_input, env=env, - ) - else: - process = subprocess.Popen( - [self.exe, subcmd], stdin=subprocess.PIPE, - stdout=subprocess.PIPE, env=env, - ) - output, _ = process.communicate(data_input) - if process.returncode != 0: - raise subprocess.CalledProcessError( - returncode=process.returncode, cmd='', output=output - ) + output = subprocess.check_output( + [self.exe, subcmd], input=data_input, env=env, + ) except subprocess.CalledProcessError as e: raise errors.process_store_error(e, self.program) except OSError as e: diff --git a/docker/errors.py b/docker/errors.py index ab30a2908..ba952562c 100644 --- a/docker/errors.py +++ b/docker/errors.py @@ -38,25 +38,25 @@ class APIError(requests.exceptions.HTTPError, DockerException): def __init__(self, message, response=None, explanation=None): # requests 1.2 supports response as a keyword argument, but # requests 1.1 doesn't - super(APIError, self).__init__(message) + super().__init__(message) self.response = response self.explanation = explanation def __str__(self): - message = super(APIError, self).__str__() + message = super().__str__() if self.is_client_error(): - message = '{0} Client Error for {1}: {2}'.format( + message = '{} Client Error for {}: {}'.format( self.response.status_code, self.response.url, self.response.reason) elif self.is_server_error(): - message = '{0} Server Error for {1}: {2}'.format( + message = '{} Server Error for {}: {}'.format( self.response.status_code, self.response.url, self.response.reason) if self.explanation: - message = '{0} ("{1}")'.format(message, self.explanation) + message = f'{message} ("{self.explanation}")' return message @@ -133,11 +133,11 @@ def __init__(self, container, exit_status, command, image, stderr): self.image = image self.stderr = stderr - err = ": {}".format(stderr) if stderr is not None else "" + err = f": {stderr}" if stderr is not None else "" msg = ("Command '{}' in image '{}' returned non-zero exit " "status {}{}").format(command, image, exit_status, err) - super(ContainerError, self).__init__(msg) + super().__init__(msg) class StreamParseError(RuntimeError): @@ -147,7 +147,7 @@ def __init__(self, reason): class BuildError(DockerException): def __init__(self, reason, build_log): - super(BuildError, self).__init__(reason) + super().__init__(reason) self.msg = reason self.build_log = build_log @@ -157,8 +157,8 @@ class ImageLoadError(DockerException): def create_unexpected_kwargs_error(name, kwargs): - quoted_kwargs = ["'{}'".format(k) for k in sorted(kwargs)] - text = ["{}() ".format(name)] + quoted_kwargs = [f"'{k}'" for k in sorted(kwargs)] + text = [f"{name}() "] if len(quoted_kwargs) == 1: text.append("got an unexpected keyword argument ") else: @@ -172,7 +172,7 @@ def __init__(self, param): self.param = param def __str__(self): - return ("missing parameter: {}".format(self.param)) + return (f"missing parameter: {self.param}") class ContextAlreadyExists(DockerException): @@ -180,7 +180,7 @@ def __init__(self, name): self.name = name def __str__(self): - return ("context {} already exists".format(self.name)) + return (f"context {self.name} already exists") class ContextException(DockerException): @@ -196,4 +196,4 @@ def __init__(self, name): self.name = name def __str__(self): - return ("context '{}' not found".format(self.name)) + return (f"context '{self.name}' not found") diff --git a/docker/models/configs.py b/docker/models/configs.py index 7f23f6500..3588c8b5d 100644 --- a/docker/models/configs.py +++ b/docker/models/configs.py @@ -7,7 +7,7 @@ class Config(Model): id_attribute = 'ID' def __repr__(self): - return "<%s: '%s'>" % (self.__class__.__name__, self.name) + return f"<{self.__class__.__name__}: '{self.name}'>" @property def name(self): diff --git a/docker/models/images.py b/docker/models/images.py index 28cfc93ce..46f8efeed 100644 --- a/docker/models/images.py +++ b/docker/models/images.py @@ -2,8 +2,6 @@ import re import warnings -import six - from ..api import APIClient from ..constants import DEFAULT_DATA_CHUNK_SIZE from ..errors import BuildError, ImageLoadError, InvalidArgument @@ -17,7 +15,7 @@ class Image(Model): An image on the server. """ def __repr__(self): - return "<%s: '%s'>" % (self.__class__.__name__, "', '".join(self.tags)) + return "<{}: '{}'>".format(self.__class__.__name__, "', '".join(self.tags)) @property def labels(self): @@ -93,10 +91,10 @@ def save(self, chunk_size=DEFAULT_DATA_CHUNK_SIZE, named=False): img = self.id if named: img = self.tags[0] if self.tags else img - if isinstance(named, six.string_types): + if isinstance(named, str): if named not in self.tags: raise InvalidArgument( - "{} is not a valid tag for this image".format(named) + f"{named} is not a valid tag for this image" ) img = named @@ -127,7 +125,7 @@ class RegistryData(Model): Image metadata stored on the registry, including available platforms. """ def __init__(self, image_name, *args, **kwargs): - super(RegistryData, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) self.image_name = image_name @property @@ -180,7 +178,7 @@ def has_platform(self, platform): parts = platform.split('/') if len(parts) > 3 or len(parts) < 1: raise InvalidArgument( - '"{0}" is not a valid platform descriptor'.format(platform) + f'"{platform}" is not a valid platform descriptor' ) platform = {'os': parts[0]} if len(parts) > 2: @@ -277,7 +275,7 @@ def build(self, **kwargs): If neither ``path`` nor ``fileobj`` is specified. """ resp = self.client.api.build(**kwargs) - if isinstance(resp, six.string_types): + if isinstance(resp, str): return self.get(resp) last_event = None image_id = None diff --git a/docker/models/plugins.py b/docker/models/plugins.py index ae5851c91..37ecefbe0 100644 --- a/docker/models/plugins.py +++ b/docker/models/plugins.py @@ -7,7 +7,7 @@ class Plugin(Model): A plugin on the server. """ def __repr__(self): - return "<%s: '%s'>" % (self.__class__.__name__, self.name) + return f"<{self.__class__.__name__}: '{self.name}'>" @property def name(self): @@ -117,8 +117,7 @@ def upgrade(self, remote=None): if remote is None: remote = self.name privileges = self.client.api.plugin_privileges(remote) - for d in self.client.api.upgrade_plugin(self.name, remote, privileges): - yield d + yield from self.client.api.upgrade_plugin(self.name, remote, privileges) self.reload() diff --git a/docker/models/resource.py b/docker/models/resource.py index ed3900af3..dec2349f6 100644 --- a/docker/models/resource.py +++ b/docker/models/resource.py @@ -1,5 +1,4 @@ - -class Model(object): +class Model: """ A base class for representing a single object on the server. """ @@ -18,13 +17,13 @@ def __init__(self, attrs=None, client=None, collection=None): self.attrs = {} def __repr__(self): - return "<%s: %s>" % (self.__class__.__name__, self.short_id) + return f"<{self.__class__.__name__}: {self.short_id}>" def __eq__(self, other): return isinstance(other, self.__class__) and self.id == other.id def __hash__(self): - return hash("%s:%s" % (self.__class__.__name__, self.id)) + return hash(f"{self.__class__.__name__}:{self.id}") @property def id(self): @@ -49,7 +48,7 @@ def reload(self): self.attrs = new_model.attrs -class Collection(object): +class Collection: """ A base class for representing all objects of a particular type on the server. diff --git a/docker/models/secrets.py b/docker/models/secrets.py index e2ee88af0..da01d44c8 100644 --- a/docker/models/secrets.py +++ b/docker/models/secrets.py @@ -7,7 +7,7 @@ class Secret(Model): id_attribute = 'ID' def __repr__(self): - return "<%s: '%s'>" % (self.__class__.__name__, self.name) + return f"<{self.__class__.__name__}: '{self.name}'>" @property def name(self): diff --git a/docker/models/swarm.py b/docker/models/swarm.py index 755c17db4..b0b1a2ef8 100644 --- a/docker/models/swarm.py +++ b/docker/models/swarm.py @@ -11,7 +11,7 @@ class Swarm(Model): id_attribute = 'ID' def __init__(self, *args, **kwargs): - super(Swarm, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) if self.client: try: self.reload() diff --git a/docker/tls.py b/docker/tls.py index 1b297ab66..067d55630 100644 --- a/docker/tls.py +++ b/docker/tls.py @@ -5,7 +5,7 @@ from .transport import SSLHTTPAdapter -class TLSConfig(object): +class TLSConfig: """ TLS configuration. diff --git a/docker/transport/basehttpadapter.py b/docker/transport/basehttpadapter.py index 4d819b669..dfbb193b9 100644 --- a/docker/transport/basehttpadapter.py +++ b/docker/transport/basehttpadapter.py @@ -3,6 +3,6 @@ class BaseHTTPAdapter(requests.adapters.HTTPAdapter): def close(self): - super(BaseHTTPAdapter, self).close() + super().close() if hasattr(self, 'pools'): self.pools.clear() diff --git a/docker/transport/npipeconn.py b/docker/transport/npipeconn.py index 70d8519dc..df67f2125 100644 --- a/docker/transport/npipeconn.py +++ b/docker/transport/npipeconn.py @@ -1,14 +1,11 @@ -import six +import queue import requests.adapters from docker.transport.basehttpadapter import BaseHTTPAdapter from .. import constants from .npipesocket import NpipeSocket -if six.PY3: - import http.client as httplib -else: - import httplib +import http.client as httplib try: import requests.packages.urllib3 as urllib3 @@ -18,9 +15,9 @@ RecentlyUsedContainer = urllib3._collections.RecentlyUsedContainer -class NpipeHTTPConnection(httplib.HTTPConnection, object): +class NpipeHTTPConnection(httplib.HTTPConnection): def __init__(self, npipe_path, timeout=60): - super(NpipeHTTPConnection, self).__init__( + super().__init__( 'localhost', timeout=timeout ) self.npipe_path = npipe_path @@ -35,7 +32,7 @@ def connect(self): class NpipeHTTPConnectionPool(urllib3.connectionpool.HTTPConnectionPool): def __init__(self, npipe_path, timeout=60, maxsize=10): - super(NpipeHTTPConnectionPool, self).__init__( + super().__init__( 'localhost', timeout=timeout, maxsize=maxsize ) self.npipe_path = npipe_path @@ -57,7 +54,7 @@ def _get_conn(self, timeout): except AttributeError: # self.pool is None raise urllib3.exceptions.ClosedPoolError(self, "Pool is closed.") - except six.moves.queue.Empty: + except queue.Empty: if self.block: raise urllib3.exceptions.EmptyPoolError( self, @@ -85,7 +82,7 @@ def __init__(self, base_url, timeout=60, self.pools = RecentlyUsedContainer( pool_connections, dispose_func=lambda p: p.close() ) - super(NpipeHTTPAdapter, self).__init__() + super().__init__() def get_connection(self, url, proxies=None): with self.pools.lock: diff --git a/docker/transport/npipesocket.py b/docker/transport/npipesocket.py index 176b5c87a..766372aef 100644 --- a/docker/transport/npipesocket.py +++ b/docker/transport/npipesocket.py @@ -2,7 +2,6 @@ import time import io -import six import win32file import win32pipe @@ -24,7 +23,7 @@ def wrapped(self, *args, **kwargs): return wrapped -class NpipeSocket(object): +class NpipeSocket: """ Partial implementation of the socket API over windows named pipes. This implementation is only designed to be used as a client socket, and server-specific methods (bind, listen, accept...) are not @@ -128,9 +127,6 @@ def recvfrom_into(self, buf, nbytes=0, flags=0): @check_closed def recv_into(self, buf, nbytes=0): - if six.PY2: - return self._recv_into_py2(buf, nbytes) - readbuf = buf if not isinstance(buf, memoryview): readbuf = memoryview(buf) @@ -195,7 +191,7 @@ def __init__(self, npipe_socket): self.sock = npipe_socket def close(self): - super(NpipeFileIOBase, self).close() + super().close() self.sock = None def fileno(self): diff --git a/docker/transport/sshconn.py b/docker/transport/sshconn.py index fb5c6bbe8..3ca45c4c0 100644 --- a/docker/transport/sshconn.py +++ b/docker/transport/sshconn.py @@ -1,6 +1,7 @@ import paramiko +import queue +import urllib.parse import requests.adapters -import six import logging import os import signal @@ -10,10 +11,7 @@ from docker.transport.basehttpadapter import BaseHTTPAdapter from .. import constants -if six.PY3: - import http.client as httplib -else: - import httplib +import http.client as httplib try: import requests.packages.urllib3 as urllib3 @@ -25,7 +23,7 @@ class SSHSocket(socket.socket): def __init__(self, host): - super(SSHSocket, self).__init__( + super().__init__( socket.AF_INET, socket.SOCK_STREAM) self.host = host self.port = None @@ -90,8 +88,7 @@ def recv(self, n): def makefile(self, mode): if not self.proc: self.connect() - if six.PY3: - self.proc.stdout.channel = self + self.proc.stdout.channel = self return self.proc.stdout @@ -103,9 +100,9 @@ def close(self): self.proc.terminate() -class SSHConnection(httplib.HTTPConnection, object): +class SSHConnection(httplib.HTTPConnection): def __init__(self, ssh_transport=None, timeout=60, host=None): - super(SSHConnection, self).__init__( + super().__init__( 'localhost', timeout=timeout ) self.ssh_transport = ssh_transport @@ -129,7 +126,7 @@ class SSHConnectionPool(urllib3.connectionpool.HTTPConnectionPool): scheme = 'ssh' def __init__(self, ssh_client=None, timeout=60, maxsize=10, host=None): - super(SSHConnectionPool, self).__init__( + super().__init__( 'localhost', timeout=timeout, maxsize=maxsize ) self.ssh_transport = None @@ -152,7 +149,7 @@ def _get_conn(self, timeout): except AttributeError: # self.pool is None raise urllib3.exceptions.ClosedPoolError(self, "Pool is closed.") - except six.moves.queue.Empty: + except queue.Empty: if self.block: raise urllib3.exceptions.EmptyPoolError( self, @@ -188,12 +185,12 @@ def __init__(self, base_url, timeout=60, self.pools = RecentlyUsedContainer( pool_connections, dispose_func=lambda p: p.close() ) - super(SSHHTTPAdapter, self).__init__() + super().__init__() def _create_paramiko_client(self, base_url): logging.getLogger("paramiko").setLevel(logging.WARNING) self.ssh_client = paramiko.SSHClient() - base_url = six.moves.urllib_parse.urlparse(base_url) + base_url = urllib.parse.urlparse(base_url) self.ssh_params = { "hostname": base_url.hostname, "port": base_url.port, @@ -252,6 +249,6 @@ def get_connection(self, url, proxies=None): return pool def close(self): - super(SSHHTTPAdapter, self).close() + super().close() if self.ssh_client: self.ssh_client.close() diff --git a/docker/transport/ssladapter.py b/docker/transport/ssladapter.py index 12de76cdc..31e3014ea 100644 --- a/docker/transport/ssladapter.py +++ b/docker/transport/ssladapter.py @@ -36,7 +36,7 @@ def __init__(self, ssl_version=None, assert_hostname=None, self.ssl_version = ssl_version self.assert_hostname = assert_hostname self.assert_fingerprint = assert_fingerprint - super(SSLHTTPAdapter, self).__init__(**kwargs) + super().__init__(**kwargs) def init_poolmanager(self, connections, maxsize, block=False): kwargs = { @@ -59,7 +59,7 @@ def get_connection(self, *args, **kwargs): But we still need to take care of when there is a proxy poolmanager """ - conn = super(SSLHTTPAdapter, self).get_connection(*args, **kwargs) + conn = super().get_connection(*args, **kwargs) if conn.assert_hostname != self.assert_hostname: conn.assert_hostname = self.assert_hostname return conn diff --git a/docker/transport/unixconn.py b/docker/transport/unixconn.py index 3e040c5af..adb6f18a1 100644 --- a/docker/transport/unixconn.py +++ b/docker/transport/unixconn.py @@ -1,7 +1,6 @@ -import six import requests.adapters import socket -from six.moves import http_client as httplib +import http.client as httplib from docker.transport.basehttpadapter import BaseHTTPAdapter from .. import constants @@ -15,21 +14,10 @@ RecentlyUsedContainer = urllib3._collections.RecentlyUsedContainer -class UnixHTTPResponse(httplib.HTTPResponse, object): - def __init__(self, sock, *args, **kwargs): - disable_buffering = kwargs.pop('disable_buffering', False) - if six.PY2: - # FIXME: We may need to disable buffering on Py3 as well, - # but there's no clear way to do it at the moment. See: - # https://github.com/docker/docker-py/issues/1799 - kwargs['buffering'] = not disable_buffering - super(UnixHTTPResponse, self).__init__(sock, *args, **kwargs) - - -class UnixHTTPConnection(httplib.HTTPConnection, object): +class UnixHTTPConnection(httplib.HTTPConnection): def __init__(self, base_url, unix_socket, timeout=60): - super(UnixHTTPConnection, self).__init__( + super().__init__( 'localhost', timeout=timeout ) self.base_url = base_url @@ -44,7 +32,7 @@ def connect(self): self.sock = sock def putheader(self, header, *values): - super(UnixHTTPConnection, self).putheader(header, *values) + super().putheader(header, *values) if header == 'Connection' and 'Upgrade' in values: self.disable_buffering = True @@ -52,12 +40,12 @@ def response_class(self, sock, *args, **kwargs): if self.disable_buffering: kwargs['disable_buffering'] = True - return UnixHTTPResponse(sock, *args, **kwargs) + return httplib.HTTPResponse(sock, *args, **kwargs) class UnixHTTPConnectionPool(urllib3.connectionpool.HTTPConnectionPool): def __init__(self, base_url, socket_path, timeout=60, maxsize=10): - super(UnixHTTPConnectionPool, self).__init__( + super().__init__( 'localhost', timeout=timeout, maxsize=maxsize ) self.base_url = base_url @@ -89,7 +77,7 @@ def __init__(self, socket_url, timeout=60, self.pools = RecentlyUsedContainer( pool_connections, dispose_func=lambda p: p.close() ) - super(UnixHTTPAdapter, self).__init__() + super().__init__() def get_connection(self, url, proxies=None): with self.pools.lock: diff --git a/docker/types/base.py b/docker/types/base.py index 689106231..8851f1e2c 100644 --- a/docker/types/base.py +++ b/docker/types/base.py @@ -1,7 +1,4 @@ -import six - - class DictType(dict): def __init__(self, init): - for k, v in six.iteritems(init): + for k, v in init.items(): self[k] = v diff --git a/docker/types/containers.py b/docker/types/containers.py index 9fa4656ab..f1b60b2d2 100644 --- a/docker/types/containers.py +++ b/docker/types/containers.py @@ -1,5 +1,3 @@ -import six - from .. import errors from ..utils.utils import ( convert_port_bindings, convert_tmpfs_mounts, convert_volume_binds, @@ -10,7 +8,7 @@ from .healthcheck import Healthcheck -class LogConfigTypesEnum(object): +class LogConfigTypesEnum: _values = ( 'json-file', 'syslog', @@ -61,7 +59,7 @@ def __init__(self, **kwargs): if config and not isinstance(config, dict): raise ValueError("LogConfig.config must be a dictionary") - super(LogConfig, self).__init__({ + super().__init__({ 'Type': log_driver_type, 'Config': config }) @@ -117,13 +115,13 @@ def __init__(self, **kwargs): name = kwargs.get('name', kwargs.get('Name')) soft = kwargs.get('soft', kwargs.get('Soft')) hard = kwargs.get('hard', kwargs.get('Hard')) - if not isinstance(name, six.string_types): + if not isinstance(name, str): raise ValueError("Ulimit.name must be a string") if soft and not isinstance(soft, int): raise ValueError("Ulimit.soft must be an integer") if hard and not isinstance(hard, int): raise ValueError("Ulimit.hard must be an integer") - super(Ulimit, self).__init__({ + super().__init__({ 'Name': name, 'Soft': soft, 'Hard': hard @@ -184,7 +182,7 @@ def __init__(self, **kwargs): if driver is None: driver = '' - elif not isinstance(driver, six.string_types): + elif not isinstance(driver, str): raise ValueError('DeviceRequest.driver must be a string') if count is None: count = 0 @@ -203,7 +201,7 @@ def __init__(self, **kwargs): elif not isinstance(options, dict): raise ValueError('DeviceRequest.options must be a dict') - super(DeviceRequest, self).__init__({ + super().__init__({ 'Driver': driver, 'Count': count, 'DeviceIDs': device_ids, @@ -297,7 +295,7 @@ def __init__(self, version, binds=None, port_bindings=None, self['MemorySwappiness'] = mem_swappiness if shm_size is not None: - if isinstance(shm_size, six.string_types): + if isinstance(shm_size, str): shm_size = parse_bytes(shm_size) self['ShmSize'] = shm_size @@ -358,7 +356,7 @@ def __init__(self, version, binds=None, port_bindings=None, self['Devices'] = parse_devices(devices) if group_add: - self['GroupAdd'] = [six.text_type(grp) for grp in group_add] + self['GroupAdd'] = [str(grp) for grp in group_add] if dns is not None: self['Dns'] = dns @@ -378,11 +376,11 @@ def __init__(self, version, binds=None, port_bindings=None, if not isinstance(sysctls, dict): raise host_config_type_error('sysctls', sysctls, 'dict') self['Sysctls'] = {} - for k, v in six.iteritems(sysctls): - self['Sysctls'][k] = six.text_type(v) + for k, v in sysctls.items(): + self['Sysctls'][k] = str(v) if volumes_from is not None: - if isinstance(volumes_from, six.string_types): + if isinstance(volumes_from, str): volumes_from = volumes_from.split(',') self['VolumesFrom'] = volumes_from @@ -404,7 +402,7 @@ def __init__(self, version, binds=None, port_bindings=None, if isinstance(lxc_conf, dict): formatted = [] - for k, v in six.iteritems(lxc_conf): + for k, v in lxc_conf.items(): formatted.append({'Key': k, 'Value': str(v)}) lxc_conf = formatted @@ -559,7 +557,7 @@ def __init__(self, version, binds=None, port_bindings=None, self["PidsLimit"] = pids_limit if isolation: - if not isinstance(isolation, six.string_types): + if not isinstance(isolation, str): raise host_config_type_error('isolation', isolation, 'string') if version_lt(version, '1.24'): raise host_config_version_error('isolation', '1.24') @@ -609,7 +607,7 @@ def __init__(self, version, binds=None, port_bindings=None, self['CpuPercent'] = cpu_percent if nano_cpus: - if not isinstance(nano_cpus, six.integer_types): + if not isinstance(nano_cpus, int): raise host_config_type_error('nano_cpus', nano_cpus, 'int') if version_lt(version, '1.25'): raise host_config_version_error('nano_cpus', '1.25') @@ -699,17 +697,17 @@ def __init__( 'version 1.29' ) - if isinstance(command, six.string_types): + if isinstance(command, str): command = split_command(command) - if isinstance(entrypoint, six.string_types): + if isinstance(entrypoint, str): entrypoint = split_command(entrypoint) if isinstance(environment, dict): environment = format_environment(environment) if isinstance(labels, list): - labels = dict((lbl, six.text_type('')) for lbl in labels) + labels = {lbl: '' for lbl in labels} if isinstance(ports, list): exposed_ports = {} @@ -720,10 +718,10 @@ def __init__( if len(port_definition) == 2: proto = port_definition[1] port = port_definition[0] - exposed_ports['{0}/{1}'.format(port, proto)] = {} + exposed_ports[f'{port}/{proto}'] = {} ports = exposed_ports - if isinstance(volumes, six.string_types): + if isinstance(volumes, str): volumes = [volumes, ] if isinstance(volumes, list): @@ -752,7 +750,7 @@ def __init__( 'Hostname': hostname, 'Domainname': domainname, 'ExposedPorts': ports, - 'User': six.text_type(user) if user is not None else None, + 'User': str(user) if user is not None else None, 'Tty': tty, 'OpenStdin': stdin_open, 'StdinOnce': stdin_once, diff --git a/docker/types/daemon.py b/docker/types/daemon.py index af3e5bcb5..10e810144 100644 --- a/docker/types/daemon.py +++ b/docker/types/daemon.py @@ -8,7 +8,7 @@ from ..errors import DockerException -class CancellableStream(object): +class CancellableStream: """ Stream wrapper for real-time events, logs, etc. from the server. @@ -32,7 +32,7 @@ def __next__(self): return next(self._stream) except urllib3.exceptions.ProtocolError: raise StopIteration - except socket.error: + except OSError: raise StopIteration next = __next__ diff --git a/docker/types/healthcheck.py b/docker/types/healthcheck.py index 9815018db..dfc88a977 100644 --- a/docker/types/healthcheck.py +++ b/docker/types/healthcheck.py @@ -1,7 +1,5 @@ from .base import DictType -import six - class Healthcheck(DictType): """ @@ -31,7 +29,7 @@ class Healthcheck(DictType): """ def __init__(self, **kwargs): test = kwargs.get('test', kwargs.get('Test')) - if isinstance(test, six.string_types): + if isinstance(test, str): test = ["CMD-SHELL", test] interval = kwargs.get('interval', kwargs.get('Interval')) @@ -39,7 +37,7 @@ def __init__(self, **kwargs): retries = kwargs.get('retries', kwargs.get('Retries')) start_period = kwargs.get('start_period', kwargs.get('StartPeriod')) - super(Healthcheck, self).__init__({ + super().__init__({ 'Test': test, 'Interval': interval, 'Timeout': timeout, @@ -53,7 +51,7 @@ def test(self): @test.setter def test(self, value): - if isinstance(value, six.string_types): + if isinstance(value, str): value = ["CMD-SHELL", value] self['Test'] = value diff --git a/docker/types/services.py b/docker/types/services.py index 29498e971..a6dd76e32 100644 --- a/docker/types/services.py +++ b/docker/types/services.py @@ -1,5 +1,3 @@ -import six - from .. import errors from ..constants import IS_WINDOWS_PLATFORM from ..utils import ( @@ -121,7 +119,7 @@ def __init__(self, image, command=None, args=None, hostname=None, env=None, privileges=None, isolation=None, init=None): self['Image'] = image - if isinstance(command, six.string_types): + if isinstance(command, str): command = split_command(command) self['Command'] = command self['Args'] = args @@ -151,7 +149,7 @@ def __init__(self, image, command=None, args=None, hostname=None, env=None, if mounts is not None: parsed_mounts = [] for mount in mounts: - if isinstance(mount, six.string_types): + if isinstance(mount, str): parsed_mounts.append(Mount.parse_mount_string(mount)) else: # If mount already parsed @@ -224,7 +222,7 @@ def __init__(self, target, source, type='volume', read_only=False, self['Source'] = source if type not in ('bind', 'volume', 'tmpfs', 'npipe'): raise errors.InvalidArgument( - 'Unsupported mount type: "{}"'.format(type) + f'Unsupported mount type: "{type}"' ) self['Type'] = type self['ReadOnly'] = read_only @@ -260,7 +258,7 @@ def __init__(self, target, source, type='volume', read_only=False, elif type == 'tmpfs': tmpfs_opts = {} if tmpfs_mode: - if not isinstance(tmpfs_mode, six.integer_types): + if not isinstance(tmpfs_mode, int): raise errors.InvalidArgument( 'tmpfs_mode must be an integer' ) @@ -280,7 +278,7 @@ def parse_mount_string(cls, string): parts = string.split(':') if len(parts) > 3: raise errors.InvalidArgument( - 'Invalid mount format "{0}"'.format(string) + f'Invalid mount format "{string}"' ) if len(parts) == 1: return cls(target=parts[0], source=None) @@ -347,7 +345,7 @@ def _convert_generic_resources_dict(generic_resources): ' (found {})'.format(type(generic_resources)) ) resources = [] - for kind, value in six.iteritems(generic_resources): + for kind, value in generic_resources.items(): resource_type = None if isinstance(value, int): resource_type = 'DiscreteResourceSpec' @@ -443,7 +441,7 @@ class RollbackConfig(UpdateConfig): pass -class RestartConditionTypesEnum(object): +class RestartConditionTypesEnum: _values = ( 'none', 'on-failure', @@ -474,7 +472,7 @@ def __init__(self, condition=RestartConditionTypesEnum.NONE, delay=0, max_attempts=0, window=0): if condition not in self.condition_types._values: raise TypeError( - 'Invalid RestartPolicy condition {0}'.format(condition) + f'Invalid RestartPolicy condition {condition}' ) self['Condition'] = condition @@ -533,7 +531,7 @@ def convert_service_ports(ports): ) result = [] - for k, v in six.iteritems(ports): + for k, v in ports.items(): port_spec = { 'Protocol': 'tcp', 'PublishedPort': k diff --git a/docker/utils/build.py b/docker/utils/build.py index 5787cab0f..ac060434d 100644 --- a/docker/utils/build.py +++ b/docker/utils/build.py @@ -4,8 +4,6 @@ import tarfile import tempfile -import six - from .fnmatch import fnmatch from ..constants import IS_WINDOWS_PLATFORM @@ -69,7 +67,7 @@ def create_archive(root, files=None, fileobj=None, gzip=False, t = tarfile.open(mode='w:gz' if gzip else 'w', fileobj=fileobj) if files is None: files = build_file_list(root) - extra_names = set(e[0] for e in extra_files) + extra_names = {e[0] for e in extra_files} for path in files: if path in extra_names: # Extra files override context files with the same name @@ -95,9 +93,9 @@ def create_archive(root, files=None, fileobj=None, gzip=False, try: with open(full_path, 'rb') as f: t.addfile(i, f) - except IOError: - raise IOError( - 'Can not read file in context: {}'.format(full_path) + except OSError: + raise OSError( + f'Can not read file in context: {full_path}' ) else: # Directories, FIFOs, symlinks... don't need to be read. @@ -119,12 +117,8 @@ def mkbuildcontext(dockerfile): t = tarfile.open(mode='w', fileobj=f) if isinstance(dockerfile, io.StringIO): dfinfo = tarfile.TarInfo('Dockerfile') - if six.PY3: - raise TypeError('Please use io.BytesIO to create in-memory ' - 'Dockerfiles with Python 3') - else: - dfinfo.size = len(dockerfile.getvalue()) - dockerfile.seek(0) + raise TypeError('Please use io.BytesIO to create in-memory ' + 'Dockerfiles with Python 3') elif isinstance(dockerfile, io.BytesIO): dfinfo = tarfile.TarInfo('Dockerfile') dfinfo.size = len(dockerfile.getvalue()) @@ -154,7 +148,7 @@ def walk(root, patterns, default=True): # Heavily based on # https://github.com/moby/moby/blob/master/pkg/fileutils/fileutils.go -class PatternMatcher(object): +class PatternMatcher: def __init__(self, patterns): self.patterns = list(filter( lambda p: p.dirs, [Pattern(p) for p in patterns] @@ -212,13 +206,12 @@ def rec_walk(current_dir): break if skip: continue - for sub in rec_walk(cur): - yield sub + yield from rec_walk(cur) return rec_walk(root) -class Pattern(object): +class Pattern: def __init__(self, pattern_str): self.exclusion = False if pattern_str.startswith('!'): diff --git a/docker/utils/config.py b/docker/utils/config.py index 82a0e2a5e..8e24959a5 100644 --- a/docker/utils/config.py +++ b/docker/utils/config.py @@ -18,11 +18,11 @@ def find_config_file(config_path=None): os.path.join(home_dir(), LEGACY_DOCKER_CONFIG_FILENAME), # 4 ])) - log.debug("Trying paths: {0}".format(repr(paths))) + log.debug(f"Trying paths: {repr(paths)}") for path in paths: if os.path.exists(path): - log.debug("Found file at path: {0}".format(path)) + log.debug(f"Found file at path: {path}") return path log.debug("No config file found") @@ -57,7 +57,7 @@ def load_general_config(config_path=None): try: with open(config_file) as f: return json.load(f) - except (IOError, ValueError) as e: + except (OSError, ValueError) as e: # In the case of a legacy `.dockercfg` file, we won't # be able to load any JSON data. log.debug(e) diff --git a/docker/utils/decorators.py b/docker/utils/decorators.py index c975d4b40..cf1baf496 100644 --- a/docker/utils/decorators.py +++ b/docker/utils/decorators.py @@ -27,7 +27,7 @@ def decorator(f): def wrapper(self, *args, **kwargs): if utils.version_lt(self._version, version): raise errors.InvalidVersion( - '{0} is not available for version < {1}'.format( + '{} is not available for version < {}'.format( f.__name__, version ) ) diff --git a/docker/utils/fnmatch.py b/docker/utils/fnmatch.py index cc940a2e6..90e9f60f5 100644 --- a/docker/utils/fnmatch.py +++ b/docker/utils/fnmatch.py @@ -108,7 +108,7 @@ def translate(pat): stuff = '^' + stuff[1:] elif stuff[0] == '^': stuff = '\\' + stuff - res = '%s[%s]' % (res, stuff) + res = f'{res}[{stuff}]' else: res = res + re.escape(c) diff --git a/docker/utils/json_stream.py b/docker/utils/json_stream.py index addffdf2f..f384175f7 100644 --- a/docker/utils/json_stream.py +++ b/docker/utils/json_stream.py @@ -1,11 +1,6 @@ -from __future__ import absolute_import -from __future__ import unicode_literals - import json import json.decoder -import six - from ..errors import StreamParseError @@ -20,7 +15,7 @@ def stream_as_text(stream): instead of byte streams. """ for data in stream: - if not isinstance(data, six.text_type): + if not isinstance(data, str): data = data.decode('utf-8', 'replace') yield data @@ -46,8 +41,8 @@ def json_stream(stream): return split_buffer(stream, json_splitter, json_decoder.decode) -def line_splitter(buffer, separator=u'\n'): - index = buffer.find(six.text_type(separator)) +def line_splitter(buffer, separator='\n'): + index = buffer.find(str(separator)) if index == -1: return None return buffer[:index + 1], buffer[index + 1:] @@ -61,7 +56,7 @@ def split_buffer(stream, splitter=None, decoder=lambda a: a): of the input. """ splitter = splitter or line_splitter - buffered = six.text_type('') + buffered = '' for data in stream_as_text(stream): buffered += data diff --git a/docker/utils/ports.py b/docker/utils/ports.py index 10b19d741..e81393660 100644 --- a/docker/utils/ports.py +++ b/docker/utils/ports.py @@ -49,7 +49,7 @@ def port_range(start, end, proto, randomly_available_port=False): if not end: return [start + proto] if randomly_available_port: - return ['{}-{}'.format(start, end) + proto] + return [f'{start}-{end}' + proto] return [str(port) + proto for port in range(int(start), int(end) + 1)] diff --git a/docker/utils/socket.py b/docker/utils/socket.py index 7ba950553..4a2076ec4 100644 --- a/docker/utils/socket.py +++ b/docker/utils/socket.py @@ -4,8 +4,6 @@ import socket as pysocket import struct -import six - try: from ..transport import NpipeSocket except ImportError: @@ -27,16 +25,16 @@ def read(socket, n=4096): recoverable_errors = (errno.EINTR, errno.EDEADLK, errno.EWOULDBLOCK) - if six.PY3 and not isinstance(socket, NpipeSocket): + if not isinstance(socket, NpipeSocket): select.select([socket], [], []) try: if hasattr(socket, 'recv'): return socket.recv(n) - if six.PY3 and isinstance(socket, getattr(pysocket, 'SocketIO')): + if isinstance(socket, getattr(pysocket, 'SocketIO')): return socket.read(n) return os.read(socket.fileno(), n) - except EnvironmentError as e: + except OSError as e: if e.errno not in recoverable_errors: raise @@ -46,7 +44,7 @@ def read_exactly(socket, n): Reads exactly n bytes from socket Raises SocketError if there isn't enough data """ - data = six.binary_type() + data = bytes() while len(data) < n: next_data = read(socket, n - len(data)) if not next_data: @@ -134,7 +132,7 @@ def consume_socket_output(frames, demux=False): if demux is False: # If the streams are multiplexed, the generator returns strings, that # we just need to concatenate. - return six.binary_type().join(frames) + return bytes().join(frames) # If the streams are demultiplexed, the generator yields tuples # (stdout, stderr) @@ -166,4 +164,4 @@ def demux_adaptor(stream_id, data): elif stream_id == STDERR: return (None, data) else: - raise ValueError('{0} is not a valid stream'.format(stream_id)) + raise ValueError(f'{stream_id} is not a valid stream') diff --git a/docker/utils/utils.py b/docker/utils/utils.py index f703cbd34..f7c3dd7d8 100644 --- a/docker/utils/utils.py +++ b/docker/utils/utils.py @@ -136,13 +136,13 @@ def convert_volume_binds(binds): mode = 'rw' result.append( - str('{0}:{1}:{2}').format(k, bind, mode) + f'{k}:{bind}:{mode}' ) else: if isinstance(v, bytes): v = v.decode('utf-8') result.append( - str('{0}:{1}:rw').format(k, v) + f'{k}:{v}:rw' ) return result @@ -233,14 +233,14 @@ def parse_host(addr, is_win32=False, tls=False): if proto not in ('tcp', 'unix', 'npipe', 'ssh'): raise errors.DockerException( - "Invalid bind address protocol: {}".format(addr) + f"Invalid bind address protocol: {addr}" ) if proto == 'tcp' and not parsed_url.netloc: # "tcp://" is exceptionally disallowed by convention; # omitting a hostname for other protocols is fine raise errors.DockerException( - 'Invalid bind address format: {}'.format(addr) + f'Invalid bind address format: {addr}' ) if any([ @@ -248,7 +248,7 @@ def parse_host(addr, is_win32=False, tls=False): parsed_url.password ]): raise errors.DockerException( - 'Invalid bind address format: {}'.format(addr) + f'Invalid bind address format: {addr}' ) if parsed_url.path and proto == 'ssh': @@ -285,8 +285,8 @@ def parse_host(addr, is_win32=False, tls=False): proto = 'http+unix' if proto in ('http+unix', 'npipe'): - return "{}://{}".format(proto, path).rstrip('/') - return '{0}://{1}:{2}{3}'.format(proto, host, port, path).rstrip('/') + return f"{proto}://{path}".rstrip('/') + return f'{proto}://{host}:{port}{path}'.rstrip('/') def parse_devices(devices): @@ -297,7 +297,7 @@ def parse_devices(devices): continue if not isinstance(device, str): raise errors.DockerException( - 'Invalid device type {0}'.format(type(device)) + f'Invalid device type {type(device)}' ) device_mapping = device.split(':') if device_mapping: @@ -408,7 +408,7 @@ def parse_bytes(s): digits = float(digits_part) except ValueError: raise errors.DockerException( - 'Failed converting the string value for memory ({0}) to' + 'Failed converting the string value for memory ({}) to' ' an integer.'.format(digits_part) ) @@ -416,7 +416,7 @@ def parse_bytes(s): s = int(digits * units[suffix]) else: raise errors.DockerException( - 'The specified value for memory ({0}) should specify the' + 'The specified value for memory ({}) should specify the' ' units. The postfix should be one of the `b` `k` `m` `g`' ' characters'.format(s) ) @@ -428,7 +428,7 @@ def normalize_links(links): if isinstance(links, dict): links = iter(links.items()) - return ['{0}:{1}'.format(k, v) if v else k for k, v in sorted(links)] + return [f'{k}:{v}' if v else k for k, v in sorted(links)] def parse_env_file(env_file): @@ -438,7 +438,7 @@ def parse_env_file(env_file): """ environment = {} - with open(env_file, 'r') as f: + with open(env_file) as f: for line in f: if line[0] == '#': @@ -454,7 +454,7 @@ def parse_env_file(env_file): environment[k] = v else: raise errors.DockerException( - 'Invalid line in environment file {0}:\n{1}'.format( + 'Invalid line in environment file {}:\n{}'.format( env_file, line)) return environment @@ -471,7 +471,7 @@ def format_env(key, value): if isinstance(value, bytes): value = value.decode('utf-8') - return u'{key}={value}'.format(key=key, value=value) + return f'{key}={value}' return [format_env(*var) for var in iter(environment.items())] @@ -479,11 +479,11 @@ def format_extra_hosts(extra_hosts, task=False): # Use format dictated by Swarm API if container is part of a task if task: return [ - '{} {}'.format(v, k) for k, v in sorted(iter(extra_hosts.items())) + f'{v} {k}' for k, v in sorted(iter(extra_hosts.items())) ] return [ - '{}:{}'.format(k, v) for k, v in sorted(iter(extra_hosts.items())) + f'{k}:{v}' for k, v in sorted(iter(extra_hosts.items())) ] diff --git a/docker/version.py b/docker/version.py index bc09e6370..355410412 100644 --- a/docker/version.py +++ b/docker/version.py @@ -1,2 +1,2 @@ version = "4.5.0-dev" -version_info = tuple([int(d) for d in version.split("-")[0].split(".")]) +version_info = tuple(int(d) for d in version.split("-")[0].split(".")) diff --git a/docs/conf.py b/docs/conf.py index f46d1f76e..2b0a71953 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # # docker-sdk-python documentation build configuration file, created by # sphinx-quickstart on Wed Sep 14 15:48:58 2016. @@ -60,21 +59,21 @@ master_doc = 'index' # General information about the project. -project = u'Docker SDK for Python' +project = 'Docker SDK for Python' year = datetime.datetime.now().year -copyright = u'%d Docker Inc' % year -author = u'Docker Inc' +copyright = '%d Docker Inc' % year +author = 'Docker Inc' # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. # -with open('../docker/version.py', 'r') as vfile: +with open('../docker/version.py') as vfile: exec(vfile.read()) # The full version, including alpha/beta/rc tags. release = version # The short X.Y version. -version = '{}.{}'.format(version_info[0], version_info[1]) +version = f'{version_info[0]}.{version_info[1]}' # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. @@ -283,8 +282,8 @@ # (source start file, target name, title, # author, documentclass [howto, manual, or own class]). latex_documents = [ - (master_doc, 'docker-sdk-python.tex', u'docker-sdk-python Documentation', - u'Docker Inc.', 'manual'), + (master_doc, 'docker-sdk-python.tex', 'docker-sdk-python Documentation', + 'Docker Inc.', 'manual'), ] # The name of an image file (relative to this directory) to place at the top of @@ -325,7 +324,7 @@ # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). man_pages = [ - (master_doc, 'docker-sdk-python', u'docker-sdk-python Documentation', + (master_doc, 'docker-sdk-python', 'docker-sdk-python Documentation', [author], 1) ] @@ -340,7 +339,7 @@ # (source start file, target name, title, author, # dir menu entry, description, category) texinfo_documents = [ - (master_doc, 'docker-sdk-python', u'docker-sdk-python Documentation', + (master_doc, 'docker-sdk-python', 'docker-sdk-python Documentation', author, 'docker-sdk-python', 'One line description of project.', 'Miscellaneous'), ] diff --git a/scripts/versions.py b/scripts/versions.py index 4bdcb74de..75e5355eb 100755 --- a/scripts/versions.py +++ b/scripts/versions.py @@ -52,8 +52,8 @@ def order(self): return (int(self.major), int(self.minor), int(self.patch)) + stage def __str__(self): - stage = '-{}'.format(self.stage) if self.stage else '' - edition = '-{}'.format(self.edition) if self.edition else '' + stage = f'-{self.stage}' if self.stage else '' + edition = f'-{self.edition}' if self.edition else '' return '.'.join(map(str, self[:3])) + edition + stage diff --git a/setup.py b/setup.py index ec1a51deb..a966fea23 100644 --- a/setup.py +++ b/setup.py @@ -1,5 +1,4 @@ #!/usr/bin/env python -from __future__ import print_function import codecs import os diff --git a/tests/helpers.py b/tests/helpers.py index f344e1c33..63cbe2e63 100644 --- a/tests/helpers.py +++ b/tests/helpers.py @@ -11,7 +11,6 @@ import docker import paramiko import pytest -import six def make_tree(dirs, files): @@ -54,7 +53,7 @@ def requires_api_version(version): return pytest.mark.skipif( docker.utils.version_lt(test_version, version), - reason="API version is too low (< {0})".format(version) + reason=f"API version is too low (< {version})" ) @@ -86,7 +85,7 @@ def wait_on_condition(condition, delay=0.1, timeout=40): def random_name(): - return u'dockerpytest_{0:x}'.format(random.getrandbits(64)) + return f'dockerpytest_{random.getrandbits(64):x}' def force_leave_swarm(client): @@ -105,11 +104,11 @@ def force_leave_swarm(client): def swarm_listen_addr(): - return '0.0.0.0:{0}'.format(random.randrange(10000, 25000)) + return f'0.0.0.0:{random.randrange(10000, 25000)}' def assert_cat_socket_detached_with_keys(sock, inputs): - if six.PY3 and hasattr(sock, '_sock'): + if hasattr(sock, '_sock'): sock = sock._sock for i in inputs: @@ -128,7 +127,7 @@ def assert_cat_socket_detached_with_keys(sock, inputs): # of the daemon no longer cause this to raise an error. try: sock.sendall(b'make sure the socket is closed\n') - except socket.error: + except OSError: return sock.sendall(b"make sure the socket is closed\n") diff --git a/tests/integration/api_build_test.py b/tests/integration/api_build_test.py index b830a106b..ef48e12ed 100644 --- a/tests/integration/api_build_test.py +++ b/tests/integration/api_build_test.py @@ -7,7 +7,6 @@ from docker.utils.proxy import ProxyConfig import pytest -import six from .base import BaseAPIIntegrationTest, TEST_IMG from ..helpers import random_name, requires_api_version, requires_experimental @@ -71,9 +70,8 @@ def test_build_streaming(self): assert len(logs) > 0 def test_build_from_stringio(self): - if six.PY3: - return - script = io.StringIO(six.text_type('\n').join([ + return + script = io.StringIO('\n'.join([ 'FROM busybox', 'RUN mkdir -p /tmp/test', 'EXPOSE 8080', @@ -83,8 +81,7 @@ def test_build_from_stringio(self): stream = self.client.build(fileobj=script) logs = '' for chunk in stream: - if six.PY3: - chunk = chunk.decode('utf-8') + chunk = chunk.decode('utf-8') logs += chunk assert logs != '' @@ -135,8 +132,7 @@ def test_build_with_dockerignore(self): self.client.wait(c) logs = self.client.logs(c) - if six.PY3: - logs = logs.decode('utf-8') + logs = logs.decode('utf-8') assert sorted(list(filter(None, logs.split('\n')))) == sorted([ '/test/#file.txt', @@ -340,8 +336,7 @@ def test_build_with_extra_hosts(self): assert self.client.inspect_image(img_name) ctnr = self.run_container(img_name, 'cat /hosts-file') logs = self.client.logs(ctnr) - if six.PY3: - logs = logs.decode('utf-8') + logs = logs.decode('utf-8') assert '127.0.0.1\textrahost.local.test' in logs assert '127.0.0.1\thello.world.test' in logs @@ -376,7 +371,7 @@ def test_build_stderr_data(self): snippet = 'Ancient Temple (Mystic Oriental Dream ~ Ancient Temple)' script = io.BytesIO(b'\n'.join([ b'FROM busybox', - 'RUN sh -c ">&2 echo \'{0}\'"'.format(snippet).encode('utf-8') + f'RUN sh -c ">&2 echo \'{snippet}\'"'.encode('utf-8') ])) stream = self.client.build( @@ -440,7 +435,7 @@ def test_build_gzip_custom_encoding(self): @requires_api_version('1.32') @requires_experimental(until=None) def test_build_invalid_platform(self): - script = io.BytesIO('FROM busybox\n'.encode('ascii')) + script = io.BytesIO(b'FROM busybox\n') with pytest.raises(errors.APIError) as excinfo: stream = self.client.build(fileobj=script, platform='foobar') diff --git a/tests/integration/api_client_test.py b/tests/integration/api_client_test.py index 9e348f3e3..d1622fa88 100644 --- a/tests/integration/api_client_test.py +++ b/tests/integration/api_client_test.py @@ -72,6 +72,6 @@ def test_resource_warnings(self): client.close() del client - assert len(w) == 0, "No warnings produced: {0}".format( + assert len(w) == 0, "No warnings produced: {}".format( w[0].message ) diff --git a/tests/integration/api_config_test.py b/tests/integration/api_config_test.py index 0ffd7675c..72cbb431c 100644 --- a/tests/integration/api_config_test.py +++ b/tests/integration/api_config_test.py @@ -1,5 +1,3 @@ -# -*- coding: utf-8 -*- - import docker import pytest @@ -31,7 +29,7 @@ def test_create_config(self): def test_create_config_unicode_data(self): config_id = self.client.create_config( - 'favorite_character', u'いざよいさくや' + 'favorite_character', 'いざよいさくや' ) self.tmp_configs.append(config_id) assert 'ID' in config_id diff --git a/tests/integration/api_container_test.py b/tests/integration/api_container_test.py index 3087045b2..9da2cfbf4 100644 --- a/tests/integration/api_container_test.py +++ b/tests/integration/api_container_test.py @@ -34,7 +34,7 @@ def test_list_containers(self): assert len(retrieved) == 1 retrieved = retrieved[0] assert 'Command' in retrieved - assert retrieved['Command'] == str('true') + assert retrieved['Command'] == 'true' assert 'Image' in retrieved assert re.search(r'alpine:.*', retrieved['Image']) assert 'Status' in retrieved @@ -104,10 +104,10 @@ def test_create_with_links(self): assert self.client.wait(container3_id)['StatusCode'] == 0 logs = self.client.logs(container3_id).decode('utf-8') - assert '{0}_NAME='.format(link_env_prefix1) in logs - assert '{0}_ENV_FOO=1'.format(link_env_prefix1) in logs - assert '{0}_NAME='.format(link_env_prefix2) in logs - assert '{0}_ENV_FOO=1'.format(link_env_prefix2) in logs + assert f'{link_env_prefix1}_NAME=' in logs + assert f'{link_env_prefix1}_ENV_FOO=1' in logs + assert f'{link_env_prefix2}_NAME=' in logs + assert f'{link_env_prefix2}_ENV_FOO=1' in logs def test_create_with_restart_policy(self): container = self.client.create_container( @@ -487,7 +487,7 @@ def test_create_with_uts_mode(self): ) class VolumeBindTest(BaseAPIIntegrationTest): def setUp(self): - super(VolumeBindTest, self).setUp() + super().setUp() self.mount_dest = '/mnt' @@ -618,7 +618,7 @@ class ArchiveTest(BaseAPIIntegrationTest): def test_get_file_archive_from_container(self): data = 'The Maid and the Pocket Watch of Blood' ctnr = self.client.create_container( - TEST_IMG, 'sh -c "echo {0} > /vol1/data.txt"'.format(data), + TEST_IMG, f'sh -c "echo {data} > /vol1/data.txt"', volumes=['/vol1'] ) self.tmp_containers.append(ctnr) @@ -636,7 +636,7 @@ def test_get_file_archive_from_container(self): def test_get_file_stat_from_container(self): data = 'The Maid and the Pocket Watch of Blood' ctnr = self.client.create_container( - TEST_IMG, 'sh -c "echo -n {0} > /vol1/data.txt"'.format(data), + TEST_IMG, f'sh -c "echo -n {data} > /vol1/data.txt"', volumes=['/vol1'] ) self.tmp_containers.append(ctnr) @@ -655,7 +655,7 @@ def test_copy_file_to_container(self): test_file.seek(0) ctnr = self.client.create_container( TEST_IMG, - 'cat {0}'.format( + 'cat {}'.format( os.path.join('/vol1/', os.path.basename(test_file.name)) ), volumes=['/vol1'] @@ -701,7 +701,7 @@ def test_rename_container(self): if version == '1.5.0': assert name == inspect['Name'] else: - assert '/{0}'.format(name) == inspect['Name'] + assert f'/{name}' == inspect['Name'] class StartContainerTest(BaseAPIIntegrationTest): @@ -807,7 +807,7 @@ class LogsTest(BaseAPIIntegrationTest): def test_logs(self): snippet = 'Flowering Nights (Sakuya Iyazoi)' container = self.client.create_container( - TEST_IMG, 'echo {0}'.format(snippet) + TEST_IMG, f'echo {snippet}' ) id = container['Id'] self.tmp_containers.append(id) @@ -821,7 +821,7 @@ def test_logs_tail_option(self): snippet = '''Line1 Line2''' container = self.client.create_container( - TEST_IMG, 'echo "{0}"'.format(snippet) + TEST_IMG, f'echo "{snippet}"' ) id = container['Id'] self.tmp_containers.append(id) @@ -834,7 +834,7 @@ def test_logs_tail_option(self): def test_logs_streaming_and_follow(self): snippet = 'Flowering Nights (Sakuya Iyazoi)' container = self.client.create_container( - TEST_IMG, 'echo {0}'.format(snippet) + TEST_IMG, f'echo {snippet}' ) id = container['Id'] self.tmp_containers.append(id) @@ -854,7 +854,7 @@ def test_logs_streaming_and_follow(self): def test_logs_streaming_and_follow_and_cancel(self): snippet = 'Flowering Nights (Sakuya Iyazoi)' container = self.client.create_container( - TEST_IMG, 'sh -c "echo \\"{0}\\" && sleep 3"'.format(snippet) + TEST_IMG, f'sh -c "echo \\"{snippet}\\" && sleep 3"' ) id = container['Id'] self.tmp_containers.append(id) @@ -872,7 +872,7 @@ def test_logs_streaming_and_follow_and_cancel(self): def test_logs_with_dict_instead_of_id(self): snippet = 'Flowering Nights (Sakuya Iyazoi)' container = self.client.create_container( - TEST_IMG, 'echo {0}'.format(snippet) + TEST_IMG, f'echo {snippet}' ) id = container['Id'] self.tmp_containers.append(id) @@ -885,7 +885,7 @@ def test_logs_with_dict_instead_of_id(self): def test_logs_with_tail_0(self): snippet = 'Flowering Nights (Sakuya Iyazoi)' container = self.client.create_container( - TEST_IMG, 'echo "{0}"'.format(snippet) + TEST_IMG, f'echo "{snippet}"' ) id = container['Id'] self.tmp_containers.append(id) @@ -899,7 +899,7 @@ def test_logs_with_tail_0(self): def test_logs_with_until(self): snippet = 'Shanghai Teahouse (Hong Meiling)' container = self.client.create_container( - TEST_IMG, 'echo "{0}"'.format(snippet) + TEST_IMG, f'echo "{snippet}"' ) self.tmp_containers.append(container) @@ -1095,7 +1095,7 @@ def test_top(self): self.client.start(container) res = self.client.top(container) if not IS_WINDOWS_PLATFORM: - assert res['Titles'] == [u'PID', u'USER', u'TIME', u'COMMAND'] + assert res['Titles'] == ['PID', 'USER', 'TIME', 'COMMAND'] assert len(res['Processes']) == 1 assert res['Processes'][0][-1] == 'sleep 60' self.client.kill(container) @@ -1113,7 +1113,7 @@ def test_top_with_psargs(self): self.client.start(container) res = self.client.top(container, '-eopid,user') - assert res['Titles'] == [u'PID', u'USER'] + assert res['Titles'] == ['PID', 'USER'] assert len(res['Processes']) == 1 assert res['Processes'][0][10] == 'sleep 60' @@ -1203,7 +1203,7 @@ def test_run_container_streaming(self): def test_run_container_reading_socket(self): line = 'hi there and stuff and things, words!' # `echo` appends CRLF, `printf` doesn't - command = "printf '{0}'".format(line) + command = f"printf '{line}'" container = self.client.create_container(TEST_IMG, command, detach=True, tty=False) self.tmp_containers.append(container) @@ -1487,7 +1487,7 @@ def test_remove_link(self): # Remove link linked_name = self.client.inspect_container(container2_id)['Name'][1:] - link_name = '%s/%s' % (linked_name, link_alias) + link_name = f'{linked_name}/{link_alias}' self.client.remove_container(link_name, link=True) # Link is gone diff --git a/tests/integration/api_exec_test.py b/tests/integration/api_exec_test.py index 554e8629e..4d7748f5e 100644 --- a/tests/integration/api_exec_test.py +++ b/tests/integration/api_exec_test.py @@ -239,7 +239,7 @@ class ExecDemuxTest(BaseAPIIntegrationTest): ) def setUp(self): - super(ExecDemuxTest, self).setUp() + super().setUp() self.container = self.client.create_container( TEST_IMG, 'cat', detach=True, stdin_open=True ) diff --git a/tests/integration/api_image_test.py b/tests/integration/api_image_test.py index d5f898930..e30de46c0 100644 --- a/tests/integration/api_image_test.py +++ b/tests/integration/api_image_test.py @@ -265,7 +265,7 @@ def test_get_load_image(self): output = self.client.load_image(data) assert any([ line for line in output - if 'Loaded image: {}'.format(test_img) in line.get('stream', '') + if f'Loaded image: {test_img}' in line.get('stream', '') ]) @contextlib.contextmanager @@ -284,7 +284,7 @@ def do_GET(self): thread.setDaemon(True) thread.start() - yield 'http://%s:%s' % (socket.gethostname(), server.server_address[1]) + yield f'http://{socket.gethostname()}:{server.server_address[1]}' server.shutdown() @@ -350,7 +350,7 @@ def test_get_image_load_image(self): result = self.client.load_image(f.read()) success = False - result_line = 'Loaded image: {}\n'.format(TEST_IMG) + result_line = f'Loaded image: {TEST_IMG}\n' for data in result: print(data) if 'stream' in data: diff --git a/tests/integration/api_network_test.py b/tests/integration/api_network_test.py index af22da8d2..256813846 100644 --- a/tests/integration/api_network_test.py +++ b/tests/integration/api_network_test.py @@ -9,7 +9,7 @@ class TestNetworks(BaseAPIIntegrationTest): def tearDown(self): self.client.leave_swarm(force=True) - super(TestNetworks, self).tearDown() + super().tearDown() def create_network(self, *args, **kwargs): net_name = random_name() diff --git a/tests/integration/api_secret_test.py b/tests/integration/api_secret_test.py index b3d93b8fc..fd9854341 100644 --- a/tests/integration/api_secret_test.py +++ b/tests/integration/api_secret_test.py @@ -1,5 +1,3 @@ -# -*- coding: utf-8 -*- - import docker import pytest @@ -31,7 +29,7 @@ def test_create_secret(self): def test_create_secret_unicode_data(self): secret_id = self.client.create_secret( - 'favorite_character', u'いざよいさくや' + 'favorite_character', 'いざよいさくや' ) self.tmp_secrets.append(secret_id) assert 'ID' in secret_id diff --git a/tests/integration/api_service_test.py b/tests/integration/api_service_test.py index 1bee46e56..19a6f1545 100644 --- a/tests/integration/api_service_test.py +++ b/tests/integration/api_service_test.py @@ -1,5 +1,3 @@ -# -*- coding: utf-8 -*- - import random import time @@ -30,10 +28,10 @@ def tearDown(self): self.client.remove_service(service['ID']) except docker.errors.APIError: pass - super(ServiceTest, self).tearDown() + super().tearDown() def get_service_name(self): - return 'dockerpytest_{0:x}'.format(random.getrandbits(64)) + return f'dockerpytest_{random.getrandbits(64):x}' def get_service_container(self, service_name, attempts=20, interval=0.5, include_stopped=False): @@ -54,7 +52,7 @@ def get_service_container(self, service_name, attempts=20, interval=0.5, def create_simple_service(self, name=None, labels=None): if name: - name = 'dockerpytest_{0}'.format(name) + name = f'dockerpytest_{name}' else: name = self.get_service_name() @@ -403,20 +401,20 @@ def test_create_service_with_placement(self): node_id = self.client.nodes()[0]['ID'] container_spec = docker.types.ContainerSpec(TEST_IMG, ['true']) task_tmpl = docker.types.TaskTemplate( - container_spec, placement=['node.id=={}'.format(node_id)] + container_spec, placement=[f'node.id=={node_id}'] ) name = self.get_service_name() svc_id = self.client.create_service(task_tmpl, name=name) svc_info = self.client.inspect_service(svc_id) assert 'Placement' in svc_info['Spec']['TaskTemplate'] assert (svc_info['Spec']['TaskTemplate']['Placement'] == - {'Constraints': ['node.id=={}'.format(node_id)]}) + {'Constraints': [f'node.id=={node_id}']}) def test_create_service_with_placement_object(self): node_id = self.client.nodes()[0]['ID'] container_spec = docker.types.ContainerSpec(TEST_IMG, ['true']) placemt = docker.types.Placement( - constraints=['node.id=={}'.format(node_id)] + constraints=[f'node.id=={node_id}'] ) task_tmpl = docker.types.TaskTemplate( container_spec, placement=placemt @@ -508,7 +506,7 @@ def test_create_service_with_endpoint_spec(self): assert port['TargetPort'] == 1990 assert port['Protocol'] == 'udp' else: - self.fail('Invalid port specification: {0}'.format(port)) + self.fail(f'Invalid port specification: {port}') assert len(ports) == 3 @@ -670,14 +668,14 @@ def test_create_service_with_secret(self): container = self.get_service_container(name) assert container is not None exec_id = self.client.exec_create( - container, 'cat /run/secrets/{0}'.format(secret_name) + container, f'cat /run/secrets/{secret_name}' ) assert self.client.exec_start(exec_id) == secret_data @requires_api_version('1.25') def test_create_service_with_unicode_secret(self): secret_name = 'favorite_touhou' - secret_data = u'東方花映塚' + secret_data = '東方花映塚' secret_id = self.client.create_secret(secret_name, secret_data) self.tmp_secrets.append(secret_id) secret_ref = docker.types.SecretReference(secret_id, secret_name) @@ -695,7 +693,7 @@ def test_create_service_with_unicode_secret(self): container = self.get_service_container(name) assert container is not None exec_id = self.client.exec_create( - container, 'cat /run/secrets/{0}'.format(secret_name) + container, f'cat /run/secrets/{secret_name}' ) container_secret = self.client.exec_start(exec_id) container_secret = container_secret.decode('utf-8') @@ -722,14 +720,14 @@ def test_create_service_with_config(self): container = self.get_service_container(name) assert container is not None exec_id = self.client.exec_create( - container, 'cat /{0}'.format(config_name) + container, f'cat /{config_name}' ) assert self.client.exec_start(exec_id) == config_data @requires_api_version('1.30') def test_create_service_with_unicode_config(self): config_name = 'favorite_touhou' - config_data = u'東方花映塚' + config_data = '東方花映塚' config_id = self.client.create_config(config_name, config_data) self.tmp_configs.append(config_id) config_ref = docker.types.ConfigReference(config_id, config_name) @@ -747,7 +745,7 @@ def test_create_service_with_unicode_config(self): container = self.get_service_container(name) assert container is not None exec_id = self.client.exec_create( - container, 'cat /{0}'.format(config_name) + container, f'cat /{config_name}' ) container_config = self.client.exec_start(exec_id) container_config = container_config.decode('utf-8') @@ -1136,7 +1134,7 @@ def test_update_service_with_defaults_endpoint_spec(self): assert port['TargetPort'] == 1990 assert port['Protocol'] == 'udp' else: - self.fail('Invalid port specification: {0}'.format(port)) + self.fail(f'Invalid port specification: {port}') assert len(ports) == 3 @@ -1163,7 +1161,7 @@ def test_update_service_with_defaults_endpoint_spec(self): assert port['TargetPort'] == 1990 assert port['Protocol'] == 'udp' else: - self.fail('Invalid port specification: {0}'.format(port)) + self.fail(f'Invalid port specification: {port}') assert len(ports) == 3 diff --git a/tests/integration/api_swarm_test.py b/tests/integration/api_swarm_test.py index f1cbc264e..48c0592c6 100644 --- a/tests/integration/api_swarm_test.py +++ b/tests/integration/api_swarm_test.py @@ -8,7 +8,7 @@ class SwarmTest(BaseAPIIntegrationTest): def setUp(self): - super(SwarmTest, self).setUp() + super().setUp() force_leave_swarm(self.client) self._unlock_key = None @@ -19,7 +19,7 @@ def tearDown(self): except docker.errors.APIError: pass force_leave_swarm(self.client) - super(SwarmTest, self).tearDown() + super().tearDown() @requires_api_version('1.24') def test_init_swarm_simple(self): diff --git a/tests/integration/base.py b/tests/integration/base.py index a7613f691..031079c91 100644 --- a/tests/integration/base.py +++ b/tests/integration/base.py @@ -75,11 +75,11 @@ class BaseAPIIntegrationTest(BaseIntegrationTest): """ def setUp(self): - super(BaseAPIIntegrationTest, self).setUp() + super().setUp() self.client = self.get_client_instance() def tearDown(self): - super(BaseAPIIntegrationTest, self).tearDown() + super().tearDown() self.client.close() @staticmethod diff --git a/tests/integration/conftest.py b/tests/integration/conftest.py index ec48835dc..ae9459558 100644 --- a/tests/integration/conftest.py +++ b/tests/integration/conftest.py @@ -1,5 +1,3 @@ -from __future__ import print_function - import sys import warnings @@ -17,11 +15,11 @@ def setup_test_session(): try: c.inspect_image(TEST_IMG) except docker.errors.NotFound: - print("\npulling {0}".format(TEST_IMG), file=sys.stderr) + print(f"\npulling {TEST_IMG}", file=sys.stderr) for data in c.pull(TEST_IMG, stream=True, decode=True): status = data.get("status") progress = data.get("progress") - detail = "{0} - {1}".format(status, progress) + detail = f"{status} - {progress}" print(detail, file=sys.stderr) # Double make sure we now have busybox diff --git a/tests/integration/credentials/store_test.py b/tests/integration/credentials/store_test.py index dd543e24a..d0cfd5417 100644 --- a/tests/integration/credentials/store_test.py +++ b/tests/integration/credentials/store_test.py @@ -3,7 +3,6 @@ import sys import pytest -import six from distutils.spawn import find_executable from docker.credentials import ( @@ -12,7 +11,7 @@ ) -class TestStore(object): +class TestStore: def teardown_method(self): for server in self.tmp_keys: try: @@ -33,7 +32,7 @@ def setup_method(self): self.store = Store(DEFAULT_OSX_STORE) def get_random_servername(self): - res = 'pycreds_test_{:x}'.format(random.getrandbits(32)) + res = f'pycreds_test_{random.getrandbits(32):x}' self.tmp_keys.append(res) return res @@ -61,7 +60,7 @@ def test_store_and_erase(self): def test_unicode_strings(self): key = self.get_random_servername() - key = six.u(key) + key = key self.store.store(server=key, username='user', secret='pass') data = self.store.get(key) assert data diff --git a/tests/integration/credentials/utils_test.py b/tests/integration/credentials/utils_test.py index ad55f3216..d7b2a1a4d 100644 --- a/tests/integration/credentials/utils_test.py +++ b/tests/integration/credentials/utils_test.py @@ -5,7 +5,7 @@ try: from unittest import mock except ImportError: - import mock + from unittest import mock @mock.patch.dict(os.environ) diff --git a/tests/integration/models_images_test.py b/tests/integration/models_images_test.py index 0d60f37b0..94aa20100 100644 --- a/tests/integration/models_images_test.py +++ b/tests/integration/models_images_test.py @@ -13,8 +13,8 @@ class ImageCollectionTest(BaseIntegrationTest): def test_build(self): client = docker.from_env(version=TEST_API_VERSION) image, _ = client.images.build(fileobj=io.BytesIO( - "FROM alpine\n" - "CMD echo hello world".encode('ascii') + b"FROM alpine\n" + b"CMD echo hello world" )) self.tmp_imgs.append(image.id) assert client.containers.run(image) == b"hello world\n" @@ -24,8 +24,8 @@ def test_build_with_error(self): client = docker.from_env(version=TEST_API_VERSION) with pytest.raises(docker.errors.BuildError) as cm: client.images.build(fileobj=io.BytesIO( - "FROM alpine\n" - "RUN exit 1".encode('ascii') + b"FROM alpine\n" + b"RUN exit 1" )) assert ( "The command '/bin/sh -c exit 1' returned a non-zero code: 1" @@ -36,8 +36,8 @@ def test_build_with_multiple_success(self): client = docker.from_env(version=TEST_API_VERSION) image, _ = client.images.build( tag='some-tag', fileobj=io.BytesIO( - "FROM alpine\n" - "CMD echo hello world".encode('ascii') + b"FROM alpine\n" + b"CMD echo hello world" ) ) self.tmp_imgs.append(image.id) @@ -47,8 +47,8 @@ def test_build_with_success_build_output(self): client = docker.from_env(version=TEST_API_VERSION) image, _ = client.images.build( tag='dup-txt-tag', fileobj=io.BytesIO( - "FROM alpine\n" - "CMD echo Successfully built abcd1234".encode('ascii') + b"FROM alpine\n" + b"CMD echo Successfully built abcd1234" ) ) self.tmp_imgs.append(image.id) @@ -119,7 +119,7 @@ def test_save_and_load_repo_name(self): self.tmp_imgs.append(additional_tag) image.reload() with tempfile.TemporaryFile() as f: - stream = image.save(named='{}:latest'.format(additional_tag)) + stream = image.save(named=f'{additional_tag}:latest') for chunk in stream: f.write(chunk) @@ -129,7 +129,7 @@ def test_save_and_load_repo_name(self): assert len(result) == 1 assert result[0].id == image.id - assert '{}:latest'.format(additional_tag) in result[0].tags + assert f'{additional_tag}:latest' in result[0].tags def test_save_name_error(self): client = docker.from_env(version=TEST_API_VERSION) @@ -143,7 +143,7 @@ class ImageTest(BaseIntegrationTest): def test_tag_and_remove(self): repo = 'dockersdk.tests.images.test_tag' tag = 'some-tag' - identifier = '{}:{}'.format(repo, tag) + identifier = f'{repo}:{tag}' client = docker.from_env(version=TEST_API_VERSION) image = client.images.pull('alpine:latest') diff --git a/tests/integration/regression_test.py b/tests/integration/regression_test.py index a63883c4f..deb9aff15 100644 --- a/tests/integration/regression_test.py +++ b/tests/integration/regression_test.py @@ -2,7 +2,6 @@ import random import docker -import six from .base import BaseAPIIntegrationTest, TEST_IMG import pytest @@ -39,8 +38,7 @@ def test_715_handle_user_param_as_int_value(self): self.client.start(ctnr) self.client.wait(ctnr) logs = self.client.logs(ctnr) - if six.PY3: - logs = logs.decode('utf-8') + logs = logs.decode('utf-8') assert logs == '1000\n' def test_792_explicit_port_protocol(self): @@ -56,10 +54,10 @@ def test_792_explicit_port_protocol(self): self.client.start(ctnr) assert self.client.port( ctnr, 2000 - )[0]['HostPort'] == six.text_type(tcp_port) + )[0]['HostPort'] == str(tcp_port) assert self.client.port( ctnr, '2000/tcp' - )[0]['HostPort'] == six.text_type(tcp_port) + )[0]['HostPort'] == str(tcp_port) assert self.client.port( ctnr, '2000/udp' - )[0]['HostPort'] == six.text_type(udp_port) + )[0]['HostPort'] == str(udp_port) diff --git a/tests/ssh/api_build_test.py b/tests/ssh/api_build_test.py index b830a106b..ef48e12ed 100644 --- a/tests/ssh/api_build_test.py +++ b/tests/ssh/api_build_test.py @@ -7,7 +7,6 @@ from docker.utils.proxy import ProxyConfig import pytest -import six from .base import BaseAPIIntegrationTest, TEST_IMG from ..helpers import random_name, requires_api_version, requires_experimental @@ -71,9 +70,8 @@ def test_build_streaming(self): assert len(logs) > 0 def test_build_from_stringio(self): - if six.PY3: - return - script = io.StringIO(six.text_type('\n').join([ + return + script = io.StringIO('\n'.join([ 'FROM busybox', 'RUN mkdir -p /tmp/test', 'EXPOSE 8080', @@ -83,8 +81,7 @@ def test_build_from_stringio(self): stream = self.client.build(fileobj=script) logs = '' for chunk in stream: - if six.PY3: - chunk = chunk.decode('utf-8') + chunk = chunk.decode('utf-8') logs += chunk assert logs != '' @@ -135,8 +132,7 @@ def test_build_with_dockerignore(self): self.client.wait(c) logs = self.client.logs(c) - if six.PY3: - logs = logs.decode('utf-8') + logs = logs.decode('utf-8') assert sorted(list(filter(None, logs.split('\n')))) == sorted([ '/test/#file.txt', @@ -340,8 +336,7 @@ def test_build_with_extra_hosts(self): assert self.client.inspect_image(img_name) ctnr = self.run_container(img_name, 'cat /hosts-file') logs = self.client.logs(ctnr) - if six.PY3: - logs = logs.decode('utf-8') + logs = logs.decode('utf-8') assert '127.0.0.1\textrahost.local.test' in logs assert '127.0.0.1\thello.world.test' in logs @@ -376,7 +371,7 @@ def test_build_stderr_data(self): snippet = 'Ancient Temple (Mystic Oriental Dream ~ Ancient Temple)' script = io.BytesIO(b'\n'.join([ b'FROM busybox', - 'RUN sh -c ">&2 echo \'{0}\'"'.format(snippet).encode('utf-8') + f'RUN sh -c ">&2 echo \'{snippet}\'"'.encode('utf-8') ])) stream = self.client.build( @@ -440,7 +435,7 @@ def test_build_gzip_custom_encoding(self): @requires_api_version('1.32') @requires_experimental(until=None) def test_build_invalid_platform(self): - script = io.BytesIO('FROM busybox\n'.encode('ascii')) + script = io.BytesIO(b'FROM busybox\n') with pytest.raises(errors.APIError) as excinfo: stream = self.client.build(fileobj=script, platform='foobar') diff --git a/tests/ssh/base.py b/tests/ssh/base.py index c723d823b..4825227f3 100644 --- a/tests/ssh/base.py +++ b/tests/ssh/base.py @@ -79,7 +79,7 @@ def setUpClass(cls): cls.client.pull(TEST_IMG) def tearDown(self): - super(BaseAPIIntegrationTest, self).tearDown() + super().tearDown() self.client.close() @staticmethod diff --git a/tests/unit/api_container_test.py b/tests/unit/api_container_test.py index 8a0577e78..1ebd37df0 100644 --- a/tests/unit/api_container_test.py +++ b/tests/unit/api_container_test.py @@ -1,5 +1,3 @@ -# -*- coding: utf-8 -*- - import datetime import json import signal @@ -7,7 +5,6 @@ import docker from docker.api import APIClient import pytest -import six from . import fake_api from ..helpers import requires_api_version @@ -19,7 +16,7 @@ try: from unittest import mock except ImportError: - import mock + from unittest import mock def fake_inspect_container_tty(self, container): @@ -771,7 +768,7 @@ def test_create_container_with_devices(self): def test_create_container_with_device_requests(self): client = APIClient(version='1.40') fake_api.fake_responses.setdefault( - '{0}/v1.40/containers/create'.format(fake_api.prefix), + f'{fake_api.prefix}/v1.40/containers/create', fake_api.post_fake_create_container, ) client.create_container( @@ -831,8 +828,8 @@ def test_create_container_with_device_requests(self): def test_create_container_with_labels_dict(self): labels_dict = { - six.text_type('foo'): six.text_type('1'), - six.text_type('bar'): six.text_type('2'), + 'foo': '1', + 'bar': '2', } self.client.create_container( @@ -848,12 +845,12 @@ def test_create_container_with_labels_dict(self): def test_create_container_with_labels_list(self): labels_list = [ - six.text_type('foo'), - six.text_type('bar'), + 'foo', + 'bar', ] labels_dict = { - six.text_type('foo'): six.text_type(), - six.text_type('bar'): six.text_type(), + 'foo': '', + 'bar': '', } self.client.create_container( @@ -1013,11 +1010,11 @@ def test_create_container_with_sysctl(self): def test_create_container_with_unicode_envvars(self): envvars_dict = { - 'foo': u'☃', + 'foo': '☃', } expected = [ - u'foo=☃' + 'foo=☃' ] self.client.create_container( @@ -1138,7 +1135,7 @@ def test_logs(self): stream=False ) - assert logs == 'Flowering Nights\n(Sakuya Iyazoi)\n'.encode('ascii') + assert logs == b'Flowering Nights\n(Sakuya Iyazoi)\n' def test_logs_with_dict_instead_of_id(self): with mock.patch('docker.api.client.APIClient.inspect_container', @@ -1154,7 +1151,7 @@ def test_logs_with_dict_instead_of_id(self): stream=False ) - assert logs == 'Flowering Nights\n(Sakuya Iyazoi)\n'.encode('ascii') + assert logs == b'Flowering Nights\n(Sakuya Iyazoi)\n' def test_log_streaming(self): with mock.patch('docker.api.client.APIClient.inspect_container', diff --git a/tests/unit/api_exec_test.py b/tests/unit/api_exec_test.py index a9d2dd5b6..450425084 100644 --- a/tests/unit/api_exec_test.py +++ b/tests/unit/api_exec_test.py @@ -11,7 +11,7 @@ def test_exec_create(self): self.client.exec_create(fake_api.FAKE_CONTAINER_ID, ['ls', '-1']) args = fake_request.call_args - assert 'POST' == args[0][0], url_prefix + 'containers/{0}/exec'.format( + assert 'POST' == args[0][0], url_prefix + 'containers/{}/exec'.format( fake_api.FAKE_CONTAINER_ID ) @@ -32,7 +32,7 @@ def test_exec_start(self): self.client.exec_start(fake_api.FAKE_EXEC_ID) args = fake_request.call_args - assert args[0][1] == url_prefix + 'exec/{0}/start'.format( + assert args[0][1] == url_prefix + 'exec/{}/start'.format( fake_api.FAKE_EXEC_ID ) @@ -51,7 +51,7 @@ def test_exec_start_detached(self): self.client.exec_start(fake_api.FAKE_EXEC_ID, detach=True) args = fake_request.call_args - assert args[0][1] == url_prefix + 'exec/{0}/start'.format( + assert args[0][1] == url_prefix + 'exec/{}/start'.format( fake_api.FAKE_EXEC_ID ) @@ -68,7 +68,7 @@ def test_exec_inspect(self): self.client.exec_inspect(fake_api.FAKE_EXEC_ID) args = fake_request.call_args - assert args[0][1] == url_prefix + 'exec/{0}/json'.format( + assert args[0][1] == url_prefix + 'exec/{}/json'.format( fake_api.FAKE_EXEC_ID ) @@ -77,7 +77,7 @@ def test_exec_resize(self): fake_request.assert_called_with( 'POST', - url_prefix + 'exec/{0}/resize'.format(fake_api.FAKE_EXEC_ID), + url_prefix + f'exec/{fake_api.FAKE_EXEC_ID}/resize', params={'h': 20, 'w': 60}, timeout=DEFAULT_TIMEOUT_SECONDS ) diff --git a/tests/unit/api_image_test.py b/tests/unit/api_image_test.py index 0b60df43a..843c11b84 100644 --- a/tests/unit/api_image_test.py +++ b/tests/unit/api_image_test.py @@ -11,7 +11,7 @@ try: from unittest import mock except ImportError: - import mock + from unittest import mock class ImageTest(BaseAPIClientTest): diff --git a/tests/unit/api_network_test.py b/tests/unit/api_network_test.py index 758f01323..84d654496 100644 --- a/tests/unit/api_network_test.py +++ b/tests/unit/api_network_test.py @@ -1,14 +1,12 @@ import json -import six - from .api_test import BaseAPIClientTest, url_prefix, response from docker.types import IPAMConfig, IPAMPool try: from unittest import mock except ImportError: - import mock + from unittest import mock class NetworkTest(BaseAPIClientTest): @@ -103,16 +101,16 @@ def test_remove_network(self): self.client.remove_network(network_id) args = delete.call_args - assert args[0][0] == url_prefix + 'networks/{0}'.format(network_id) + assert args[0][0] == url_prefix + f'networks/{network_id}' def test_inspect_network(self): network_id = 'abc12345' network_name = 'foo' network_data = { - six.u('name'): network_name, - six.u('id'): network_id, - six.u('driver'): 'bridge', - six.u('containers'): {}, + 'name': network_name, + 'id': network_id, + 'driver': 'bridge', + 'containers': {}, } network_response = response(status_code=200, content=network_data) @@ -123,7 +121,7 @@ def test_inspect_network(self): assert result == network_data args = get.call_args - assert args[0][0] == url_prefix + 'networks/{0}'.format(network_id) + assert args[0][0] == url_prefix + f'networks/{network_id}' def test_connect_container_to_network(self): network_id = 'abc12345' @@ -141,7 +139,7 @@ def test_connect_container_to_network(self): ) assert post.call_args[0][0] == ( - url_prefix + 'networks/{0}/connect'.format(network_id) + url_prefix + f'networks/{network_id}/connect' ) assert json.loads(post.call_args[1]['data']) == { @@ -164,7 +162,7 @@ def test_disconnect_container_from_network(self): container={'Id': container_id}, net_id=network_id) assert post.call_args[0][0] == ( - url_prefix + 'networks/{0}/disconnect'.format(network_id) + url_prefix + f'networks/{network_id}/disconnect' ) assert json.loads(post.call_args[1]['data']) == { 'Container': container_id diff --git a/tests/unit/api_test.py b/tests/unit/api_test.py index cb14b74e1..dfc38164d 100644 --- a/tests/unit/api_test.py +++ b/tests/unit/api_test.py @@ -10,11 +10,12 @@ import threading import time import unittest +import socketserver +import http.server import docker import pytest import requests -import six from docker.api import APIClient from docker.constants import DEFAULT_DOCKER_API_VERSION from requests.packages import urllib3 @@ -24,7 +25,7 @@ try: from unittest import mock except ImportError: - import mock + from unittest import mock DEFAULT_TIMEOUT_SECONDS = docker.constants.DEFAULT_TIMEOUT_SECONDS @@ -34,7 +35,7 @@ def response(status_code=200, content='', headers=None, reason=None, elapsed=0, request=None, raw=None): res = requests.Response() res.status_code = status_code - if not isinstance(content, six.binary_type): + if not isinstance(content, bytes): content = json.dumps(content).encode('ascii') res._content = content res.headers = requests.structures.CaseInsensitiveDict(headers or {}) @@ -60,7 +61,7 @@ def fake_resp(method, url, *args, **kwargs): elif (url, method) in fake_api.fake_responses: key = (url, method) if not key: - raise Exception('{0} {1}'.format(method, url)) + raise Exception(f'{method} {url}') status_code, content = fake_api.fake_responses[key]() return response(status_code=status_code, content=content) @@ -85,11 +86,11 @@ def fake_delete(self, url, *args, **kwargs): def fake_read_from_socket(self, response, stream, tty=False, demux=False): - return six.binary_type() + return bytes() -url_base = '{0}/'.format(fake_api.prefix) -url_prefix = '{0}v{1}/'.format( +url_base = f'{fake_api.prefix}/' +url_prefix = '{}v{}/'.format( url_base, docker.constants.DEFAULT_DOCKER_API_VERSION) @@ -133,20 +134,20 @@ def test_ctor(self): def test_url_valid_resource(self): url = self.client._url('/hello/{0}/world', 'somename') - assert url == '{0}{1}'.format(url_prefix, 'hello/somename/world') + assert url == '{}{}'.format(url_prefix, 'hello/somename/world') url = self.client._url( '/hello/{0}/world/{1}', 'somename', 'someothername' ) - assert url == '{0}{1}'.format( + assert url == '{}{}'.format( url_prefix, 'hello/somename/world/someothername' ) url = self.client._url('/hello/{0}/world', 'some?name') - assert url == '{0}{1}'.format(url_prefix, 'hello/some%3Fname/world') + assert url == '{}{}'.format(url_prefix, 'hello/some%3Fname/world') url = self.client._url("/images/{0}/push", "localhost:5000/image") - assert url == '{0}{1}'.format( + assert url == '{}{}'.format( url_prefix, 'images/localhost:5000/image/push' ) @@ -156,13 +157,13 @@ def test_url_invalid_resource(self): def test_url_no_resource(self): url = self.client._url('/simple') - assert url == '{0}{1}'.format(url_prefix, 'simple') + assert url == '{}{}'.format(url_prefix, 'simple') def test_url_unversioned_api(self): url = self.client._url( '/hello/{0}/world', 'somename', versioned_api=False ) - assert url == '{0}{1}'.format(url_base, 'hello/somename/world') + assert url == '{}{}'.format(url_base, 'hello/somename/world') def test_version(self): self.client.version() @@ -184,13 +185,13 @@ def test_version_no_api_version(self): def test_retrieve_server_version(self): client = APIClient(version="auto") - assert isinstance(client._version, six.string_types) + assert isinstance(client._version, str) assert not (client._version == "auto") client.close() def test_auto_retrieve_server_version(self): version = self.client._retrieve_server_version() - assert isinstance(version, six.string_types) + assert isinstance(version, str) def test_info(self): self.client.info() @@ -337,8 +338,7 @@ def test_create_host_config_secopt(self): def test_stream_helper_decoding(self): status_code, content = fake_api.fake_responses[url_prefix + 'events']() content_str = json.dumps(content) - if six.PY3: - content_str = content_str.encode('utf-8') + content_str = content_str.encode('utf-8') body = io.BytesIO(content_str) # mock a stream interface @@ -405,7 +405,7 @@ def run_server(self): while not self.stop_server: try: connection, client_address = self.server_socket.accept() - except socket.error: + except OSError: # Probably no connection to accept yet time.sleep(0.01) continue @@ -489,7 +489,7 @@ class TCPSocketStreamTest(unittest.TestCase): @classmethod def setup_class(cls): - cls.server = six.moves.socketserver.ThreadingTCPServer( + cls.server = socketserver.ThreadingTCPServer( ('', 0), cls.get_handler_class()) cls.thread = threading.Thread(target=cls.server.serve_forever) cls.thread.setDaemon(True) @@ -508,7 +508,7 @@ def get_handler_class(cls): stdout_data = cls.stdout_data stderr_data = cls.stderr_data - class Handler(six.moves.BaseHTTPServer.BaseHTTPRequestHandler, object): + class Handler(http.server.BaseHTTPRequestHandler): def do_POST(self): resp_data = self.get_resp_data() self.send_response(101) @@ -534,7 +534,7 @@ def get_resp_data(self): data += stderr_data return data else: - raise Exception('Unknown path {0}'.format(path)) + raise Exception(f'Unknown path {path}') @staticmethod def frame_header(stream, data): @@ -632,7 +632,7 @@ def test_custom_user_agent(self): class DisableSocketTest(unittest.TestCase): - class DummySocket(object): + class DummySocket: def __init__(self, timeout=60): self.timeout = timeout diff --git a/tests/unit/api_volume_test.py b/tests/unit/api_volume_test.py index 7850c224f..a8d9193f7 100644 --- a/tests/unit/api_volume_test.py +++ b/tests/unit/api_volume_test.py @@ -104,7 +104,7 @@ def test_inspect_volume(self): args = fake_request.call_args assert args[0][0] == 'GET' - assert args[0][1] == '{0}volumes/{1}'.format(url_prefix, name) + assert args[0][1] == f'{url_prefix}volumes/{name}' def test_remove_volume(self): name = 'perfectcherryblossom' @@ -112,4 +112,4 @@ def test_remove_volume(self): args = fake_request.call_args assert args[0][0] == 'DELETE' - assert args[0][1] == '{0}volumes/{1}'.format(url_prefix, name) + assert args[0][1] == f'{url_prefix}volumes/{name}' diff --git a/tests/unit/auth_test.py b/tests/unit/auth_test.py index aac891091..8bd2e1658 100644 --- a/tests/unit/auth_test.py +++ b/tests/unit/auth_test.py @@ -1,5 +1,3 @@ -# -*- coding: utf-8 -*- - import base64 import json import os @@ -15,7 +13,7 @@ try: from unittest import mock except ImportError: - import mock + from unittest import mock class RegressionTest(unittest.TestCase): @@ -239,7 +237,7 @@ def test_load_legacy_config(self): cfg_path = os.path.join(folder, '.dockercfg') auth_ = base64.b64encode(b'sakuya:izayoi').decode('ascii') with open(cfg_path, 'w') as f: - f.write('auth = {0}\n'.format(auth_)) + f.write(f'auth = {auth_}\n') f.write('email = sakuya@scarlet.net') cfg = auth.load_config(cfg_path) @@ -297,13 +295,13 @@ def test_load_config_with_random_name(self): self.addCleanup(shutil.rmtree, folder) dockercfg_path = os.path.join(folder, - '.{0}.dockercfg'.format( + '.{}.dockercfg'.format( random.randrange(100000))) registry = 'https://your.private.registry.io' auth_ = base64.b64encode(b'sakuya:izayoi').decode('ascii') config = { registry: { - 'auth': '{0}'.format(auth_), + 'auth': f'{auth_}', 'email': 'sakuya@scarlet.net' } } @@ -329,7 +327,7 @@ def test_load_config_custom_config_env(self): auth_ = base64.b64encode(b'sakuya:izayoi').decode('ascii') config = { registry: { - 'auth': '{0}'.format(auth_), + 'auth': f'{auth_}', 'email': 'sakuya@scarlet.net' } } @@ -357,7 +355,7 @@ def test_load_config_custom_config_env_with_auths(self): config = { 'auths': { registry: { - 'auth': '{0}'.format(auth_), + 'auth': f'{auth_}', 'email': 'sakuya@scarlet.net' } } @@ -386,7 +384,7 @@ def test_load_config_custom_config_env_utf8(self): config = { 'auths': { registry: { - 'auth': '{0}'.format(auth_), + 'auth': f'{auth_}', 'email': 'sakuya@scarlet.net' } } @@ -794,9 +792,9 @@ def store(self, server, username, secret): } def list(self): - return dict( - [(k, v['Username']) for k, v in self.__store.items()] - ) + return { + k: v['Username'] for k, v in self.__store.items() + } def erase(self, server): del self.__store[server] diff --git a/tests/unit/client_test.py b/tests/unit/client_test.py index ad88e8456..d647d3a1a 100644 --- a/tests/unit/client_test.py +++ b/tests/unit/client_test.py @@ -15,7 +15,7 @@ try: from unittest import mock except ImportError: - import mock + from unittest import mock TEST_CERT_DIR = os.path.join(os.path.dirname(__file__), 'testdata/certs') POOL_SIZE = 20 diff --git a/tests/unit/dockertypes_test.py b/tests/unit/dockertypes_test.py index 0689d07b3..a0a171bec 100644 --- a/tests/unit/dockertypes_test.py +++ b/tests/unit/dockertypes_test.py @@ -1,5 +1,3 @@ -# -*- coding: utf-8 -*- - import unittest import pytest @@ -15,7 +13,7 @@ try: from unittest import mock except: # noqa: E722 - import mock + from unittest import mock def create_host_config(*args, **kwargs): diff --git a/tests/unit/errors_test.py b/tests/unit/errors_test.py index 54c2ba8f6..f8c3a6663 100644 --- a/tests/unit/errors_test.py +++ b/tests/unit/errors_test.py @@ -126,7 +126,7 @@ def test_container_without_stderr(self): err = ContainerError(container, exit_status, command, image, stderr) msg = ("Command '{}' in image '{}' returned non-zero exit status {}" - ).format(command, image, exit_status, stderr) + ).format(command, image, exit_status) assert str(err) == msg def test_container_with_stderr(self): diff --git a/tests/unit/fake_api.py b/tests/unit/fake_api.py index 4fd4d1138..4c9332953 100644 --- a/tests/unit/fake_api.py +++ b/tests/unit/fake_api.py @@ -2,7 +2,7 @@ from . import fake_stat -CURRENT_VERSION = 'v{0}'.format(constants.DEFAULT_DOCKER_API_VERSION) +CURRENT_VERSION = f'v{constants.DEFAULT_DOCKER_API_VERSION}' FAKE_CONTAINER_ID = '3cc2351ab11b' FAKE_IMAGE_ID = 'e9aa60c60128' @@ -526,96 +526,96 @@ def post_fake_secret(): prefix = 'http+docker://localnpipe' fake_responses = { - '{0}/version'.format(prefix): + f'{prefix}/version': get_fake_version, - '{1}/{0}/version'.format(CURRENT_VERSION, prefix): + f'{prefix}/{CURRENT_VERSION}/version': get_fake_version, - '{1}/{0}/info'.format(CURRENT_VERSION, prefix): + f'{prefix}/{CURRENT_VERSION}/info': get_fake_info, - '{1}/{0}/auth'.format(CURRENT_VERSION, prefix): + f'{prefix}/{CURRENT_VERSION}/auth': post_fake_auth, - '{1}/{0}/_ping'.format(CURRENT_VERSION, prefix): + f'{prefix}/{CURRENT_VERSION}/_ping': get_fake_ping, - '{1}/{0}/images/search'.format(CURRENT_VERSION, prefix): + f'{prefix}/{CURRENT_VERSION}/images/search': get_fake_search, - '{1}/{0}/images/json'.format(CURRENT_VERSION, prefix): + f'{prefix}/{CURRENT_VERSION}/images/json': get_fake_images, - '{1}/{0}/images/test_image/history'.format(CURRENT_VERSION, prefix): + f'{prefix}/{CURRENT_VERSION}/images/test_image/history': get_fake_image_history, - '{1}/{0}/images/create'.format(CURRENT_VERSION, prefix): + f'{prefix}/{CURRENT_VERSION}/images/create': post_fake_import_image, - '{1}/{0}/containers/json'.format(CURRENT_VERSION, prefix): + f'{prefix}/{CURRENT_VERSION}/containers/json': get_fake_containers, - '{1}/{0}/containers/3cc2351ab11b/start'.format(CURRENT_VERSION, prefix): + f'{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/start': post_fake_start_container, - '{1}/{0}/containers/3cc2351ab11b/resize'.format(CURRENT_VERSION, prefix): + f'{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/resize': post_fake_resize_container, - '{1}/{0}/containers/3cc2351ab11b/json'.format(CURRENT_VERSION, prefix): + f'{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/json': get_fake_inspect_container, - '{1}/{0}/containers/3cc2351ab11b/rename'.format(CURRENT_VERSION, prefix): + f'{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/rename': post_fake_rename_container, - '{1}/{0}/images/e9aa60c60128/tag'.format(CURRENT_VERSION, prefix): + f'{prefix}/{CURRENT_VERSION}/images/e9aa60c60128/tag': post_fake_tag_image, - '{1}/{0}/containers/3cc2351ab11b/wait'.format(CURRENT_VERSION, prefix): + f'{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/wait': get_fake_wait, - '{1}/{0}/containers/3cc2351ab11b/logs'.format(CURRENT_VERSION, prefix): + f'{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/logs': get_fake_logs, - '{1}/{0}/containers/3cc2351ab11b/changes'.format(CURRENT_VERSION, prefix): + f'{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/changes': get_fake_diff, - '{1}/{0}/containers/3cc2351ab11b/export'.format(CURRENT_VERSION, prefix): + f'{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/export': get_fake_export, - '{1}/{0}/containers/3cc2351ab11b/update'.format(CURRENT_VERSION, prefix): + f'{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/update': post_fake_update_container, - '{1}/{0}/containers/3cc2351ab11b/exec'.format(CURRENT_VERSION, prefix): + f'{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/exec': post_fake_exec_create, - '{1}/{0}/exec/d5d177f121dc/start'.format(CURRENT_VERSION, prefix): + f'{prefix}/{CURRENT_VERSION}/exec/d5d177f121dc/start': post_fake_exec_start, - '{1}/{0}/exec/d5d177f121dc/json'.format(CURRENT_VERSION, prefix): + f'{prefix}/{CURRENT_VERSION}/exec/d5d177f121dc/json': get_fake_exec_inspect, - '{1}/{0}/exec/d5d177f121dc/resize'.format(CURRENT_VERSION, prefix): + f'{prefix}/{CURRENT_VERSION}/exec/d5d177f121dc/resize': post_fake_exec_resize, - '{1}/{0}/containers/3cc2351ab11b/stats'.format(CURRENT_VERSION, prefix): + f'{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/stats': get_fake_stats, - '{1}/{0}/containers/3cc2351ab11b/top'.format(CURRENT_VERSION, prefix): + f'{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/top': get_fake_top, - '{1}/{0}/containers/3cc2351ab11b/stop'.format(CURRENT_VERSION, prefix): + f'{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/stop': post_fake_stop_container, - '{1}/{0}/containers/3cc2351ab11b/kill'.format(CURRENT_VERSION, prefix): + f'{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/kill': post_fake_kill_container, - '{1}/{0}/containers/3cc2351ab11b/pause'.format(CURRENT_VERSION, prefix): + f'{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/pause': post_fake_pause_container, - '{1}/{0}/containers/3cc2351ab11b/unpause'.format(CURRENT_VERSION, prefix): + f'{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/unpause': post_fake_unpause_container, - '{1}/{0}/containers/3cc2351ab11b/restart'.format(CURRENT_VERSION, prefix): + f'{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/restart': post_fake_restart_container, - '{1}/{0}/containers/3cc2351ab11b'.format(CURRENT_VERSION, prefix): + f'{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b': delete_fake_remove_container, - '{1}/{0}/images/create'.format(CURRENT_VERSION, prefix): + f'{prefix}/{CURRENT_VERSION}/images/create': post_fake_image_create, - '{1}/{0}/images/e9aa60c60128'.format(CURRENT_VERSION, prefix): + f'{prefix}/{CURRENT_VERSION}/images/e9aa60c60128': delete_fake_remove_image, - '{1}/{0}/images/e9aa60c60128/get'.format(CURRENT_VERSION, prefix): + f'{prefix}/{CURRENT_VERSION}/images/e9aa60c60128/get': get_fake_get_image, - '{1}/{0}/images/load'.format(CURRENT_VERSION, prefix): + f'{prefix}/{CURRENT_VERSION}/images/load': post_fake_load_image, - '{1}/{0}/images/test_image/json'.format(CURRENT_VERSION, prefix): + f'{prefix}/{CURRENT_VERSION}/images/test_image/json': get_fake_inspect_image, - '{1}/{0}/images/test_image/insert'.format(CURRENT_VERSION, prefix): + f'{prefix}/{CURRENT_VERSION}/images/test_image/insert': get_fake_insert_image, - '{1}/{0}/images/test_image/push'.format(CURRENT_VERSION, prefix): + f'{prefix}/{CURRENT_VERSION}/images/test_image/push': post_fake_push, - '{1}/{0}/commit'.format(CURRENT_VERSION, prefix): + f'{prefix}/{CURRENT_VERSION}/commit': post_fake_commit, - '{1}/{0}/containers/create'.format(CURRENT_VERSION, prefix): + f'{prefix}/{CURRENT_VERSION}/containers/create': post_fake_create_container, - '{1}/{0}/build'.format(CURRENT_VERSION, prefix): + f'{prefix}/{CURRENT_VERSION}/build': post_fake_build_container, - '{1}/{0}/events'.format(CURRENT_VERSION, prefix): + f'{prefix}/{CURRENT_VERSION}/events': get_fake_events, - ('{1}/{0}/volumes'.format(CURRENT_VERSION, prefix), 'GET'): + (f'{prefix}/{CURRENT_VERSION}/volumes', 'GET'): get_fake_volume_list, - ('{1}/{0}/volumes/create'.format(CURRENT_VERSION, prefix), 'POST'): + (f'{prefix}/{CURRENT_VERSION}/volumes/create', 'POST'): get_fake_volume, ('{1}/{0}/volumes/{2}'.format( CURRENT_VERSION, prefix, FAKE_VOLUME_NAME @@ -629,11 +629,11 @@ def post_fake_secret(): CURRENT_VERSION, prefix, FAKE_NODE_ID ), 'POST'): post_fake_update_node, - ('{1}/{0}/swarm/join'.format(CURRENT_VERSION, prefix), 'POST'): + (f'{prefix}/{CURRENT_VERSION}/swarm/join', 'POST'): post_fake_join_swarm, - ('{1}/{0}/networks'.format(CURRENT_VERSION, prefix), 'GET'): + (f'{prefix}/{CURRENT_VERSION}/networks', 'GET'): get_fake_network_list, - ('{1}/{0}/networks/create'.format(CURRENT_VERSION, prefix), 'POST'): + (f'{prefix}/{CURRENT_VERSION}/networks/create', 'POST'): post_fake_network, ('{1}/{0}/networks/{2}'.format( CURRENT_VERSION, prefix, FAKE_NETWORK_ID @@ -651,6 +651,6 @@ def post_fake_secret(): CURRENT_VERSION, prefix, FAKE_NETWORK_ID ), 'POST'): post_fake_network_disconnect, - '{1}/{0}/secrets/create'.format(CURRENT_VERSION, prefix): + f'{prefix}/{CURRENT_VERSION}/secrets/create': post_fake_secret, } diff --git a/tests/unit/fake_api_client.py b/tests/unit/fake_api_client.py index 5825b6ec0..1663ef127 100644 --- a/tests/unit/fake_api_client.py +++ b/tests/unit/fake_api_client.py @@ -7,7 +7,7 @@ try: from unittest import mock except ImportError: - import mock + from unittest import mock class CopyReturnMagicMock(mock.MagicMock): @@ -15,7 +15,7 @@ class CopyReturnMagicMock(mock.MagicMock): A MagicMock which deep copies every return value. """ def _mock_call(self, *args, **kwargs): - ret = super(CopyReturnMagicMock, self)._mock_call(*args, **kwargs) + ret = super()._mock_call(*args, **kwargs) if isinstance(ret, (dict, list)): ret = copy.deepcopy(ret) return ret diff --git a/tests/unit/models_resources_test.py b/tests/unit/models_resources_test.py index 5af24ee69..11dea2948 100644 --- a/tests/unit/models_resources_test.py +++ b/tests/unit/models_resources_test.py @@ -16,7 +16,7 @@ def test_reload(self): def test_hash(self): client = make_fake_client() container1 = client.containers.get(FAKE_CONTAINER_ID) - my_set = set([container1]) + my_set = {container1} assert len(my_set) == 1 container2 = client.containers.get(FAKE_CONTAINER_ID) diff --git a/tests/unit/models_secrets_test.py b/tests/unit/models_secrets_test.py index 4ccf4c638..1c261a871 100644 --- a/tests/unit/models_secrets_test.py +++ b/tests/unit/models_secrets_test.py @@ -8,4 +8,4 @@ class CreateServiceTest(unittest.TestCase): def test_secrets_repr(self): client = make_fake_client() secret = client.secrets.create(name="super_secret", data="secret") - assert secret.__repr__() == "".format(FAKE_SECRET_NAME) + assert secret.__repr__() == f"" diff --git a/tests/unit/models_services_test.py b/tests/unit/models_services_test.py index 07bb58970..b9192e422 100644 --- a/tests/unit/models_services_test.py +++ b/tests/unit/models_services_test.py @@ -40,10 +40,10 @@ def test_get_create_service_kwargs(self): 'update_config': {'update': 'config'}, 'endpoint_spec': {'blah': 'blah'}, } - assert set(task_template.keys()) == set([ + assert set(task_template.keys()) == { 'ContainerSpec', 'Resources', 'RestartPolicy', 'Placement', 'LogDriver', 'Networks' - ]) + } assert task_template['Placement'] == { 'Constraints': ['foo=bar'], 'Preferences': ['bar=baz'], @@ -55,7 +55,7 @@ def test_get_create_service_kwargs(self): 'Options': {'foo': 'bar'} } assert task_template['Networks'] == [{'Target': 'somenet'}] - assert set(task_template['ContainerSpec'].keys()) == set([ + assert set(task_template['ContainerSpec'].keys()) == { 'Image', 'Command', 'Args', 'Hostname', 'Env', 'Dir', 'User', 'Labels', 'Mounts', 'StopGracePeriod' - ]) + } diff --git a/tests/unit/ssladapter_test.py b/tests/unit/ssladapter_test.py index 73b73360c..41a87f207 100644 --- a/tests/unit/ssladapter_test.py +++ b/tests/unit/ssladapter_test.py @@ -32,30 +32,30 @@ def test_only_uses_tls(self): class MatchHostnameTest(unittest.TestCase): cert = { 'issuer': ( - (('countryName', u'US'),), - (('stateOrProvinceName', u'California'),), - (('localityName', u'San Francisco'),), - (('organizationName', u'Docker Inc'),), - (('organizationalUnitName', u'Docker-Python'),), - (('commonName', u'localhost'),), - (('emailAddress', u'info@docker.com'),) + (('countryName', 'US'),), + (('stateOrProvinceName', 'California'),), + (('localityName', 'San Francisco'),), + (('organizationName', 'Docker Inc'),), + (('organizationalUnitName', 'Docker-Python'),), + (('commonName', 'localhost'),), + (('emailAddress', 'info@docker.com'),) ), 'notAfter': 'Mar 25 23:08:23 2030 GMT', - 'notBefore': u'Mar 25 23:08:23 2016 GMT', - 'serialNumber': u'BD5F894C839C548F', + 'notBefore': 'Mar 25 23:08:23 2016 GMT', + 'serialNumber': 'BD5F894C839C548F', 'subject': ( - (('countryName', u'US'),), - (('stateOrProvinceName', u'California'),), - (('localityName', u'San Francisco'),), - (('organizationName', u'Docker Inc'),), - (('organizationalUnitName', u'Docker-Python'),), - (('commonName', u'localhost'),), - (('emailAddress', u'info@docker.com'),) + (('countryName', 'US'),), + (('stateOrProvinceName', 'California'),), + (('localityName', 'San Francisco'),), + (('organizationName', 'Docker Inc'),), + (('organizationalUnitName', 'Docker-Python'),), + (('commonName', 'localhost'),), + (('emailAddress', 'info@docker.com'),) ), 'subjectAltName': ( - ('DNS', u'localhost'), - ('DNS', u'*.gensokyo.jp'), - ('IP Address', u'127.0.0.1'), + ('DNS', 'localhost'), + ('DNS', '*.gensokyo.jp'), + ('IP Address', '127.0.0.1'), ), 'version': 3 } diff --git a/tests/unit/swarm_test.py b/tests/unit/swarm_test.py index 438538028..aee1b9e80 100644 --- a/tests/unit/swarm_test.py +++ b/tests/unit/swarm_test.py @@ -1,5 +1,3 @@ -# -*- coding: utf-8 -*- - import json from . import fake_api diff --git a/tests/unit/utils_build_test.py b/tests/unit/utils_build_test.py index bc6fb5f40..9f183886b 100644 --- a/tests/unit/utils_build_test.py +++ b/tests/unit/utils_build_test.py @@ -1,5 +1,3 @@ -# -*- coding: utf-8 -*- - import os import os.path import shutil @@ -82,7 +80,7 @@ def test_no_dupes(self): assert sorted(paths) == sorted(set(paths)) def test_wildcard_exclude(self): - assert self.exclude(['*']) == set(['Dockerfile', '.dockerignore']) + assert self.exclude(['*']) == {'Dockerfile', '.dockerignore'} def test_exclude_dockerfile_dockerignore(self): """ @@ -99,18 +97,18 @@ def test_exclude_custom_dockerfile(self): If we're using a custom Dockerfile, make sure that's not excluded. """ - assert self.exclude(['*'], dockerfile='Dockerfile.alt') == set( - ['Dockerfile.alt', '.dockerignore'] - ) + assert self.exclude(['*'], dockerfile='Dockerfile.alt') == { + 'Dockerfile.alt', '.dockerignore' + } assert self.exclude( ['*'], dockerfile='foo/Dockerfile3' - ) == convert_paths(set(['foo/Dockerfile3', '.dockerignore'])) + ) == convert_paths({'foo/Dockerfile3', '.dockerignore'}) # https://github.com/docker/docker-py/issues/1956 assert self.exclude( ['*'], dockerfile='./foo/Dockerfile3' - ) == convert_paths(set(['foo/Dockerfile3', '.dockerignore'])) + ) == convert_paths({'foo/Dockerfile3', '.dockerignore'}) def test_exclude_dockerfile_child(self): includes = self.exclude(['foo/'], dockerfile='foo/Dockerfile3') @@ -119,56 +117,56 @@ def test_exclude_dockerfile_child(self): def test_single_filename(self): assert self.exclude(['a.py']) == convert_paths( - self.all_paths - set(['a.py']) + self.all_paths - {'a.py'} ) def test_single_filename_leading_dot_slash(self): assert self.exclude(['./a.py']) == convert_paths( - self.all_paths - set(['a.py']) + self.all_paths - {'a.py'} ) # As odd as it sounds, a filename pattern with a trailing slash on the # end *will* result in that file being excluded. def test_single_filename_trailing_slash(self): assert self.exclude(['a.py/']) == convert_paths( - self.all_paths - set(['a.py']) + self.all_paths - {'a.py'} ) def test_wildcard_filename_start(self): assert self.exclude(['*.py']) == convert_paths( - self.all_paths - set(['a.py', 'b.py', 'cde.py']) + self.all_paths - {'a.py', 'b.py', 'cde.py'} ) def test_wildcard_with_exception(self): assert self.exclude(['*.py', '!b.py']) == convert_paths( - self.all_paths - set(['a.py', 'cde.py']) + self.all_paths - {'a.py', 'cde.py'} ) def test_wildcard_with_wildcard_exception(self): assert self.exclude(['*.*', '!*.go']) == convert_paths( - self.all_paths - set([ + self.all_paths - { 'a.py', 'b.py', 'cde.py', 'Dockerfile.alt', - ]) + } ) def test_wildcard_filename_end(self): assert self.exclude(['a.*']) == convert_paths( - self.all_paths - set(['a.py', 'a.go']) + self.all_paths - {'a.py', 'a.go'} ) def test_question_mark(self): assert self.exclude(['?.py']) == convert_paths( - self.all_paths - set(['a.py', 'b.py']) + self.all_paths - {'a.py', 'b.py'} ) def test_single_subdir_single_filename(self): assert self.exclude(['foo/a.py']) == convert_paths( - self.all_paths - set(['foo/a.py']) + self.all_paths - {'foo/a.py'} ) def test_single_subdir_single_filename_leading_slash(self): assert self.exclude(['/foo/a.py']) == convert_paths( - self.all_paths - set(['foo/a.py']) + self.all_paths - {'foo/a.py'} ) def test_exclude_include_absolute_path(self): @@ -176,57 +174,57 @@ def test_exclude_include_absolute_path(self): assert exclude_paths( base, ['/*', '!/*.py'] - ) == set(['a.py', 'b.py']) + ) == {'a.py', 'b.py'} def test_single_subdir_with_path_traversal(self): assert self.exclude(['foo/whoops/../a.py']) == convert_paths( - self.all_paths - set(['foo/a.py']) + self.all_paths - {'foo/a.py'} ) def test_single_subdir_wildcard_filename(self): assert self.exclude(['foo/*.py']) == convert_paths( - self.all_paths - set(['foo/a.py', 'foo/b.py']) + self.all_paths - {'foo/a.py', 'foo/b.py'} ) def test_wildcard_subdir_single_filename(self): assert self.exclude(['*/a.py']) == convert_paths( - self.all_paths - set(['foo/a.py', 'bar/a.py']) + self.all_paths - {'foo/a.py', 'bar/a.py'} ) def test_wildcard_subdir_wildcard_filename(self): assert self.exclude(['*/*.py']) == convert_paths( - self.all_paths - set(['foo/a.py', 'foo/b.py', 'bar/a.py']) + self.all_paths - {'foo/a.py', 'foo/b.py', 'bar/a.py'} ) def test_directory(self): assert self.exclude(['foo']) == convert_paths( - self.all_paths - set([ + self.all_paths - { 'foo', 'foo/a.py', 'foo/b.py', 'foo/bar', 'foo/bar/a.py', 'foo/Dockerfile3' - ]) + } ) def test_directory_with_trailing_slash(self): assert self.exclude(['foo']) == convert_paths( - self.all_paths - set([ + self.all_paths - { 'foo', 'foo/a.py', 'foo/b.py', 'foo/bar', 'foo/bar/a.py', 'foo/Dockerfile3' - ]) + } ) def test_directory_with_single_exception(self): assert self.exclude(['foo', '!foo/bar/a.py']) == convert_paths( - self.all_paths - set([ + self.all_paths - { 'foo/a.py', 'foo/b.py', 'foo', 'foo/bar', 'foo/Dockerfile3' - ]) + } ) def test_directory_with_subdir_exception(self): assert self.exclude(['foo', '!foo/bar']) == convert_paths( - self.all_paths - set([ + self.all_paths - { 'foo/a.py', 'foo/b.py', 'foo', 'foo/Dockerfile3' - ]) + } ) @pytest.mark.skipif( @@ -234,21 +232,21 @@ def test_directory_with_subdir_exception(self): ) def test_directory_with_subdir_exception_win32_pathsep(self): assert self.exclude(['foo', '!foo\\bar']) == convert_paths( - self.all_paths - set([ + self.all_paths - { 'foo/a.py', 'foo/b.py', 'foo', 'foo/Dockerfile3' - ]) + } ) def test_directory_with_wildcard_exception(self): assert self.exclude(['foo', '!foo/*.py']) == convert_paths( - self.all_paths - set([ + self.all_paths - { 'foo/bar', 'foo/bar/a.py', 'foo', 'foo/Dockerfile3' - ]) + } ) def test_subdirectory(self): assert self.exclude(['foo/bar']) == convert_paths( - self.all_paths - set(['foo/bar', 'foo/bar/a.py']) + self.all_paths - {'foo/bar', 'foo/bar/a.py'} ) @pytest.mark.skipif( @@ -256,33 +254,33 @@ def test_subdirectory(self): ) def test_subdirectory_win32_pathsep(self): assert self.exclude(['foo\\bar']) == convert_paths( - self.all_paths - set(['foo/bar', 'foo/bar/a.py']) + self.all_paths - {'foo/bar', 'foo/bar/a.py'} ) def test_double_wildcard(self): assert self.exclude(['**/a.py']) == convert_paths( - self.all_paths - set( - ['a.py', 'foo/a.py', 'foo/bar/a.py', 'bar/a.py'] - ) + self.all_paths - { + 'a.py', 'foo/a.py', 'foo/bar/a.py', 'bar/a.py' + } ) assert self.exclude(['foo/**/bar']) == convert_paths( - self.all_paths - set(['foo/bar', 'foo/bar/a.py']) + self.all_paths - {'foo/bar', 'foo/bar/a.py'} ) def test_single_and_double_wildcard(self): assert self.exclude(['**/target/*/*']) == convert_paths( - self.all_paths - set( - ['target/subdir/file.txt', + self.all_paths - { + 'target/subdir/file.txt', 'subdir/target/subdir/file.txt', - 'subdir/subdir2/target/subdir/file.txt'] - ) + 'subdir/subdir2/target/subdir/file.txt' + } ) def test_trailing_double_wildcard(self): assert self.exclude(['subdir/**']) == convert_paths( - self.all_paths - set( - ['subdir/file.txt', + self.all_paths - { + 'subdir/file.txt', 'subdir/target/file.txt', 'subdir/target/subdir/file.txt', 'subdir/subdir2/file.txt', @@ -292,16 +290,16 @@ def test_trailing_double_wildcard(self): 'subdir/target/subdir', 'subdir/subdir2', 'subdir/subdir2/target', - 'subdir/subdir2/target/subdir'] - ) + 'subdir/subdir2/target/subdir' + } ) def test_double_wildcard_with_exception(self): assert self.exclude(['**', '!bar', '!foo/bar']) == convert_paths( - set([ + { 'foo/bar', 'foo/bar/a.py', 'bar', 'bar/a.py', 'Dockerfile', '.dockerignore', - ]) + } ) def test_include_wildcard(self): @@ -324,7 +322,7 @@ def test_last_line_precedence(self): assert exclude_paths( base, ['*.md', '!README*.md', 'README-secret.md'] - ) == set(['README.md', 'README-bis.md']) + ) == {'README.md', 'README-bis.md'} def test_parent_directory(self): base = make_tree( @@ -340,7 +338,7 @@ def test_parent_directory(self): assert exclude_paths( base, ['../a.py', '/../b.py'] - ) == set(['c.py']) + ) == {'c.py'} class TarTest(unittest.TestCase): @@ -374,14 +372,14 @@ def test_tar_with_excludes(self): '.dockerignore', ] - expected_names = set([ + expected_names = { 'Dockerfile', '.dockerignore', 'a.go', 'b.py', 'bar', 'bar/a.py', - ]) + } base = make_tree(dirs, files) self.addCleanup(shutil.rmtree, base) @@ -413,7 +411,7 @@ def test_tar_with_inaccessible_file(self): with pytest.raises(IOError) as ei: tar(base) - assert 'Can not read file in context: {}'.format(full_path) in ( + assert f'Can not read file in context: {full_path}' in ( ei.exconly() ) diff --git a/tests/unit/utils_config_test.py b/tests/unit/utils_config_test.py index b0934f956..83e04a146 100644 --- a/tests/unit/utils_config_test.py +++ b/tests/unit/utils_config_test.py @@ -11,7 +11,7 @@ try: from unittest import mock except ImportError: - import mock + from unittest import mock class FindConfigFileTest(unittest.TestCase): diff --git a/tests/unit/utils_json_stream_test.py b/tests/unit/utils_json_stream_test.py index f7aefd0f1..821ebe42d 100644 --- a/tests/unit/utils_json_stream_test.py +++ b/tests/unit/utils_json_stream_test.py @@ -1,11 +1,7 @@ -# encoding: utf-8 -from __future__ import absolute_import -from __future__ import unicode_literals - from docker.utils.json_stream import json_splitter, stream_as_text, json_stream -class TestJsonSplitter(object): +class TestJsonSplitter: def test_json_splitter_no_object(self): data = '{"foo": "bar' @@ -20,7 +16,7 @@ def test_json_splitter_leading_whitespace(self): assert json_splitter(data) == ({'foo': 'bar'}, '{"next": "obj"}') -class TestStreamAsText(object): +class TestStreamAsText: def test_stream_with_non_utf_unicode_character(self): stream = [b'\xed\xf3\xf3'] @@ -28,12 +24,12 @@ def test_stream_with_non_utf_unicode_character(self): assert output == '���' def test_stream_with_utf_character(self): - stream = ['ěĝ'.encode('utf-8')] + stream = ['ěĝ'.encode()] output, = stream_as_text(stream) assert output == 'ěĝ' -class TestJsonStream(object): +class TestJsonStream: def test_with_falsy_entries(self): stream = [ diff --git a/tests/unit/utils_proxy_test.py b/tests/unit/utils_proxy_test.py index ff0e14ba7..2da60401d 100644 --- a/tests/unit/utils_proxy_test.py +++ b/tests/unit/utils_proxy_test.py @@ -1,7 +1,4 @@ -# -*- coding: utf-8 -*- - import unittest -import six from docker.utils.proxy import ProxyConfig @@ -65,7 +62,7 @@ def test_inject_proxy_environment(self): # Proxy config is non null, env is None. self.assertSetEqual( set(CONFIG.inject_proxy_environment(None)), - set(['{}={}'.format(k, v) for k, v in six.iteritems(ENV)])) + {f'{k}={v}' for k, v in ENV.items()}) # Proxy config is null, env is None. self.assertIsNone(ProxyConfig().inject_proxy_environment(None), None) @@ -74,7 +71,7 @@ def test_inject_proxy_environment(self): # Proxy config is non null, env is non null actual = CONFIG.inject_proxy_environment(env) - expected = ['{}={}'.format(k, v) for k, v in six.iteritems(ENV)] + env + expected = [f'{k}={v}' for k, v in ENV.items()] + env # It's important that the first 8 variables are the ones from the proxy # config, and the last 2 are the ones from the input environment self.assertSetEqual(set(actual[:8]), set(expected[:8])) diff --git a/tests/unit/utils_test.py b/tests/unit/utils_test.py index 0d6ff22d7..802d91962 100644 --- a/tests/unit/utils_test.py +++ b/tests/unit/utils_test.py @@ -1,5 +1,3 @@ -# -*- coding: utf-8 -*- - import base64 import json import os @@ -9,7 +7,6 @@ import unittest import pytest -import six from docker.api.client import APIClient from docker.constants import IS_WINDOWS_PLATFORM, DEFAULT_DOCKER_API_VERSION from docker.errors import DockerException @@ -195,22 +192,22 @@ def test_convert_volume_binds_no_mode(self): assert convert_volume_binds(data) == ['/mnt/vol1:/data:rw'] def test_convert_volume_binds_unicode_bytes_input(self): - expected = [u'/mnt/지연:/unicode/박:rw'] + expected = ['/mnt/지연:/unicode/박:rw'] data = { - u'/mnt/지연'.encode('utf-8'): { - 'bind': u'/unicode/박'.encode('utf-8'), + '/mnt/지연'.encode(): { + 'bind': '/unicode/박'.encode(), 'mode': 'rw' } } assert convert_volume_binds(data) == expected def test_convert_volume_binds_unicode_unicode_input(self): - expected = [u'/mnt/지연:/unicode/박:rw'] + expected = ['/mnt/지연:/unicode/박:rw'] data = { - u'/mnt/지연': { - 'bind': u'/unicode/박', + '/mnt/지연': { + 'bind': '/unicode/박', 'mode': 'rw' } } @@ -359,14 +356,14 @@ def test_private_reg_image_tag(self): ) def test_index_image_sha(self): - assert parse_repository_tag("root@sha256:{0}".format(self.sha)) == ( - "root", "sha256:{0}".format(self.sha) + assert parse_repository_tag(f"root@sha256:{self.sha}") == ( + "root", f"sha256:{self.sha}" ) def test_private_reg_image_sha(self): assert parse_repository_tag( - "url:5000/repo@sha256:{0}".format(self.sha) - ) == ("url:5000/repo", "sha256:{0}".format(self.sha)) + f"url:5000/repo@sha256:{self.sha}" + ) == ("url:5000/repo", f"sha256:{self.sha}") class ParseDeviceTest(unittest.TestCase): @@ -463,20 +460,13 @@ def test_convert_filters(self): def test_decode_json_header(self): obj = {'a': 'b', 'c': 1} data = None - if six.PY3: - data = base64.urlsafe_b64encode(bytes(json.dumps(obj), 'utf-8')) - else: - data = base64.urlsafe_b64encode(json.dumps(obj)) + data = base64.urlsafe_b64encode(bytes(json.dumps(obj), 'utf-8')) decoded_data = decode_json_header(data) assert obj == decoded_data class SplitCommandTest(unittest.TestCase): def test_split_command_with_unicode(self): - assert split_command(u'echo μμ') == ['echo', 'μμ'] - - @pytest.mark.skipif(six.PY3, reason="shlex doesn't support bytes in py3") - def test_split_command_with_bytes(self): assert split_command('echo μμ') == ['echo', 'μμ'] @@ -626,7 +616,7 @@ def test_format_env_binary_unicode_value(self): env_dict = { 'ARTIST_NAME': b'\xec\x86\xa1\xec\xa7\x80\xec\x9d\x80' } - assert format_environment(env_dict) == [u'ARTIST_NAME=송지은'] + assert format_environment(env_dict) == ['ARTIST_NAME=송지은'] def test_format_env_no_value(self): env_dict = { From 19d6cd8a015f1484e147a0bb9d0b4684c2a6aaac Mon Sep 17 00:00:00 2001 From: Ulysses Souza Date: Fri, 6 Aug 2021 09:32:42 -0300 Subject: [PATCH 03/12] Bump requests => 2.26.0 Signed-off-by: Ulysses Souza --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 1d0be30a1..f6b17fd5f 100644 --- a/requirements.txt +++ b/requirements.txt @@ -12,6 +12,6 @@ pycparser==2.17 pyOpenSSL==18.0.0 pyparsing==2.2.0 pywin32==227; sys_platform == 'win32' -requests==2.20.0 +requests==2.26.0 urllib3==1.24.3 websocket-client==0.56.0 From 582f6277ce4dfe67b5be5a52b88bbdef3f349e11 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 6 Aug 2021 12:46:56 +0000 Subject: [PATCH 04/12] Bump urllib3 from 1.24.3 to 1.26.5 Bumps [urllib3](https://github.com/urllib3/urllib3) from 1.24.3 to 1.26.5. - [Release notes](https://github.com/urllib3/urllib3/releases) - [Changelog](https://github.com/urllib3/urllib3/blob/main/CHANGES.rst) - [Commits](https://github.com/urllib3/urllib3/compare/1.24.3...1.26.5) --- updated-dependencies: - dependency-name: urllib3 dependency-type: direct:production ... Signed-off-by: dependabot[bot] --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index f6b17fd5f..42af699be 100644 --- a/requirements.txt +++ b/requirements.txt @@ -13,5 +13,5 @@ pyOpenSSL==18.0.0 pyparsing==2.2.0 pywin32==227; sys_platform == 'win32' requests==2.26.0 -urllib3==1.24.3 +urllib3==1.26.5 websocket-client==0.56.0 From e0d186d754693feb7d27c2352e455c5febb4a5cd Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 9 Aug 2021 20:57:04 +0000 Subject: [PATCH 05/12] Bump pywin32 from 227 to 301 Bumps [pywin32](https://github.com/mhammond/pywin32) from 227 to 301. - [Release notes](https://github.com/mhammond/pywin32/releases) - [Changelog](https://github.com/mhammond/pywin32/blob/master/CHANGES.txt) - [Commits](https://github.com/mhammond/pywin32/commits) --- updated-dependencies: - dependency-name: pywin32 dependency-type: direct:production ... Signed-off-by: dependabot[bot] --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 42af699be..26cbc6fb4 100644 --- a/requirements.txt +++ b/requirements.txt @@ -11,7 +11,7 @@ paramiko==2.4.2 pycparser==2.17 pyOpenSSL==18.0.0 pyparsing==2.2.0 -pywin32==227; sys_platform == 'win32' +pywin32==301; sys_platform == 'win32' requests==2.26.0 urllib3==1.26.5 websocket-client==0.56.0 From 2fa56879a2f978387d230db087003d79eb2762d0 Mon Sep 17 00:00:00 2001 From: sinarostami Date: Mon, 16 Aug 2021 00:06:45 +0430 Subject: [PATCH 06/12] Improve containers documentation Signed-off-by: sinarostami --- docker/models/containers.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/docker/models/containers.py b/docker/models/containers.py index 36cbbc41a..957deed46 100644 --- a/docker/models/containers.py +++ b/docker/models/containers.py @@ -761,6 +761,14 @@ def run(self, image, command=None, stdout=True, stderr=False, {'/home/user1/': {'bind': '/mnt/vol2', 'mode': 'rw'}, '/var/www': {'bind': '/mnt/vol1', 'mode': 'ro'}} + Or a list of strings which each one of its elements specifies a mount volume. + + For example: + + .. code-block:: python + + ['/home/user1/:/mnt/vol2','/var/www:/mnt/vol1'] + volumes_from (:py:class:`list`): List of container names or IDs to get volumes from. working_dir (str): Path to the working directory. From 8da03e01265f229a91aaffb7af2aa2057e08f1b9 Mon Sep 17 00:00:00 2001 From: Shehzaman Date: Thu, 27 May 2021 00:11:38 +0530 Subject: [PATCH 07/12] Put back identityfile parameter Signed-off-by: Shehzaman --- docker/transport/sshconn.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/docker/transport/sshconn.py b/docker/transport/sshconn.py index fb5c6bbe8..e08e3a868 100644 --- a/docker/transport/sshconn.py +++ b/docker/transport/sshconn.py @@ -205,7 +205,6 @@ def _create_paramiko_client(self, base_url): with open(ssh_config_file) as f: conf.parse(f) host_config = conf.lookup(base_url.hostname) - self.ssh_conf = host_config if 'proxycommand' in host_config: self.ssh_params["sock"] = paramiko.ProxyCommand( self.ssh_conf['proxycommand'] @@ -213,9 +212,11 @@ def _create_paramiko_client(self, base_url): if 'hostname' in host_config: self.ssh_params['hostname'] = host_config['hostname'] if base_url.port is None and 'port' in host_config: - self.ssh_params['port'] = self.ssh_conf['port'] + self.ssh_params['port'] = host_config['port'] if base_url.username is None and 'user' in host_config: - self.ssh_params['username'] = self.ssh_conf['user'] + self.ssh_params['username'] = host_config['user'] + if 'identityfile' in host_config: + self.ssh_params['key_filename'] = host_config['identityfile'] self.ssh_client.load_system_host_keys() self.ssh_client.set_missing_host_key_policy(paramiko.WarningPolicy()) From 4a3cddf4bf926f3aa0d46d5f0318dbb212231377 Mon Sep 17 00:00:00 2001 From: Anca Iordache Date: Tue, 31 Aug 2021 15:57:32 +0200 Subject: [PATCH 08/12] Update changelog for v5.0.0 Signed-off-by: Anca Iordache --- docker/version.py | 2 +- docs/change-log.md | 18 ++++++++++++++++++ 2 files changed, 19 insertions(+), 1 deletion(-) diff --git a/docker/version.py b/docker/version.py index 355410412..b95a1edef 100644 --- a/docker/version.py +++ b/docker/version.py @@ -1,2 +1,2 @@ -version = "4.5.0-dev" +version = "5.0.0-dev" version_info = tuple(int(d) for d in version.split("-")[0].split(".")) diff --git a/docs/change-log.md b/docs/change-log.md index 8db3fc582..63a029e13 100644 --- a/docs/change-log.md +++ b/docs/change-log.md @@ -1,6 +1,24 @@ Change log ========== +5.0.0 +----- + +[List of PRs / issues for this release](https://github.com/docker/docker-py/milestone/70?closed=1) + +### Breaking changes +- Remove support for Python 2.7 +- Make Python 3.6 the minimum version supported + +### Features +- Add `limit` parameter to image search endpoint + +### Bugfixes +- Fix `KeyError` exception on secret create +- Verify TLS keys loaded from docker contexts +- Update PORT_SPEC regex to allow square brackets for IPv6 addresses +- Fix containers and images documentation examples + 4.4.4 ----- From c5fc19385765b2724285689a94c408cfd486f210 Mon Sep 17 00:00:00 2001 From: Anca Iordache Date: Tue, 31 Aug 2021 16:39:50 +0200 Subject: [PATCH 09/12] Update changelog for 5.0.1 release Signed-off-by: Anca Iordache --- docker/version.py | 2 +- docs/change-log.md | 14 ++++++++++++++ 2 files changed, 15 insertions(+), 1 deletion(-) diff --git a/docker/version.py b/docker/version.py index b95a1edef..5687086f1 100644 --- a/docker/version.py +++ b/docker/version.py @@ -1,2 +1,2 @@ -version = "5.0.0-dev" +version = "5.1.0-dev" version_info = tuple(int(d) for d in version.split("-")[0].split(".")) diff --git a/docs/change-log.md b/docs/change-log.md index 63a029e13..441e91def 100644 --- a/docs/change-log.md +++ b/docs/change-log.md @@ -1,6 +1,20 @@ Change log ========== +5.0.1 +----- + +[List of PRs / issues for this release](https://github.com/docker/docker-py/milestone/74?closed=1) + +### Bugfixes +- Bring back support for ssh identity file +- Cleanup remaining python-2 dependencies +- Fix image save example in docs + +### Miscellaneous +- Bump urllib3 to 1.26.5 +- Bump requests to 2.26.0 + 5.0.0 ----- From f9b85586ca7244ada8b66a4dab1fd324caccbe24 Mon Sep 17 00:00:00 2001 From: Adam Aposhian Date: Tue, 31 Aug 2021 15:02:04 -0600 Subject: [PATCH 10/12] fix(transport): remove disable_buffering option Signed-off-by: Adam Aposhian --- docker/transport/unixconn.py | 6 ------ 1 file changed, 6 deletions(-) diff --git a/docker/transport/unixconn.py b/docker/transport/unixconn.py index adb6f18a1..1b00762a6 100644 --- a/docker/transport/unixconn.py +++ b/docker/transport/unixconn.py @@ -23,7 +23,6 @@ def __init__(self, base_url, unix_socket, timeout=60): self.base_url = base_url self.unix_socket = unix_socket self.timeout = timeout - self.disable_buffering = False def connect(self): sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) @@ -33,13 +32,8 @@ def connect(self): def putheader(self, header, *values): super().putheader(header, *values) - if header == 'Connection' and 'Upgrade' in values: - self.disable_buffering = True def response_class(self, sock, *args, **kwargs): - if self.disable_buffering: - kwargs['disable_buffering'] = True - return httplib.HTTPResponse(sock, *args, **kwargs) From a9265197d262302d34846e26886347f68c83bb5d Mon Sep 17 00:00:00 2001 From: Anca Iordache Date: Wed, 1 Sep 2021 19:23:59 +0200 Subject: [PATCH 11/12] Post-release changelog update Signed-off-by: Anca Iordache --- docs/change-log.md | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/docs/change-log.md b/docs/change-log.md index 441e91def..2ff0774f4 100644 --- a/docs/change-log.md +++ b/docs/change-log.md @@ -1,6 +1,14 @@ Change log ========== +5.0.2 +----- + +[List of PRs / issues for this release](https://github.com/docker/docker-py/milestone/75?closed=1) + +### Bugfixes +- Fix `disable_buffering` regression + 5.0.1 ----- From 63618b5e11e9326ed6e4cad6a0b012b9dc02593f Mon Sep 17 00:00:00 2001 From: Segev Finer Date: Thu, 15 Mar 2018 21:46:24 +0200 Subject: [PATCH 12/12] Fix getting a read timeout for logs/attach with a tty and slow output Fixes #931 Signed-off-by: Segev Finer --- docker/api/client.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/docker/api/client.py b/docker/api/client.py index f0cb39b86..2667922d9 100644 --- a/docker/api/client.py +++ b/docker/api/client.py @@ -397,6 +397,12 @@ def _multiplexed_response_stream_helper(self, response): def _stream_raw_result(self, response, chunk_size=1, decode=True): ''' Stream result for TTY-enabled container and raw binary data''' self._raise_for_status(response) + + # Disable timeout on the underlying socket to prevent + # Read timed out(s) for long running processes + socket = self._get_raw_response_socket(response) + self._disable_socket_timeout(socket) + yield from response.iter_content(chunk_size, decode) def _read_from_socket(self, response, stream, tty=True, demux=False):