Replace str.format() uses with f-strings.

This commit is contained in:
Felix Fontein 2025-10-05 22:03:53 +02:00
parent 9fd3cedd1a
commit e4d37af9ca
82 changed files with 457 additions and 595 deletions

View File

@ -249,9 +249,8 @@ class Connection(ConnectionBase):
for val, what in ((k, 'Key'), (v, 'Value')): for val, what in ((k, 'Key'), (v, 'Value')):
if not isinstance(val, str): if not isinstance(val, str):
raise AnsibleConnectionFailure( raise AnsibleConnectionFailure(
'Non-string {0} found for extra_env option. Ambiguous env options must be ' f'Non-string {what.lower()} found for extra_env option. Ambiguous env options must be '
'wrapped in quotes to avoid them being interpreted. {1}: {2!r}' f'wrapped in quotes to avoid them being interpreted. {what}: {val!r}'
.format(what.lower(), what, val)
) )
local_cmd += [b'-e', b'%s=%s' % (to_bytes(k, errors='surrogate_or_strict'), to_bytes(v, errors='surrogate_or_strict'))] local_cmd += [b'-e', b'%s=%s' % (to_bytes(k, errors='surrogate_or_strict'), to_bytes(v, errors='surrogate_or_strict'))]
@ -260,8 +259,7 @@ class Connection(ConnectionBase):
if self.docker_version != 'dev' and LooseVersion(self.docker_version) < LooseVersion('18.06'): if self.docker_version != 'dev' and LooseVersion(self.docker_version) < LooseVersion('18.06'):
# https://github.com/docker/cli/pull/732, first appeared in release 18.06.0 # https://github.com/docker/cli/pull/732, first appeared in release 18.06.0
raise AnsibleConnectionFailure( raise AnsibleConnectionFailure(
'Providing the working directory requires Docker CLI version 18.06 or newer. You have Docker CLI version {0}.' f'Providing the working directory requires Docker CLI version 18.06 or newer. You have Docker CLI version {self.docker_version}.'
.format(self.docker_version)
) )
if self.get_option('privileged'): if self.get_option('privileged'):
@ -318,8 +316,7 @@ class Connection(ConnectionBase):
self.remote_user = None self.remote_user = None
actual_user = self._get_docker_remote_user() actual_user = self._get_docker_remote_user()
if actual_user != self.get_option('remote_user'): if actual_user != self.get_option('remote_user'):
display.warning('docker {0} does not support remote_user, using container default: {1}' display.warning(f'docker {self.docker_version} does not support remote_user, using container default: {self.actual_user or "?"}')
.format(self.docker_version, self.actual_user or '?'))
return actual_user return actual_user
elif self._display.verbosity > 2: elif self._display.verbosity > 2:
# Since we are not setting the actual_user, look it up so we have it for logging later # Since we are not setting the actual_user, look it up so we have it for logging later
@ -335,9 +332,7 @@ class Connection(ConnectionBase):
if not self._connected: if not self._connected:
self._set_conn_data() self._set_conn_data()
actual_user = self._get_actual_user() actual_user = self._get_actual_user()
display.vvv("ESTABLISH DOCKER CONNECTION FOR USER: {0}".format( display.vvv(f"ESTABLISH DOCKER CONNECTION FOR USER: {actual_user or '?'}", host=self.get_option('remote_addr'))
actual_user or '?'), host=self.get_option('remote_addr')
)
self._connected = True self._connected = True
def exec_command(self, cmd, in_data=None, sudoable=False): def exec_command(self, cmd, in_data=None, sudoable=False):
@ -349,7 +344,7 @@ class Connection(ConnectionBase):
local_cmd = self._build_exec_cmd([self._play_context.executable, '-c', cmd]) local_cmd = self._build_exec_cmd([self._play_context.executable, '-c', cmd])
display.vvv("EXEC {0}".format(to_text(local_cmd)), host=self.get_option('remote_addr')) display.vvv(f"EXEC {to_text(local_cmd)}", host=self.get_option('remote_addr'))
display.debug("opening command with Popen()") display.debug("opening command with Popen()")
local_cmd = [to_bytes(i, errors='surrogate_or_strict') for i in local_cmd] local_cmd = [to_bytes(i, errors='surrogate_or_strict') for i in local_cmd]

View File

@ -146,27 +146,27 @@ class Connection(ConnectionBase):
has_pipelining = True has_pipelining = True
def _call_client(self, callable, not_found_can_be_resource=False): def _call_client(self, callable, not_found_can_be_resource=False):
remote_addr = self.get_option('remote_addr')
try: try:
return callable() return callable()
except NotFound as e: except NotFound as e:
if not_found_can_be_resource: if not_found_can_be_resource:
raise AnsibleConnectionFailure('Could not find container "{1}" or resource in it ({0})'.format(e, self.get_option('remote_addr'))) raise AnsibleConnectionFailure(f'Could not find container "{remote_addr}" or resource in it ({e})')
else: else:
raise AnsibleConnectionFailure('Could not find container "{1}" ({0})'.format(e, self.get_option('remote_addr'))) raise AnsibleConnectionFailure(f'Could not find container "{remote_addr}" ({e})')
except APIError as e: except APIError as e:
if e.response is not None and e.response.status_code == 409: if e.response is not None and e.response.status_code == 409:
raise AnsibleConnectionFailure('The container "{1}" has been paused ({0})'.format(e, self.get_option('remote_addr'))) raise AnsibleConnectionFailure(f'The container "{remote_addr}" has been paused ({e})')
self.client.fail( self.client.fail(
'An unexpected Docker error occurred for container "{1}": {0}'.format(e, self.get_option('remote_addr')) f'An unexpected Docker error occurred for container "{remote_addr}": {e}'
) )
except DockerException as e: except DockerException as e:
self.client.fail( self.client.fail(
'An unexpected Docker error occurred for container "{1}": {0}'.format(e, self.get_option('remote_addr')) f'An unexpected Docker error occurred for container "{remote_addr}": {e}'
) )
except RequestException as e: except RequestException as e:
self.client.fail( self.client.fail(
'An unexpected requests error occurred for container "{1}" when trying to talk to the Docker daemon: {0}' f'An unexpected requests error occurred for container "{remote_addr}" when trying to talk to the Docker daemon: {e}'
.format(e, self.get_option('remote_addr'))
) )
def __init__(self, play_context, new_stdin, *args, **kwargs): def __init__(self, play_context, new_stdin, *args, **kwargs):
@ -186,9 +186,7 @@ class Connection(ConnectionBase):
super(Connection, self)._connect() super(Connection, self)._connect()
if not self._connected: if not self._connected:
self.actual_user = self.get_option('remote_user') self.actual_user = self.get_option('remote_user')
display.vvv("ESTABLISH DOCKER CONNECTION FOR USER: {0}".format( display.vvv(f"ESTABLISH DOCKER CONNECTION FOR USER: {self.actual_user or '?'}", host=self.get_option('remote_addr'))
self.actual_user or '?'), host=self.get_option('remote_addr')
)
if self.client is None: if self.client is None:
self.client = AnsibleDockerClient(self, min_docker_api_version=MIN_DOCKER_API) self.client = AnsibleDockerClient(self, min_docker_api_version=MIN_DOCKER_API)
self._connected = True self._connected = True
@ -202,7 +200,7 @@ class Connection(ConnectionBase):
if result.get('Config'): if result.get('Config'):
self.actual_user = result['Config'].get('User') self.actual_user = result['Config'].get('User')
if self.actual_user is not None: if self.actual_user is not None:
display.vvv("Actual user is '{0}'".format(self.actual_user)) display.vvv(f"Actual user is '{self.actual_user}'")
def exec_command(self, cmd, in_data=None, sudoable=False): def exec_command(self, cmd, in_data=None, sudoable=False):
""" Run a command on the docker host """ """ Run a command on the docker host """
@ -213,12 +211,10 @@ class Connection(ConnectionBase):
do_become = self.become and self.become.expect_prompt() and sudoable do_become = self.become and self.become.expect_prompt() and sudoable
stdin_part = f', with stdin ({len(in_data)} bytes)' if in_data is not None else ''
become_part = ', with become prompt' if do_become else ''
display.vvv( display.vvv(
"EXEC {0}{1}{2}".format( f"EXEC {to_text(command)}{stdin_part}{become_part}",
to_text(command),
', with stdin ({0} bytes)'.format(len(in_data)) if in_data is not None else '',
', with become prompt' if do_become else '',
),
host=self.get_option('remote_addr') host=self.get_option('remote_addr')
) )
@ -244,19 +240,19 @@ class Connection(ConnectionBase):
for val, what in ((k, 'Key'), (v, 'Value')): for val, what in ((k, 'Key'), (v, 'Value')):
if not isinstance(val, str): if not isinstance(val, str):
raise AnsibleConnectionFailure( raise AnsibleConnectionFailure(
'Non-string {0} found for extra_env option. Ambiguous env options must be ' f'Non-string {what.lower()} found for extra_env option. Ambiguous env options must be '
'wrapped in quotes to avoid them being interpreted. {1}: {2!r}' f'wrapped in quotes to avoid them being interpreted. {what}: {val!r}'
.format(what.lower(), what, val)
) )
data['Env'].append('{0}={1}'.format(to_text(k, errors='surrogate_or_strict'), to_text(v, errors='surrogate_or_strict'))) kk = to_text(k, errors='surrogate_or_strict')
vv = to_text(v, errors='surrogate_or_strict')
data['Env'].append(f'{kk}={vv}')
if self.get_option('working_dir') is not None: if self.get_option('working_dir') is not None:
data['WorkingDir'] = self.get_option('working_dir') data['WorkingDir'] = self.get_option('working_dir')
if self.client.docker_api_version < LooseVersion('1.35'): if self.client.docker_api_version < LooseVersion('1.35'):
raise AnsibleConnectionFailure( raise AnsibleConnectionFailure(
'Providing the working directory requires Docker API version 1.35 or newer.' 'Providing the working directory requires Docker API version 1.35 or newer.'
' The Docker daemon the connection is using has API version {0}.' f' The Docker daemon the connection is using has API version {self.client.docker_api_version_str}.'
.format(self.client.docker_api_version_str)
) )
exec_data = self._call_client(lambda: self.client.post_json_to_json('/containers/{0}/exec', self.get_option('remote_addr'), data=data)) exec_data = self._call_client(lambda: self.client.post_json_to_json('/containers/{0}/exec', self.get_option('remote_addr'), data=data))
@ -331,17 +327,17 @@ class Connection(ConnectionBase):
if self.actual_user not in self.ids: if self.actual_user not in self.ids:
dummy, ids, dummy = self.exec_command(b'id -u && id -g') dummy, ids, dummy = self.exec_command(b'id -u && id -g')
remote_addr = self.get_option('remote_addr')
try: try:
user_id, group_id = ids.splitlines() user_id, group_id = ids.splitlines()
self.ids[self.actual_user] = int(user_id), int(group_id) self.ids[self.actual_user] = int(user_id), int(group_id)
display.vvvv( display.vvvv(
'PUT: Determined uid={0} and gid={1} for user "{2}"'.format(user_id, group_id, self.actual_user), f'PUT: Determined uid={user_id} and gid={group_id} for user "{self.actual_user}"',
host=self.get_option('remote_addr') host=remote_addr
) )
except Exception as e: except Exception as e:
raise AnsibleConnectionFailure( raise AnsibleConnectionFailure(
'Error while determining user and group ID of current user in container "{1}": {0}\nGot value: {2!r}' f'Error while determining user and group ID of current user in container "{remote_addr}": {e}\nGot value: {ids!r}'
.format(e, self.get_option('remote_addr'), ids)
) )
user_id, group_id = self.ids[self.actual_user] user_id, group_id = self.ids[self.actual_user]

View File

@ -76,9 +76,7 @@ class Connection(ConnectionBase):
if not self._connected: if not self._connected:
display.vvv( display.vvv(
"ESTABLISH NSENTER CONNECTION FOR USER: {0}".format( f"ESTABLISH NSENTER CONNECTION FOR USER: {self._play_context.remote_user}",
self._play_context.remote_user
),
host=self._play_context.remote_addr, host=self._play_context.remote_addr,
) )
self._connected = True self._connected = True
@ -104,7 +102,7 @@ class Connection(ConnectionBase):
"--pid", "--pid",
"--uts", "--uts",
"--preserve-credentials", "--preserve-credentials",
"--target={0}".format(self._nsenter_pid), f"--target={self._nsenter_pid}",
"--", "--",
] ]
@ -115,7 +113,7 @@ class Connection(ConnectionBase):
cmd_parts = nsenter_cmd_parts + cmd cmd_parts = nsenter_cmd_parts + cmd
cmd = [to_bytes(arg) for arg in cmd_parts] cmd = [to_bytes(arg) for arg in cmd_parts]
display.vvv("EXEC {0}".format(to_text(cmd)), host=self._play_context.remote_addr) display.vvv(f"EXEC {to_text(cmd)}", host=self._play_context.remote_addr)
display.debug("opening command with Popen()") display.debug("opening command with Popen()")
master = None master = None
@ -204,15 +202,15 @@ class Connection(ConnectionBase):
in_path = unfrackpath(in_path, basedir=self.cwd) in_path = unfrackpath(in_path, basedir=self.cwd)
out_path = unfrackpath(out_path, basedir=self.cwd) out_path = unfrackpath(out_path, basedir=self.cwd)
display.vvv("PUT {0} to {1}".format(in_path, out_path), host=self._play_context.remote_addr) display.vvv(f"PUT {in_path} to {out_path}", host=self._play_context.remote_addr)
try: try:
with open(to_bytes(in_path, errors="surrogate_or_strict"), "rb") as in_file: with open(to_bytes(in_path, errors="surrogate_or_strict"), "rb") as in_file:
in_data = in_file.read() in_data = in_file.read()
rc, out, err = self.exec_command(cmd=["tee", out_path], in_data=in_data) rc, out, err = self.exec_command(cmd=["tee", out_path], in_data=in_data)
if rc != 0: if rc != 0:
raise AnsibleError("failed to transfer file to {0}: {1}".format(out_path, err)) raise AnsibleError(f"failed to transfer file to {out_path}: {err}")
except IOError as e: except IOError as e:
raise AnsibleError("failed to transfer file to {0}: {1}".format(out_path, to_native(e))) raise AnsibleError(f"failed to transfer file to {out_path}: {e}")
def fetch_file(self, in_path, out_path): def fetch_file(self, in_path, out_path):
super(Connection, self).fetch_file(in_path, out_path) super(Connection, self).fetch_file(in_path, out_path)
@ -222,13 +220,13 @@ class Connection(ConnectionBase):
try: try:
rc, out, err = self.exec_command(cmd=["cat", in_path]) rc, out, err = self.exec_command(cmd=["cat", in_path])
display.vvv("FETCH {0} TO {1}".format(in_path, out_path), host=self._play_context.remote_addr) display.vvv(f"FETCH {in_path} TO {out_path}", host=self._play_context.remote_addr)
if rc != 0: if rc != 0:
raise AnsibleError("failed to transfer file to {0}: {1}".format(in_path, err)) raise AnsibleError(f"failed to transfer file to {in_path}: {err}")
with open(to_bytes(out_path, errors='surrogate_or_strict'), 'wb') as out_file: with open(to_bytes(out_path, errors='surrogate_or_strict'), 'wb') as out_file:
out_file.write(out) out_file.write(out)
except IOError as e: except IOError as e:
raise AnsibleError("failed to transfer file to {0}: {1}".format(to_native(out_path), to_native(e))) raise AnsibleError(f"failed to transfer file to {to_native(out_path)}: {e}")
def close(self): def close(self):
''' terminate the connection; nothing to do here ''' ''' terminate the connection; nothing to do here '''

View File

@ -268,19 +268,19 @@ class InventoryModule(BaseInventoryPlugin, Constructable):
# Add container to groups # Add container to groups
image_name = config.get('Image') image_name = config.get('Image')
if image_name and add_legacy_groups: if image_name and add_legacy_groups:
groups.append('image_{0}'.format(image_name)) groups.append(f'image_{image_name}')
stack_name = labels.get('com.docker.stack.namespace') stack_name = labels.get('com.docker.stack.namespace')
if stack_name: if stack_name:
full_facts['docker_stack'] = stack_name full_facts['docker_stack'] = stack_name
if add_legacy_groups: if add_legacy_groups:
groups.append('stack_{0}'.format(stack_name)) groups.append(f'stack_{stack_name}')
service_name = labels.get('com.docker.swarm.service.name') service_name = labels.get('com.docker.swarm.service.name')
if service_name: if service_name:
full_facts['docker_service'] = service_name full_facts['docker_service'] = service_name
if add_legacy_groups: if add_legacy_groups:
groups.append('service_{0}'.format(service_name)) groups.append(f'service_{service_name}')
ansible_connection = None ansible_connection = None
if connection_type == 'ssh': if connection_type == 'ssh':
@ -383,9 +383,9 @@ class InventoryModule(BaseInventoryPlugin, Constructable):
self._populate(client) self._populate(client)
except DockerException as e: except DockerException as e:
raise AnsibleError( raise AnsibleError(
'An unexpected Docker error occurred: {0}'.format(e) f'An unexpected Docker error occurred: {e}'
) )
except RequestException as e: except RequestException as e:
raise AnsibleError( raise AnsibleError(
'An unexpected requests error occurred when trying to talk to the Docker daemon: {0}'.format(e) f'An unexpected requests error occurred when trying to talk to the Docker daemon: {e}'
) )

View File

@ -131,11 +131,11 @@ class InventoryModule(BaseInventoryPlugin, Constructable, Cacheable):
command = [self.DOCKER_MACHINE_PATH] command = [self.DOCKER_MACHINE_PATH]
command.extend(args) command.extend(args)
display.debug('Executing command {0}'.format(command)) display.debug(f'Executing command {command}')
try: try:
result = subprocess.check_output(command) result = subprocess.check_output(command)
except subprocess.CalledProcessError as e: except subprocess.CalledProcessError as e:
display.warning('Exception {0} caught while executing command {1}, this was the original exception: {2}'.format(type(e).__name__, command, e)) display.warning(f'Exception {type(e).__name__} caught while executing command {command}, this was the original exception: {e}')
raise e raise e
return to_text(result).strip() return to_text(result).strip()
@ -203,14 +203,14 @@ class InventoryModule(BaseInventoryPlugin, Constructable, Cacheable):
def _should_skip_host(self, machine_name, env_var_tuples, daemon_env): def _should_skip_host(self, machine_name, env_var_tuples, daemon_env):
if not env_var_tuples: if not env_var_tuples:
warning_prefix = 'Unable to fetch Docker daemon env vars from Docker Machine for host {0}'.format(machine_name) warning_prefix = f'Unable to fetch Docker daemon env vars from Docker Machine for host {machine_name}'
if daemon_env in ('require', 'require-silently'): if daemon_env in ('require', 'require-silently'):
if daemon_env == 'require': if daemon_env == 'require':
display.warning('{0}: host will be skipped'.format(warning_prefix)) display.warning(f'{warning_prefix}: host will be skipped')
return True return True
else: # 'optional', 'optional-silently' else: # 'optional', 'optional-silently'
if daemon_env == 'optional': if daemon_env == 'optional':
display.warning('{0}: host will lack dm_DOCKER_xxx variables'.format(warning_prefix)) display.warning(f'{warning_prefix}: host will lack dm_DOCKER_xxx variables')
return False return False
def _populate(self): def _populate(self):
@ -261,7 +261,7 @@ class InventoryModule(BaseInventoryPlugin, Constructable, Cacheable):
# set variables based on Docker Machine env variables # set variables based on Docker Machine env variables
for kv in env_var_tuples: for kv in env_var_tuples:
self.inventory.set_variable(machine_name, 'dm_{0}'.format(kv[0]), make_unsafe(kv[1])) self.inventory.set_variable(machine_name, f'dm_{kv[0]}', make_unsafe(kv[1]))
if self.get_option('verbose_output'): if self.get_option('verbose_output'):
self.inventory.set_variable(machine_name, 'docker_machine_node_attributes', unsafe_node_attrs) self.inventory.set_variable(machine_name, 'docker_machine_node_attributes', unsafe_node_attrs)

View File

@ -187,14 +187,11 @@ class APIClient(
self._version = version self._version = version
if not isinstance(self._version, str): if not isinstance(self._version, str):
raise DockerException( raise DockerException(
'Version parameter must be a string or None. Found {0}'.format( f'Version parameter must be a string or None. Found {type(version).__name__}'
type(version).__name__
)
) )
if utils.version_lt(self._version, MINIMUM_DOCKER_API_VERSION): if utils.version_lt(self._version, MINIMUM_DOCKER_API_VERSION):
raise InvalidVersion( raise InvalidVersion(
'API versions below {0} are no longer supported by this ' f'API versions below {MINIMUM_DOCKER_API_VERSION} are no longer supported by this library.'
'library.'.format(MINIMUM_DOCKER_API_VERSION)
) )
def _retrieve_server_version(self): def _retrieve_server_version(self):
@ -202,7 +199,7 @@ class APIClient(
version_result = self.version(api_version=False) version_result = self.version(api_version=False)
except Exception as e: except Exception as e:
raise DockerException( raise DockerException(
'Error while fetching server API version: {0}'.format(e) f'Error while fetching server API version: {e}'
) )
try: try:
@ -214,7 +211,7 @@ class APIClient(
) )
except Exception as e: except Exception as e:
raise DockerException( raise DockerException(
'Error while fetching server API version: {0}. Response seems to be broken.'.format(e) f'Error while fetching server API version: {e}. Response seems to be broken.'
) )
def _set_request_timeout(self, kwargs): def _set_request_timeout(self, kwargs):
@ -247,19 +244,16 @@ class APIClient(
for arg in args: for arg in args:
if not isinstance(arg, str): if not isinstance(arg, str):
raise ValueError( raise ValueError(
'Expected a string but found {0} ({1}) ' f'Expected a string but found {arg} ({type(arg)}) instead'
'instead'.format(arg, type(arg))
) )
quote_f = partial(quote, safe="/:") quote_f = partial(quote, safe="/:")
args = map(quote_f, args) args = map(quote_f, args)
if kwargs.get('versioned_api', True): if kwargs.get('versioned_api', True):
return '{0}/v{1}{2}'.format( return f'{self.base_url}/v{self._version}{pathfmt.format(*args)}'
self.base_url, self._version, pathfmt.format(*args)
)
else: else:
return '{0}{1}'.format(self.base_url, pathfmt.format(*args)) return f'{self.base_url}{pathfmt.format(*args)}'
def _raise_for_status(self, response): def _raise_for_status(self, response):
"""Raises stored :class:`APIError`, if one occurred.""" """Raises stored :class:`APIError`, if one occurred."""

View File

@ -19,7 +19,7 @@ from .credentials.errors import StoreError, CredentialsNotFound
from .utils import config from .utils import config
INDEX_NAME = 'docker.io' INDEX_NAME = 'docker.io'
INDEX_URL = 'https://index.{0}/v1/'.format(INDEX_NAME) INDEX_URL = f'https://index.{INDEX_NAME}/v1/'
TOKEN_USERNAME = '<token>' TOKEN_USERNAME = '<token>'
log = logging.getLogger(__name__) log = logging.getLogger(__name__)
@ -28,14 +28,13 @@ log = logging.getLogger(__name__)
def resolve_repository_name(repo_name): def resolve_repository_name(repo_name):
if '://' in repo_name: if '://' in repo_name:
raise errors.InvalidRepository( raise errors.InvalidRepository(
'Repository name cannot contain a scheme ({0})'.format(repo_name) f'Repository name cannot contain a scheme ({repo_name})'
) )
index_name, remote_name = split_repo_name(repo_name) index_name, remote_name = split_repo_name(repo_name)
if index_name[0] == '-' or index_name[-1] == '-': if index_name[0] == '-' or index_name[-1] == '-':
raise errors.InvalidRepository( raise errors.InvalidRepository(
'Invalid index name ({0}). Cannot begin or end with a' f'Invalid index name ({index_name}). Cannot begin or end with a hyphen.'
' hyphen.'.format(index_name)
) )
return resolve_index_name(index_name), remote_name return resolve_index_name(index_name), remote_name
@ -117,9 +116,7 @@ class AuthConfig(dict):
# keys is not formatted properly. # keys is not formatted properly.
if raise_on_error: if raise_on_error:
raise errors.InvalidConfigFile( raise errors.InvalidConfigFile(
'Invalid configuration for registry {0}'.format( f'Invalid configuration for registry {registry}'
registry
)
) )
return {} return {}
if 'identitytoken' in entry: if 'identitytoken' in entry:
@ -272,7 +269,7 @@ class AuthConfig(dict):
return None return None
except StoreError as e: except StoreError as e:
raise errors.DockerException( raise errors.DockerException(
'Credentials store error: {0}'.format(repr(e)) f'Credentials store error: {e}'
) )
def _get_store_instance(self, name): def _get_store_instance(self, name):

View File

@ -146,14 +146,14 @@ class ContextAPI(object):
names.append(name) names.append(name)
except Exception as e: except Exception as e:
raise errors.ContextException( raise errors.ContextException(
"Failed to load metafile {filepath}: {e}".format(filepath=filepath, e=e), f"Failed to load metafile {filepath}: {e}"
) from e ) from e
contexts = [cls.get_default_context()] contexts = [cls.get_default_context()]
for name in names: for name in names:
context = Context.load_context(name) context = Context.load_context(name)
if not context: if not context:
raise errors.ContextException("Context {context} cannot be found".format(context=name)) raise errors.ContextException(f"Context {name} cannot be found")
contexts.append(context) contexts.append(context)
return contexts return contexts
@ -174,7 +174,7 @@ class ContextAPI(object):
err = write_context_name_to_docker_config(name) err = write_context_name_to_docker_config(name)
if err: if err:
raise errors.ContextException( raise errors.ContextException(
'Failed to set current context: {err}'.format(err=err)) f'Failed to set current context: {err}')
@classmethod @classmethod
def remove_context(cls, name): def remove_context(cls, name):

View File

@ -29,7 +29,7 @@ def get_current_context_name_with_source():
if docker_cfg_path: if docker_cfg_path:
try: try:
with open(docker_cfg_path) as f: with open(docker_cfg_path) as f:
return json.load(f).get("currentContext", "default"), "configuration file {file}".format(file=docker_cfg_path) return json.load(f).get("currentContext", "default"), f"configuration file {docker_cfg_path}"
except Exception: except Exception:
pass pass
return "default", "fallback value" return "default", "fallback value"

View File

@ -62,7 +62,7 @@ class Context(object):
if not isinstance(v, dict): if not isinstance(v, dict):
# unknown format # unknown format
raise ContextException( raise ContextException(
"Unknown endpoint format for context {name}: {v}".format(name=name, v=v), f"Unknown endpoint format for context {name}: {v}",
) )
self.endpoints[k] = v self.endpoints[k] = v
@ -118,7 +118,7 @@ class Context(object):
except (OSError, KeyError, ValueError) as e: except (OSError, KeyError, ValueError) as e:
# unknown format # unknown format
raise Exception( raise Exception(
"Detected corrupted meta file for context {name} : {e}".format(name=name, e=e) f"Detected corrupted meta file for context {name} : {e}"
) from e ) from e
# for docker endpoints, set defaults for # for docker endpoints, set defaults for
@ -193,7 +193,7 @@ class Context(object):
rmtree(self.tls_path) rmtree(self.tls_path)
def __repr__(self): def __repr__(self):
return "<{classname}: '{name}'>".format(classname=self.__class__.__name__, name=self.name) return f"<{self.__class__.__name__}: '{self.name}'>"
def __str__(self): def __str__(self):
return json.dumps(self.__call__(), indent=2) return json.dumps(self.__call__(), indent=2)

View File

@ -26,12 +26,8 @@ def process_store_error(cpe, program):
message = cpe.output.decode('utf-8') message = cpe.output.decode('utf-8')
if 'credentials not found in native keychain' in message: if 'credentials not found in native keychain' in message:
return CredentialsNotFound( return CredentialsNotFound(
'No matching credentials in {0}'.format( f'No matching credentials in {program}'
program
)
) )
return StoreError( return StoreError(
'Credentials store {0} exited with "{1}".'.format( f'Credentials store {program} exited with "{cpe.output.decode("utf-8").strip()}".'
program, cpe.output.decode('utf-8').strip()
)
) )

View File

@ -30,9 +30,7 @@ class Store(object):
self.environment = environment self.environment = environment
if self.exe is None: if self.exe is None:
raise errors.InitializationError( raise errors.InitializationError(
'{0} not installed or not available in PATH'.format( f'{self.program} not installed or not available in PATH'
self.program
)
) )
def get(self, server): def get(self, server):
@ -50,7 +48,7 @@ class Store(object):
# raise CredentialsNotFound # raise CredentialsNotFound
if result['Username'] == '' and result['Secret'] == '': if result['Username'] == '' and result['Secret'] == '':
raise errors.CredentialsNotFound( raise errors.CredentialsNotFound(
'No matching credentials in {0}'.format(self.program) f'No matching credentials in {self.program}'
) )
return result return result
@ -92,14 +90,10 @@ class Store(object):
except OSError as e: except OSError as e:
if e.errno == errno.ENOENT: if e.errno == errno.ENOENT:
raise errors.StoreError( raise errors.StoreError(
'{0} not installed or not available in PATH'.format( f'{self.program} not installed or not available in PATH'
self.program
)
) )
else: else:
raise errors.StoreError( raise errors.StoreError(
'Unexpected OS error "{0}", errno={1}'.format( f'Unexpected OS error "{e.strerror}", errno={e.errno}'
e.strerror, e.errno
)
) )
return output return output

View File

@ -59,17 +59,13 @@ class APIError(_HTTPError, DockerException):
message = super(APIError, self).__str__() message = super(APIError, self).__str__()
if self.is_client_error(): if self.is_client_error():
message = '{0} Client Error for {1}: {2}'.format( message = f'{self.response.status_code} Client Error for {self.response.url}: {self.response.reason}'
self.response.status_code, self.response.url,
self.response.reason)
elif self.is_server_error(): elif self.is_server_error():
message = '{0} Server Error for {1}: {2}'.format( message = f'{self.response.status_code} Server Error for {self.response.url}: {self.response.reason}'
self.response.status_code, self.response.url,
self.response.reason)
if self.explanation: if self.explanation:
message = '{0} ("{1}")'.format(message, self.explanation) message = f'{message} ("{self.explanation}")'
return message return message
@ -146,9 +142,8 @@ class ContainerError(DockerException):
self.image = image self.image = image
self.stderr = stderr self.stderr = stderr
err = ": {0}".format(stderr) if stderr is not None else "" err = f": {stderr}" if stderr is not None else ""
msg = ("Command '{0}' in image '{1}' returned non-zero exit " msg = f"Command '{command}' in image '{image}' returned non-zero exit status {exit_status}{err}"
"status {2}{3}").format(command, image, exit_status, err)
super(ContainerError, self).__init__(msg) super(ContainerError, self).__init__(msg)
@ -170,8 +165,8 @@ class ImageLoadError(DockerException):
def create_unexpected_kwargs_error(name, kwargs): def create_unexpected_kwargs_error(name, kwargs):
quoted_kwargs = ["'{0}'".format(k) for k in sorted(kwargs)] quoted_kwargs = [f"'{k}'" for k in sorted(kwargs)]
text = ["{0}() ".format(name)] text = [f"{name}() "]
if len(quoted_kwargs) == 1: if len(quoted_kwargs) == 1:
text.append("got an unexpected keyword argument ") text.append("got an unexpected keyword argument ")
else: else:
@ -185,7 +180,7 @@ class MissingContextParameter(DockerException):
self.param = param self.param = param
def __str__(self): def __str__(self):
return ("missing parameter: {0}".format(self.param)) return f"missing parameter: {self.param}"
class ContextAlreadyExists(DockerException): class ContextAlreadyExists(DockerException):
@ -193,7 +188,7 @@ class ContextAlreadyExists(DockerException):
self.name = name self.name = name
def __str__(self): def __str__(self):
return ("context {0} already exists".format(self.name)) return f"context {self.name} already exists"
class ContextException(DockerException): class ContextException(DockerException):
@ -209,7 +204,7 @@ class ContextNotFound(DockerException):
self.name = name self.name = name
def __str__(self): def __str__(self):
return ("context '{0}' not found".format(self.name)) return f"context '{self.name}' not found"
class MissingRequirementException(DockerException): class MissingRequirementException(DockerException):

View File

@ -107,7 +107,7 @@ def create_archive(root, files=None, fileobj=None, gzip=False,
t.addfile(i, f) t.addfile(i, f)
except IOError: except IOError:
raise IOError( raise IOError(
'Can not read file in context: {0}'.format(full_path) f'Can not read file in context: {full_path}'
) )
else: else:
# Directories, FIFOs, symlinks... do not need to be read. # Directories, FIFOs, symlinks... do not need to be read.
@ -271,18 +271,13 @@ def process_dockerfile(dockerfile, path):
abs_dockerfile = os.path.join(path, dockerfile) abs_dockerfile = os.path.join(path, dockerfile)
if IS_WINDOWS_PLATFORM and path.startswith( if IS_WINDOWS_PLATFORM and path.startswith(
WINDOWS_LONGPATH_PREFIX): WINDOWS_LONGPATH_PREFIX):
abs_dockerfile = '{0}{1}'.format( abs_dockerfile = f'{WINDOWS_LONGPATH_PREFIX}{os.path.normpath(abs_dockerfile[len(WINDOWS_LONGPATH_PREFIX):])}'
WINDOWS_LONGPATH_PREFIX,
os.path.normpath(
abs_dockerfile[len(WINDOWS_LONGPATH_PREFIX):]
)
)
if (os.path.splitdrive(path)[0] != os.path.splitdrive(abs_dockerfile)[0] or if (os.path.splitdrive(path)[0] != os.path.splitdrive(abs_dockerfile)[0] or
os.path.relpath(abs_dockerfile, path).startswith('..')): os.path.relpath(abs_dockerfile, path).startswith('..')):
# Dockerfile not in context - read data to insert into tar later # Dockerfile not in context - read data to insert into tar later
with open(abs_dockerfile) as df: with open(abs_dockerfile) as df:
return ( return (
'.dockerfile.{random:x}'.format(random=random.getrandbits(160)), f'.dockerfile.{random.getrandbits(160):x}',
df.read() df.read()
) )

View File

@ -38,9 +38,7 @@ def minimum_version(version):
def wrapper(self, *args, **kwargs): def wrapper(self, *args, **kwargs):
if utils.version_lt(self._version, version): if utils.version_lt(self._version, version):
raise errors.InvalidVersion( raise errors.InvalidVersion(
'{0} is not available for version < {1}'.format( f'{f.__name__} is not available for version < {version}'
f.__name__, version
)
) )
return f(self, *args, **kwargs) return f(self, *args, **kwargs)
return wrapper return wrapper

View File

@ -58,10 +58,10 @@ def port_range(start, end, proto, randomly_available_port=False):
if not start: if not start:
return start return start
if not end: if not end:
return [start + proto] return [f'{start}{proto}']
if randomly_available_port: if randomly_available_port:
return ['{0}-{1}'.format(start, end) + proto] return [f'{start}-{end}{proto}']
return [str(port) + proto for port in range(int(start), int(end) + 1)] return [f'{port}{proto}' for port in range(int(start), int(end) + 1)]
def split_port(port): def split_port(port):

View File

@ -80,5 +80,4 @@ class ProxyConfig(dict):
return proxy_env + environment return proxy_env + environment
def __str__(self): def __str__(self):
return 'ProxyConfig(http={0}, https={1}, ftp={2}, no_proxy={3})'.format( return f'ProxyConfig(http={self.http}, https={self.https}, ftp={self.ftp}, no_proxy={self.no_proxy})'
self.http, self.https, self.ftp, self.no_proxy)

View File

@ -193,4 +193,4 @@ def demux_adaptor(stream_id, data):
elif stream_id == STDERR: elif stream_id == STDERR:
return (None, data) return (None, data)
else: else:
raise ValueError('{0} is not a valid stream'.format(stream_id)) raise ValueError(f'{stream_id} is not a valid stream')

View File

@ -136,8 +136,7 @@ def convert_volume_binds(binds):
if isinstance(v, dict): if isinstance(v, dict):
if 'ro' in v and 'mode' in v: if 'ro' in v and 'mode' in v:
raise ValueError( raise ValueError(
'Binding cannot contain both "ro" and "mode": {0}' f'Binding cannot contain both "ro" and "mode": {v!r}'
.format(repr(v))
) )
bind = v['bind'] bind = v['bind']
@ -167,11 +166,11 @@ def convert_volume_binds(binds):
else: else:
mode = v['propagation'] mode = v['propagation']
result.append('{0}:{1}:{2}'.format(k, bind, mode)) result.append(f'{k}:{bind}:{mode}')
else: else:
if isinstance(v, bytes): if isinstance(v, bytes):
v = v.decode('utf-8') v = v.decode('utf-8')
result.append('{0}:{1}:rw'.format(k, v)) result.append(f'{k}:{v}:rw')
return result return result
@ -181,8 +180,7 @@ def convert_tmpfs_mounts(tmpfs):
if not isinstance(tmpfs, list): if not isinstance(tmpfs, list):
raise ValueError( raise ValueError(
'Expected tmpfs value to be either a list or a dict, found: {0}' f'Expected tmpfs value to be either a list or a dict, found: {type(tmpfs).__name__}'
.format(type(tmpfs).__name__)
) )
result = {} result = {}
@ -196,8 +194,7 @@ def convert_tmpfs_mounts(tmpfs):
else: else:
raise ValueError( raise ValueError(
"Expected item in tmpfs list to be a string, found: {0}" f"Expected item in tmpfs list to be a string, found: {type(mount).__name__}"
.format(type(mount).__name__)
) )
result[name] = options result[name] = options
@ -257,14 +254,14 @@ def parse_host(addr, is_win32=False, tls=False):
if proto not in ('tcp', 'unix', 'npipe', 'ssh'): if proto not in ('tcp', 'unix', 'npipe', 'ssh'):
raise errors.DockerException( raise errors.DockerException(
"Invalid bind address protocol: {0}".format(addr) f"Invalid bind address protocol: {addr}"
) )
if proto == 'tcp' and not parsed_url.netloc: if proto == 'tcp' and not parsed_url.netloc:
# "tcp://" is exceptionally disallowed by convention; # "tcp://" is exceptionally disallowed by convention;
# omitting a hostname for other protocols is fine # omitting a hostname for other protocols is fine
raise errors.DockerException( raise errors.DockerException(
'Invalid bind address format: {0}'.format(addr) f'Invalid bind address format: {addr}'
) )
if any([ if any([
@ -272,13 +269,12 @@ def parse_host(addr, is_win32=False, tls=False):
parsed_url.password parsed_url.password
]): ]):
raise errors.DockerException( raise errors.DockerException(
'Invalid bind address format: {0}'.format(addr) f'Invalid bind address format: {addr}'
) )
if parsed_url.path and proto == 'ssh': if parsed_url.path and proto == 'ssh':
raise errors.DockerException( raise errors.DockerException(
'Invalid bind address format: no path allowed for this protocol:' f'Invalid bind address format: no path allowed for this protocol: {addr}'
' {0}'.format(addr)
) )
else: else:
path = parsed_url.path path = parsed_url.path
@ -292,19 +288,19 @@ def parse_host(addr, is_win32=False, tls=False):
port = parsed_url.port or 0 port = parsed_url.port or 0
if port <= 0: if port <= 0:
port = 22 if proto == 'ssh' else (2375 if tls else 2376) port = 22 if proto == 'ssh' else (2375 if tls else 2376)
netloc = '{0}:{1}'.format(parsed_url.netloc, port) netloc = f'{parsed_url.netloc}:{port}'
if not parsed_url.hostname: if not parsed_url.hostname:
netloc = '{0}:{1}'.format(DEFAULT_HTTP_HOST, port) netloc = f'{DEFAULT_HTTP_HOST}:{port}'
# Rewrite schemes to fit library internals (requests adapters) # Rewrite schemes to fit library internals (requests adapters)
if proto == 'tcp': if proto == 'tcp':
proto = 'http{0}'.format('s' if tls else '') proto = f"http{'s' if tls else ''}"
elif proto == 'unix': elif proto == 'unix':
proto = 'http+unix' proto = 'http+unix'
if proto in ('http+unix', 'npipe'): if proto in ('http+unix', 'npipe'):
return "{0}://{1}".format(proto, path).rstrip('/') return f"{proto}://{path}".rstrip('/')
return urlunparse(URLComponents( return urlunparse(URLComponents(
scheme=proto, scheme=proto,
netloc=netloc, netloc=netloc,
@ -323,7 +319,7 @@ def parse_devices(devices):
continue continue
if not isinstance(device, str): if not isinstance(device, str):
raise errors.DockerException( raise errors.DockerException(
'Invalid device type {0}'.format(type(device)) f'Invalid device type {type(device)}'
) )
device_mapping = device.split(':') device_mapping = device.split(':')
if device_mapping: if device_mapping:
@ -428,17 +424,14 @@ def parse_bytes(s):
digits = float(digits_part) digits = float(digits_part)
except ValueError: except ValueError:
raise errors.DockerException( raise errors.DockerException(
'Failed converting the string value for memory ({0}) to' f'Failed converting the string value for memory ({digits_part}) to an integer.'
' an integer.'.format(digits_part)
) )
# Reconvert to long for the final result # Reconvert to long for the final result
s = int(digits * units[suffix]) s = int(digits * units[suffix])
else: else:
raise errors.DockerException( raise errors.DockerException(
'The specified value for memory ({0}) should specify the' f'The specified value for memory ({s}) should specify the units. The postfix should be one of the `b` `k` `m` `g` characters'
' units. The postfix should be one of the `b` `k` `m` `g`'
' characters'.format(s)
) )
return s return s
@ -448,7 +441,7 @@ def normalize_links(links):
if isinstance(links, dict): if isinstance(links, dict):
links = links.items() links = links.items()
return ['{0}:{1}'.format(k, v) if v else k for k, v in sorted(links)] return [f'{k}:{v}' if v else k for k, v in sorted(links)]
def parse_env_file(env_file): def parse_env_file(env_file):
@ -473,9 +466,7 @@ def parse_env_file(env_file):
k, v = parse_line k, v = parse_line
environment[k] = v environment[k] = v
else: else:
raise errors.DockerException( raise errors.DockerException(f'Invalid line in environment file {env_file}:\n{line}')
'Invalid line in environment file {0}:\n{1}'.format(
env_file, line))
return environment return environment
@ -491,7 +482,7 @@ def format_environment(environment):
if isinstance(value, bytes): if isinstance(value, bytes):
value = value.decode('utf-8') value = value.decode('utf-8')
return '{key}={value}'.format(key=key, value=value) return f'{key}={value}'
return [format_env(*var) for var in environment.items()] return [format_env(*var) for var in environment.items()]
@ -499,11 +490,11 @@ def format_extra_hosts(extra_hosts, task=False):
# Use format dictated by Swarm API if container is part of a task # Use format dictated by Swarm API if container is part of a task
if task: if task:
return [ return [
'{0} {1}'.format(v, k) for k, v in sorted(extra_hosts.items()) f'{v} {k}' for k, v in sorted(extra_hosts.items())
] ]
return [ return [
'{0}:{1}'.format(k, v) for k, v in sorted(extra_hosts.items()) f'{k}:{v}' for k, v in sorted(extra_hosts.items())
] ]

View File

@ -98,7 +98,7 @@ class _Parser(object):
try: try:
v += _HEX_DICT[self.line[self.index]] v += _HEX_DICT[self.line[self.index]]
except KeyError: except KeyError:
raise InvalidLogFmt('Invalid unicode escape digit {digit!r}'.format(digit=self.line[self.index])) raise InvalidLogFmt(f'Invalid unicode escape digit {self.line[self.index]!r}')
self.index += 6 self.index += 6
return chr(v) return chr(v)
@ -170,7 +170,8 @@ def parse_line(line, logrus_mode=False):
if cur in _ESCAPE_DICT: if cur in _ESCAPE_DICT:
value.append(_ESCAPE_DICT[cur]) value.append(_ESCAPE_DICT[cur])
elif cur != 'u': elif cur != 'u':
raise InvalidLogFmt('Unknown escape sequence {seq!r}'.format(seq='\\' + cur)) es = f"\\{cur}"
raise InvalidLogFmt(f'Unknown escape sequence {es!r}')
else: else:
parser.prev() parser.prev()
value.append(parser.parse_unicode_sequence()) value.append(parser.parse_unicode_sequence())

View File

@ -18,9 +18,9 @@ _VALID_STR = re.compile('^[A-Za-z0-9_-]+$')
def _validate_part(string, part, part_name): def _validate_part(string, part, part_name):
if not part: if not part:
raise ValueError('Invalid platform string "{string}": {part} is empty'.format(string=string, part=part_name)) raise ValueError(f'Invalid platform string "{string}": {part} is empty')
if not _VALID_STR.match(part): if not _VALID_STR.match(part):
raise ValueError('Invalid platform string "{string}": {part} has invalid characters'.format(string=string, part=part_name)) raise ValueError(f'Invalid platform string "{string}": {part} has invalid characters')
return part return part
@ -123,16 +123,16 @@ class _Platform(object):
arch=arch or None, arch=arch or None,
variant=variant or None, variant=variant or None,
) )
raise ValueError('Invalid platform string "{0}": unknown OS or architecture'.format(string)) raise ValueError(f'Invalid platform string "{string}": unknown OS or architecture')
os = _validate_part(string, parts[0], 'OS') os = _validate_part(string, parts[0], 'OS')
if not os: if not os:
raise ValueError('Invalid platform string "{0}": OS is empty'.format(string)) raise ValueError(f'Invalid platform string "{string}": OS is empty')
arch = _validate_part(string, parts[1], 'architecture') if len(parts) > 1 else None arch = _validate_part(string, parts[1], 'architecture') if len(parts) > 1 else None
if arch is not None and not arch: if arch is not None and not arch:
raise ValueError('Invalid platform string "{0}": architecture is empty'.format(string)) raise ValueError(f'Invalid platform string "{string}": architecture is empty')
variant = _validate_part(string, parts[2], 'variant') if len(parts) > 2 else None variant = _validate_part(string, parts[2], 'variant') if len(parts) > 2 else None
if variant is not None and not variant: if variant is not None and not variant:
raise ValueError('Invalid platform string "{0}": variant is empty'.format(string)) raise ValueError(f'Invalid platform string "{string}": variant is empty')
arch, variant = _normalize_arch(arch, variant or '') arch, variant = _normalize_arch(arch, variant or '')
if len(parts) == 2 and arch == 'arm' and variant == 'v7': if len(parts) == 2 and arch == 'arm' and variant == 'v7':
variant = None variant = None
@ -155,7 +155,7 @@ class _Platform(object):
return '/'.join(parts) return '/'.join(parts)
def __repr__(self): def __repr__(self):
return '_Platform(os={os!r}, arch={arch!r}, variant={variant!r})'.format(os=self.os, arch=self.arch, variant=self.variant) return f'_Platform(os={self.os!r}, arch={self.arch!r}, variant={self.variant!r})'
def __eq__(self, other): def __eq__(self, other):
return self.os == other.os and self.arch == other.arch and self.variant == other.variant return self.os == other.os and self.arch == other.arch and self.variant == other.variant

View File

@ -125,12 +125,9 @@ def _get_tls_config(fail_function, **kwargs):
if assert_hostname is not None: if assert_hostname is not None:
fail_function( fail_function(
"tls_hostname is not compatible with Docker SDK for Python 7.0.0+. You are using" "tls_hostname is not compatible with Docker SDK for Python 7.0.0+. You are using"
" Docker SDK for Python {docker_py_version}. The tls_hostname option (value: {tls_hostname})" f" Docker SDK for Python {docker_version}. The tls_hostname option (value: {assert_hostname})"
" has either been set directly or with the environment variable DOCKER_TLS_HOSTNAME." " has either been set directly or with the environment variable DOCKER_TLS_HOSTNAME."
" Make sure it is not set, or switch to an older version of Docker SDK for Python.".format( " Make sure it is not set, or switch to an older version of Docker SDK for Python."
docker_py_version=docker_version,
tls_hostname=assert_hostname,
)
) )
# Filter out all None parameters # Filter out all None parameters
kwargs = dict((k, v) for k, v in kwargs.items() if v is not None) kwargs = dict((k, v) for k, v in kwargs.items() if v is not None)
@ -691,6 +688,6 @@ class AnsibleDockerClient(AnsibleDockerClientBase):
result = result.get(key) result = result.get(key)
if isinstance(result, Sequence): if isinstance(result, Sequence):
for warning in result: for warning in result:
self.module.warn('Docker warning: {0}'.format(warning)) self.module.warn(f'Docker warning: {warning}')
elif isinstance(result, str) and result: elif isinstance(result, str) and result:
self.module.warn('Docker warning: {0}'.format(result)) self.module.warn(f'Docker warning: {result}')

View File

@ -577,6 +577,6 @@ class AnsibleDockerClient(AnsibleDockerClientBase):
result = result.get(key) result = result.get(key)
if isinstance(result, Sequence): if isinstance(result, Sequence):
for warning in result: for warning in result:
self.module.warn('Docker warning: {0}'.format(warning)) self.module.warn(f'Docker warning: {warning}')
elif isinstance(result, str) and result: elif isinstance(result, str) and result:
self.module.warn('Docker warning: {0}'.format(result)) self.module.warn(f'Docker warning: {result}')

View File

@ -128,11 +128,7 @@ class AnsibleDockerClientBase(object):
try: try:
data = json.loads(stdout) data = json.loads(stdout)
except Exception as exc: except Exception as exc:
self.fail('Error while parsing JSON output of {cmd}: {exc}\nJSON output: {stdout}'.format( self.fail(f'Error while parsing JSON output of {self._compose_cmd_str(args)}: {exc}\nJSON output: {to_native(stdout)}')
cmd=self._compose_cmd_str(args),
exc=to_native(exc),
stdout=to_native(stdout),
))
return rc, data, stderr return rc, data, stderr
# def call_cli_json_stream(self, *args, check_rc=False, data=None, cwd=None, environ_update=None, warn_on_stderr=False): # def call_cli_json_stream(self, *args, check_rc=False, data=None, cwd=None, environ_update=None, warn_on_stderr=False):
@ -148,11 +144,7 @@ class AnsibleDockerClientBase(object):
if line.startswith(b'{'): if line.startswith(b'{'):
result.append(json.loads(line)) result.append(json.loads(line))
except Exception as exc: except Exception as exc:
self.fail('Error while parsing JSON output of {cmd}: {exc}\nJSON output: {stdout}'.format( self.fail(f'Error while parsing JSON output of {self._compose_cmd_str(args)}: {exc}\nJSON output: {to_native(stdout)}')
cmd=self._compose_cmd_str(args),
exc=to_native(exc),
stdout=to_native(stdout),
))
return rc, result, stderr return rc, result, stderr
@abc.abstractmethod @abc.abstractmethod
@ -188,7 +180,7 @@ class AnsibleDockerClientBase(object):
if the tag exists. if the tag exists.
''' '''
dummy, images, dummy = self.call_cli_json_stream( dummy, images, dummy = self.call_cli_json_stream(
'image', 'ls', '--format', '{{ json . }}', '--no-trunc', '--filter', 'reference={0}'.format(name), 'image', 'ls', '--format', '{{ json . }}', '--no-trunc', '--filter', f'reference={name}',
check_rc=True, check_rc=True,
) )
if tag: if tag:

View File

@ -271,10 +271,10 @@ def _extract_event(line, warn_function=None):
if match: if match:
if warn_function: if warn_function:
if match.group('msg'): if match.group('msg'):
msg = '{rid}: {msg}' msg = f"{match.group('resource_id')}: {match.group('msg')}"
else: else:
msg = 'Unspecified warning for {rid}' msg = f"Unspecified warning for {match.group('resource_id')}"
warn_function(msg.format(rid=match.group('resource_id'), msg=match.group('msg'))) warn_function(msg)
return None, True return None, True
match = _RE_PULL_PROGRESS.match(line) match = _RE_PULL_PROGRESS.match(line)
if match: if match:
@ -323,9 +323,8 @@ def _warn_missing_dry_run_prefix(line, warn_missing_dry_run_prefix, warn_functio
# This could be a bug, a change of docker compose's output format, ... # This could be a bug, a change of docker compose's output format, ...
# Tell the user to report it to us :-) # Tell the user to report it to us :-)
warn_function( warn_function(
'Event line is missing dry-run mode marker: {0!r}. Please report this at ' f'Event line is missing dry-run mode marker: {line!r}. Please report this at '
'https://github.com/ansible-collections/community.docker/issues/new?assignees=&labels=&projects=&template=bug_report.md' 'https://github.com/ansible-collections/community.docker/issues/new?assignees=&labels=&projects=&template=bug_report.md'
.format(line)
) )
@ -334,9 +333,8 @@ def _warn_unparsable_line(line, warn_function):
# Tell the user to report it to us :-) # Tell the user to report it to us :-)
if warn_function: if warn_function:
warn_function( warn_function(
'Cannot parse event from line: {0!r}. Please report this at ' f'Cannot parse event from line: {line!r}. Please report this at '
'https://github.com/ansible-collections/community.docker/issues/new?assignees=&labels=&projects=&template=bug_report.md' 'https://github.com/ansible-collections/community.docker/issues/new?assignees=&labels=&projects=&template=bug_report.md'
.format(line)
) )
@ -382,9 +380,8 @@ def parse_json_events(stderr, warn_function=None):
continue continue
if warn_function: if warn_function:
warn_function( warn_function(
'Cannot parse event from non-JSON line: {0!r}. Please report this at ' f'Cannot parse event from non-JSON line: {line!r}. Please report this at '
'https://github.com/ansible-collections/community.docker/issues/new?assignees=&labels=&projects=&template=bug_report.md' 'https://github.com/ansible-collections/community.docker/issues/new?assignees=&labels=&projects=&template=bug_report.md'
.format(line)
) )
continue continue
try: try:
@ -392,9 +389,8 @@ def parse_json_events(stderr, warn_function=None):
except Exception as exc: except Exception as exc:
if warn_function: if warn_function:
warn_function( warn_function(
'Cannot parse event from line: {0!r}: {1}. Please report this at ' f'Cannot parse event from line: {line!r}: {exc}. Please report this at '
'https://github.com/ansible-collections/community.docker/issues/new?assignees=&labels=&projects=&template=bug_report.md' 'https://github.com/ansible-collections/community.docker/issues/new?assignees=&labels=&projects=&template=bug_report.md'
.format(line, exc)
) )
continue continue
if line_data.get('tail'): if line_data.get('tail'):
@ -449,9 +445,8 @@ def parse_json_events(stderr, warn_function=None):
except KeyError: except KeyError:
if warn_function: if warn_function:
warn_function( warn_function(
'Unknown resource type {0!r} in line {1!r}. Please report this at ' f'Unknown resource type {resource_type_str!r} in line {line!r}. Please report this at '
'https://github.com/ansible-collections/community.docker/issues/new?assignees=&labels=&projects=&template=bug_report.md' 'https://github.com/ansible-collections/community.docker/issues/new?assignees=&labels=&projects=&template=bug_report.md'
.format(resource_type_str, line)
) )
resource_type = ResourceType.UNKNOWN resource_type = ResourceType.UNKNOWN
elif text in DOCKER_STATUS_PULL: elif text in DOCKER_STATUS_PULL:
@ -589,11 +584,7 @@ def emit_warnings(events, warn_function):
for event in events: for event in events:
# If a message is present, assume it is a warning # If a message is present, assume it is a warning
if (event.status is None and event.msg is not None) or event.status in DOCKER_STATUS_WARNING: if (event.status is None and event.msg is not None) or event.status in DOCKER_STATUS_WARNING:
warn_function('Docker compose: {resource_type} {resource_id}: {msg}'.format( warn_function(f'Docker compose: {event.resource_type} {event.resource_id}: {event.msg}')
resource_type=event.resource_type,
resource_id=event.resource_id,
msg=event.msg,
))
def is_failed(events, rc): def is_failed(events, rc):
@ -610,22 +601,17 @@ def update_failed(result, events, args, stdout, stderr, rc, cli):
if event.status in DOCKER_STATUS_ERROR: if event.status in DOCKER_STATUS_ERROR:
if event.resource_id is None: if event.resource_id is None:
if event.resource_type == 'unknown': if event.resource_type == 'unknown':
msg = 'General error: ' if event.resource_type == 'unknown' else 'Error when processing {resource_type}: ' msg = 'General error: ' if event.resource_type == 'unknown' else f'Error when processing {event.resource_type}: '
else: else:
msg = 'Error when processing {resource_type} {resource_id}: ' msg = f'Error when processing {event.resource_type} {event.resource_id}: '
if event.resource_type == 'unknown': if event.resource_type == 'unknown':
msg = 'Error when processing {resource_id}: ' msg = f'Error when processing {event.resource_id}: '
if event.resource_id == '': if event.resource_id == '':
msg = 'General error: ' msg = 'General error: '
msg += '{status}' if event.msg is None else '{msg}' msg += f'{event.status}' if event.msg is None else f'{event.msg}'
errors.append(msg.format( errors.append(msg)
resource_type=event.resource_type,
resource_id=event.resource_id,
status=event.status,
msg=event.msg,
))
if not errors: if not errors:
errors.append('Return code {code} is non-zero'.format(code=rc)) errors.append(f'Return code {rc} is non-zero')
result['failed'] = True result['failed'] = True
result['msg'] = '\n'.join(errors) result['msg'] = '\n'.join(errors)
result['cmd'] = ' '.join(quote(arg) for arg in [cli] + args) result['cmd'] = ' '.join(quote(arg) for arg in [cli] + args)
@ -706,24 +692,20 @@ class BaseComposeManager(DockerBaseClass):
compose_version = self.get_compose_version() compose_version = self.get_compose_version()
self.compose_version = LooseVersion(compose_version) self.compose_version = LooseVersion(compose_version)
if self.compose_version < LooseVersion(min_version): if self.compose_version < LooseVersion(min_version):
self.fail('Docker CLI {cli} has the compose plugin with version {version}; need version {min_version} or later'.format( self.fail(f'Docker CLI {self.client.get_cli()} has the compose plugin with version {compose_version}; need version {min_version} or later')
cli=self.client.get_cli(),
version=compose_version,
min_version=min_version,
))
if not os.path.isdir(self.project_src): if not os.path.isdir(self.project_src):
self.fail('"{0}" is not a directory'.format(self.project_src)) self.fail(f'"{self.project_src}" is not a directory')
self.check_files_existing = parameters['check_files_existing'] self.check_files_existing = parameters['check_files_existing']
if self.files: if self.files:
for file in self.files: for file in self.files:
path = os.path.join(self.project_src, file) path = os.path.join(self.project_src, file)
if not os.path.exists(path): if not os.path.exists(path):
self.fail('Cannot find Compose file "{0}" relative to project directory "{1}"'.format(file, self.project_src)) self.fail(f'Cannot find Compose file "{file}" relative to project directory "{self.project_src}"')
elif self.check_files_existing and all(not os.path.exists(os.path.join(self.project_src, f)) for f in DOCKER_COMPOSE_FILES): elif self.check_files_existing and all(not os.path.exists(os.path.join(self.project_src, f)) for f in DOCKER_COMPOSE_FILES):
filenames = ', '.join(DOCKER_COMPOSE_FILES[:-1]) filenames = ', '.join(DOCKER_COMPOSE_FILES[:-1])
self.fail('"{0}" does not contain {1}, or {2}'.format(self.project_src, filenames, DOCKER_COMPOSE_FILES[-1])) self.fail(f'"{self.project_src}" does not contain {filenames}, or {DOCKER_COMPOSE_FILES[-1]}')
# Support for JSON output was added in Compose 2.29.0 (https://github.com/docker/compose/releases/tag/v2.29.0); # Support for JSON output was added in Compose 2.29.0 (https://github.com/docker/compose/releases/tag/v2.29.0);
# more precisely in https://github.com/docker/compose/pull/11478 # more precisely in https://github.com/docker/compose/pull/11478
@ -747,12 +729,11 @@ class BaseComposeManager(DockerBaseClass):
def get_compose_version_from_api(self): def get_compose_version_from_api(self):
compose = self.client.get_client_plugin_info('compose') compose = self.client.get_client_plugin_info('compose')
if compose is None: if compose is None:
self.fail('Docker CLI {0} does not have the compose plugin installed'.format(self.client.get_cli())) self.fail(f'Docker CLI {self.client.get_cli()} does not have the compose plugin installed')
if compose['Version'] == 'dev': if compose['Version'] == 'dev':
self.fail( self.fail(
'Docker CLI {0} has a compose plugin installed, but it reports version "dev".' f'Docker CLI {self.client.get_cli()} has a compose plugin installed, but it reports version "dev".'
' Please use a version of the plugin that returns a proper version.' ' Please use a version of the plugin that returns a proper version.'
.format(self.client.get_cli())
) )
return compose['Version'].lstrip('v') return compose['Version'].lstrip('v')

View File

@ -172,13 +172,13 @@ def put_file(client, container, in_path, out_path, user_id, group_id, mode=None,
elif stat.S_ISLNK(file_stat.st_mode): elif stat.S_ISLNK(file_stat.st_mode):
stream = _symlink_tar_generator(b_in_path, file_stat, out_file, user_id, group_id, mode=mode, user_name=user_name) stream = _symlink_tar_generator(b_in_path, file_stat, out_file, user_id, group_id, mode=mode, user_name=user_name)
else: else:
file_part = ' referenced by' if follow_links else ''
raise DockerFileCopyError( raise DockerFileCopyError(
'File{0} {1} is neither a regular file nor a symlink (stat mode {2}).'.format( f'File{file_part} {in_path} is neither a regular file nor a symlink (stat mode {oct(file_stat.st_mode)}).')
' referenced by' if follow_links else '', in_path, oct(file_stat.st_mode)))
ok = _put_archive(client, container, out_dir, stream) ok = _put_archive(client, container, out_dir, stream)
if not ok: if not ok:
raise DockerUnexpectedError('Unknown error while creating file "{0}" in container "{1}".'.format(out_path, container)) raise DockerUnexpectedError(f'Unknown error while creating file "{out_path}" in container "{container}".')
def put_file_content(client, container, content, out_path, user_id, group_id, mode, user_name=None): def put_file_content(client, container, content, out_path, user_id, group_id, mode, user_name=None):
@ -189,7 +189,7 @@ def put_file_content(client, container, content, out_path, user_id, group_id, mo
ok = _put_archive(client, container, out_dir, stream) ok = _put_archive(client, container, out_dir, stream)
if not ok: if not ok:
raise DockerUnexpectedError('Unknown error while creating file "{0}" in container "{1}".'.format(out_path, container)) raise DockerUnexpectedError(f'Unknown error while creating file "{out_path}" in container "{container}".')
def stat_file(client, container, in_path, follow_links=False, log=None): def stat_file(client, container, in_path, follow_links=False, log=None):
@ -208,7 +208,7 @@ def stat_file(client, container, in_path, follow_links=False, log=None):
while True: while True:
if in_path in considered_in_paths: if in_path in considered_in_paths:
raise DockerFileCopyError('Found infinite symbolic link loop when trying to stating "{0}"'.format(in_path)) raise DockerFileCopyError(f'Found infinite symbolic link loop when trying to stating "{in_path}"')
considered_in_paths.add(in_path) considered_in_paths.add(in_path)
if log: if log:
@ -226,8 +226,7 @@ def stat_file(client, container, in_path, follow_links=False, log=None):
stat_data = json.loads(base64.b64decode(header)) stat_data = json.loads(base64.b64decode(header))
except Exception as exc: except Exception as exc:
raise DockerUnexpectedError( raise DockerUnexpectedError(
'When retrieving information for {in_path} from {container}, obtained header {header!r} that cannot be loaded as JSON: {exc}' f'When retrieving information for {in_path} from {container}, obtained header {header!r} that cannot be loaded as JSON: {exc}'
.format(in_path=in_path, container=container, header=header, exc=exc)
) )
# https://pkg.go.dev/io/fs#FileMode: bit 32 - 5 means ModeSymlink # https://pkg.go.dev/io/fs#FileMode: bit 32 - 5 means ModeSymlink
@ -285,7 +284,7 @@ def fetch_file_ex(client, container, in_path, process_none, process_regular, pro
while True: while True:
if in_path in considered_in_paths: if in_path in considered_in_paths:
raise DockerFileCopyError('Found infinite symbolic link loop when trying to fetch "{0}"'.format(in_path)) raise DockerFileCopyError(f'Found infinite symbolic link loop when trying to fetch "{in_path}"')
considered_in_paths.add(in_path) considered_in_paths.add(in_path)
if log: if log:
@ -331,8 +330,7 @@ def fetch_file(client, container, in_path, out_path, follow_links=False, log=Non
def process_none(in_path): def process_none(in_path):
raise DockerFileNotFound( raise DockerFileNotFound(
'File {in_path} does not exist in container {container}' f'File {in_path} does not exist in container {container}'
.format(in_path=in_path, container=container)
) )
def process_regular(in_path, tar, member): def process_regular(in_path, tar, member):
@ -359,7 +357,7 @@ def fetch_file(client, container, in_path, out_path, follow_links=False, log=Non
def _execute_command(client, container, command, log=None, check_rc=False): def _execute_command(client, container, command, log=None, check_rc=False):
if log: if log:
log('Executing {command} in {container}'.format(command=command, container=container)) log(f'Executing {command} in {container}')
data = { data = {
'Container': container, 'Container': container,
@ -378,10 +376,10 @@ def _execute_command(client, container, command, log=None, check_rc=False):
try: try:
exec_data = client.post_json_to_json('/containers/{0}/exec', container, data=data) exec_data = client.post_json_to_json('/containers/{0}/exec', container, data=data)
except NotFound as e: except NotFound as e:
raise DockerFileCopyError('Could not find container "{container}"'.format(container=container)) from e raise DockerFileCopyError(f'Could not find container "{container}"') from e
except APIError as e: except APIError as e:
if e.response is not None and e.response.status_code == 409: if e.response is not None and e.response.status_code == 409:
raise DockerFileCopyError('Cannot execute command in paused container "{container}"'.format(container=container)) from e raise DockerFileCopyError(f'Cannot execute command in paused container "{container}"') from e
raise raise
exec_id = exec_data['Id'] exec_id = exec_data['Id']
@ -398,12 +396,12 @@ def _execute_command(client, container, command, log=None, check_rc=False):
stderr = stderr or b'' stderr = stderr or b''
if log: if log:
log('Exit code {rc}, stdout {stdout!r}, stderr {stderr!r}'.format(rc=rc, stdout=stdout, stderr=stderr)) log(f'Exit code {rc}, stdout {stdout!r}, stderr {stderr!r}')
if check_rc and rc != 0: if check_rc and rc != 0:
command_str = ' '.join(command)
raise DockerUnexpectedError( raise DockerUnexpectedError(
'Obtained unexpected exit code {rc} when running "{command}" in {container}.\nSTDOUT: {stdout}\nSTDERR: {stderr}' f'Obtained unexpected exit code {rc} when running "{command_str}" in {container}.\nSTDOUT: {stdout}\nSTDERR: {stderr}'
.format(command=' '.join(command), container=container, rc=rc, stdout=stdout, stderr=stderr)
) )
return rc, stdout, stderr return rc, stdout, stderr
@ -415,8 +413,7 @@ def determine_user_group(client, container, log=None):
stdout_lines = stdout.splitlines() stdout_lines = stdout.splitlines()
if len(stdout_lines) != 2: if len(stdout_lines) != 2:
raise DockerUnexpectedError( raise DockerUnexpectedError(
'Expected two-line output to obtain user and group ID for container {container}, but got {lc} lines:\n{stdout}' f'Expected two-line output to obtain user and group ID for container {container}, but got {len(stdout_lines)} lines:\n{stdout}'
.format(container=container, lc=len(stdout_lines), stdout=stdout)
) )
user_id, group_id = stdout_lines user_id, group_id = stdout_lines
@ -424,6 +421,5 @@ def determine_user_group(client, container, log=None):
return int(user_id), int(group_id) return int(user_id), int(group_id)
except ValueError: except ValueError:
raise DockerUnexpectedError( raise DockerUnexpectedError(
'Expected two-line output with numeric IDs to obtain user and group ID for container {container}, but got "{l1}" and "{l2}" instead' f'Expected two-line output with numeric IDs to obtain user and group ID for container {container}, but got "{user_id}" and "{group_id}" instead'
.format(container=container, l1=user_id, l2=group_id)
) )

View File

@ -93,7 +93,7 @@ def load_archived_image_manifest(archive_path):
config_file = meta['Config'] config_file = meta['Config']
except KeyError as exc: except KeyError as exc:
raise ImageArchiveInvalidException( raise ImageArchiveInvalidException(
"Failed to get Config entry from {0}th manifest in manifest.json: {1}".format(index + 1, to_native(exc)) f"Failed to get Config entry from {index + 1}th manifest in manifest.json: {exc}"
) from exc ) from exc
# Extracts hash without 'sha256:' prefix # Extracts hash without 'sha256:' prefix
@ -102,7 +102,7 @@ def load_archived_image_manifest(archive_path):
image_id = os.path.splitext(config_file)[0] image_id = os.path.splitext(config_file)[0]
except Exception as exc: except Exception as exc:
raise ImageArchiveInvalidException( raise ImageArchiveInvalidException(
"Failed to extract image id from config file name %s: %s" % (config_file, to_native(exc)) f"Failed to extract image id from config file name {config_file}: {exc}"
) from exc ) from exc
for prefix in ( for prefix in (
@ -115,7 +115,7 @@ def load_archived_image_manifest(archive_path):
repo_tags = meta['RepoTags'] repo_tags = meta['RepoTags']
except KeyError as exc: except KeyError as exc:
raise ImageArchiveInvalidException( raise ImageArchiveInvalidException(
"Failed to get RepoTags entry from {0}th manifest in manifest.json: {1}".format(index + 1, to_native(exc)) f"Failed to get RepoTags entry from {index + 1}th manifest in manifest.json: {exc}"
) from exc ) from exc
result.append(ImageArchiveManifestSummary( result.append(ImageArchiveManifestSummary(

View File

@ -365,15 +365,15 @@ def _parse_port_range(range_or_port, module):
try: try:
start, end = [int(port) for port in range_or_port.split('-')] start, end = [int(port) for port in range_or_port.split('-')]
except Exception: except Exception:
module.fail_json(msg='Invalid port range: "{0}"'.format(range_or_port)) module.fail_json(msg=f'Invalid port range: "{range_or_port}"')
if end < start: if end < start:
module.fail_json(msg='Invalid port range: "{0}"'.format(range_or_port)) module.fail_json(msg=f'Invalid port range: "{range_or_port}"')
return list(range(start, end + 1)) return list(range(start, end + 1))
else: else:
try: try:
return [int(range_or_port)] return [int(range_or_port)]
except Exception: except Exception:
module.fail_json(msg='Invalid port: "{0}"'.format(range_or_port)) module.fail_json(msg=f'Invalid port: "{range_or_port}"')
def _split_colon_ipv6(text, module): def _split_colon_ipv6(text, module):
@ -391,7 +391,7 @@ def _split_colon_ipv6(text, module):
break break
j = text.find(']', i) j = text.find(']', i)
if j < 0: if j < 0:
module.fail_json(msg='Cannot find closing "]" in input "{0}" for opening "[" at index {1}!'.format(text, i + 1)) module.fail_json(msg=f'Cannot find closing "]" in input "{text}" for opening "[" at index {i + 1}!')
result.extend(text[start:i].split(':')) result.extend(text[start:i].split(':'))
k = text.find(':', j) k = text.find(':', j)
if k < 0: if k < 0:
@ -571,9 +571,9 @@ def _preprocess_mounts(module, values):
def check_collision(t, name): def check_collision(t, name):
if t in last: if t in last:
if name == last[t]: if name == last[t]:
module.fail_json(msg='The mount point "{0}" appears twice in the {1} option'.format(t, name)) module.fail_json(msg=f'The mount point "{t}" appears twice in the {name} option')
else: else:
module.fail_json(msg='The mount point "{0}" appears both in the {1} and {2} option'.format(t, name, last[t])) module.fail_json(msg=f'The mount point "{t}" appears both in the {name} and {last[t]} option')
last[t] = name last[t] = name
if 'mounts' in values: if 'mounts' in values:
@ -588,17 +588,13 @@ def _preprocess_mounts(module, values):
# Sanity checks # Sanity checks
if mount['source'] is None and mount_type not in ('tmpfs', 'volume', 'image', 'cluster'): if mount['source'] is None and mount_type not in ('tmpfs', 'volume', 'image', 'cluster'):
module.fail_json(msg='source must be specified for mount "{0}" of type "{1}"'.format(target, mount_type)) module.fail_json(msg=f'source must be specified for mount "{target}" of type "{mount_type}"')
for option, req_mount_types in _MOUNT_OPTION_TYPES.items(): for option, req_mount_types in _MOUNT_OPTION_TYPES.items():
if mount[option] is not None and mount_type not in req_mount_types: if mount[option] is not None and mount_type not in req_mount_types:
type_plural = "" if len(req_mount_types) == 1 else "s"
type_list = '", "'.join(req_mount_types)
module.fail_json( module.fail_json(
msg='{0} cannot be specified for mount "{1}" of type "{2}" (needs type{3} "{4}")'.format( msg=f'{option} cannot be specified for mount "{target}" of type "{mount_type}" (needs type{type_plural} "{type_list}")'
option,
target,
mount_type,
"" if len(req_mount_types) == 1 else "s",
'", "'.join(req_mount_types),
)
) )
# Streamline options # Streamline options
@ -611,22 +607,22 @@ def _preprocess_mounts(module, values):
try: try:
mount_dict['tmpfs_size'] = human_to_bytes(mount_dict['tmpfs_size']) mount_dict['tmpfs_size'] = human_to_bytes(mount_dict['tmpfs_size'])
except ValueError as exc: except ValueError as exc:
module.fail_json(msg='Failed to convert tmpfs_size of mount "{0}" to bytes: {1}'.format(target, to_native(exc))) module.fail_json(msg=f'Failed to convert tmpfs_size of mount "{target}" to bytes: {exc}')
if mount_dict['tmpfs_mode'] is not None: if mount_dict['tmpfs_mode'] is not None:
try: try:
mount_dict['tmpfs_mode'] = int(mount_dict['tmpfs_mode'], 8) mount_dict['tmpfs_mode'] = int(mount_dict['tmpfs_mode'], 8)
except Exception as dummy: except Exception as dummy:
module.fail_json(msg='tmp_fs mode of mount "{0}" is not an octal string!'.format(target)) module.fail_json(msg=f'tmp_fs mode of mount "{target}" is not an octal string!')
if mount_dict['tmpfs_options']: if mount_dict['tmpfs_options']:
opts = [] opts = []
for idx, opt in enumerate(mount_dict['tmpfs_options']): for idx, opt in enumerate(mount_dict['tmpfs_options']):
if len(opt) != 1: if len(opt) != 1:
module.fail_json(msg='tmpfs_options[{1}] of mount "{0}" must be a one-element dictionary!'.format(target, idx + 1)) module.fail_json(msg=f'tmpfs_options[{idx + 1}] of mount "{target}" must be a one-element dictionary!')
k, v = list(opt.items())[0] k, v = list(opt.items())[0]
if not isinstance(k, str): if not isinstance(k, str):
module.fail_json(msg='key {2!r} in tmpfs_options[{1}] of mount "{0}" must be a string!'.format(target, idx + 1, k)) module.fail_json(msg=f'key {k!r} in tmpfs_options[{idx + 1}] of mount "{target}" must be a string!')
if v is not None and not isinstance(v, str): if v is not None and not isinstance(v, str):
module.fail_json(msg='value {2!r} in tmpfs_options[{1}] of mount "{0}" must be a string or null/none!'.format(target, idx + 1, v)) module.fail_json(msg=f'value {v!r} in tmpfs_options[{idx + 1}] of mount "{target}" must be a string or null/none!')
opts.append([k, v] if v is not None else [k]) opts.append([k, v] if v is not None else [k])
mount_dict['tmpfs_options'] = opts mount_dict['tmpfs_options'] = opts
@ -641,7 +637,7 @@ def _preprocess_mounts(module, values):
if len(parts) == 3: if len(parts) == 3:
host, container, mode = parts host, container, mode = parts
if not _is_volume_permissions(mode): if not _is_volume_permissions(mode):
module.fail_json(msg='Found invalid volumes mode: {0}'.format(mode)) module.fail_json(msg=f'Found invalid volumes mode: {mode}')
if re.match(r'[.~]', host): if re.match(r'[.~]', host):
host = os.path.abspath(os.path.expanduser(host)) host = os.path.abspath(os.path.expanduser(host))
check_collision(container, 'volumes') check_collision(container, 'volumes')
@ -664,7 +660,7 @@ def _preprocess_mounts(module, values):
if len(parts) == 3: if len(parts) == 3:
host, container, mode = parts host, container, mode = parts
if not _is_volume_permissions(mode): if not _is_volume_permissions(mode):
module.fail_json(msg='Found invalid volumes mode: {0}'.format(mode)) module.fail_json(msg=f'Found invalid volumes mode: {mode}')
elif len(parts) == 2: elif len(parts) == 2:
if not _is_volume_permissions(parts[1]): if not _is_volume_permissions(parts[1]):
host, container, mode = (parts + ['rw']) host, container, mode = (parts + ['rw'])
@ -735,7 +731,7 @@ def _preprocess_ports(module, values):
if not re.match(r'^[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+$', parts[0]) and not re.match(r'^\[[0-9a-fA-F:]+(?:|%[^\]/]+)\]$', ipaddr): if not re.match(r'^[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+$', parts[0]) and not re.match(r'^\[[0-9a-fA-F:]+(?:|%[^\]/]+)\]$', ipaddr):
module.fail_json( module.fail_json(
msg='Bind addresses for published ports must be IPv4 or IPv6 addresses, not hostnames. ' msg='Bind addresses for published ports must be IPv4 or IPv6 addresses, not hostnames. '
'Use the dig lookup to resolve hostnames. (Found hostname: {0})'.format(ipaddr) f'Use the dig lookup to resolve hostnames. (Found hostname: {ipaddr})'
) )
if re.match(r'^\[[0-9a-fA-F:]+\]$', ipaddr): if re.match(r'^\[[0-9a-fA-F:]+\]$', ipaddr):
ipaddr = ipaddr[1:-1] ipaddr = ipaddr[1:-1]
@ -753,7 +749,7 @@ def _preprocess_ports(module, values):
) )
for bind, container_port in zip(port_binds, container_ports): for bind, container_port in zip(port_binds, container_ports):
idx = '{0}/{1}'.format(container_port, protocol) if protocol else container_port idx = f'{container_port}/{protocol}' if protocol else container_port
if idx in binds: if idx in binds:
old_bind = binds[idx] old_bind = binds[idx]
if isinstance(old_bind, list): if isinstance(old_bind, list):

View File

@ -389,10 +389,10 @@ class DockerAPIEngineDriver(EngineDriver):
try: try:
runner() runner()
except DockerException as e: except DockerException as e:
client.fail('An unexpected Docker error occurred: {0}'.format(to_native(e)), exception=traceback.format_exc()) client.fail(f'An unexpected Docker error occurred: {e}', exception=traceback.format_exc())
except RequestException as e: except RequestException as e:
client.fail( client.fail(
'An unexpected requests error occurred when trying to talk to the Docker daemon: {0}'.format(to_native(e)), f'An unexpected requests error occurred when trying to talk to the Docker daemon: {e}',
exception=traceback.format_exc()) exception=traceback.format_exc())
@ -611,7 +611,7 @@ def _get_default_host_ip(module, client):
network = client.get_network(network_data['name']) network = client.get_network(network_data['name'])
if network is None: if network is None:
client.fail( client.fail(
"Cannot inspect the network '{0}' to determine the default IP".format(network_data['name']), f"Cannot inspect the network '{network_data['name']}' to determine the default IP",
) )
if network.get('Driver') == 'bridge' and network.get('Options', {}).get('com.docker.network.bridge.host_binding_ipv4'): if network.get('Driver') == 'bridge' and network.get('Options', {}).get('com.docker.network.bridge.host_binding_ipv4'):
ip = network['Options']['com.docker.network.bridge.host_binding_ipv4'] ip = network['Options']['com.docker.network.bridge.host_binding_ipv4']
@ -832,9 +832,10 @@ def _ignore_mismatching_label_result(module, client, api_version, option, image,
# Format label for error message # Format label for error message
would_remove_labels.append('"%s"' % (label, )) would_remove_labels.append('"%s"' % (label, ))
if would_remove_labels: if would_remove_labels:
labels = ', '.join(would_remove_labels)
msg = ("Some labels should be removed but are present in the base image. You can set image_label_mismatch to 'ignore' to ignore" msg = ("Some labels should be removed but are present in the base image. You can set image_label_mismatch to 'ignore' to ignore"
" this error. Labels: {0}") f" this error. Labels: {labels}")
client.fail(msg.format(', '.join(would_remove_labels))) client.fail(msg)
return False return False
@ -1282,9 +1283,9 @@ def _preprocess_container_names(module, client, api_version, value):
if container is None: if container is None:
# If we cannot find the container, issue a warning and continue with # If we cannot find the container, issue a warning and continue with
# what the user specified. # what the user specified.
module.warn('Cannot find a container with name or ID "{0}"'.format(container_name)) module.warn(f'Cannot find a container with name or ID "{container_name}"')
return value return value
return 'container:{0}'.format(container['Id']) return f"container:{container['Id']}"
def _get_value_command(module, container, api_version, options, image, host_info): def _get_value_command(module, container, api_version, options, image, host_info):

View File

@ -102,11 +102,11 @@ class ContainerManager(DockerBaseClass):
if re.match(r'^\[[0-9a-fA-F:]+\]$', self.param_default_host_ip): if re.match(r'^\[[0-9a-fA-F:]+\]$', self.param_default_host_ip):
valid_ip = True valid_ip = True
if re.match(r'^[0-9a-fA-F:]+$', self.param_default_host_ip): if re.match(r'^[0-9a-fA-F:]+$', self.param_default_host_ip):
self.param_default_host_ip = '[{0}]'.format(self.param_default_host_ip) self.param_default_host_ip = f'[{self.param_default_host_ip}]'
valid_ip = True valid_ip = True
if not valid_ip: if not valid_ip:
self.fail('The value of default_host_ip must be an empty string, an IPv4 address, ' self.fail('The value of default_host_ip must be an empty string, an IPv4 address, '
'or an IPv6 address. Got "{0}" instead.'.format(self.param_default_host_ip)) f'or an IPv6 address. Got "{self.param_default_host_ip}" instead.')
def _collect_all_options(self, active_options): def _collect_all_options(self, active_options):
all_options = {} all_options = {}
@ -228,8 +228,8 @@ class ContainerManager(DockerBaseClass):
if result is None: if result is None:
if accept_removal: if accept_removal:
return result return result
msg = 'Encontered vanished container while waiting for container "{0}"' msg = f'Encontered vanished container while waiting for container "{container_id}"'
self.fail(msg.format(container_id)) self.fail(msg)
# Check container state # Check container state
state_info = result.get('State') or {} state_info = result.get('State') or {}
if health_state: if health_state:
@ -238,13 +238,13 @@ class ContainerManager(DockerBaseClass):
if complete_states is not None and state in complete_states: if complete_states is not None and state in complete_states:
return result return result
if wait_states is not None and state not in wait_states: if wait_states is not None and state not in wait_states:
msg = 'Encontered unexpected state "{1}" while waiting for container "{0}"' msg = f'Encontered unexpected state "{state}" while waiting for container "{container_id}"'
self.fail(msg.format(container_id, state), container=result) self.fail(msg, container=result)
# Wait # Wait
if max_wait is not None: if max_wait is not None:
if total_wait > max_wait or delay < 1E-4: if total_wait > max_wait or delay < 1E-4:
msg = 'Timeout of {1} seconds exceeded while waiting for container "{0}"' msg = f'Timeout of {max_wait} seconds exceeded while waiting for container "{container_id}"'
self.fail(msg.format(container_id, max_wait), container=result) self.fail(msg, container=result)
if total_wait + delay > max_wait: if total_wait + delay > max_wait:
delay = max_wait - total_wait delay = max_wait - total_wait
sleep(delay) sleep(delay)
@ -674,7 +674,7 @@ class ContainerManager(DockerBaseClass):
self.diff['differences'] = [dict(network_differences=network_differences)] self.diff['differences'] = [dict(network_differences=network_differences)]
for netdiff in network_differences: for netdiff in network_differences:
self.diff_tracker.add( self.diff_tracker.add(
'network.{0}'.format(netdiff['parameter']['name']), f"network.{netdiff['parameter']['name']}",
parameter=netdiff['parameter'], parameter=netdiff['parameter'],
active=netdiff['container'] active=netdiff['container']
) )
@ -691,7 +691,7 @@ class ContainerManager(DockerBaseClass):
self.diff['differences'] = [dict(purge_networks=extra_networks)] self.diff['differences'] = [dict(purge_networks=extra_networks)]
for extra_network in extra_networks: for extra_network in extra_networks:
self.diff_tracker.add( self.diff_tracker.add(
'network.{0}'.format(extra_network['name']), f"network.{extra_network['name']}",
active=extra_network active=extra_network
) )
self.results['changed'] = True self.results['changed'] = True

View File

@ -89,7 +89,7 @@ class DockerSocketHandlerBase(object):
if data is None: if data is None:
# no data available # no data available
return return
self._log('read {0} bytes'.format(len(data))) self._log(f'read {len(data)} bytes')
if len(data) == 0: if len(data) == 0:
# Stream EOF # Stream EOF
self._eof = True self._eof = True
@ -123,7 +123,7 @@ class DockerSocketHandlerBase(object):
if len(self._write_buffer) > 0: if len(self._write_buffer) > 0:
written = write_to_socket(self._sock, self._write_buffer) written = write_to_socket(self._sock, self._write_buffer)
self._write_buffer = self._write_buffer[written:] self._write_buffer = self._write_buffer[written:]
self._log('wrote {0} bytes, {1} are left'.format(written, len(self._write_buffer))) self._log(f'wrote {written} bytes, {len(self._write_buffer)} are left')
if len(self._write_buffer) > 0: if len(self._write_buffer) > 0:
self._selector.modify(self._sock, self._selectors.EVENT_READ | self._selectors.EVENT_WRITE) self._selector.modify(self._sock, self._selectors.EVENT_READ | self._selectors.EVENT_WRITE)
else: else:
@ -147,14 +147,13 @@ class DockerSocketHandlerBase(object):
return True return True
if timeout is not None: if timeout is not None:
timeout -= PARAMIKO_POLL_TIMEOUT timeout -= PARAMIKO_POLL_TIMEOUT
self._log('select... ({0})'.format(timeout)) self._log(f'select... ({timeout})')
events = self._selector.select(timeout) events = self._selector.select(timeout)
for key, event in events: for key, event in events:
if key.fileobj == self._sock: if key.fileobj == self._sock:
self._log( ev_read = event & self._selectors.EVENT_READ != 0
'select event read:{0} write:{1}'.format( ev_write = event & self._selectors.EVENT_WRITE != 0
event & self._selectors.EVENT_READ != 0, self._log(f'select event read:{ev_read} write:{ev_write}')
event & self._selectors.EVENT_WRITE != 0))
if event & self._selectors.EVENT_READ != 0: if event & self._selectors.EVENT_READ != 0:
self._read() self._read()
if event & self._selectors.EVENT_WRITE != 0: if event & self._selectors.EVENT_WRITE != 0:
@ -183,7 +182,7 @@ class DockerSocketHandlerBase(object):
elif stream_id == docker_socket.STDERR: elif stream_id == docker_socket.STDERR:
stderr.append(data) stderr.append(data)
else: else:
raise ValueError('{0} is not a valid stream ID'.format(stream_id)) raise ValueError(f'{stream_id} is not a valid stream ID')
self.end_of_writing() self.end_of_writing()

View File

@ -44,7 +44,7 @@ def shutdown_writing(sock, log=_empty_writer):
sock.shutdown(pysocket.SHUT_WR) sock.shutdown(pysocket.SHUT_WR)
except TypeError as e: except TypeError as e:
# probably: "TypeError: shutdown() takes 1 positional argument but 2 were given" # probably: "TypeError: shutdown() takes 1 positional argument but 2 were given"
log('Shutting down for writing not possible; trying shutdown instead: {0}'.format(e)) log(f'Shutting down for writing not possible; trying shutdown instead: {e}')
sock.shutdown() sock.shutdown()
elif isinstance(sock, getattr(pysocket, 'SocketIO')): elif isinstance(sock, getattr(pysocket, 'SocketIO')):
sock._sock.shutdown(pysocket.SHUT_WR) sock._sock.shutdown(pysocket.SHUT_WR)

View File

@ -289,13 +289,9 @@ def sanitize_labels(labels, labels_field, client=None, module=None):
return return
for k, v in list(labels.items()): for k, v in list(labels.items()):
if not isinstance(k, str): if not isinstance(k, str):
fail( fail(f"The key {k!r} of {labels_field} is not a string!")
"The key {key!r} of {field} is not a string!".format(
field=labels_field, key=k))
if isinstance(v, (bool, float)): if isinstance(v, (bool, float)):
fail( fail(f"The value {v!r} for {k!r} of {labels_field} is not a string or something than can be safely converted to a string!")
"The value {value!r} for {key!r} of {field} is not a string or something than can be safely converted to a string!".format(
field=labels_field, key=k, value=v))
labels[k] = to_text(v) labels[k] = to_text(v)
@ -389,8 +385,7 @@ def normalize_healthcheck(healthcheck, normalize_test=False):
value = int(value) value = int(value)
except ValueError: except ValueError:
raise ValueError( raise ValueError(
'Cannot parse number of retries for healthcheck. ' f'Cannot parse number of retries for healthcheck. Expected an integer, got "{value}".'
'Expected an integer, got "{0}".'.format(value)
) )
if key == 'test' and value and normalize_test: if key == 'test' and value and normalize_test:
value = normalize_healthcheck_test(value) value = normalize_healthcheck_test(value)

View File

@ -439,7 +439,6 @@ actions:
import traceback import traceback
from ansible.module_utils.common.validation import check_type_int from ansible.module_utils.common.validation import check_type_int
from ansible.module_utils.common.text.converters import to_native
from ansible_collections.community.docker.plugins.module_utils.common_cli import ( from ansible_collections.community.docker.plugins.module_utils.common_cli import (
AnsibleModuleDockerClient, AnsibleModuleDockerClient,
@ -691,7 +690,7 @@ def main():
manager.cleanup() manager.cleanup()
client.module.exit_json(**result) client.module.exit_json(**result)
except DockerException as e: except DockerException as e:
client.fail('An unexpected docker error occurred: {0}'.format(to_native(e)), exception=traceback.format_exc()) client.fail(f'An unexpected Docker error occurred: {e}', exception=traceback.format_exc())
if __name__ == '__main__': if __name__ == '__main__':

View File

@ -167,7 +167,7 @@ rc:
import shlex import shlex
import traceback import traceback
from ansible.module_utils.common.text.converters import to_text, to_native from ansible.module_utils.common.text.converters import to_text
from ansible_collections.community.docker.plugins.module_utils.common_cli import ( from ansible_collections.community.docker.plugins.module_utils.common_cli import (
AnsibleModuleDockerClient, AnsibleModuleDockerClient,
@ -232,7 +232,7 @@ class ExecManager(BaseComposeManager):
if self.env: if self.env:
for name, value in list(self.env.items()): for name, value in list(self.env.items()):
args.append('--env') args.append('--env')
args.append('{0}={1}'.format(name, value)) args.append(f'{name}={value}')
args.append('--') args.append('--')
args.append(self.service) args.append(self.service)
args.extend(self.argv) args.extend(self.argv)
@ -295,7 +295,7 @@ def main():
manager.cleanup() manager.cleanup()
client.module.exit_json(**result) client.module.exit_json(**result)
except DockerException as e: except DockerException as e:
client.fail('An unexpected docker error occurred: {0}'.format(to_native(e)), exception=traceback.format_exc()) client.fail(f'An unexpected Docker error occurred: {e}', exception=traceback.format_exc())
if __name__ == '__main__': if __name__ == '__main__':

View File

@ -112,8 +112,6 @@ actions:
import traceback import traceback
from ansible.module_utils.common.text.converters import to_native
from ansible_collections.community.docker.plugins.module_utils.common_cli import ( from ansible_collections.community.docker.plugins.module_utils.common_cli import (
AnsibleModuleDockerClient, AnsibleModuleDockerClient,
DockerException, DockerException,
@ -139,12 +137,12 @@ class PullManager(BaseComposeManager):
if self.policy != 'always' and self.compose_version < LooseVersion('2.22.0'): if self.policy != 'always' and self.compose_version < LooseVersion('2.22.0'):
# https://github.com/docker/compose/pull/10981 - 2.22.0 # https://github.com/docker/compose/pull/10981 - 2.22.0
self.fail('A pull policy other than always is only supported since Docker Compose 2.22.0. {0} has version {1}'.format( self.fail(
self.client.get_cli(), self.compose_version)) f'A pull policy other than always is only supported since Docker Compose 2.22.0. {self.client.get_cli()} has version {self.compose_version}')
if self.ignore_buildable and self.compose_version < LooseVersion('2.15.0'): if self.ignore_buildable and self.compose_version < LooseVersion('2.15.0'):
# https://github.com/docker/compose/pull/10134 - 2.15.0 # https://github.com/docker/compose/pull/10134 - 2.15.0
self.fail('--ignore-buildable is only supported since Docker Compose 2.15.0. {0} has version {1}'.format( self.fail(
self.client.get_cli(), self.compose_version)) f'--ignore-buildable is only supported since Docker Compose 2.15.0. {self.client.get_cli()} has version {self.compose_version}')
def get_pull_cmd(self, dry_run, no_start=False): def get_pull_cmd(self, dry_run, no_start=False):
args = self.get_base_args() + ['pull'] args = self.get_base_args() + ['pull']
@ -196,7 +194,7 @@ def main():
manager.cleanup() manager.cleanup()
client.module.exit_json(**result) client.module.exit_json(**result)
except DockerException as e: except DockerException as e:
client.fail('An unexpected docker error occurred: {0}'.format(to_native(e)), exception=traceback.format_exc()) client.fail(f'An unexpected docker error occurred: {e}', exception=traceback.format_exc())
if __name__ == '__main__': if __name__ == '__main__':

View File

@ -240,7 +240,7 @@ rc:
import shlex import shlex
import traceback import traceback
from ansible.module_utils.common.text.converters import to_text, to_native from ansible.module_utils.common.text.converters import to_text
from ansible_collections.community.docker.plugins.module_utils.common_cli import ( from ansible_collections.community.docker.plugins.module_utils.common_cli import (
AnsibleModuleDockerClient, AnsibleModuleDockerClient,
@ -349,7 +349,7 @@ class ExecManager(BaseComposeManager):
if self.env: if self.env:
for name, value in list(self.env.items()): for name, value in list(self.env.items()):
args.append('--env') args.append('--env')
args.append('{0}={1}'.format(name, value)) args.append(f'{name}={value}')
args.append('--') args.append('--')
args.append(self.service) args.append(self.service)
if self.argv: if self.argv:
@ -428,7 +428,7 @@ def main():
manager.cleanup() manager.cleanup()
client.module.exit_json(**result) client.module.exit_json(**result)
except DockerException as e: except DockerException as e:
client.fail('An unexpected docker error occurred: {0}'.format(to_native(e)), exception=traceback.format_exc()) client.fail(f'An unexpected Docker error occurred: {e}', exception=traceback.format_exc())
if __name__ == '__main__': if __name__ == '__main__':

View File

@ -242,7 +242,7 @@ class ConfigManager(DockerBaseClass):
with open(data_src, 'rb') as f: with open(data_src, 'rb') as f:
self.data = f.read() self.data = f.read()
except Exception as exc: except Exception as exc:
self.client.fail('Error while reading {src}: {error}'.format(src=data_src, error=to_native(exc))) self.client.fail(f'Error while reading {data_src}: {exc}')
self.labels = parameters.get('labels') self.labels = parameters.get('labels')
self.force = parameters.get('force') self.force = parameters.get('force')
self.rolling_versions = parameters.get('rolling_versions') self.rolling_versions = parameters.get('rolling_versions')
@ -287,7 +287,7 @@ class ConfigManager(DockerBaseClass):
self.configs = [ self.configs = [
config config
for config in configs for config in configs
if config['Spec']['Name'].startswith('{name}_v'.format(name=self.name)) if config['Spec']['Name'].startswith(f'{self.name}_v')
] ]
self.configs.sort(key=self.get_version) self.configs.sort(key=self.get_version)
else: else:
@ -305,7 +305,7 @@ class ConfigManager(DockerBaseClass):
if self.rolling_versions: if self.rolling_versions:
self.version += 1 self.version += 1
labels['ansible_version'] = str(self.version) labels['ansible_version'] = str(self.version)
self.name = '{name}_v{version}'.format(name=self.name, version=self.version) self.name = f'{self.name}_v{self.version}'
if self.labels: if self.labels:
labels.update(self.labels) labels.update(self.labels)
@ -425,10 +425,10 @@ def main():
ConfigManager(client, results)() ConfigManager(client, results)()
client.module.exit_json(**results) client.module.exit_json(**results)
except DockerException as e: except DockerException as e:
client.fail('An unexpected docker error occurred: {0}'.format(to_native(e)), exception=traceback.format_exc()) client.fail(f'An unexpected Docker error occurred: {e}', exception=traceback.format_exc())
except RequestException as e: except RequestException as e:
client.fail( client.fail(
'An unexpected requests error occurred when Docker SDK for Python tried to talk to the docker daemon: {0}'.format(to_native(e)), f'An unexpected requests error occurred when Docker SDK for Python tried to talk to the docker daemon: {e}',
exception=traceback.format_exc()) exception=traceback.format_exc())

View File

@ -451,7 +451,7 @@ def is_file_idempotent(client, container, managed_path, container_path, follow_l
file_stat = os.stat(managed_path) if local_follow_links else os.lstat(managed_path) file_stat = os.stat(managed_path) if local_follow_links else os.lstat(managed_path)
except OSError as exc: except OSError as exc:
if exc.errno == 2: if exc.errno == 2:
raise DockerFileNotFound('Cannot find local file {managed_path}'.format(managed_path=managed_path)) raise DockerFileNotFound(f'Cannot find local file {managed_path}')
raise raise
if mode is None: if mode is None:
mode = stat.S_IMODE(file_stat.st_mode) mode = stat.S_IMODE(file_stat.st_mode)
@ -786,13 +786,13 @@ def parse_modern(mode):
return int(to_native(mode), 8) return int(to_native(mode), 8)
if isinstance(mode, int): if isinstance(mode, int):
return mode return mode
raise TypeError('must be an octal string or an integer, got {mode!r}'.format(mode=mode)) raise TypeError(f'must be an octal string or an integer, got {mode!r}')
def parse_octal_string_only(mode): def parse_octal_string_only(mode):
if isinstance(mode, str): if isinstance(mode, str):
return int(to_native(mode), 8) return int(to_native(mode), 8)
raise TypeError('must be an octal string, got {mode!r}'.format(mode=mode)) raise TypeError(f'must be an octal string, got {mode!r}')
def main(): def main():
@ -847,16 +847,16 @@ def main():
elif mode_parse == 'octal_string_only': elif mode_parse == 'octal_string_only':
mode = parse_octal_string_only(mode) mode = parse_octal_string_only(mode)
except (TypeError, ValueError) as e: except (TypeError, ValueError) as e:
client.fail("Error while parsing 'mode': {error}".format(error=e)) client.fail(f"Error while parsing 'mode': {e}")
if mode < 0: if mode < 0:
client.fail("'mode' must not be negative; got {mode}".format(mode=mode)) client.fail(f"'mode' must not be negative; got {mode}")
if content is not None: if content is not None:
if client.module.params['content_is_b64']: if client.module.params['content_is_b64']:
try: try:
content = base64.b64decode(content) content = base64.b64decode(content)
except Exception as e: # depending on Python version and error, multiple different exceptions can be raised except Exception as e: # depending on Python version and error, multiple different exceptions can be raised
client.fail('Cannot Base64 decode the content option: {0}'.format(e)) client.fail(f'Cannot Base64 decode the content option: {e}')
else: else:
content = to_bytes(content) content = to_bytes(content)
@ -901,21 +901,21 @@ def main():
# Can happen if a user explicitly passes `content: null` or `path: null`... # Can happen if a user explicitly passes `content: null` or `path: null`...
client.fail('One of path and content must be supplied') client.fail('One of path and content must be supplied')
except NotFound as exc: except NotFound as exc:
client.fail('Could not find container "{1}" or resource in it ({0})'.format(exc, container)) client.fail(f'Could not find container "{container}" or resource in it ({exc})')
except APIError as exc: except APIError as exc:
client.fail('An unexpected Docker error occurred for container "{1}": {0}'.format(exc, container), exception=traceback.format_exc()) client.fail(f'An unexpected Docker error occurred for container "{container}": {exc}', exception=traceback.format_exc())
except DockerException as exc: except DockerException as exc:
client.fail('An unexpected Docker error occurred for container "{1}": {0}'.format(exc, container), exception=traceback.format_exc()) client.fail(f'An unexpected Docker error occurred for container "{container}": {exc}', exception=traceback.format_exc())
except RequestException as exc: except RequestException as exc:
client.fail( client.fail(
'An unexpected requests error occurred for container "{1}" when trying to talk to the Docker daemon: {0}'.format(exc, container), f'An unexpected requests error occurred for container "{container}" when trying to talk to the Docker daemon: {exc}',
exception=traceback.format_exc()) exception=traceback.format_exc())
except DockerUnexpectedError as exc: except DockerUnexpectedError as exc:
client.fail('Unexpected error: {exc}'.format(exc=to_native(exc)), exception=traceback.format_exc()) client.fail(f'Unexpected error: {exc}', exception=traceback.format_exc())
except DockerFileCopyError as exc: except DockerFileCopyError as exc:
client.fail(to_native(exc)) client.fail(to_native(exc))
except OSError as exc: except OSError as exc:
client.fail('Unexpected error: {exc}'.format(exc=to_native(exc)), exception=traceback.format_exc()) client.fail(f'Unexpected error: {exc}', exception=traceback.format_exc())
if __name__ == '__main__': if __name__ == '__main__':

View File

@ -167,7 +167,7 @@ import selectors
import shlex import shlex
import traceback import traceback
from ansible.module_utils.common.text.converters import to_text, to_bytes, to_native from ansible.module_utils.common.text.converters import to_text, to_bytes
from ansible_collections.community.docker.plugins.module_utils.common_api import ( from ansible_collections.community.docker.plugins.module_utils.common_api import (
AnsibleDockerClient, AnsibleDockerClient,
@ -295,16 +295,16 @@ def main():
rc=result.get('ExitCode') or 0, rc=result.get('ExitCode') or 0,
) )
except NotFound: except NotFound:
client.fail('Could not find container "{0}"'.format(container)) client.fail(f'Could not find container "{container}"')
except APIError as e: except APIError as e:
if e.response is not None and e.response.status_code == 409: if e.response is not None and e.response.status_code == 409:
client.fail('The container "{0}" has been paused ({1})'.format(container, to_native(e))) client.fail(f'The container "{container}" has been paused ({e})')
client.fail('An unexpected Docker error occurred: {0}'.format(to_native(e)), exception=traceback.format_exc()) client.fail(f'An unexpected Docker error occurred: {e}', exception=traceback.format_exc())
except DockerException as e: except DockerException as e:
client.fail('An unexpected Docker error occurred: {0}'.format(to_native(e)), exception=traceback.format_exc()) client.fail(f'An unexpected Docker error occurred: {e}', exception=traceback.format_exc())
except RequestException as e: except RequestException as e:
client.fail( client.fail(
'An unexpected requests error occurred when trying to talk to the Docker daemon: {0}'.format(to_native(e)), f'An unexpected requests error occurred when trying to talk to the Docker daemon: {e}',
exception=traceback.format_exc()) exception=traceback.format_exc())

View File

@ -77,8 +77,6 @@ container:
import traceback import traceback
from ansible.module_utils.common.text.converters import to_native
from ansible_collections.community.docker.plugins.module_utils.common_api import ( from ansible_collections.community.docker.plugins.module_utils.common_api import (
AnsibleDockerClient, AnsibleDockerClient,
RequestException, RequestException,
@ -105,10 +103,10 @@ def main():
container=container, container=container,
) )
except DockerException as e: except DockerException as e:
client.fail('An unexpected Docker error occurred: {0}'.format(to_native(e)), exception=traceback.format_exc()) client.fail(f'An unexpected Docker error occurred: {e}', exception=traceback.format_exc())
except RequestException as e: except RequestException as e:
client.fail( client.fail(
'An unexpected requests error occurred when trying to talk to the Docker daemon: {0}'.format(to_native(e)), f'An unexpected requests error occurred when trying to talk to the Docker daemon: {e}',
exception=traceback.format_exc()) exception=traceback.format_exc())

View File

@ -175,7 +175,7 @@ current_context_name:
import traceback import traceback
from ansible.module_utils.basic import AnsibleModule from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.common.text.converters import to_native, to_text from ansible.module_utils.common.text.converters import to_text
from ansible_collections.community.docker.plugins.module_utils._api.context.api import ( from ansible_collections.community.docker.plugins.module_utils._api.context.api import (
ContextAPI, ContextAPI,
@ -226,7 +226,7 @@ def context_to_json(context, current):
if proto == 'http+unix': if proto == 'http+unix':
proto = 'unix' proto = 'unix'
if proto: if proto:
host_str = "{0}://{1}".format(proto, host_str) host_str = f"{proto}://{host_str}"
# Create config for the modules # Create config for the modules
module_config['docker_host'] = host_str module_config['docker_host'] = host_str
@ -274,15 +274,12 @@ def main():
if module.params['name']: if module.params['name']:
contexts = [ContextAPI.get_context(module.params['name'])] contexts = [ContextAPI.get_context(module.params['name'])]
if not contexts[0]: if not contexts[0]:
module.fail_json(msg="There is no context of name {name!r}".format(name=module.params['name'])) module.fail_json(msg=f"There is no context of name {module.params['name']!r}")
elif module.params['only_current']: elif module.params['only_current']:
contexts = [ContextAPI.get_context(current_context_name)] contexts = [ContextAPI.get_context(current_context_name)]
if not contexts[0]: if not contexts[0]:
module.fail_json( module.fail_json(
msg="There is no context of name {name!r}, which is configured as the default context ({source})".format( msg=f"There is no context of name {current_context_name!r}, which is configured as the default context ({current_context_source})",
name=current_context_name,
source=current_context_source,
),
) )
else: else:
contexts = ContextAPI.contexts() contexts = ContextAPI.contexts()
@ -298,9 +295,9 @@ def main():
current_context_name=current_context_name, current_context_name=current_context_name,
) )
except ContextException as e: except ContextException as e:
module.fail_json(msg='Error when handling Docker contexts: {0}'.format(to_native(e)), exception=traceback.format_exc()) module.fail_json(msg=f'Error when handling Docker contexts: {e}', exception=traceback.format_exc())
except DockerException as e: except DockerException as e:
module.fail_json(msg='An unexpected Docker error occurred: {0}'.format(to_native(e)), exception=traceback.format_exc()) module.fail_json(msg=f'An unexpected Docker error occurred: {e}', exception=traceback.format_exc())
if __name__ == '__main__': if __name__ == '__main__':

View File

@ -370,10 +370,10 @@ def main():
DockerHostManager(client, results) DockerHostManager(client, results)
client.module.exit_json(**results) client.module.exit_json(**results)
except DockerException as e: except DockerException as e:
client.fail('An unexpected Docker error occurred: {0}'.format(to_native(e)), exception=traceback.format_exc()) client.fail(f'An unexpected Docker error occurred: {e}', exception=traceback.format_exc())
except RequestException as e: except RequestException as e:
client.fail( client.fail(
'An unexpected requests error occurred when trying to talk to the Docker daemon: {0}'.format(to_native(e)), f'An unexpected requests error occurred when trying to talk to the Docker daemon: {e}',
exception=traceback.format_exc()) exception=traceback.format_exc())

View File

@ -826,7 +826,7 @@ class ImageManager(DockerBaseClass):
container_limits = self.container_limits or {} container_limits = self.container_limits or {}
for key in container_limits.keys(): for key in container_limits.keys():
if key not in CONTAINER_LIMITS_KEYS: if key not in CONTAINER_LIMITS_KEYS:
raise DockerException('Invalid container_limits key {key}'.format(key=key)) raise DockerException(f'Invalid container_limits key {key}')
dockerfile = self.dockerfile dockerfile = self.dockerfile
if self.build_path.startswith(('http://', 'https://', 'git://', 'github.com/', 'git@')): if self.build_path.startswith(('http://', 'https://', 'git://', 'github.com/', 'git@')):
@ -1068,7 +1068,7 @@ def main():
) )
if not is_valid_tag(client.module.params['tag'], allow_empty=True): if not is_valid_tag(client.module.params['tag'], allow_empty=True):
client.fail('"{0}" is not a valid docker tag!'.format(client.module.params['tag'])) client.fail(f'"{client.module.params["tag"]}" is not a valid docker tag!')
if client.module.params['source'] == 'build': if client.module.params['source'] == 'build':
if not client.module.params['build'] or not client.module.params['build'].get('path'): if not client.module.params['build'] or not client.module.params['build'].get('path'):
@ -1084,10 +1084,10 @@ def main():
ImageManager(client, results) ImageManager(client, results)
client.module.exit_json(**results) client.module.exit_json(**results)
except DockerException as e: except DockerException as e:
client.fail('An unexpected Docker error occurred: {0}'.format(to_native(e)), exception=traceback.format_exc()) client.fail(f'An unexpected Docker error occurred: {e}', exception=traceback.format_exc())
except RequestException as e: except RequestException as e:
client.fail( client.fail(
'An unexpected requests error occurred when trying to talk to the Docker daemon: {0}'.format(to_native(e)), f'An unexpected requests error occurred when trying to talk to the Docker daemon: {e}',
exception=traceback.format_exc()) exception=traceback.format_exc())

View File

@ -323,7 +323,8 @@ def dict_to_list(dictionary, concat='='):
def _quote_csv(input): def _quote_csv(input):
if input.strip() == input and all(i not in input for i in '",\r\n'): if input.strip() == input and all(i not in input for i in '",\r\n'):
return input return input
return '"{0}"'.format(input.replace('"', '""')) input = input.replace('"', '""')
return f'"{input}"'
class ImageBuilder(DockerBaseClass): class ImageBuilder(DockerBaseClass):
@ -349,33 +350,29 @@ class ImageBuilder(DockerBaseClass):
buildx = self.client.get_client_plugin_info('buildx') buildx = self.client.get_client_plugin_info('buildx')
if buildx is None: if buildx is None:
self.fail('Docker CLI {0} does not have the buildx plugin installed'.format(self.client.get_cli())) self.fail(f'Docker CLI {self.client.get_cli()} does not have the buildx plugin installed')
buildx_version = buildx['Version'].lstrip('v') buildx_version = buildx['Version'].lstrip('v')
if self.secrets: if self.secrets:
for secret in self.secrets: for secret in self.secrets:
if secret['type'] in ('env', 'value'): if secret['type'] in ('env', 'value'):
if LooseVersion(buildx_version) < LooseVersion('0.6.0'): if LooseVersion(buildx_version) < LooseVersion('0.6.0'):
self.fail('The Docker buildx plugin has version {version}, but 0.6.0 is needed for secrets of type=env and type=value'.format( self.fail(f'The Docker buildx plugin has version {buildx_version}, but 0.6.0 is needed for secrets of type=env and type=value')
version=buildx_version,
))
if self.outputs and len(self.outputs) > 1: if self.outputs and len(self.outputs) > 1:
if LooseVersion(buildx_version) < LooseVersion('0.13.0'): if LooseVersion(buildx_version) < LooseVersion('0.13.0'):
self.fail('The Docker buildx plugin has version {version}, but 0.13.0 is needed to specify more than one output'.format( self.fail(f'The Docker buildx plugin has version {buildx_version}, but 0.13.0 is needed to specify more than one output')
version=buildx_version,
))
self.path = parameters['path'] self.path = parameters['path']
if not os.path.isdir(self.path): if not os.path.isdir(self.path):
self.fail('"{0}" is not an existing directory'.format(self.path)) self.fail(f'"{self.path}" is not an existing directory')
self.dockerfile = parameters['dockerfile'] self.dockerfile = parameters['dockerfile']
if self.dockerfile and not os.path.isfile(os.path.join(self.path, self.dockerfile)): if self.dockerfile and not os.path.isfile(os.path.join(self.path, self.dockerfile)):
self.fail('"{0}" is not an existing file'.format(os.path.join(self.path, self.dockerfile))) self.fail(f'"{os.path.join(self.path, self.dockerfile)}" is not an existing file')
self.name = parameters['name'] self.name = parameters['name']
self.tag = parameters['tag'] self.tag = parameters['tag']
if not is_valid_tag(self.tag, allow_empty=True): if not is_valid_tag(self.tag, allow_empty=True):
self.fail('"{0}" is not a valid docker tag'.format(self.tag)) self.fail(f'"{self.tag}" is not a valid docker tag')
if is_image_name_id(self.name): if is_image_name_id(self.name):
self.fail('Image name must not be a digest') self.fail('Image name must not be a digest')
@ -406,11 +403,8 @@ class ImageBuilder(DockerBaseClass):
}) })
if LooseVersion(buildx_version) < LooseVersion('0.13.0'): if LooseVersion(buildx_version) < LooseVersion('0.13.0'):
self.fail( self.fail(
"The output does not include an image with name {name_tag}, and the Docker" f"The output does not include an image with name {name_tag}, and the Docker"
" buildx plugin has version {version} which only supports one output.".format( f" buildx plugin has version {buildx_version} which only supports one output."
name_tag=name_tag,
version=buildx_version,
),
) )
def fail(self, msg, **kwargs): def fail(self, msg, **kwargs):
@ -450,41 +444,47 @@ class ImageBuilder(DockerBaseClass):
if self.secrets: if self.secrets:
random_prefix = None random_prefix = None
for index, secret in enumerate(self.secrets): for index, secret in enumerate(self.secrets):
sid = secret['id']
if secret['type'] == 'file': if secret['type'] == 'file':
args.extend(['--secret', 'id={id},type=file,src={src}'.format(id=secret['id'], src=secret['src'])]) src = secret['src']
args.extend(['--secret', f'id={sid},type=file,src={src}'])
if secret['type'] == 'env': if secret['type'] == 'env':
args.extend(['--secret', 'id={id},type=env,env={env}'.format(id=secret['id'], env=secret['src'])]) env = secret['src']
args.extend(['--secret', f'id={sid},type=env,env={env}'])
if secret['type'] == 'value': if secret['type'] == 'value':
# We pass values on using environment variables. The user has been warned in the documentation # We pass values on using environment variables. The user has been warned in the documentation
# that they should only use this mechanism when being comfortable with it. # that they should only use this mechanism when being comfortable with it.
if random_prefix is None: if random_prefix is None:
# Use /dev/urandom to generate some entropy to make the environment variable's name unguessable # Use /dev/urandom to generate some entropy to make the environment variable's name unguessable
random_prefix = base64.b64encode(os.urandom(16)).decode('utf-8').replace('=', '') random_prefix = base64.b64encode(os.urandom(16)).decode('utf-8').replace('=', '')
env_name = 'ANSIBLE_DOCKER_COMPOSE_ENV_SECRET_{random}_{id}'.format( env_name = f'ANSIBLE_DOCKER_COMPOSE_ENV_SECRET_{random_prefix}_{index}'
random=random_prefix,
id=index,
)
environ_update[env_name] = secret['value'] environ_update[env_name] = secret['value']
args.extend(['--secret', 'id={id},type=env,env={env}'.format(id=secret['id'], env=env_name)]) args.extend(['--secret', f'id={sid},type=env,env={env_name}'])
if self.outputs: if self.outputs:
for output in self.outputs: for output in self.outputs:
subargs = [] subargs = []
if output['type'] == 'local': if output['type'] == 'local':
subargs.extend(['type=local', 'dest={dest}'.format(dest=output['dest'])]) dest = output['dest']
subargs.extend(['type=local', f'dest={dest}'])
if output['type'] == 'tar': if output['type'] == 'tar':
subargs.extend(['type=tar', 'dest={dest}'.format(dest=output['dest'])]) dest = output['dest']
subargs.extend(['type=tar', f'dest={dest}'])
if output['type'] == 'oci': if output['type'] == 'oci':
subargs.extend(['type=oci', 'dest={dest}'.format(dest=output['dest'])]) dest = output['dest']
subargs.extend(['type=oci', f'dest={dest}'])
if output['type'] == 'docker': if output['type'] == 'docker':
subargs.append('type=docker') subargs.append('type=docker')
dest = output['dest']
if output['dest'] is not None: if output['dest'] is not None:
subargs.append('dest={dest}'.format(dest=output['dest'])) subargs.append(f'dest={dest}')
if output['context'] is not None: if output['context'] is not None:
subargs.append('context={context}'.format(context=output['context'])) context = output['context']
subargs.append(f'context={context}')
if output['type'] == 'image': if output['type'] == 'image':
subargs.append('type=image') subargs.append('type=image')
if output['name'] is not None: if output['name'] is not None:
subargs.append('name={name}'.format(name=','.join(output['name']))) name = ','.join(output['name'])
subargs.append(f'name={name}')
if output['push']: if output['push']:
subargs.append('push=true') subargs.append('push=true')
if subargs: if subargs:
@ -590,7 +590,7 @@ def main():
results = ImageBuilder(client).build_image() results = ImageBuilder(client).build_image()
client.module.exit_json(**results) client.module.exit_json(**results)
except DockerException as e: except DockerException as e:
client.fail('An unexpected Docker error occurred: {0}'.format(to_native(e)), exception=traceback.format_exc()) client.fail(f'An unexpected Docker error occurred: {e}', exception=traceback.format_exc())
if __name__ == '__main__': if __name__ == '__main__':

View File

@ -135,7 +135,7 @@ class ImageExportManager(DockerBaseClass):
self.tag = parameters['tag'] self.tag = parameters['tag']
if not is_valid_tag(self.tag, allow_empty=True): if not is_valid_tag(self.tag, allow_empty=True):
self.fail('"{0}" is not a valid docker tag'.format(self.tag)) self.fail(f'"{self.tag}" is not a valid docker tag')
# If name contains a tag, it takes precedence over tag parameter. # If name contains a tag, it takes precedence over tag parameter.
self.names = [] self.names = []
@ -272,10 +272,10 @@ def main():
results = ImageExportManager(client).run() results = ImageExportManager(client).run()
client.module.exit_json(**results) client.module.exit_json(**results)
except DockerException as e: except DockerException as e:
client.fail('An unexpected Docker error occurred: {0}'.format(to_native(e)), exception=traceback.format_exc()) client.fail(f'An unexpected Docker error occurred: {e}', exception=traceback.format_exc())
except RequestException as e: except RequestException as e:
client.fail( client.fail(
'An unexpected requests error occurred when trying to talk to the Docker daemon: {0}'.format(to_native(e)), f'An unexpected requests error occurred when trying to talk to the Docker daemon: {e}',
exception=traceback.format_exc()) exception=traceback.format_exc())

View File

@ -234,10 +234,10 @@ def main():
ImageManager(client, results) ImageManager(client, results)
client.module.exit_json(**results) client.module.exit_json(**results)
except DockerException as e: except DockerException as e:
client.fail('An unexpected Docker error occurred: {0}'.format(to_native(e)), exception=traceback.format_exc()) client.fail(f'An unexpected Docker error occurred: {e}', exception=traceback.format_exc())
except RequestException as e: except RequestException as e:
client.fail( client.fail(
'An unexpected requests error occurred when trying to talk to the Docker daemon: {0}'.format(to_native(e)), f'An unexpected requests error occurred when trying to talk to the Docker daemon: {e}',
exception=traceback.format_exc()) exception=traceback.format_exc())

View File

@ -82,8 +82,6 @@ images:
import errno import errno
import traceback import traceback
from ansible.module_utils.common.text.converters import to_native
from ansible_collections.community.docker.plugins.module_utils.common_api import ( from ansible_collections.community.docker.plugins.module_utils.common_api import (
AnsibleDockerClient, AnsibleDockerClient,
RequestException, RequestException,
@ -127,19 +125,19 @@ class ImageManager(DockerBaseClass):
# Load image(s) from file # Load image(s) from file
load_output = [] load_output = []
try: try:
self.log("Opening image {0}".format(self.path)) self.log(f"Opening image {self.path}")
with open(self.path, 'rb') as image_tar: with open(self.path, 'rb') as image_tar:
self.log("Loading images from {0}".format(self.path)) self.log(f"Loading images from {self.path}")
res = self.client._post(self.client._url("/images/load"), data=image_tar, stream=True) res = self.client._post(self.client._url("/images/load"), data=image_tar, stream=True)
for line in self.client._stream_helper(res, decode=True): for line in self.client._stream_helper(res, decode=True):
self.log(line, pretty_print=True) self.log(line, pretty_print=True)
self._extract_output_line(line, load_output) self._extract_output_line(line, load_output)
except EnvironmentError as exc: except EnvironmentError as exc:
if exc.errno == errno.ENOENT: if exc.errno == errno.ENOENT:
self.client.fail("Error opening archive {0} - {1}".format(self.path, to_native(exc))) self.client.fail(f"Error opening archive {self.path} - {exc}")
self.client.fail("Error loading archive {0} - {1}".format(self.path, to_native(exc)), stdout='\n'.join(load_output)) self.client.fail(f"Error loading archive {self.path} - {exc}", stdout='\n'.join(load_output))
except Exception as exc: except Exception as exc:
self.client.fail("Error loading archive {0} - {1}".format(self.path, to_native(exc)), stdout='\n'.join(load_output)) self.client.fail(f"Error loading archive {self.path} - {exc}", stdout='\n'.join(load_output))
# Collect loaded images # Collect loaded images
loaded_images = [] loaded_images = []
@ -160,7 +158,7 @@ class ImageManager(DockerBaseClass):
image_name, tag = image_name.rsplit(':', 1) image_name, tag = image_name.rsplit(':', 1)
images.append(self.client.find_image(image_name, tag)) images.append(self.client.find_image(image_name, tag))
else: else:
self.client.module.warn('Image name "{0}" is neither ID nor has a tag'.format(image_name)) self.client.module.warn(f'Image name "{image_name}" is neither ID nor has a tag')
self.results['image_names'] = loaded_images self.results['image_names'] = loaded_images
self.results['images'] = images self.results['images'] = images
@ -185,10 +183,10 @@ def main():
ImageManager(client, results) ImageManager(client, results)
client.module.exit_json(**results) client.module.exit_json(**results)
except DockerException as e: except DockerException as e:
client.fail('An unexpected Docker error occurred: {0}'.format(to_native(e)), exception=traceback.format_exc()) client.fail(f'An unexpected Docker error occurred: {e}', exception=traceback.format_exc())
except RequestException as e: except RequestException as e:
client.fail( client.fail(
'An unexpected requests error occurred when trying to talk to the Docker daemon: {0}'.format(to_native(e)), f'An unexpected requests error occurred when trying to talk to the Docker daemon: {e}',
exception=traceback.format_exc()) exception=traceback.format_exc())

View File

@ -92,8 +92,6 @@ image:
import traceback import traceback
from ansible.module_utils.common.text.converters import to_native
from ansible_collections.community.docker.plugins.module_utils.common_api import ( from ansible_collections.community.docker.plugins.module_utils.common_api import (
AnsibleDockerClient, AnsibleDockerClient,
RequestException, RequestException,
@ -142,7 +140,7 @@ class ImagePuller(DockerBaseClass):
if is_image_name_id(self.name): if is_image_name_id(self.name):
self.client.fail("Cannot pull an image by ID") self.client.fail("Cannot pull an image by ID")
if not is_valid_tag(self.tag, allow_empty=True): if not is_valid_tag(self.tag, allow_empty=True):
self.client.fail('"{0}" is not a valid docker tag!'.format(self.tag)) self.client.fail(f'"{self.tag}" is not a valid docker tag!')
# If name contains a tag, it takes precedence over tag parameter. # If name contains a tag, it takes precedence over tag parameter.
repo, repo_tag = parse_repository_tag(self.name) repo, repo_tag = parse_repository_tag(self.name)
@ -212,10 +210,10 @@ def main():
results = ImagePuller(client).pull() results = ImagePuller(client).pull()
client.module.exit_json(**results) client.module.exit_json(**results)
except DockerException as e: except DockerException as e:
client.fail('An unexpected Docker error occurred: {0}'.format(to_native(e)), exception=traceback.format_exc()) client.fail(f'An unexpected Docker error occurred: {e}', exception=traceback.format_exc())
except RequestException as e: except RequestException as e:
client.fail( client.fail(
'An unexpected requests error occurred when trying to talk to the Docker daemon: {0}'.format(to_native(e)), f'An unexpected requests error occurred when trying to talk to the Docker daemon: {e}',
exception=traceback.format_exc()) exception=traceback.format_exc())

View File

@ -112,7 +112,7 @@ class ImagePusher(DockerBaseClass):
if is_image_name_id(self.name): if is_image_name_id(self.name):
self.client.fail("Cannot push an image by ID") self.client.fail("Cannot push an image by ID")
if not is_valid_tag(self.tag, allow_empty=True): if not is_valid_tag(self.tag, allow_empty=True):
self.client.fail('"{0}" is not a valid docker tag!'.format(self.tag)) self.client.fail(f'"{self.tag}" is not a valid docker tag!')
# If name contains a tag, it takes precedence over tag parameter. # If name contains a tag, it takes precedence over tag parameter.
repo, repo_tag = parse_repository_tag(self.name) repo, repo_tag = parse_repository_tag(self.name)
@ -123,7 +123,7 @@ class ImagePusher(DockerBaseClass):
if is_image_name_id(self.tag): if is_image_name_id(self.tag):
self.client.fail("Cannot push an image by digest") self.client.fail("Cannot push an image by digest")
if not is_valid_tag(self.tag, allow_empty=False): if not is_valid_tag(self.tag, allow_empty=False):
self.client.fail('"{0}" is not a valid docker tag!'.format(self.tag)) self.client.fail(f'"{self.tag}" is not a valid docker tag!')
def push(self): def push(self):
image = self.client.find_image(name=self.name, tag=self.tag) image = self.client.find_image(name=self.name, tag=self.tag)
@ -190,10 +190,10 @@ def main():
results = ImagePusher(client).push() results = ImagePusher(client).push()
client.module.exit_json(**results) client.module.exit_json(**results)
except DockerException as e: except DockerException as e:
client.fail('An unexpected Docker error occurred: {0}'.format(to_native(e)), exception=traceback.format_exc()) client.fail(f'An unexpected Docker error occurred: {e}', exception=traceback.format_exc())
except RequestException as e: except RequestException as e:
client.fail( client.fail(
'An unexpected requests error occurred when trying to talk to the Docker daemon: {0}'.format(to_native(e)), f'An unexpected requests error occurred when trying to talk to the Docker daemon: {e}',
exception=traceback.format_exc()) exception=traceback.format_exc())

View File

@ -134,7 +134,7 @@ class ImageRemover(DockerBaseClass):
self.prune = parameters['prune'] self.prune = parameters['prune']
if not is_valid_tag(self.tag, allow_empty=True): if not is_valid_tag(self.tag, allow_empty=True):
self.fail('"{0}" is not a valid docker tag'.format(self.tag)) self.fail(f'"{self.tag}" is not a valid docker tag')
# If name contains a tag, it takes precedence over tag parameter. # If name contains a tag, it takes precedence over tag parameter.
if not is_image_name_id(self.name): if not is_image_name_id(self.name):
@ -257,10 +257,10 @@ def main():
results = ImageRemover(client).absent() results = ImageRemover(client).absent()
client.module.exit_json(**results) client.module.exit_json(**results)
except DockerException as e: except DockerException as e:
client.fail('An unexpected Docker error occurred: {0}'.format(to_native(e)), exception=traceback.format_exc()) client.fail(f'An unexpected Docker error occurred: {e}', exception=traceback.format_exc())
except RequestException as e: except RequestException as e:
client.fail( client.fail(
'An unexpected requests error occurred when trying to talk to the Docker daemon: {0}'.format(to_native(e)), f'An unexpected requests error occurred when trying to talk to the Docker daemon: {e}',
exception=traceback.format_exc()) exception=traceback.format_exc())

View File

@ -153,7 +153,7 @@ class ImageTagger(DockerBaseClass):
self.name = parameters['name'] self.name = parameters['name']
self.tag = parameters['tag'] self.tag = parameters['tag']
if not is_valid_tag(self.tag, allow_empty=True): if not is_valid_tag(self.tag, allow_empty=True):
self.fail('"{0}" is not a valid docker tag'.format(self.tag)) self.fail(f'"{self.tag}" is not a valid docker tag')
# If name contains a tag, it takes precedence over tag parameter. # If name contains a tag, it takes precedence over tag parameter.
if not is_image_name_id(self.name): if not is_image_name_id(self.name):
@ -264,10 +264,10 @@ def main():
results = ImageTagger(client).tag_images() results = ImageTagger(client).tag_images()
client.module.exit_json(**results) client.module.exit_json(**results)
except DockerException as e: except DockerException as e:
client.fail('An unexpected Docker error occurred: {0}'.format(to_native(e)), exception=traceback.format_exc()) client.fail(f'An unexpected Docker error occurred: {e}', exception=traceback.format_exc())
except RequestException as e: except RequestException as e:
client.fail( client.fail(
'An unexpected requests error occurred when trying to talk to the Docker daemon: {0}'.format(to_native(e)), f'An unexpected requests error occurred when trying to talk to the Docker daemon: {e}',
exception=traceback.format_exc()) exception=traceback.format_exc())

View File

@ -435,10 +435,10 @@ def main():
del results['actions'] del results['actions']
client.module.exit_json(**results) client.module.exit_json(**results)
except DockerException as e: except DockerException as e:
client.fail('An unexpected Docker error occurred: {0}'.format(to_native(e)), exception=traceback.format_exc()) client.fail(f'An unexpected Docker error occurred: {e}', exception=traceback.format_exc())
except RequestException as e: except RequestException as e:
client.fail( client.fail(
'An unexpected requests error occurred when trying to talk to the Docker daemon: {0}'.format(to_native(e)), f'An unexpected requests error occurred when trying to talk to the Docker daemon: {e}',
exception=traceback.format_exc()) exception=traceback.format_exc())

View File

@ -355,7 +355,7 @@ def validate_cidr(cidr):
return 'ipv4' return 'ipv4'
elif CIDR_IPV6.match(cidr): elif CIDR_IPV6.match(cidr):
return 'ipv6' return 'ipv6'
raise ValueError('"{0}" is not a valid CIDR'.format(cidr)) raise ValueError(f'"{cidr}" is not a valid CIDR')
def normalize_ipam_config_key(key): def normalize_ipam_config_key(key):
@ -621,10 +621,10 @@ class DockerNetworkManager(object):
return bool(container) return bool(container)
except DockerException as e: except DockerException as e:
self.client.fail('An unexpected Docker error occurred: {0}'.format(to_native(e)), exception=traceback.format_exc()) self.client.fail(f'An unexpected Docker error occurred: {e}', exception=traceback.format_exc())
except RequestException as e: except RequestException as e:
self.client.fail( self.client.fail(
'An unexpected requests error occurred when trying to talk to the Docker daemon: {0}'.format(to_native(e)), f'An unexpected requests error occurred when trying to talk to the Docker daemon: {e}',
exception=traceback.format_exc()) exception=traceback.format_exc())
def connect_containers(self): def connect_containers(self):
@ -638,7 +638,7 @@ class DockerNetworkManager(object):
self.client.post_json('/networks/{0}/connect', self.parameters.name, data=data) self.client.post_json('/networks/{0}/connect', self.parameters.name, data=data)
self.results['actions'].append("Connected container %s" % (name,)) self.results['actions'].append("Connected container %s" % (name,))
self.results['changed'] = True self.results['changed'] = True
self.diff_tracker.add('connected.{0}'.format(name), parameter=True, active=False) self.diff_tracker.add(f'connected.{name}', parameter=True, active=False)
def disconnect_missing(self): def disconnect_missing(self):
if not self.existing_network: if not self.existing_network:
@ -664,7 +664,7 @@ class DockerNetworkManager(object):
self.client.post_json('/networks/{0}/disconnect', self.parameters.name, data=data) self.client.post_json('/networks/{0}/disconnect', self.parameters.name, data=data)
self.results['actions'].append("Disconnected container %s" % (container_name,)) self.results['actions'].append("Disconnected container %s" % (container_name,))
self.results['changed'] = True self.results['changed'] = True
self.diff_tracker.add('connected.{0}'.format(container_name), self.diff_tracker.add(f'connected.{container_name}',
parameter=False, parameter=False,
active=True) active=True)
@ -747,10 +747,10 @@ def main():
cm = DockerNetworkManager(client) cm = DockerNetworkManager(client)
client.module.exit_json(**cm.results) client.module.exit_json(**cm.results)
except DockerException as e: except DockerException as e:
client.fail('An unexpected Docker error occurred: {0}'.format(to_native(e)), exception=traceback.format_exc()) client.fail(f'An unexpected Docker error occurred: {e}', exception=traceback.format_exc())
except RequestException as e: except RequestException as e:
client.fail( client.fail(
'An unexpected requests error occurred when trying to talk to the Docker daemon: {0}'.format(to_native(e)), f'An unexpected requests error occurred when trying to talk to the Docker daemon: {e}',
exception=traceback.format_exc()) exception=traceback.format_exc())

View File

@ -98,8 +98,6 @@ network:
import traceback import traceback
from ansible.module_utils.common.text.converters import to_native
from ansible_collections.community.docker.plugins.module_utils.common_api import ( from ansible_collections.community.docker.plugins.module_utils.common_api import (
AnsibleDockerClient, AnsibleDockerClient,
RequestException, RequestException,
@ -126,10 +124,10 @@ def main():
network=network, network=network,
) )
except DockerException as e: except DockerException as e:
client.fail('An unexpected Docker error occurred: {0}'.format(to_native(e)), exception=traceback.format_exc()) client.fail(f'An unexpected Docker error occurred: {e}', exception=traceback.format_exc())
except RequestException as e: except RequestException as e:
client.fail( client.fail(
'An unexpected requests error occurred when trying to talk to the Docker daemon: {0}'.format(to_native(e)), f'An unexpected requests error occurred when trying to talk to the Docker daemon: {e}',
exception=traceback.format_exc()) exception=traceback.format_exc())

View File

@ -293,10 +293,10 @@ def main():
SwarmNodeManager(client, results) SwarmNodeManager(client, results)
client.module.exit_json(**results) client.module.exit_json(**results)
except DockerException as e: except DockerException as e:
client.fail('An unexpected docker error occurred: {0}'.format(to_native(e)), exception=traceback.format_exc()) client.fail(f'An unexpected Docker error occurred: {e}', exception=traceback.format_exc())
except RequestException as e: except RequestException as e:
client.fail( client.fail(
'An unexpected requests error occurred when Docker SDK for Python tried to talk to the docker daemon: {0}'.format(to_native(e)), f'An unexpected requests error occurred when Docker SDK for Python tried to talk to the docker daemon: {e}',
exception=traceback.format_exc()) exception=traceback.format_exc())

View File

@ -88,8 +88,6 @@ nodes:
import traceback import traceback
from ansible.module_utils.common.text.converters import to_native
from ansible_collections.community.docker.plugins.module_utils.common import ( from ansible_collections.community.docker.plugins.module_utils.common import (
RequestException, RequestException,
) )
@ -149,10 +147,10 @@ def main():
nodes=nodes, nodes=nodes,
) )
except DockerException as e: except DockerException as e:
client.fail('An unexpected docker error occurred: {0}'.format(to_native(e)), exception=traceback.format_exc()) client.fail(f'An unexpected docker error occurred: {e}', exception=traceback.format_exc())
except RequestException as e: except RequestException as e:
client.fail( client.fail(
'An unexpected requests error occurred when Docker SDK for Python tried to talk to the docker daemon: {0}'.format(to_native(e)), f'An unexpected requests error occurred when Docker SDK for Python tried to talk to the docker daemon: {e}',
exception=traceback.format_exc()) exception=traceback.format_exc())

View File

@ -384,10 +384,10 @@ def main():
cm = DockerPluginManager(client) cm = DockerPluginManager(client)
client.module.exit_json(**cm.result) client.module.exit_json(**cm.result)
except DockerException as e: except DockerException as e:
client.fail('An unexpected docker error occurred: {0}'.format(to_native(e)), exception=traceback.format_exc()) client.fail(f'An unexpected docker error occurred: {e}', exception=traceback.format_exc())
except RequestException as e: except RequestException as e:
client.fail( client.fail(
'An unexpected requests error occurred when trying to talk to the Docker daemon: {0}'.format(to_native(e)), f'An unexpected requests error occurred when trying to talk to the Docker daemon: {e}',
exception=traceback.format_exc()) exception=traceback.format_exc())

View File

@ -230,7 +230,6 @@ builder_cache_caches_deleted:
import traceback import traceback
from ansible.module_utils.common.text.converters import to_native
from ansible.module_utils.common.text.formatters import human_to_bytes from ansible.module_utils.common.text.formatters import human_to_bytes
from ansible_collections.community.docker.plugins.module_utils.common_api import ( from ansible_collections.community.docker.plugins.module_utils.common_api import (
@ -276,7 +275,7 @@ def main():
try: try:
builder_cache_keep_storage = human_to_bytes(client.module.params.get('builder_cache_keep_storage')) builder_cache_keep_storage = human_to_bytes(client.module.params.get('builder_cache_keep_storage'))
except ValueError as exc: except ValueError as exc:
client.module.fail_json(msg='Error while parsing value of builder_cache_keep_storage: {0}'.format(exc)) client.module.fail_json(msg=f'Error while parsing value of builder_cache_keep_storage: {exc}')
try: try:
result = dict() result = dict()
@ -337,10 +336,10 @@ def main():
result['changed'] = changed result['changed'] = changed
client.module.exit_json(**result) client.module.exit_json(**result)
except DockerException as e: except DockerException as e:
client.fail('An unexpected Docker error occurred: {0}'.format(to_native(e)), exception=traceback.format_exc()) client.fail(f'An unexpected Docker error occurred: {e}', exception=traceback.format_exc())
except RequestException as e: except RequestException as e:
client.fail( client.fail(
'An unexpected requests error occurred when trying to talk to the Docker daemon: {0}'.format(to_native(e)), f'An unexpected requests error occurred when trying to talk to the Docker daemon: {e}',
exception=traceback.format_exc()) exception=traceback.format_exc())

View File

@ -234,7 +234,7 @@ class SecretManager(DockerBaseClass):
with open(data_src, 'rb') as f: with open(data_src, 'rb') as f:
self.data = f.read() self.data = f.read()
except Exception as exc: except Exception as exc:
self.client.fail('Error while reading {src}: {error}'.format(src=data_src, error=to_native(exc))) self.client.fail(f'Error while reading {data_src}: {exc}')
self.labels = parameters.get('labels') self.labels = parameters.get('labels')
self.force = parameters.get('force') self.force = parameters.get('force')
self.rolling_versions = parameters.get('rolling_versions') self.rolling_versions = parameters.get('rolling_versions')
@ -278,7 +278,7 @@ class SecretManager(DockerBaseClass):
self.secrets = [ self.secrets = [
secret secret
for secret in secrets for secret in secrets
if secret['Spec']['Name'].startswith('{name}_v'.format(name=self.name)) if secret['Spec']['Name'].startswith(f'{self.name}_v')
] ]
self.secrets.sort(key=self.get_version) self.secrets.sort(key=self.get_version)
else: else:
@ -296,7 +296,7 @@ class SecretManager(DockerBaseClass):
if self.rolling_versions: if self.rolling_versions:
self.version += 1 self.version += 1
labels['ansible_version'] = str(self.version) labels['ansible_version'] = str(self.version)
self.name = '{name}_v{version}'.format(name=self.name, version=self.version) self.name = f'{self.name}_v{self.version}'
if self.labels: if self.labels:
labels.update(self.labels) labels.update(self.labels)
@ -397,10 +397,10 @@ def main():
SecretManager(client, results)() SecretManager(client, results)()
client.module.exit_json(**results) client.module.exit_json(**results)
except DockerException as e: except DockerException as e:
client.fail('An unexpected docker error occurred: {0}'.format(to_native(e)), exception=traceback.format_exc()) client.fail(f'An unexpected Docker error occurred: {e}', exception=traceback.format_exc())
except RequestException as e: except RequestException as e:
client.fail( client.fail(
'An unexpected requests error occurred when Docker SDK for Python tried to talk to the docker daemon: {0}'.format(to_native(e)), f'An unexpected requests error occurred when Docker SDK for Python tried to talk to the docker daemon: {e}',
exception=traceback.format_exc()) exception=traceback.format_exc())

View File

@ -340,7 +340,7 @@ def main():
) )
client.module.exit_json(changed=False) client.module.exit_json(changed=False)
except DockerException as e: except DockerException as e:
client.fail('An unexpected Docker error occurred: {0}'.format(to_native(e)), exception=traceback.format_exc()) client.fail(f'An unexpected Docker error occurred: {e}', exception=traceback.format_exc())
if __name__ == "__main__": if __name__ == "__main__":

View File

@ -110,7 +110,7 @@ def main():
results=ret, results=ret,
) )
except DockerException as e: except DockerException as e:
client.fail('An unexpected Docker error occurred: {0}'.format(to_native(e)), exception=traceback.format_exc()) client.fail(f'An unexpected Docker error occurred: {e}', exception=traceback.format_exc())
if __name__ == "__main__": if __name__ == "__main__":

View File

@ -120,7 +120,7 @@ def main():
results=ret, results=ret,
) )
except DockerException as e: except DockerException as e:
client.fail('An unexpected Docker error occurred: {0}'.format(to_native(e)), exception=traceback.format_exc()) client.fail(f'An unexpected Docker error occurred: {e}', exception=traceback.format_exc())
if __name__ == "__main__": if __name__ == "__main__":

View File

@ -707,10 +707,10 @@ def main():
SwarmManager(client, results)() SwarmManager(client, results)()
client.module.exit_json(**results) client.module.exit_json(**results)
except DockerException as e: except DockerException as e:
client.fail('An unexpected docker error occurred: {0}'.format(to_native(e)), exception=traceback.format_exc()) client.fail(f'An unexpected Docker error occurred: {e}', exception=traceback.format_exc())
except RequestException as e: except RequestException as e:
client.fail( client.fail(
'An unexpected requests error occurred when Docker SDK for Python tried to talk to the docker daemon: {0}'.format(to_native(e)), f'An unexpected requests error occurred when Docker SDK for Python tried to talk to the docker daemon: {e}',
exception=traceback.format_exc()) exception=traceback.format_exc())

View File

@ -367,10 +367,10 @@ def main():
results.update(client.fail_results) results.update(client.fail_results)
client.module.exit_json(**results) client.module.exit_json(**results)
except DockerException as e: except DockerException as e:
client.fail('An unexpected docker error occurred: {0}'.format(to_native(e)), exception=traceback.format_exc()) client.fail(f'An unexpected Docker error occurred: {e}', exception=traceback.format_exc())
except RequestException as e: except RequestException as e:
client.fail( client.fail(
'An unexpected requests error occurred when Docker SDK for Python tried to talk to the docker daemon: {0}'.format(to_native(e)), f'An unexpected requests error occurred when Docker SDK for Python tried to talk to the docker daemon: {e}',
exception=traceback.format_exc()) exception=traceback.format_exc())

View File

@ -2792,10 +2792,10 @@ def main():
client.module.exit_json(**results) client.module.exit_json(**results)
except DockerException as e: except DockerException as e:
client.fail('An unexpected docker error occurred: {0}'.format(to_native(e)), exception=traceback.format_exc()) client.fail(f'An unexpected Docker error occurred: {e}', exception=traceback.format_exc())
except RequestException as e: except RequestException as e:
client.fail( client.fail(
'An unexpected requests error occurred when Docker SDK for Python tried to talk to the docker daemon: {0}'.format(to_native(e)), f'An unexpected requests error occurred when Docker SDK for Python tried to talk to the docker daemon: {e}',
exception=traceback.format_exc()) exception=traceback.format_exc())

View File

@ -64,8 +64,6 @@ service:
import traceback import traceback
from ansible.module_utils.common.text.converters import to_native
try: try:
from docker.errors import DockerException from docker.errors import DockerException
except ImportError: except ImportError:
@ -109,10 +107,10 @@ def main():
exists=bool(service) exists=bool(service)
) )
except DockerException as e: except DockerException as e:
client.fail('An unexpected docker error occurred: {0}'.format(to_native(e)), exception=traceback.format_exc()) client.fail(f'An unexpected Docker error occurred: {e}', exception=traceback.format_exc())
except RequestException as e: except RequestException as e:
client.fail( client.fail(
'An unexpected requests error occurred when Docker SDK for Python tried to talk to the docker daemon: {0}'.format(to_native(e)), f'An unexpected requests error occurred when Docker SDK for Python tried to talk to the docker daemon: {e}',
exception=traceback.format_exc()) exception=traceback.format_exc())

View File

@ -304,10 +304,10 @@ def main():
cm = DockerVolumeManager(client) cm = DockerVolumeManager(client)
client.module.exit_json(**cm.results) client.module.exit_json(**cm.results)
except DockerException as e: except DockerException as e:
client.fail('An unexpected Docker error occurred: {0}'.format(to_native(e)), exception=traceback.format_exc()) client.fail(f'An unexpected Docker error occurred: {e}', exception=traceback.format_exc())
except RequestException as e: except RequestException as e:
client.fail( client.fail(
'An unexpected requests error occurred when trying to talk to the Docker daemon: {0}'.format(to_native(e)), f'An unexpected requests error occurred when trying to talk to the Docker daemon: {e}',
exception=traceback.format_exc()) exception=traceback.format_exc())

View File

@ -109,10 +109,10 @@ def main():
volume=volume, volume=volume,
) )
except DockerException as e: except DockerException as e:
client.fail('An unexpected Docker error occurred: {0}'.format(to_native(e)), exception=traceback.format_exc()) client.fail(f'An unexpected Docker error occurred: {e}', exception=traceback.format_exc())
except RequestException as e: except RequestException as e:
client.fail( client.fail(
'An unexpected requests error occurred when trying to talk to the Docker daemon: {0}'.format(to_native(e)), f'An unexpected requests error occurred when trying to talk to the Docker daemon: {e}',
exception=traceback.format_exc()) exception=traceback.format_exc())

View File

@ -28,7 +28,7 @@ class AnsibleDockerClient(AnsibleDockerClientBase):
def fail(self, msg, **kwargs): def fail(self, msg, **kwargs):
if kwargs: if kwargs:
msg += '\nContext:\n' + '\n'.join(' {0} = {1!r}'.format(k, v) for (k, v) in kwargs.items()) msg += '\nContext:\n' + '\n'.join(f' {k} = {v!r}' for (k, v) in kwargs.items())
raise AnsibleConnectionFailure(msg) raise AnsibleConnectionFailure(msg)
def deprecate(self, msg, version=None, date=None, collection_name=None): def deprecate(self, msg, version=None, date=None, collection_name=None):

View File

@ -26,7 +26,7 @@ class AnsibleDockerClient(AnsibleDockerClientBase):
def fail(self, msg, **kwargs): def fail(self, msg, **kwargs):
if kwargs: if kwargs:
msg += '\nContext:\n' + '\n'.join(' {0} = {1!r}'.format(k, v) for (k, v) in kwargs.items()) msg += '\nContext:\n' + '\n'.join(f' {k} = {v!r}' for (k, v) in kwargs.items())
raise AnsibleConnectionFailure(msg) raise AnsibleConnectionFailure(msg)
def deprecate(self, msg, version=None, date=None, collection_name=None): def deprecate(self, msg, version=None, date=None, collection_name=None):

View File

@ -107,8 +107,8 @@ class FakeClient(object):
'Image': host['Config']['Image'], 'Image': host['Config']['Image'],
'ImageId': host['Image'], 'ImageId': host['Image'],
}) })
self.get_results['/containers/{0}/json'.format(host['Name'])] = host self.get_results[f"/containers/{host['Name']}/json"] = host
self.get_results['/containers/{0}/json'.format(host['Id'])] = host self.get_results[f"/containers/{host['Id']}/json"] = host
self.get_results['/containers/json'] = list_reply self.get_results['/containers/json'] = list_reply
def get_json(self, url, *param, **kwargs): def get_json(self, url, *param, **kwargs):

View File

@ -74,7 +74,7 @@ def fake_resp(method, url, *args, **kwargs):
elif (url, method) in fake_api.fake_responses: elif (url, method) in fake_api.fake_responses:
key = (url, method) key = (url, method)
if not key: if not key:
raise Exception('{method} {url}'.format(method=method, url=url)) raise Exception(f'{method} {url}')
status_code, content = fake_api.fake_responses[key]() status_code, content = fake_api.fake_responses[key]()
return response(status_code=status_code, content=content) return response(status_code=status_code, content=content)
@ -102,10 +102,8 @@ def fake_read_from_socket(self, response, stream, tty=False, demux=False):
return b"" return b""
url_base = '{prefix}/'.format(prefix=fake_api.prefix) url_base = f'{fake_api.prefix}/'
url_prefix = '{0}v{1}/'.format( url_prefix = f'{url_base}v{DEFAULT_DOCKER_API_VERSION}/'
url_base,
DEFAULT_DOCKER_API_VERSION)
class BaseAPIClientTest(unittest.TestCase): class BaseAPIClientTest(unittest.TestCase):
@ -147,22 +145,18 @@ class DockerApiTest(BaseAPIClientTest):
def test_url_valid_resource(self): def test_url_valid_resource(self):
url = self.client._url('/hello/{0}/world', 'somename') url = self.client._url('/hello/{0}/world', 'somename')
assert url == '{0}{1}'.format(url_prefix, 'hello/somename/world') assert url == f'{url_prefix}hello/somename/world'
url = self.client._url( url = self.client._url(
'/hello/{0}/world/{1}', 'somename', 'someothername' '/hello/{0}/world/{1}', 'somename', 'someothername'
) )
assert url == '{0}{1}'.format( assert url == f'{url_prefix}hello/somename/world/someothername'
url_prefix, 'hello/somename/world/someothername'
)
url = self.client._url('/hello/{0}/world', 'some?name') url = self.client._url('/hello/{0}/world', 'some?name')
assert url == '{0}{1}'.format(url_prefix, 'hello/some%3Fname/world') assert url == f'{url_prefix}hello/some%3Fname/world'
url = self.client._url("/images/{0}/push", "localhost:5000/image") url = self.client._url("/images/{0}/push", "localhost:5000/image")
assert url == '{0}{1}'.format( assert url == f'{url_prefix}images/localhost:5000/image/push'
url_prefix, 'images/localhost:5000/image/push'
)
def test_url_invalid_resource(self): def test_url_invalid_resource(self):
with pytest.raises(ValueError): with pytest.raises(ValueError):
@ -170,13 +164,13 @@ class DockerApiTest(BaseAPIClientTest):
def test_url_no_resource(self): def test_url_no_resource(self):
url = self.client._url('/simple') url = self.client._url('/simple')
assert url == '{0}{1}'.format(url_prefix, 'simple') assert url == f'{url_prefix}simple'
def test_url_unversioned_api(self): def test_url_unversioned_api(self):
url = self.client._url( url = self.client._url(
'/hello/{0}/world', 'somename', versioned_api=False '/hello/{0}/world', 'somename', versioned_api=False
) )
assert url == '{0}{1}'.format(url_base, 'hello/somename/world') assert url == f'{url_base}hello/somename/world'
def test_version(self): def test_version(self):
self.client.version() self.client.version()
@ -463,8 +457,7 @@ class TCPSocketStreamTest(unittest.TestCase):
cls.thread = threading.Thread(target=cls.server.serve_forever) cls.thread = threading.Thread(target=cls.server.serve_forever)
cls.thread.daemon = True cls.thread.daemon = True
cls.thread.start() cls.thread.start()
cls.address = 'http://{0}:{1}'.format( cls.address = f'http://{socket.gethostname()}:{cls.server.server_address[1]}'
socket.gethostname(), cls.server.server_address[1])
@classmethod @classmethod
def teardown_class(cls): def teardown_class(cls):
@ -503,7 +496,7 @@ class TCPSocketStreamTest(unittest.TestCase):
data += stderr_data data += stderr_data
return data return data
else: else:
raise Exception('Unknown path {path}'.format(path=path)) raise Exception(f'Unknown path {path}')
@staticmethod @staticmethod
def frame_header(stream, data): def frame_header(stream, data):

View File

@ -15,7 +15,7 @@ from ansible_collections.community.docker.tests.unit.plugins.module_utils._api.c
from . import fake_stat from . import fake_stat
CURRENT_VERSION = 'v{api_version}'.format(api_version=DEFAULT_DOCKER_API_VERSION) CURRENT_VERSION = f'v{DEFAULT_DOCKER_API_VERSION}'
FAKE_CONTAINER_ID = '3cc2351ab11b' FAKE_CONTAINER_ID = '3cc2351ab11b'
FAKE_IMAGE_ID = 'e9aa60c60128' FAKE_IMAGE_ID = 'e9aa60c60128'
@ -539,131 +539,117 @@ if constants.IS_WINDOWS_PLATFORM:
prefix = 'http+docker://localnpipe' prefix = 'http+docker://localnpipe'
fake_responses = { fake_responses = {
'{prefix}/version'.format(prefix=prefix): f'{prefix}/version':
get_fake_version, get_fake_version,
'{prefix}/{CURRENT_VERSION}/version'.format(prefix=prefix, CURRENT_VERSION=CURRENT_VERSION): f'{prefix}/{CURRENT_VERSION}/version':
get_fake_version, get_fake_version,
'{prefix}/{CURRENT_VERSION}/info'.format(prefix=prefix, CURRENT_VERSION=CURRENT_VERSION): f'{prefix}/{CURRENT_VERSION}/info':
get_fake_info, get_fake_info,
'{prefix}/{CURRENT_VERSION}/auth'.format(prefix=prefix, CURRENT_VERSION=CURRENT_VERSION): f'{prefix}/{CURRENT_VERSION}/auth':
post_fake_auth, post_fake_auth,
'{prefix}/{CURRENT_VERSION}/_ping'.format(prefix=prefix, CURRENT_VERSION=CURRENT_VERSION): f'{prefix}/{CURRENT_VERSION}/_ping':
get_fake_ping, get_fake_ping,
'{prefix}/{CURRENT_VERSION}/images/search'.format(prefix=prefix, CURRENT_VERSION=CURRENT_VERSION): f'{prefix}/{CURRENT_VERSION}/images/search':
get_fake_search, get_fake_search,
'{prefix}/{CURRENT_VERSION}/images/json'.format(prefix=prefix, CURRENT_VERSION=CURRENT_VERSION): f'{prefix}/{CURRENT_VERSION}/images/json':
get_fake_images, get_fake_images,
'{prefix}/{CURRENT_VERSION}/images/test_image/history'.format(prefix=prefix, CURRENT_VERSION=CURRENT_VERSION): f'{prefix}/{CURRENT_VERSION}/images/test_image/history':
get_fake_image_history, get_fake_image_history,
'{prefix}/{CURRENT_VERSION}/images/create'.format(prefix=prefix, CURRENT_VERSION=CURRENT_VERSION): f'{prefix}/{CURRENT_VERSION}/images/create':
post_fake_import_image, post_fake_import_image,
'{prefix}/{CURRENT_VERSION}/containers/json'.format(prefix=prefix, CURRENT_VERSION=CURRENT_VERSION): f'{prefix}/{CURRENT_VERSION}/containers/json':
get_fake_containers, get_fake_containers,
'{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/start'.format(prefix=prefix, CURRENT_VERSION=CURRENT_VERSION): f'{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/start':
post_fake_start_container, post_fake_start_container,
'{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/resize'.format(prefix=prefix, CURRENT_VERSION=CURRENT_VERSION): f'{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/resize':
post_fake_resize_container, post_fake_resize_container,
'{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/json'.format(prefix=prefix, CURRENT_VERSION=CURRENT_VERSION): f'{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/json':
get_fake_inspect_container, get_fake_inspect_container,
'{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/rename'.format(prefix=prefix, CURRENT_VERSION=CURRENT_VERSION): f'{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/rename':
post_fake_rename_container, post_fake_rename_container,
'{prefix}/{CURRENT_VERSION}/images/e9aa60c60128/tag'.format(prefix=prefix, CURRENT_VERSION=CURRENT_VERSION): f'{prefix}/{CURRENT_VERSION}/images/e9aa60c60128/tag':
post_fake_tag_image, post_fake_tag_image,
'{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/wait'.format(prefix=prefix, CURRENT_VERSION=CURRENT_VERSION): f'{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/wait':
get_fake_wait, get_fake_wait,
'{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/logs'.format(prefix=prefix, CURRENT_VERSION=CURRENT_VERSION): f'{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/logs':
get_fake_logs, get_fake_logs,
'{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/changes'.format(prefix=prefix, CURRENT_VERSION=CURRENT_VERSION): f'{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/changes':
get_fake_diff, get_fake_diff,
'{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/export'.format(prefix=prefix, CURRENT_VERSION=CURRENT_VERSION): f'{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/export':
get_fake_export, get_fake_export,
'{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/update'.format(prefix=prefix, CURRENT_VERSION=CURRENT_VERSION): f'{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/update':
post_fake_update_container, post_fake_update_container,
'{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/exec'.format(prefix=prefix, CURRENT_VERSION=CURRENT_VERSION): f'{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/exec':
post_fake_exec_create, post_fake_exec_create,
'{prefix}/{CURRENT_VERSION}/exec/d5d177f121dc/start'.format(prefix=prefix, CURRENT_VERSION=CURRENT_VERSION): f'{prefix}/{CURRENT_VERSION}/exec/d5d177f121dc/start':
post_fake_exec_start, post_fake_exec_start,
'{prefix}/{CURRENT_VERSION}/exec/d5d177f121dc/json'.format(prefix=prefix, CURRENT_VERSION=CURRENT_VERSION): f'{prefix}/{CURRENT_VERSION}/exec/d5d177f121dc/json':
get_fake_exec_inspect, get_fake_exec_inspect,
'{prefix}/{CURRENT_VERSION}/exec/d5d177f121dc/resize'.format(prefix=prefix, CURRENT_VERSION=CURRENT_VERSION): f'{prefix}/{CURRENT_VERSION}/exec/d5d177f121dc/resize':
post_fake_exec_resize, post_fake_exec_resize,
'{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/stats'.format(prefix=prefix, CURRENT_VERSION=CURRENT_VERSION): f'{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/stats':
get_fake_stats, get_fake_stats,
'{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/top'.format(prefix=prefix, CURRENT_VERSION=CURRENT_VERSION): f'{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/top':
get_fake_top, get_fake_top,
'{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/stop'.format(prefix=prefix, CURRENT_VERSION=CURRENT_VERSION): f'{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/stop':
post_fake_stop_container, post_fake_stop_container,
'{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/kill'.format(prefix=prefix, CURRENT_VERSION=CURRENT_VERSION): f'{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/kill':
post_fake_kill_container, post_fake_kill_container,
'{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/pause'.format(prefix=prefix, CURRENT_VERSION=CURRENT_VERSION): f'{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/pause':
post_fake_pause_container, post_fake_pause_container,
'{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/unpause'.format(prefix=prefix, CURRENT_VERSION=CURRENT_VERSION): f'{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/unpause':
post_fake_unpause_container, post_fake_unpause_container,
'{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/restart'.format(prefix=prefix, CURRENT_VERSION=CURRENT_VERSION): f'{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/restart':
post_fake_restart_container, post_fake_restart_container,
'{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b'.format(prefix=prefix, CURRENT_VERSION=CURRENT_VERSION): f'{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b':
delete_fake_remove_container, delete_fake_remove_container,
'{prefix}/{CURRENT_VERSION}/images/create'.format(prefix=prefix, CURRENT_VERSION=CURRENT_VERSION): f'{prefix}/{CURRENT_VERSION}/images/create':
post_fake_image_create, post_fake_image_create,
'{prefix}/{CURRENT_VERSION}/images/e9aa60c60128'.format(prefix=prefix, CURRENT_VERSION=CURRENT_VERSION): f'{prefix}/{CURRENT_VERSION}/images/e9aa60c60128':
delete_fake_remove_image, delete_fake_remove_image,
'{prefix}/{CURRENT_VERSION}/images/e9aa60c60128/get'.format(prefix=prefix, CURRENT_VERSION=CURRENT_VERSION): f'{prefix}/{CURRENT_VERSION}/images/e9aa60c60128/get':
get_fake_get_image, get_fake_get_image,
'{prefix}/{CURRENT_VERSION}/images/load'.format(prefix=prefix, CURRENT_VERSION=CURRENT_VERSION): f'{prefix}/{CURRENT_VERSION}/images/load':
post_fake_load_image, post_fake_load_image,
'{prefix}/{CURRENT_VERSION}/images/test_image/json'.format(prefix=prefix, CURRENT_VERSION=CURRENT_VERSION): f'{prefix}/{CURRENT_VERSION}/images/test_image/json':
get_fake_inspect_image, get_fake_inspect_image,
'{prefix}/{CURRENT_VERSION}/images/test_image/insert'.format(prefix=prefix, CURRENT_VERSION=CURRENT_VERSION): f'{prefix}/{CURRENT_VERSION}/images/test_image/insert':
get_fake_insert_image, get_fake_insert_image,
'{prefix}/{CURRENT_VERSION}/images/test_image/push'.format(prefix=prefix, CURRENT_VERSION=CURRENT_VERSION): f'{prefix}/{CURRENT_VERSION}/images/test_image/push':
post_fake_push, post_fake_push,
'{prefix}/{CURRENT_VERSION}/commit'.format(prefix=prefix, CURRENT_VERSION=CURRENT_VERSION): f'{prefix}/{CURRENT_VERSION}/commit':
post_fake_commit, post_fake_commit,
'{prefix}/{CURRENT_VERSION}/containers/create'.format(prefix=prefix, CURRENT_VERSION=CURRENT_VERSION): f'{prefix}/{CURRENT_VERSION}/containers/create':
post_fake_create_container, post_fake_create_container,
'{prefix}/{CURRENT_VERSION}/build'.format(prefix=prefix, CURRENT_VERSION=CURRENT_VERSION): f'{prefix}/{CURRENT_VERSION}/build':
post_fake_build_container, post_fake_build_container,
'{prefix}/{CURRENT_VERSION}/events'.format(prefix=prefix, CURRENT_VERSION=CURRENT_VERSION): f'{prefix}/{CURRENT_VERSION}/events':
get_fake_events, get_fake_events,
('{prefix}/{CURRENT_VERSION}/volumes'.format(prefix=prefix, CURRENT_VERSION=CURRENT_VERSION), 'GET'): (f'{prefix}/{CURRENT_VERSION}/volumes', 'GET'):
get_fake_volume_list, get_fake_volume_list,
('{prefix}/{CURRENT_VERSION}/volumes/create'.format(prefix=prefix, CURRENT_VERSION=CURRENT_VERSION), 'POST'): (f'{prefix}/{CURRENT_VERSION}/volumes/create', 'POST'):
get_fake_volume, get_fake_volume,
('{1}/{0}/volumes/{2}'.format( (f'{prefix}/{CURRENT_VERSION}/volumes/{FAKE_VOLUME_NAME}', 'GET'):
CURRENT_VERSION, prefix, FAKE_VOLUME_NAME
), 'GET'):
get_fake_volume, get_fake_volume,
('{1}/{0}/volumes/{2}'.format( (f'{prefix}/{CURRENT_VERSION}/volumes/{FAKE_VOLUME_NAME}', 'DELETE'):
CURRENT_VERSION, prefix, FAKE_VOLUME_NAME
), 'DELETE'):
fake_remove_volume, fake_remove_volume,
('{1}/{0}/nodes/{2}/update?version=1'.format( (f'{prefix}/{CURRENT_VERSION}/nodes/{FAKE_NODE_ID}/update?version=1', 'POST'):
CURRENT_VERSION, prefix, FAKE_NODE_ID
), 'POST'):
post_fake_update_node, post_fake_update_node,
('{prefix}/{CURRENT_VERSION}/swarm/join'.format(prefix=prefix, CURRENT_VERSION=CURRENT_VERSION), 'POST'): (f'{prefix}/{CURRENT_VERSION}/swarm/join', 'POST'):
post_fake_join_swarm, post_fake_join_swarm,
('{prefix}/{CURRENT_VERSION}/networks'.format(prefix=prefix, CURRENT_VERSION=CURRENT_VERSION), 'GET'): (f'{prefix}/{CURRENT_VERSION}/networks', 'GET'):
get_fake_network_list, get_fake_network_list,
('{prefix}/{CURRENT_VERSION}/networks/create'.format(prefix=prefix, CURRENT_VERSION=CURRENT_VERSION), 'POST'): (f'{prefix}/{CURRENT_VERSION}/networks/create', 'POST'):
post_fake_network, post_fake_network,
('{1}/{0}/networks/{2}'.format( (f'{prefix}/{CURRENT_VERSION}/networks/{FAKE_NETWORK_ID}', 'GET'):
CURRENT_VERSION, prefix, FAKE_NETWORK_ID
), 'GET'):
get_fake_network, get_fake_network,
('{1}/{0}/networks/{2}'.format( (f'{prefix}/{CURRENT_VERSION}/networks/{FAKE_NETWORK_ID}', 'DELETE'):
CURRENT_VERSION, prefix, FAKE_NETWORK_ID
), 'DELETE'):
delete_fake_network, delete_fake_network,
('{1}/{0}/networks/{2}/connect'.format( (f'{prefix}/{CURRENT_VERSION}/networks/{FAKE_NETWORK_ID}/connect', 'POST'):
CURRENT_VERSION, prefix, FAKE_NETWORK_ID
), 'POST'):
post_fake_network_connect, post_fake_network_connect,
('{1}/{0}/networks/{2}/disconnect'.format( (f'{prefix}/{CURRENT_VERSION}/networks/{FAKE_NETWORK_ID}/disconnect', 'POST'):
CURRENT_VERSION, prefix, FAKE_NETWORK_ID
), 'POST'):
post_fake_network_disconnect, post_fake_network_disconnect,
'{prefix}/{CURRENT_VERSION}/secrets/create'.format(prefix=prefix, CURRENT_VERSION=CURRENT_VERSION): f'{prefix}/{CURRENT_VERSION}/secrets/create':
post_fake_secret, post_fake_secret,
} }

View File

@ -252,7 +252,7 @@ class LoadConfigTest(unittest.TestCase):
cfg_path = os.path.join(folder, '.dockercfg') cfg_path = os.path.join(folder, '.dockercfg')
auth_ = base64.b64encode(b'sakuya:izayoi').decode('ascii') auth_ = base64.b64encode(b'sakuya:izayoi').decode('ascii')
with open(cfg_path, 'w') as f: with open(cfg_path, 'w') as f:
f.write('auth = {auth}\n'.format(auth=auth_)) f.write(f'auth = {auth_}\n')
f.write('email = sakuya@scarlet.net') f.write('email = sakuya@scarlet.net')
cfg = auth.load_config(cfg_path) cfg = auth.load_config(cfg_path)
@ -309,14 +309,12 @@ class LoadConfigTest(unittest.TestCase):
folder = tempfile.mkdtemp() folder = tempfile.mkdtemp()
self.addCleanup(shutil.rmtree, folder) self.addCleanup(shutil.rmtree, folder)
dockercfg_path = os.path.join(folder, dockercfg_path = os.path.join(folder, f'.{random.randrange(100000)}.dockercfg')
'.{0}.dockercfg'.format(
random.randrange(100000)))
registry = 'https://your.private.registry.io' registry = 'https://your.private.registry.io'
auth_ = base64.b64encode(b'sakuya:izayoi').decode('ascii') auth_ = base64.b64encode(b'sakuya:izayoi').decode('ascii')
config = { config = {
registry: { registry: {
'auth': '{auth}'.format(auth=auth_), 'auth': f'{auth_}',
'email': 'sakuya@scarlet.net' 'email': 'sakuya@scarlet.net'
} }
} }
@ -342,7 +340,7 @@ class LoadConfigTest(unittest.TestCase):
auth_ = base64.b64encode(b'sakuya:izayoi').decode('ascii') auth_ = base64.b64encode(b'sakuya:izayoi').decode('ascii')
config = { config = {
registry: { registry: {
'auth': '{auth}'.format(auth=auth_), 'auth': f'{auth_}',
'email': 'sakuya@scarlet.net' 'email': 'sakuya@scarlet.net'
} }
} }
@ -370,7 +368,7 @@ class LoadConfigTest(unittest.TestCase):
config = { config = {
'auths': { 'auths': {
registry: { registry: {
'auth': '{auth}'.format(auth=auth_), 'auth': f'{auth_}',
'email': 'sakuya@scarlet.net' 'email': 'sakuya@scarlet.net'
} }
} }
@ -399,7 +397,7 @@ class LoadConfigTest(unittest.TestCase):
config = { config = {
'auths': { 'auths': {
registry: { registry: {
'auth': '{auth}'.format(auth=auth_), 'auth': f'{auth_}',
'email': 'sakuya@scarlet.net' 'email': 'sakuya@scarlet.net'
} }
} }

View File

@ -431,9 +431,7 @@ class TarTest(unittest.TestCase):
with pytest.raises(IOError) as ei: with pytest.raises(IOError) as ei:
tar(base) tar(base)
assert 'Can not read file in context: {full_path}'.format(full_path=full_path) in ( assert f'Can not read file in context: {full_path}' in ei.exconly()
ei.exconly()
)
@pytest.mark.skipif(IS_WINDOWS_PLATFORM, reason='No symlinks on Windows') @pytest.mark.skipif(IS_WINDOWS_PLATFORM, reason='No symlinks on Windows')
def test_tar_with_file_symlinks(self): def test_tar_with_file_symlinks(self):

View File

@ -75,7 +75,7 @@ class ProxyConfigTest(unittest.TestCase):
# Proxy config is non null, env is None. # Proxy config is non null, env is None.
self.assertSetEqual( self.assertSetEqual(
set(CONFIG.inject_proxy_environment(None)), set(CONFIG.inject_proxy_environment(None)),
set('{k}={v}'.format(k=k, v=v) for k, v in ENV.items())) set(f'{k}={v}' for k, v in ENV.items()))
# Proxy config is null, env is None. # Proxy config is null, env is None.
self.assertIsNone(ProxyConfig().inject_proxy_environment(None), None) self.assertIsNone(ProxyConfig().inject_proxy_environment(None), None)
@ -84,7 +84,7 @@ class ProxyConfigTest(unittest.TestCase):
# Proxy config is non null, env is non null # Proxy config is non null, env is non null
actual = CONFIG.inject_proxy_environment(env) actual = CONFIG.inject_proxy_environment(env)
expected = ['{k}={v}'.format(k=k, v=v) for k, v in ENV.items()] + env expected = [f'{k}={v}' for k, v in ENV.items()] + env
# It's important that the first 8 variables are the ones from the proxy # It's important that the first 8 variables are the ones from the proxy
# config, and the last 2 are the ones from the input environment # config, and the last 2 are the ones from the input environment
self.assertSetEqual(set(actual[:8]), set(expected[:8])) self.assertSetEqual(set(actual[:8]), set(expected[:8]))

View File

@ -288,7 +288,7 @@ class ParseHostTest(unittest.TestCase):
} }
for host in invalid_hosts: for host in invalid_hosts:
msg = 'Should have failed to parse invalid host: {0}'.format(host) msg = f'Should have failed to parse invalid host: {host}'
with self.assertRaises(DockerException, msg=msg): with self.assertRaises(DockerException, msg=msg):
parse_host(host, None) parse_host(host, None)
@ -296,7 +296,7 @@ class ParseHostTest(unittest.TestCase):
self.assertEqual( self.assertEqual(
parse_host(host, None), parse_host(host, None),
expected, expected,
msg='Failed to parse valid host: {0}'.format(host), msg=f'Failed to parse valid host: {host}',
) )
def test_parse_host_empty_value(self): def test_parse_host_empty_value(self):
@ -347,14 +347,14 @@ class ParseRepositoryTagTest(unittest.TestCase):
) )
def test_index_image_sha(self): def test_index_image_sha(self):
assert parse_repository_tag("root@sha256:{sha}".format(sha=self.sha)) == ( assert parse_repository_tag(f"root@sha256:{self.sha}") == (
"root", "sha256:{sha}".format(sha=self.sha) "root", f"sha256:{self.sha}"
) )
def test_private_reg_image_sha(self): def test_private_reg_image_sha(self):
assert parse_repository_tag( assert parse_repository_tag(
"url:5000/repo@sha256:{sha}".format(sha=self.sha) f"url:5000/repo@sha256:{self.sha}"
) == ("url:5000/repo", "sha256:{sha}".format(sha=self.sha)) ) == ("url:5000/repo", f"sha256:{self.sha}")
class ParseDeviceTest(unittest.TestCase): class ParseDeviceTest(unittest.TestCase):

View File

@ -20,9 +20,9 @@ from ansible_collections.community.docker.plugins.module_utils._scramble import
]) ])
def test_scramble_unscramble(plaintext, key, scrambled): def test_scramble_unscramble(plaintext, key, scrambled):
scrambled_ = scramble(plaintext, key) scrambled_ = scramble(plaintext, key)
print('{0!r} == {1!r}'.format(scrambled_, scrambled)) print(f'{scrambled_!r} == {scrambled!r}')
assert scrambled_ == scrambled assert scrambled_ == scrambled
plaintext_ = unscramble(scrambled, key) plaintext_ = unscramble(scrambled, key)
print('{0!r} == {1!r}'.format(plaintext_, plaintext)) print(f'{plaintext_!r} == {plaintext!r}')
assert plaintext_ == plaintext assert plaintext_ == plaintext

View File

@ -39,7 +39,7 @@ def test_parse_string(input, expected):
]) ])
def test_parse_int(input): def test_parse_int(input):
assert parse_modern(input) == input assert parse_modern(input) == input
with pytest.raises(TypeError, match="^must be an octal string, got {value}L?$".format(value=input)): with pytest.raises(TypeError, match=f"^must be an octal string, got {input}L?$"):
parse_octal_string_only(input) parse_octal_string_only(input)

View File

@ -32,4 +32,4 @@ def test_validate_cidr_positives(cidr, expected):
def test_validate_cidr_negatives(cidr): def test_validate_cidr_negatives(cidr):
with pytest.raises(ValueError) as e: with pytest.raises(ValueError) as e:
validate_cidr(cidr) validate_cidr(cidr)
assert '"{0}" is not a valid CIDR'.format(cidr) == str(e.value) assert f'"{cidr}" is not a valid CIDR' == str(e.value)

View File

@ -75,7 +75,7 @@ def test_get_docker_environment(mocker, docker_swarm_service):
mocker.patch.object( mocker.patch.object(
docker_swarm_service, docker_swarm_service,
'format_environment', 'format_environment',
side_effect=lambda d: ['{0}={1}'.format(key, value) for key, value in d.items()], side_effect=lambda d: [f'{key}={value}' for key, value in d.items()],
) )
# Test with env dict and file # Test with env dict and file
result = docker_swarm_service.get_docker_environment( result = docker_swarm_service.get_docker_environment(