mirror of
https://github.com/ansible-collections/community.docker.git
synced 2025-12-18 21:02:36 +00:00
Replace str.format() uses with f-strings.
This commit is contained in:
parent
9fd3cedd1a
commit
e4d37af9ca
@ -249,9 +249,8 @@ class Connection(ConnectionBase):
|
||||
for val, what in ((k, 'Key'), (v, 'Value')):
|
||||
if not isinstance(val, str):
|
||||
raise AnsibleConnectionFailure(
|
||||
'Non-string {0} found for extra_env option. Ambiguous env options must be '
|
||||
'wrapped in quotes to avoid them being interpreted. {1}: {2!r}'
|
||||
.format(what.lower(), what, val)
|
||||
f'Non-string {what.lower()} found for extra_env option. Ambiguous env options must be '
|
||||
f'wrapped in quotes to avoid them being interpreted. {what}: {val!r}'
|
||||
)
|
||||
local_cmd += [b'-e', b'%s=%s' % (to_bytes(k, errors='surrogate_or_strict'), to_bytes(v, errors='surrogate_or_strict'))]
|
||||
|
||||
@ -260,8 +259,7 @@ class Connection(ConnectionBase):
|
||||
if self.docker_version != 'dev' and LooseVersion(self.docker_version) < LooseVersion('18.06'):
|
||||
# https://github.com/docker/cli/pull/732, first appeared in release 18.06.0
|
||||
raise AnsibleConnectionFailure(
|
||||
'Providing the working directory requires Docker CLI version 18.06 or newer. You have Docker CLI version {0}.'
|
||||
.format(self.docker_version)
|
||||
f'Providing the working directory requires Docker CLI version 18.06 or newer. You have Docker CLI version {self.docker_version}.'
|
||||
)
|
||||
|
||||
if self.get_option('privileged'):
|
||||
@ -318,8 +316,7 @@ class Connection(ConnectionBase):
|
||||
self.remote_user = None
|
||||
actual_user = self._get_docker_remote_user()
|
||||
if actual_user != self.get_option('remote_user'):
|
||||
display.warning('docker {0} does not support remote_user, using container default: {1}'
|
||||
.format(self.docker_version, self.actual_user or '?'))
|
||||
display.warning(f'docker {self.docker_version} does not support remote_user, using container default: {self.actual_user or "?"}')
|
||||
return actual_user
|
||||
elif self._display.verbosity > 2:
|
||||
# Since we are not setting the actual_user, look it up so we have it for logging later
|
||||
@ -335,9 +332,7 @@ class Connection(ConnectionBase):
|
||||
if not self._connected:
|
||||
self._set_conn_data()
|
||||
actual_user = self._get_actual_user()
|
||||
display.vvv("ESTABLISH DOCKER CONNECTION FOR USER: {0}".format(
|
||||
actual_user or '?'), host=self.get_option('remote_addr')
|
||||
)
|
||||
display.vvv(f"ESTABLISH DOCKER CONNECTION FOR USER: {actual_user or '?'}", host=self.get_option('remote_addr'))
|
||||
self._connected = True
|
||||
|
||||
def exec_command(self, cmd, in_data=None, sudoable=False):
|
||||
@ -349,7 +344,7 @@ class Connection(ConnectionBase):
|
||||
|
||||
local_cmd = self._build_exec_cmd([self._play_context.executable, '-c', cmd])
|
||||
|
||||
display.vvv("EXEC {0}".format(to_text(local_cmd)), host=self.get_option('remote_addr'))
|
||||
display.vvv(f"EXEC {to_text(local_cmd)}", host=self.get_option('remote_addr'))
|
||||
display.debug("opening command with Popen()")
|
||||
|
||||
local_cmd = [to_bytes(i, errors='surrogate_or_strict') for i in local_cmd]
|
||||
|
||||
@ -146,27 +146,27 @@ class Connection(ConnectionBase):
|
||||
has_pipelining = True
|
||||
|
||||
def _call_client(self, callable, not_found_can_be_resource=False):
|
||||
remote_addr = self.get_option('remote_addr')
|
||||
try:
|
||||
return callable()
|
||||
except NotFound as e:
|
||||
if not_found_can_be_resource:
|
||||
raise AnsibleConnectionFailure('Could not find container "{1}" or resource in it ({0})'.format(e, self.get_option('remote_addr')))
|
||||
raise AnsibleConnectionFailure(f'Could not find container "{remote_addr}" or resource in it ({e})')
|
||||
else:
|
||||
raise AnsibleConnectionFailure('Could not find container "{1}" ({0})'.format(e, self.get_option('remote_addr')))
|
||||
raise AnsibleConnectionFailure(f'Could not find container "{remote_addr}" ({e})')
|
||||
except APIError as e:
|
||||
if e.response is not None and e.response.status_code == 409:
|
||||
raise AnsibleConnectionFailure('The container "{1}" has been paused ({0})'.format(e, self.get_option('remote_addr')))
|
||||
raise AnsibleConnectionFailure(f'The container "{remote_addr}" has been paused ({e})')
|
||||
self.client.fail(
|
||||
'An unexpected Docker error occurred for container "{1}": {0}'.format(e, self.get_option('remote_addr'))
|
||||
f'An unexpected Docker error occurred for container "{remote_addr}": {e}'
|
||||
)
|
||||
except DockerException as e:
|
||||
self.client.fail(
|
||||
'An unexpected Docker error occurred for container "{1}": {0}'.format(e, self.get_option('remote_addr'))
|
||||
f'An unexpected Docker error occurred for container "{remote_addr}": {e}'
|
||||
)
|
||||
except RequestException as e:
|
||||
self.client.fail(
|
||||
'An unexpected requests error occurred for container "{1}" when trying to talk to the Docker daemon: {0}'
|
||||
.format(e, self.get_option('remote_addr'))
|
||||
f'An unexpected requests error occurred for container "{remote_addr}" when trying to talk to the Docker daemon: {e}'
|
||||
)
|
||||
|
||||
def __init__(self, play_context, new_stdin, *args, **kwargs):
|
||||
@ -186,9 +186,7 @@ class Connection(ConnectionBase):
|
||||
super(Connection, self)._connect()
|
||||
if not self._connected:
|
||||
self.actual_user = self.get_option('remote_user')
|
||||
display.vvv("ESTABLISH DOCKER CONNECTION FOR USER: {0}".format(
|
||||
self.actual_user or '?'), host=self.get_option('remote_addr')
|
||||
)
|
||||
display.vvv(f"ESTABLISH DOCKER CONNECTION FOR USER: {self.actual_user or '?'}", host=self.get_option('remote_addr'))
|
||||
if self.client is None:
|
||||
self.client = AnsibleDockerClient(self, min_docker_api_version=MIN_DOCKER_API)
|
||||
self._connected = True
|
||||
@ -202,7 +200,7 @@ class Connection(ConnectionBase):
|
||||
if result.get('Config'):
|
||||
self.actual_user = result['Config'].get('User')
|
||||
if self.actual_user is not None:
|
||||
display.vvv("Actual user is '{0}'".format(self.actual_user))
|
||||
display.vvv(f"Actual user is '{self.actual_user}'")
|
||||
|
||||
def exec_command(self, cmd, in_data=None, sudoable=False):
|
||||
""" Run a command on the docker host """
|
||||
@ -213,12 +211,10 @@ class Connection(ConnectionBase):
|
||||
|
||||
do_become = self.become and self.become.expect_prompt() and sudoable
|
||||
|
||||
stdin_part = f', with stdin ({len(in_data)} bytes)' if in_data is not None else ''
|
||||
become_part = ', with become prompt' if do_become else ''
|
||||
display.vvv(
|
||||
"EXEC {0}{1}{2}".format(
|
||||
to_text(command),
|
||||
', with stdin ({0} bytes)'.format(len(in_data)) if in_data is not None else '',
|
||||
', with become prompt' if do_become else '',
|
||||
),
|
||||
f"EXEC {to_text(command)}{stdin_part}{become_part}",
|
||||
host=self.get_option('remote_addr')
|
||||
)
|
||||
|
||||
@ -244,19 +240,19 @@ class Connection(ConnectionBase):
|
||||
for val, what in ((k, 'Key'), (v, 'Value')):
|
||||
if not isinstance(val, str):
|
||||
raise AnsibleConnectionFailure(
|
||||
'Non-string {0} found for extra_env option. Ambiguous env options must be '
|
||||
'wrapped in quotes to avoid them being interpreted. {1}: {2!r}'
|
||||
.format(what.lower(), what, val)
|
||||
f'Non-string {what.lower()} found for extra_env option. Ambiguous env options must be '
|
||||
f'wrapped in quotes to avoid them being interpreted. {what}: {val!r}'
|
||||
)
|
||||
data['Env'].append('{0}={1}'.format(to_text(k, errors='surrogate_or_strict'), to_text(v, errors='surrogate_or_strict')))
|
||||
kk = to_text(k, errors='surrogate_or_strict')
|
||||
vv = to_text(v, errors='surrogate_or_strict')
|
||||
data['Env'].append(f'{kk}={vv}')
|
||||
|
||||
if self.get_option('working_dir') is not None:
|
||||
data['WorkingDir'] = self.get_option('working_dir')
|
||||
if self.client.docker_api_version < LooseVersion('1.35'):
|
||||
raise AnsibleConnectionFailure(
|
||||
'Providing the working directory requires Docker API version 1.35 or newer.'
|
||||
' The Docker daemon the connection is using has API version {0}.'
|
||||
.format(self.client.docker_api_version_str)
|
||||
f' The Docker daemon the connection is using has API version {self.client.docker_api_version_str}.'
|
||||
)
|
||||
|
||||
exec_data = self._call_client(lambda: self.client.post_json_to_json('/containers/{0}/exec', self.get_option('remote_addr'), data=data))
|
||||
@ -331,17 +327,17 @@ class Connection(ConnectionBase):
|
||||
|
||||
if self.actual_user not in self.ids:
|
||||
dummy, ids, dummy = self.exec_command(b'id -u && id -g')
|
||||
remote_addr = self.get_option('remote_addr')
|
||||
try:
|
||||
user_id, group_id = ids.splitlines()
|
||||
self.ids[self.actual_user] = int(user_id), int(group_id)
|
||||
display.vvvv(
|
||||
'PUT: Determined uid={0} and gid={1} for user "{2}"'.format(user_id, group_id, self.actual_user),
|
||||
host=self.get_option('remote_addr')
|
||||
f'PUT: Determined uid={user_id} and gid={group_id} for user "{self.actual_user}"',
|
||||
host=remote_addr
|
||||
)
|
||||
except Exception as e:
|
||||
raise AnsibleConnectionFailure(
|
||||
'Error while determining user and group ID of current user in container "{1}": {0}\nGot value: {2!r}'
|
||||
.format(e, self.get_option('remote_addr'), ids)
|
||||
f'Error while determining user and group ID of current user in container "{remote_addr}": {e}\nGot value: {ids!r}'
|
||||
)
|
||||
|
||||
user_id, group_id = self.ids[self.actual_user]
|
||||
|
||||
@ -76,9 +76,7 @@ class Connection(ConnectionBase):
|
||||
|
||||
if not self._connected:
|
||||
display.vvv(
|
||||
"ESTABLISH NSENTER CONNECTION FOR USER: {0}".format(
|
||||
self._play_context.remote_user
|
||||
),
|
||||
f"ESTABLISH NSENTER CONNECTION FOR USER: {self._play_context.remote_user}",
|
||||
host=self._play_context.remote_addr,
|
||||
)
|
||||
self._connected = True
|
||||
@ -104,7 +102,7 @@ class Connection(ConnectionBase):
|
||||
"--pid",
|
||||
"--uts",
|
||||
"--preserve-credentials",
|
||||
"--target={0}".format(self._nsenter_pid),
|
||||
f"--target={self._nsenter_pid}",
|
||||
"--",
|
||||
]
|
||||
|
||||
@ -115,7 +113,7 @@ class Connection(ConnectionBase):
|
||||
cmd_parts = nsenter_cmd_parts + cmd
|
||||
cmd = [to_bytes(arg) for arg in cmd_parts]
|
||||
|
||||
display.vvv("EXEC {0}".format(to_text(cmd)), host=self._play_context.remote_addr)
|
||||
display.vvv(f"EXEC {to_text(cmd)}", host=self._play_context.remote_addr)
|
||||
display.debug("opening command with Popen()")
|
||||
|
||||
master = None
|
||||
@ -204,15 +202,15 @@ class Connection(ConnectionBase):
|
||||
in_path = unfrackpath(in_path, basedir=self.cwd)
|
||||
out_path = unfrackpath(out_path, basedir=self.cwd)
|
||||
|
||||
display.vvv("PUT {0} to {1}".format(in_path, out_path), host=self._play_context.remote_addr)
|
||||
display.vvv(f"PUT {in_path} to {out_path}", host=self._play_context.remote_addr)
|
||||
try:
|
||||
with open(to_bytes(in_path, errors="surrogate_or_strict"), "rb") as in_file:
|
||||
in_data = in_file.read()
|
||||
rc, out, err = self.exec_command(cmd=["tee", out_path], in_data=in_data)
|
||||
if rc != 0:
|
||||
raise AnsibleError("failed to transfer file to {0}: {1}".format(out_path, err))
|
||||
raise AnsibleError(f"failed to transfer file to {out_path}: {err}")
|
||||
except IOError as e:
|
||||
raise AnsibleError("failed to transfer file to {0}: {1}".format(out_path, to_native(e)))
|
||||
raise AnsibleError(f"failed to transfer file to {out_path}: {e}")
|
||||
|
||||
def fetch_file(self, in_path, out_path):
|
||||
super(Connection, self).fetch_file(in_path, out_path)
|
||||
@ -222,13 +220,13 @@ class Connection(ConnectionBase):
|
||||
|
||||
try:
|
||||
rc, out, err = self.exec_command(cmd=["cat", in_path])
|
||||
display.vvv("FETCH {0} TO {1}".format(in_path, out_path), host=self._play_context.remote_addr)
|
||||
display.vvv(f"FETCH {in_path} TO {out_path}", host=self._play_context.remote_addr)
|
||||
if rc != 0:
|
||||
raise AnsibleError("failed to transfer file to {0}: {1}".format(in_path, err))
|
||||
raise AnsibleError(f"failed to transfer file to {in_path}: {err}")
|
||||
with open(to_bytes(out_path, errors='surrogate_or_strict'), 'wb') as out_file:
|
||||
out_file.write(out)
|
||||
except IOError as e:
|
||||
raise AnsibleError("failed to transfer file to {0}: {1}".format(to_native(out_path), to_native(e)))
|
||||
raise AnsibleError(f"failed to transfer file to {to_native(out_path)}: {e}")
|
||||
|
||||
def close(self):
|
||||
''' terminate the connection; nothing to do here '''
|
||||
|
||||
@ -268,19 +268,19 @@ class InventoryModule(BaseInventoryPlugin, Constructable):
|
||||
# Add container to groups
|
||||
image_name = config.get('Image')
|
||||
if image_name and add_legacy_groups:
|
||||
groups.append('image_{0}'.format(image_name))
|
||||
groups.append(f'image_{image_name}')
|
||||
|
||||
stack_name = labels.get('com.docker.stack.namespace')
|
||||
if stack_name:
|
||||
full_facts['docker_stack'] = stack_name
|
||||
if add_legacy_groups:
|
||||
groups.append('stack_{0}'.format(stack_name))
|
||||
groups.append(f'stack_{stack_name}')
|
||||
|
||||
service_name = labels.get('com.docker.swarm.service.name')
|
||||
if service_name:
|
||||
full_facts['docker_service'] = service_name
|
||||
if add_legacy_groups:
|
||||
groups.append('service_{0}'.format(service_name))
|
||||
groups.append(f'service_{service_name}')
|
||||
|
||||
ansible_connection = None
|
||||
if connection_type == 'ssh':
|
||||
@ -383,9 +383,9 @@ class InventoryModule(BaseInventoryPlugin, Constructable):
|
||||
self._populate(client)
|
||||
except DockerException as e:
|
||||
raise AnsibleError(
|
||||
'An unexpected Docker error occurred: {0}'.format(e)
|
||||
f'An unexpected Docker error occurred: {e}'
|
||||
)
|
||||
except RequestException as e:
|
||||
raise AnsibleError(
|
||||
'An unexpected requests error occurred when trying to talk to the Docker daemon: {0}'.format(e)
|
||||
f'An unexpected requests error occurred when trying to talk to the Docker daemon: {e}'
|
||||
)
|
||||
|
||||
@ -131,11 +131,11 @@ class InventoryModule(BaseInventoryPlugin, Constructable, Cacheable):
|
||||
|
||||
command = [self.DOCKER_MACHINE_PATH]
|
||||
command.extend(args)
|
||||
display.debug('Executing command {0}'.format(command))
|
||||
display.debug(f'Executing command {command}')
|
||||
try:
|
||||
result = subprocess.check_output(command)
|
||||
except subprocess.CalledProcessError as e:
|
||||
display.warning('Exception {0} caught while executing command {1}, this was the original exception: {2}'.format(type(e).__name__, command, e))
|
||||
display.warning(f'Exception {type(e).__name__} caught while executing command {command}, this was the original exception: {e}')
|
||||
raise e
|
||||
|
||||
return to_text(result).strip()
|
||||
@ -203,14 +203,14 @@ class InventoryModule(BaseInventoryPlugin, Constructable, Cacheable):
|
||||
|
||||
def _should_skip_host(self, machine_name, env_var_tuples, daemon_env):
|
||||
if not env_var_tuples:
|
||||
warning_prefix = 'Unable to fetch Docker daemon env vars from Docker Machine for host {0}'.format(machine_name)
|
||||
warning_prefix = f'Unable to fetch Docker daemon env vars from Docker Machine for host {machine_name}'
|
||||
if daemon_env in ('require', 'require-silently'):
|
||||
if daemon_env == 'require':
|
||||
display.warning('{0}: host will be skipped'.format(warning_prefix))
|
||||
display.warning(f'{warning_prefix}: host will be skipped')
|
||||
return True
|
||||
else: # 'optional', 'optional-silently'
|
||||
if daemon_env == 'optional':
|
||||
display.warning('{0}: host will lack dm_DOCKER_xxx variables'.format(warning_prefix))
|
||||
display.warning(f'{warning_prefix}: host will lack dm_DOCKER_xxx variables')
|
||||
return False
|
||||
|
||||
def _populate(self):
|
||||
@ -261,7 +261,7 @@ class InventoryModule(BaseInventoryPlugin, Constructable, Cacheable):
|
||||
|
||||
# set variables based on Docker Machine env variables
|
||||
for kv in env_var_tuples:
|
||||
self.inventory.set_variable(machine_name, 'dm_{0}'.format(kv[0]), make_unsafe(kv[1]))
|
||||
self.inventory.set_variable(machine_name, f'dm_{kv[0]}', make_unsafe(kv[1]))
|
||||
|
||||
if self.get_option('verbose_output'):
|
||||
self.inventory.set_variable(machine_name, 'docker_machine_node_attributes', unsafe_node_attrs)
|
||||
|
||||
@ -187,14 +187,11 @@ class APIClient(
|
||||
self._version = version
|
||||
if not isinstance(self._version, str):
|
||||
raise DockerException(
|
||||
'Version parameter must be a string or None. Found {0}'.format(
|
||||
type(version).__name__
|
||||
)
|
||||
f'Version parameter must be a string or None. Found {type(version).__name__}'
|
||||
)
|
||||
if utils.version_lt(self._version, MINIMUM_DOCKER_API_VERSION):
|
||||
raise InvalidVersion(
|
||||
'API versions below {0} are no longer supported by this '
|
||||
'library.'.format(MINIMUM_DOCKER_API_VERSION)
|
||||
f'API versions below {MINIMUM_DOCKER_API_VERSION} are no longer supported by this library.'
|
||||
)
|
||||
|
||||
def _retrieve_server_version(self):
|
||||
@ -202,7 +199,7 @@ class APIClient(
|
||||
version_result = self.version(api_version=False)
|
||||
except Exception as e:
|
||||
raise DockerException(
|
||||
'Error while fetching server API version: {0}'.format(e)
|
||||
f'Error while fetching server API version: {e}'
|
||||
)
|
||||
|
||||
try:
|
||||
@ -214,7 +211,7 @@ class APIClient(
|
||||
)
|
||||
except Exception as e:
|
||||
raise DockerException(
|
||||
'Error while fetching server API version: {0}. Response seems to be broken.'.format(e)
|
||||
f'Error while fetching server API version: {e}. Response seems to be broken.'
|
||||
)
|
||||
|
||||
def _set_request_timeout(self, kwargs):
|
||||
@ -247,19 +244,16 @@ class APIClient(
|
||||
for arg in args:
|
||||
if not isinstance(arg, str):
|
||||
raise ValueError(
|
||||
'Expected a string but found {0} ({1}) '
|
||||
'instead'.format(arg, type(arg))
|
||||
f'Expected a string but found {arg} ({type(arg)}) instead'
|
||||
)
|
||||
|
||||
quote_f = partial(quote, safe="/:")
|
||||
args = map(quote_f, args)
|
||||
|
||||
if kwargs.get('versioned_api', True):
|
||||
return '{0}/v{1}{2}'.format(
|
||||
self.base_url, self._version, pathfmt.format(*args)
|
||||
)
|
||||
return f'{self.base_url}/v{self._version}{pathfmt.format(*args)}'
|
||||
else:
|
||||
return '{0}{1}'.format(self.base_url, pathfmt.format(*args))
|
||||
return f'{self.base_url}{pathfmt.format(*args)}'
|
||||
|
||||
def _raise_for_status(self, response):
|
||||
"""Raises stored :class:`APIError`, if one occurred."""
|
||||
|
||||
@ -19,7 +19,7 @@ from .credentials.errors import StoreError, CredentialsNotFound
|
||||
from .utils import config
|
||||
|
||||
INDEX_NAME = 'docker.io'
|
||||
INDEX_URL = 'https://index.{0}/v1/'.format(INDEX_NAME)
|
||||
INDEX_URL = f'https://index.{INDEX_NAME}/v1/'
|
||||
TOKEN_USERNAME = '<token>'
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
@ -28,14 +28,13 @@ log = logging.getLogger(__name__)
|
||||
def resolve_repository_name(repo_name):
|
||||
if '://' in repo_name:
|
||||
raise errors.InvalidRepository(
|
||||
'Repository name cannot contain a scheme ({0})'.format(repo_name)
|
||||
f'Repository name cannot contain a scheme ({repo_name})'
|
||||
)
|
||||
|
||||
index_name, remote_name = split_repo_name(repo_name)
|
||||
if index_name[0] == '-' or index_name[-1] == '-':
|
||||
raise errors.InvalidRepository(
|
||||
'Invalid index name ({0}). Cannot begin or end with a'
|
||||
' hyphen.'.format(index_name)
|
||||
f'Invalid index name ({index_name}). Cannot begin or end with a hyphen.'
|
||||
)
|
||||
return resolve_index_name(index_name), remote_name
|
||||
|
||||
@ -117,9 +116,7 @@ class AuthConfig(dict):
|
||||
# keys is not formatted properly.
|
||||
if raise_on_error:
|
||||
raise errors.InvalidConfigFile(
|
||||
'Invalid configuration for registry {0}'.format(
|
||||
registry
|
||||
)
|
||||
f'Invalid configuration for registry {registry}'
|
||||
)
|
||||
return {}
|
||||
if 'identitytoken' in entry:
|
||||
@ -272,7 +269,7 @@ class AuthConfig(dict):
|
||||
return None
|
||||
except StoreError as e:
|
||||
raise errors.DockerException(
|
||||
'Credentials store error: {0}'.format(repr(e))
|
||||
f'Credentials store error: {e}'
|
||||
)
|
||||
|
||||
def _get_store_instance(self, name):
|
||||
|
||||
@ -146,14 +146,14 @@ class ContextAPI(object):
|
||||
names.append(name)
|
||||
except Exception as e:
|
||||
raise errors.ContextException(
|
||||
"Failed to load metafile {filepath}: {e}".format(filepath=filepath, e=e),
|
||||
f"Failed to load metafile {filepath}: {e}"
|
||||
) from e
|
||||
|
||||
contexts = [cls.get_default_context()]
|
||||
for name in names:
|
||||
context = Context.load_context(name)
|
||||
if not context:
|
||||
raise errors.ContextException("Context {context} cannot be found".format(context=name))
|
||||
raise errors.ContextException(f"Context {name} cannot be found")
|
||||
contexts.append(context)
|
||||
return contexts
|
||||
|
||||
@ -174,7 +174,7 @@ class ContextAPI(object):
|
||||
err = write_context_name_to_docker_config(name)
|
||||
if err:
|
||||
raise errors.ContextException(
|
||||
'Failed to set current context: {err}'.format(err=err))
|
||||
f'Failed to set current context: {err}')
|
||||
|
||||
@classmethod
|
||||
def remove_context(cls, name):
|
||||
|
||||
@ -29,7 +29,7 @@ def get_current_context_name_with_source():
|
||||
if docker_cfg_path:
|
||||
try:
|
||||
with open(docker_cfg_path) as f:
|
||||
return json.load(f).get("currentContext", "default"), "configuration file {file}".format(file=docker_cfg_path)
|
||||
return json.load(f).get("currentContext", "default"), f"configuration file {docker_cfg_path}"
|
||||
except Exception:
|
||||
pass
|
||||
return "default", "fallback value"
|
||||
|
||||
@ -62,7 +62,7 @@ class Context(object):
|
||||
if not isinstance(v, dict):
|
||||
# unknown format
|
||||
raise ContextException(
|
||||
"Unknown endpoint format for context {name}: {v}".format(name=name, v=v),
|
||||
f"Unknown endpoint format for context {name}: {v}",
|
||||
)
|
||||
|
||||
self.endpoints[k] = v
|
||||
@ -118,7 +118,7 @@ class Context(object):
|
||||
except (OSError, KeyError, ValueError) as e:
|
||||
# unknown format
|
||||
raise Exception(
|
||||
"Detected corrupted meta file for context {name} : {e}".format(name=name, e=e)
|
||||
f"Detected corrupted meta file for context {name} : {e}"
|
||||
) from e
|
||||
|
||||
# for docker endpoints, set defaults for
|
||||
@ -193,7 +193,7 @@ class Context(object):
|
||||
rmtree(self.tls_path)
|
||||
|
||||
def __repr__(self):
|
||||
return "<{classname}: '{name}'>".format(classname=self.__class__.__name__, name=self.name)
|
||||
return f"<{self.__class__.__name__}: '{self.name}'>"
|
||||
|
||||
def __str__(self):
|
||||
return json.dumps(self.__call__(), indent=2)
|
||||
|
||||
@ -26,12 +26,8 @@ def process_store_error(cpe, program):
|
||||
message = cpe.output.decode('utf-8')
|
||||
if 'credentials not found in native keychain' in message:
|
||||
return CredentialsNotFound(
|
||||
'No matching credentials in {0}'.format(
|
||||
program
|
||||
)
|
||||
f'No matching credentials in {program}'
|
||||
)
|
||||
return StoreError(
|
||||
'Credentials store {0} exited with "{1}".'.format(
|
||||
program, cpe.output.decode('utf-8').strip()
|
||||
)
|
||||
f'Credentials store {program} exited with "{cpe.output.decode("utf-8").strip()}".'
|
||||
)
|
||||
|
||||
@ -30,9 +30,7 @@ class Store(object):
|
||||
self.environment = environment
|
||||
if self.exe is None:
|
||||
raise errors.InitializationError(
|
||||
'{0} not installed or not available in PATH'.format(
|
||||
self.program
|
||||
)
|
||||
f'{self.program} not installed or not available in PATH'
|
||||
)
|
||||
|
||||
def get(self, server):
|
||||
@ -50,7 +48,7 @@ class Store(object):
|
||||
# raise CredentialsNotFound
|
||||
if result['Username'] == '' and result['Secret'] == '':
|
||||
raise errors.CredentialsNotFound(
|
||||
'No matching credentials in {0}'.format(self.program)
|
||||
f'No matching credentials in {self.program}'
|
||||
)
|
||||
|
||||
return result
|
||||
@ -92,14 +90,10 @@ class Store(object):
|
||||
except OSError as e:
|
||||
if e.errno == errno.ENOENT:
|
||||
raise errors.StoreError(
|
||||
'{0} not installed or not available in PATH'.format(
|
||||
self.program
|
||||
)
|
||||
f'{self.program} not installed or not available in PATH'
|
||||
)
|
||||
else:
|
||||
raise errors.StoreError(
|
||||
'Unexpected OS error "{0}", errno={1}'.format(
|
||||
e.strerror, e.errno
|
||||
)
|
||||
f'Unexpected OS error "{e.strerror}", errno={e.errno}'
|
||||
)
|
||||
return output
|
||||
|
||||
@ -59,17 +59,13 @@ class APIError(_HTTPError, DockerException):
|
||||
message = super(APIError, self).__str__()
|
||||
|
||||
if self.is_client_error():
|
||||
message = '{0} Client Error for {1}: {2}'.format(
|
||||
self.response.status_code, self.response.url,
|
||||
self.response.reason)
|
||||
message = f'{self.response.status_code} Client Error for {self.response.url}: {self.response.reason}'
|
||||
|
||||
elif self.is_server_error():
|
||||
message = '{0} Server Error for {1}: {2}'.format(
|
||||
self.response.status_code, self.response.url,
|
||||
self.response.reason)
|
||||
message = f'{self.response.status_code} Server Error for {self.response.url}: {self.response.reason}'
|
||||
|
||||
if self.explanation:
|
||||
message = '{0} ("{1}")'.format(message, self.explanation)
|
||||
message = f'{message} ("{self.explanation}")'
|
||||
|
||||
return message
|
||||
|
||||
@ -146,9 +142,8 @@ class ContainerError(DockerException):
|
||||
self.image = image
|
||||
self.stderr = stderr
|
||||
|
||||
err = ": {0}".format(stderr) if stderr is not None else ""
|
||||
msg = ("Command '{0}' in image '{1}' returned non-zero exit "
|
||||
"status {2}{3}").format(command, image, exit_status, err)
|
||||
err = f": {stderr}" if stderr is not None else ""
|
||||
msg = f"Command '{command}' in image '{image}' returned non-zero exit status {exit_status}{err}"
|
||||
|
||||
super(ContainerError, self).__init__(msg)
|
||||
|
||||
@ -170,8 +165,8 @@ class ImageLoadError(DockerException):
|
||||
|
||||
|
||||
def create_unexpected_kwargs_error(name, kwargs):
|
||||
quoted_kwargs = ["'{0}'".format(k) for k in sorted(kwargs)]
|
||||
text = ["{0}() ".format(name)]
|
||||
quoted_kwargs = [f"'{k}'" for k in sorted(kwargs)]
|
||||
text = [f"{name}() "]
|
||||
if len(quoted_kwargs) == 1:
|
||||
text.append("got an unexpected keyword argument ")
|
||||
else:
|
||||
@ -185,7 +180,7 @@ class MissingContextParameter(DockerException):
|
||||
self.param = param
|
||||
|
||||
def __str__(self):
|
||||
return ("missing parameter: {0}".format(self.param))
|
||||
return f"missing parameter: {self.param}"
|
||||
|
||||
|
||||
class ContextAlreadyExists(DockerException):
|
||||
@ -193,7 +188,7 @@ class ContextAlreadyExists(DockerException):
|
||||
self.name = name
|
||||
|
||||
def __str__(self):
|
||||
return ("context {0} already exists".format(self.name))
|
||||
return f"context {self.name} already exists"
|
||||
|
||||
|
||||
class ContextException(DockerException):
|
||||
@ -209,7 +204,7 @@ class ContextNotFound(DockerException):
|
||||
self.name = name
|
||||
|
||||
def __str__(self):
|
||||
return ("context '{0}' not found".format(self.name))
|
||||
return f"context '{self.name}' not found"
|
||||
|
||||
|
||||
class MissingRequirementException(DockerException):
|
||||
|
||||
@ -107,7 +107,7 @@ def create_archive(root, files=None, fileobj=None, gzip=False,
|
||||
t.addfile(i, f)
|
||||
except IOError:
|
||||
raise IOError(
|
||||
'Can not read file in context: {0}'.format(full_path)
|
||||
f'Can not read file in context: {full_path}'
|
||||
)
|
||||
else:
|
||||
# Directories, FIFOs, symlinks... do not need to be read.
|
||||
@ -271,18 +271,13 @@ def process_dockerfile(dockerfile, path):
|
||||
abs_dockerfile = os.path.join(path, dockerfile)
|
||||
if IS_WINDOWS_PLATFORM and path.startswith(
|
||||
WINDOWS_LONGPATH_PREFIX):
|
||||
abs_dockerfile = '{0}{1}'.format(
|
||||
WINDOWS_LONGPATH_PREFIX,
|
||||
os.path.normpath(
|
||||
abs_dockerfile[len(WINDOWS_LONGPATH_PREFIX):]
|
||||
)
|
||||
)
|
||||
abs_dockerfile = f'{WINDOWS_LONGPATH_PREFIX}{os.path.normpath(abs_dockerfile[len(WINDOWS_LONGPATH_PREFIX):])}'
|
||||
if (os.path.splitdrive(path)[0] != os.path.splitdrive(abs_dockerfile)[0] or
|
||||
os.path.relpath(abs_dockerfile, path).startswith('..')):
|
||||
# Dockerfile not in context - read data to insert into tar later
|
||||
with open(abs_dockerfile) as df:
|
||||
return (
|
||||
'.dockerfile.{random:x}'.format(random=random.getrandbits(160)),
|
||||
f'.dockerfile.{random.getrandbits(160):x}',
|
||||
df.read()
|
||||
)
|
||||
|
||||
|
||||
@ -38,9 +38,7 @@ def minimum_version(version):
|
||||
def wrapper(self, *args, **kwargs):
|
||||
if utils.version_lt(self._version, version):
|
||||
raise errors.InvalidVersion(
|
||||
'{0} is not available for version < {1}'.format(
|
||||
f.__name__, version
|
||||
)
|
||||
f'{f.__name__} is not available for version < {version}'
|
||||
)
|
||||
return f(self, *args, **kwargs)
|
||||
return wrapper
|
||||
|
||||
@ -58,10 +58,10 @@ def port_range(start, end, proto, randomly_available_port=False):
|
||||
if not start:
|
||||
return start
|
||||
if not end:
|
||||
return [start + proto]
|
||||
return [f'{start}{proto}']
|
||||
if randomly_available_port:
|
||||
return ['{0}-{1}'.format(start, end) + proto]
|
||||
return [str(port) + proto for port in range(int(start), int(end) + 1)]
|
||||
return [f'{start}-{end}{proto}']
|
||||
return [f'{port}{proto}' for port in range(int(start), int(end) + 1)]
|
||||
|
||||
|
||||
def split_port(port):
|
||||
|
||||
@ -80,5 +80,4 @@ class ProxyConfig(dict):
|
||||
return proxy_env + environment
|
||||
|
||||
def __str__(self):
|
||||
return 'ProxyConfig(http={0}, https={1}, ftp={2}, no_proxy={3})'.format(
|
||||
self.http, self.https, self.ftp, self.no_proxy)
|
||||
return f'ProxyConfig(http={self.http}, https={self.https}, ftp={self.ftp}, no_proxy={self.no_proxy})'
|
||||
|
||||
@ -193,4 +193,4 @@ def demux_adaptor(stream_id, data):
|
||||
elif stream_id == STDERR:
|
||||
return (None, data)
|
||||
else:
|
||||
raise ValueError('{0} is not a valid stream'.format(stream_id))
|
||||
raise ValueError(f'{stream_id} is not a valid stream')
|
||||
|
||||
@ -136,8 +136,7 @@ def convert_volume_binds(binds):
|
||||
if isinstance(v, dict):
|
||||
if 'ro' in v and 'mode' in v:
|
||||
raise ValueError(
|
||||
'Binding cannot contain both "ro" and "mode": {0}'
|
||||
.format(repr(v))
|
||||
f'Binding cannot contain both "ro" and "mode": {v!r}'
|
||||
)
|
||||
|
||||
bind = v['bind']
|
||||
@ -167,11 +166,11 @@ def convert_volume_binds(binds):
|
||||
else:
|
||||
mode = v['propagation']
|
||||
|
||||
result.append('{0}:{1}:{2}'.format(k, bind, mode))
|
||||
result.append(f'{k}:{bind}:{mode}')
|
||||
else:
|
||||
if isinstance(v, bytes):
|
||||
v = v.decode('utf-8')
|
||||
result.append('{0}:{1}:rw'.format(k, v))
|
||||
result.append(f'{k}:{v}:rw')
|
||||
return result
|
||||
|
||||
|
||||
@ -181,8 +180,7 @@ def convert_tmpfs_mounts(tmpfs):
|
||||
|
||||
if not isinstance(tmpfs, list):
|
||||
raise ValueError(
|
||||
'Expected tmpfs value to be either a list or a dict, found: {0}'
|
||||
.format(type(tmpfs).__name__)
|
||||
f'Expected tmpfs value to be either a list or a dict, found: {type(tmpfs).__name__}'
|
||||
)
|
||||
|
||||
result = {}
|
||||
@ -196,8 +194,7 @@ def convert_tmpfs_mounts(tmpfs):
|
||||
|
||||
else:
|
||||
raise ValueError(
|
||||
"Expected item in tmpfs list to be a string, found: {0}"
|
||||
.format(type(mount).__name__)
|
||||
f"Expected item in tmpfs list to be a string, found: {type(mount).__name__}"
|
||||
)
|
||||
|
||||
result[name] = options
|
||||
@ -257,14 +254,14 @@ def parse_host(addr, is_win32=False, tls=False):
|
||||
|
||||
if proto not in ('tcp', 'unix', 'npipe', 'ssh'):
|
||||
raise errors.DockerException(
|
||||
"Invalid bind address protocol: {0}".format(addr)
|
||||
f"Invalid bind address protocol: {addr}"
|
||||
)
|
||||
|
||||
if proto == 'tcp' and not parsed_url.netloc:
|
||||
# "tcp://" is exceptionally disallowed by convention;
|
||||
# omitting a hostname for other protocols is fine
|
||||
raise errors.DockerException(
|
||||
'Invalid bind address format: {0}'.format(addr)
|
||||
f'Invalid bind address format: {addr}'
|
||||
)
|
||||
|
||||
if any([
|
||||
@ -272,13 +269,12 @@ def parse_host(addr, is_win32=False, tls=False):
|
||||
parsed_url.password
|
||||
]):
|
||||
raise errors.DockerException(
|
||||
'Invalid bind address format: {0}'.format(addr)
|
||||
f'Invalid bind address format: {addr}'
|
||||
)
|
||||
|
||||
if parsed_url.path and proto == 'ssh':
|
||||
raise errors.DockerException(
|
||||
'Invalid bind address format: no path allowed for this protocol:'
|
||||
' {0}'.format(addr)
|
||||
f'Invalid bind address format: no path allowed for this protocol: {addr}'
|
||||
)
|
||||
else:
|
||||
path = parsed_url.path
|
||||
@ -292,19 +288,19 @@ def parse_host(addr, is_win32=False, tls=False):
|
||||
port = parsed_url.port or 0
|
||||
if port <= 0:
|
||||
port = 22 if proto == 'ssh' else (2375 if tls else 2376)
|
||||
netloc = '{0}:{1}'.format(parsed_url.netloc, port)
|
||||
netloc = f'{parsed_url.netloc}:{port}'
|
||||
|
||||
if not parsed_url.hostname:
|
||||
netloc = '{0}:{1}'.format(DEFAULT_HTTP_HOST, port)
|
||||
netloc = f'{DEFAULT_HTTP_HOST}:{port}'
|
||||
|
||||
# Rewrite schemes to fit library internals (requests adapters)
|
||||
if proto == 'tcp':
|
||||
proto = 'http{0}'.format('s' if tls else '')
|
||||
proto = f"http{'s' if tls else ''}"
|
||||
elif proto == 'unix':
|
||||
proto = 'http+unix'
|
||||
|
||||
if proto in ('http+unix', 'npipe'):
|
||||
return "{0}://{1}".format(proto, path).rstrip('/')
|
||||
return f"{proto}://{path}".rstrip('/')
|
||||
return urlunparse(URLComponents(
|
||||
scheme=proto,
|
||||
netloc=netloc,
|
||||
@ -323,7 +319,7 @@ def parse_devices(devices):
|
||||
continue
|
||||
if not isinstance(device, str):
|
||||
raise errors.DockerException(
|
||||
'Invalid device type {0}'.format(type(device))
|
||||
f'Invalid device type {type(device)}'
|
||||
)
|
||||
device_mapping = device.split(':')
|
||||
if device_mapping:
|
||||
@ -428,17 +424,14 @@ def parse_bytes(s):
|
||||
digits = float(digits_part)
|
||||
except ValueError:
|
||||
raise errors.DockerException(
|
||||
'Failed converting the string value for memory ({0}) to'
|
||||
' an integer.'.format(digits_part)
|
||||
f'Failed converting the string value for memory ({digits_part}) to an integer.'
|
||||
)
|
||||
|
||||
# Reconvert to long for the final result
|
||||
s = int(digits * units[suffix])
|
||||
else:
|
||||
raise errors.DockerException(
|
||||
'The specified value for memory ({0}) should specify the'
|
||||
' units. The postfix should be one of the `b` `k` `m` `g`'
|
||||
' characters'.format(s)
|
||||
f'The specified value for memory ({s}) should specify the units. The postfix should be one of the `b` `k` `m` `g` characters'
|
||||
)
|
||||
|
||||
return s
|
||||
@ -448,7 +441,7 @@ def normalize_links(links):
|
||||
if isinstance(links, dict):
|
||||
links = links.items()
|
||||
|
||||
return ['{0}:{1}'.format(k, v) if v else k for k, v in sorted(links)]
|
||||
return [f'{k}:{v}' if v else k for k, v in sorted(links)]
|
||||
|
||||
|
||||
def parse_env_file(env_file):
|
||||
@ -473,9 +466,7 @@ def parse_env_file(env_file):
|
||||
k, v = parse_line
|
||||
environment[k] = v
|
||||
else:
|
||||
raise errors.DockerException(
|
||||
'Invalid line in environment file {0}:\n{1}'.format(
|
||||
env_file, line))
|
||||
raise errors.DockerException(f'Invalid line in environment file {env_file}:\n{line}')
|
||||
|
||||
return environment
|
||||
|
||||
@ -491,7 +482,7 @@ def format_environment(environment):
|
||||
if isinstance(value, bytes):
|
||||
value = value.decode('utf-8')
|
||||
|
||||
return '{key}={value}'.format(key=key, value=value)
|
||||
return f'{key}={value}'
|
||||
return [format_env(*var) for var in environment.items()]
|
||||
|
||||
|
||||
@ -499,11 +490,11 @@ def format_extra_hosts(extra_hosts, task=False):
|
||||
# Use format dictated by Swarm API if container is part of a task
|
||||
if task:
|
||||
return [
|
||||
'{0} {1}'.format(v, k) for k, v in sorted(extra_hosts.items())
|
||||
f'{v} {k}' for k, v in sorted(extra_hosts.items())
|
||||
]
|
||||
|
||||
return [
|
||||
'{0}:{1}'.format(k, v) for k, v in sorted(extra_hosts.items())
|
||||
f'{k}:{v}' for k, v in sorted(extra_hosts.items())
|
||||
]
|
||||
|
||||
|
||||
|
||||
@ -98,7 +98,7 @@ class _Parser(object):
|
||||
try:
|
||||
v += _HEX_DICT[self.line[self.index]]
|
||||
except KeyError:
|
||||
raise InvalidLogFmt('Invalid unicode escape digit {digit!r}'.format(digit=self.line[self.index]))
|
||||
raise InvalidLogFmt(f'Invalid unicode escape digit {self.line[self.index]!r}')
|
||||
self.index += 6
|
||||
return chr(v)
|
||||
|
||||
@ -170,7 +170,8 @@ def parse_line(line, logrus_mode=False):
|
||||
if cur in _ESCAPE_DICT:
|
||||
value.append(_ESCAPE_DICT[cur])
|
||||
elif cur != 'u':
|
||||
raise InvalidLogFmt('Unknown escape sequence {seq!r}'.format(seq='\\' + cur))
|
||||
es = f"\\{cur}"
|
||||
raise InvalidLogFmt(f'Unknown escape sequence {es!r}')
|
||||
else:
|
||||
parser.prev()
|
||||
value.append(parser.parse_unicode_sequence())
|
||||
|
||||
@ -18,9 +18,9 @@ _VALID_STR = re.compile('^[A-Za-z0-9_-]+$')
|
||||
|
||||
def _validate_part(string, part, part_name):
|
||||
if not part:
|
||||
raise ValueError('Invalid platform string "{string}": {part} is empty'.format(string=string, part=part_name))
|
||||
raise ValueError(f'Invalid platform string "{string}": {part} is empty')
|
||||
if not _VALID_STR.match(part):
|
||||
raise ValueError('Invalid platform string "{string}": {part} has invalid characters'.format(string=string, part=part_name))
|
||||
raise ValueError(f'Invalid platform string "{string}": {part} has invalid characters')
|
||||
return part
|
||||
|
||||
|
||||
@ -123,16 +123,16 @@ class _Platform(object):
|
||||
arch=arch or None,
|
||||
variant=variant or None,
|
||||
)
|
||||
raise ValueError('Invalid platform string "{0}": unknown OS or architecture'.format(string))
|
||||
raise ValueError(f'Invalid platform string "{string}": unknown OS or architecture')
|
||||
os = _validate_part(string, parts[0], 'OS')
|
||||
if not os:
|
||||
raise ValueError('Invalid platform string "{0}": OS is empty'.format(string))
|
||||
raise ValueError(f'Invalid platform string "{string}": OS is empty')
|
||||
arch = _validate_part(string, parts[1], 'architecture') if len(parts) > 1 else None
|
||||
if arch is not None and not arch:
|
||||
raise ValueError('Invalid platform string "{0}": architecture is empty'.format(string))
|
||||
raise ValueError(f'Invalid platform string "{string}": architecture is empty')
|
||||
variant = _validate_part(string, parts[2], 'variant') if len(parts) > 2 else None
|
||||
if variant is not None and not variant:
|
||||
raise ValueError('Invalid platform string "{0}": variant is empty'.format(string))
|
||||
raise ValueError(f'Invalid platform string "{string}": variant is empty')
|
||||
arch, variant = _normalize_arch(arch, variant or '')
|
||||
if len(parts) == 2 and arch == 'arm' and variant == 'v7':
|
||||
variant = None
|
||||
@ -155,7 +155,7 @@ class _Platform(object):
|
||||
return '/'.join(parts)
|
||||
|
||||
def __repr__(self):
|
||||
return '_Platform(os={os!r}, arch={arch!r}, variant={variant!r})'.format(os=self.os, arch=self.arch, variant=self.variant)
|
||||
return f'_Platform(os={self.os!r}, arch={self.arch!r}, variant={self.variant!r})'
|
||||
|
||||
def __eq__(self, other):
|
||||
return self.os == other.os and self.arch == other.arch and self.variant == other.variant
|
||||
|
||||
@ -125,12 +125,9 @@ def _get_tls_config(fail_function, **kwargs):
|
||||
if assert_hostname is not None:
|
||||
fail_function(
|
||||
"tls_hostname is not compatible with Docker SDK for Python 7.0.0+. You are using"
|
||||
" Docker SDK for Python {docker_py_version}. The tls_hostname option (value: {tls_hostname})"
|
||||
f" Docker SDK for Python {docker_version}. The tls_hostname option (value: {assert_hostname})"
|
||||
" has either been set directly or with the environment variable DOCKER_TLS_HOSTNAME."
|
||||
" Make sure it is not set, or switch to an older version of Docker SDK for Python.".format(
|
||||
docker_py_version=docker_version,
|
||||
tls_hostname=assert_hostname,
|
||||
)
|
||||
" Make sure it is not set, or switch to an older version of Docker SDK for Python."
|
||||
)
|
||||
# Filter out all None parameters
|
||||
kwargs = dict((k, v) for k, v in kwargs.items() if v is not None)
|
||||
@ -691,6 +688,6 @@ class AnsibleDockerClient(AnsibleDockerClientBase):
|
||||
result = result.get(key)
|
||||
if isinstance(result, Sequence):
|
||||
for warning in result:
|
||||
self.module.warn('Docker warning: {0}'.format(warning))
|
||||
self.module.warn(f'Docker warning: {warning}')
|
||||
elif isinstance(result, str) and result:
|
||||
self.module.warn('Docker warning: {0}'.format(result))
|
||||
self.module.warn(f'Docker warning: {result}')
|
||||
|
||||
@ -577,6 +577,6 @@ class AnsibleDockerClient(AnsibleDockerClientBase):
|
||||
result = result.get(key)
|
||||
if isinstance(result, Sequence):
|
||||
for warning in result:
|
||||
self.module.warn('Docker warning: {0}'.format(warning))
|
||||
self.module.warn(f'Docker warning: {warning}')
|
||||
elif isinstance(result, str) and result:
|
||||
self.module.warn('Docker warning: {0}'.format(result))
|
||||
self.module.warn(f'Docker warning: {result}')
|
||||
|
||||
@ -128,11 +128,7 @@ class AnsibleDockerClientBase(object):
|
||||
try:
|
||||
data = json.loads(stdout)
|
||||
except Exception as exc:
|
||||
self.fail('Error while parsing JSON output of {cmd}: {exc}\nJSON output: {stdout}'.format(
|
||||
cmd=self._compose_cmd_str(args),
|
||||
exc=to_native(exc),
|
||||
stdout=to_native(stdout),
|
||||
))
|
||||
self.fail(f'Error while parsing JSON output of {self._compose_cmd_str(args)}: {exc}\nJSON output: {to_native(stdout)}')
|
||||
return rc, data, stderr
|
||||
|
||||
# def call_cli_json_stream(self, *args, check_rc=False, data=None, cwd=None, environ_update=None, warn_on_stderr=False):
|
||||
@ -148,11 +144,7 @@ class AnsibleDockerClientBase(object):
|
||||
if line.startswith(b'{'):
|
||||
result.append(json.loads(line))
|
||||
except Exception as exc:
|
||||
self.fail('Error while parsing JSON output of {cmd}: {exc}\nJSON output: {stdout}'.format(
|
||||
cmd=self._compose_cmd_str(args),
|
||||
exc=to_native(exc),
|
||||
stdout=to_native(stdout),
|
||||
))
|
||||
self.fail(f'Error while parsing JSON output of {self._compose_cmd_str(args)}: {exc}\nJSON output: {to_native(stdout)}')
|
||||
return rc, result, stderr
|
||||
|
||||
@abc.abstractmethod
|
||||
@ -188,7 +180,7 @@ class AnsibleDockerClientBase(object):
|
||||
if the tag exists.
|
||||
'''
|
||||
dummy, images, dummy = self.call_cli_json_stream(
|
||||
'image', 'ls', '--format', '{{ json . }}', '--no-trunc', '--filter', 'reference={0}'.format(name),
|
||||
'image', 'ls', '--format', '{{ json . }}', '--no-trunc', '--filter', f'reference={name}',
|
||||
check_rc=True,
|
||||
)
|
||||
if tag:
|
||||
|
||||
@ -271,10 +271,10 @@ def _extract_event(line, warn_function=None):
|
||||
if match:
|
||||
if warn_function:
|
||||
if match.group('msg'):
|
||||
msg = '{rid}: {msg}'
|
||||
msg = f"{match.group('resource_id')}: {match.group('msg')}"
|
||||
else:
|
||||
msg = 'Unspecified warning for {rid}'
|
||||
warn_function(msg.format(rid=match.group('resource_id'), msg=match.group('msg')))
|
||||
msg = f"Unspecified warning for {match.group('resource_id')}"
|
||||
warn_function(msg)
|
||||
return None, True
|
||||
match = _RE_PULL_PROGRESS.match(line)
|
||||
if match:
|
||||
@ -323,9 +323,8 @@ def _warn_missing_dry_run_prefix(line, warn_missing_dry_run_prefix, warn_functio
|
||||
# This could be a bug, a change of docker compose's output format, ...
|
||||
# Tell the user to report it to us :-)
|
||||
warn_function(
|
||||
'Event line is missing dry-run mode marker: {0!r}. Please report this at '
|
||||
f'Event line is missing dry-run mode marker: {line!r}. Please report this at '
|
||||
'https://github.com/ansible-collections/community.docker/issues/new?assignees=&labels=&projects=&template=bug_report.md'
|
||||
.format(line)
|
||||
)
|
||||
|
||||
|
||||
@ -334,9 +333,8 @@ def _warn_unparsable_line(line, warn_function):
|
||||
# Tell the user to report it to us :-)
|
||||
if warn_function:
|
||||
warn_function(
|
||||
'Cannot parse event from line: {0!r}. Please report this at '
|
||||
f'Cannot parse event from line: {line!r}. Please report this at '
|
||||
'https://github.com/ansible-collections/community.docker/issues/new?assignees=&labels=&projects=&template=bug_report.md'
|
||||
.format(line)
|
||||
)
|
||||
|
||||
|
||||
@ -382,9 +380,8 @@ def parse_json_events(stderr, warn_function=None):
|
||||
continue
|
||||
if warn_function:
|
||||
warn_function(
|
||||
'Cannot parse event from non-JSON line: {0!r}. Please report this at '
|
||||
f'Cannot parse event from non-JSON line: {line!r}. Please report this at '
|
||||
'https://github.com/ansible-collections/community.docker/issues/new?assignees=&labels=&projects=&template=bug_report.md'
|
||||
.format(line)
|
||||
)
|
||||
continue
|
||||
try:
|
||||
@ -392,9 +389,8 @@ def parse_json_events(stderr, warn_function=None):
|
||||
except Exception as exc:
|
||||
if warn_function:
|
||||
warn_function(
|
||||
'Cannot parse event from line: {0!r}: {1}. Please report this at '
|
||||
f'Cannot parse event from line: {line!r}: {exc}. Please report this at '
|
||||
'https://github.com/ansible-collections/community.docker/issues/new?assignees=&labels=&projects=&template=bug_report.md'
|
||||
.format(line, exc)
|
||||
)
|
||||
continue
|
||||
if line_data.get('tail'):
|
||||
@ -449,9 +445,8 @@ def parse_json_events(stderr, warn_function=None):
|
||||
except KeyError:
|
||||
if warn_function:
|
||||
warn_function(
|
||||
'Unknown resource type {0!r} in line {1!r}. Please report this at '
|
||||
f'Unknown resource type {resource_type_str!r} in line {line!r}. Please report this at '
|
||||
'https://github.com/ansible-collections/community.docker/issues/new?assignees=&labels=&projects=&template=bug_report.md'
|
||||
.format(resource_type_str, line)
|
||||
)
|
||||
resource_type = ResourceType.UNKNOWN
|
||||
elif text in DOCKER_STATUS_PULL:
|
||||
@ -589,11 +584,7 @@ def emit_warnings(events, warn_function):
|
||||
for event in events:
|
||||
# If a message is present, assume it is a warning
|
||||
if (event.status is None and event.msg is not None) or event.status in DOCKER_STATUS_WARNING:
|
||||
warn_function('Docker compose: {resource_type} {resource_id}: {msg}'.format(
|
||||
resource_type=event.resource_type,
|
||||
resource_id=event.resource_id,
|
||||
msg=event.msg,
|
||||
))
|
||||
warn_function(f'Docker compose: {event.resource_type} {event.resource_id}: {event.msg}')
|
||||
|
||||
|
||||
def is_failed(events, rc):
|
||||
@ -610,22 +601,17 @@ def update_failed(result, events, args, stdout, stderr, rc, cli):
|
||||
if event.status in DOCKER_STATUS_ERROR:
|
||||
if event.resource_id is None:
|
||||
if event.resource_type == 'unknown':
|
||||
msg = 'General error: ' if event.resource_type == 'unknown' else 'Error when processing {resource_type}: '
|
||||
msg = 'General error: ' if event.resource_type == 'unknown' else f'Error when processing {event.resource_type}: '
|
||||
else:
|
||||
msg = 'Error when processing {resource_type} {resource_id}: '
|
||||
msg = f'Error when processing {event.resource_type} {event.resource_id}: '
|
||||
if event.resource_type == 'unknown':
|
||||
msg = 'Error when processing {resource_id}: '
|
||||
msg = f'Error when processing {event.resource_id}: '
|
||||
if event.resource_id == '':
|
||||
msg = 'General error: '
|
||||
msg += '{status}' if event.msg is None else '{msg}'
|
||||
errors.append(msg.format(
|
||||
resource_type=event.resource_type,
|
||||
resource_id=event.resource_id,
|
||||
status=event.status,
|
||||
msg=event.msg,
|
||||
))
|
||||
msg += f'{event.status}' if event.msg is None else f'{event.msg}'
|
||||
errors.append(msg)
|
||||
if not errors:
|
||||
errors.append('Return code {code} is non-zero'.format(code=rc))
|
||||
errors.append(f'Return code {rc} is non-zero')
|
||||
result['failed'] = True
|
||||
result['msg'] = '\n'.join(errors)
|
||||
result['cmd'] = ' '.join(quote(arg) for arg in [cli] + args)
|
||||
@ -706,24 +692,20 @@ class BaseComposeManager(DockerBaseClass):
|
||||
compose_version = self.get_compose_version()
|
||||
self.compose_version = LooseVersion(compose_version)
|
||||
if self.compose_version < LooseVersion(min_version):
|
||||
self.fail('Docker CLI {cli} has the compose plugin with version {version}; need version {min_version} or later'.format(
|
||||
cli=self.client.get_cli(),
|
||||
version=compose_version,
|
||||
min_version=min_version,
|
||||
))
|
||||
self.fail(f'Docker CLI {self.client.get_cli()} has the compose plugin with version {compose_version}; need version {min_version} or later')
|
||||
|
||||
if not os.path.isdir(self.project_src):
|
||||
self.fail('"{0}" is not a directory'.format(self.project_src))
|
||||
self.fail(f'"{self.project_src}" is not a directory')
|
||||
|
||||
self.check_files_existing = parameters['check_files_existing']
|
||||
if self.files:
|
||||
for file in self.files:
|
||||
path = os.path.join(self.project_src, file)
|
||||
if not os.path.exists(path):
|
||||
self.fail('Cannot find Compose file "{0}" relative to project directory "{1}"'.format(file, self.project_src))
|
||||
self.fail(f'Cannot find Compose file "{file}" relative to project directory "{self.project_src}"')
|
||||
elif self.check_files_existing and all(not os.path.exists(os.path.join(self.project_src, f)) for f in DOCKER_COMPOSE_FILES):
|
||||
filenames = ', '.join(DOCKER_COMPOSE_FILES[:-1])
|
||||
self.fail('"{0}" does not contain {1}, or {2}'.format(self.project_src, filenames, DOCKER_COMPOSE_FILES[-1]))
|
||||
self.fail(f'"{self.project_src}" does not contain {filenames}, or {DOCKER_COMPOSE_FILES[-1]}')
|
||||
|
||||
# Support for JSON output was added in Compose 2.29.0 (https://github.com/docker/compose/releases/tag/v2.29.0);
|
||||
# more precisely in https://github.com/docker/compose/pull/11478
|
||||
@ -747,12 +729,11 @@ class BaseComposeManager(DockerBaseClass):
|
||||
def get_compose_version_from_api(self):
|
||||
compose = self.client.get_client_plugin_info('compose')
|
||||
if compose is None:
|
||||
self.fail('Docker CLI {0} does not have the compose plugin installed'.format(self.client.get_cli()))
|
||||
self.fail(f'Docker CLI {self.client.get_cli()} does not have the compose plugin installed')
|
||||
if compose['Version'] == 'dev':
|
||||
self.fail(
|
||||
'Docker CLI {0} has a compose plugin installed, but it reports version "dev".'
|
||||
f'Docker CLI {self.client.get_cli()} has a compose plugin installed, but it reports version "dev".'
|
||||
' Please use a version of the plugin that returns a proper version.'
|
||||
.format(self.client.get_cli())
|
||||
)
|
||||
return compose['Version'].lstrip('v')
|
||||
|
||||
|
||||
@ -172,13 +172,13 @@ def put_file(client, container, in_path, out_path, user_id, group_id, mode=None,
|
||||
elif stat.S_ISLNK(file_stat.st_mode):
|
||||
stream = _symlink_tar_generator(b_in_path, file_stat, out_file, user_id, group_id, mode=mode, user_name=user_name)
|
||||
else:
|
||||
file_part = ' referenced by' if follow_links else ''
|
||||
raise DockerFileCopyError(
|
||||
'File{0} {1} is neither a regular file nor a symlink (stat mode {2}).'.format(
|
||||
' referenced by' if follow_links else '', in_path, oct(file_stat.st_mode)))
|
||||
f'File{file_part} {in_path} is neither a regular file nor a symlink (stat mode {oct(file_stat.st_mode)}).')
|
||||
|
||||
ok = _put_archive(client, container, out_dir, stream)
|
||||
if not ok:
|
||||
raise DockerUnexpectedError('Unknown error while creating file "{0}" in container "{1}".'.format(out_path, container))
|
||||
raise DockerUnexpectedError(f'Unknown error while creating file "{out_path}" in container "{container}".')
|
||||
|
||||
|
||||
def put_file_content(client, container, content, out_path, user_id, group_id, mode, user_name=None):
|
||||
@ -189,7 +189,7 @@ def put_file_content(client, container, content, out_path, user_id, group_id, mo
|
||||
|
||||
ok = _put_archive(client, container, out_dir, stream)
|
||||
if not ok:
|
||||
raise DockerUnexpectedError('Unknown error while creating file "{0}" in container "{1}".'.format(out_path, container))
|
||||
raise DockerUnexpectedError(f'Unknown error while creating file "{out_path}" in container "{container}".')
|
||||
|
||||
|
||||
def stat_file(client, container, in_path, follow_links=False, log=None):
|
||||
@ -208,7 +208,7 @@ def stat_file(client, container, in_path, follow_links=False, log=None):
|
||||
|
||||
while True:
|
||||
if in_path in considered_in_paths:
|
||||
raise DockerFileCopyError('Found infinite symbolic link loop when trying to stating "{0}"'.format(in_path))
|
||||
raise DockerFileCopyError(f'Found infinite symbolic link loop when trying to stating "{in_path}"')
|
||||
considered_in_paths.add(in_path)
|
||||
|
||||
if log:
|
||||
@ -226,8 +226,7 @@ def stat_file(client, container, in_path, follow_links=False, log=None):
|
||||
stat_data = json.loads(base64.b64decode(header))
|
||||
except Exception as exc:
|
||||
raise DockerUnexpectedError(
|
||||
'When retrieving information for {in_path} from {container}, obtained header {header!r} that cannot be loaded as JSON: {exc}'
|
||||
.format(in_path=in_path, container=container, header=header, exc=exc)
|
||||
f'When retrieving information for {in_path} from {container}, obtained header {header!r} that cannot be loaded as JSON: {exc}'
|
||||
)
|
||||
|
||||
# https://pkg.go.dev/io/fs#FileMode: bit 32 - 5 means ModeSymlink
|
||||
@ -285,7 +284,7 @@ def fetch_file_ex(client, container, in_path, process_none, process_regular, pro
|
||||
|
||||
while True:
|
||||
if in_path in considered_in_paths:
|
||||
raise DockerFileCopyError('Found infinite symbolic link loop when trying to fetch "{0}"'.format(in_path))
|
||||
raise DockerFileCopyError(f'Found infinite symbolic link loop when trying to fetch "{in_path}"')
|
||||
considered_in_paths.add(in_path)
|
||||
|
||||
if log:
|
||||
@ -331,8 +330,7 @@ def fetch_file(client, container, in_path, out_path, follow_links=False, log=Non
|
||||
|
||||
def process_none(in_path):
|
||||
raise DockerFileNotFound(
|
||||
'File {in_path} does not exist in container {container}'
|
||||
.format(in_path=in_path, container=container)
|
||||
f'File {in_path} does not exist in container {container}'
|
||||
)
|
||||
|
||||
def process_regular(in_path, tar, member):
|
||||
@ -359,7 +357,7 @@ def fetch_file(client, container, in_path, out_path, follow_links=False, log=Non
|
||||
|
||||
def _execute_command(client, container, command, log=None, check_rc=False):
|
||||
if log:
|
||||
log('Executing {command} in {container}'.format(command=command, container=container))
|
||||
log(f'Executing {command} in {container}')
|
||||
|
||||
data = {
|
||||
'Container': container,
|
||||
@ -378,10 +376,10 @@ def _execute_command(client, container, command, log=None, check_rc=False):
|
||||
try:
|
||||
exec_data = client.post_json_to_json('/containers/{0}/exec', container, data=data)
|
||||
except NotFound as e:
|
||||
raise DockerFileCopyError('Could not find container "{container}"'.format(container=container)) from e
|
||||
raise DockerFileCopyError(f'Could not find container "{container}"') from e
|
||||
except APIError as e:
|
||||
if e.response is not None and e.response.status_code == 409:
|
||||
raise DockerFileCopyError('Cannot execute command in paused container "{container}"'.format(container=container)) from e
|
||||
raise DockerFileCopyError(f'Cannot execute command in paused container "{container}"') from e
|
||||
raise
|
||||
exec_id = exec_data['Id']
|
||||
|
||||
@ -398,12 +396,12 @@ def _execute_command(client, container, command, log=None, check_rc=False):
|
||||
stderr = stderr or b''
|
||||
|
||||
if log:
|
||||
log('Exit code {rc}, stdout {stdout!r}, stderr {stderr!r}'.format(rc=rc, stdout=stdout, stderr=stderr))
|
||||
log(f'Exit code {rc}, stdout {stdout!r}, stderr {stderr!r}')
|
||||
|
||||
if check_rc and rc != 0:
|
||||
command_str = ' '.join(command)
|
||||
raise DockerUnexpectedError(
|
||||
'Obtained unexpected exit code {rc} when running "{command}" in {container}.\nSTDOUT: {stdout}\nSTDERR: {stderr}'
|
||||
.format(command=' '.join(command), container=container, rc=rc, stdout=stdout, stderr=stderr)
|
||||
f'Obtained unexpected exit code {rc} when running "{command_str}" in {container}.\nSTDOUT: {stdout}\nSTDERR: {stderr}'
|
||||
)
|
||||
|
||||
return rc, stdout, stderr
|
||||
@ -415,8 +413,7 @@ def determine_user_group(client, container, log=None):
|
||||
stdout_lines = stdout.splitlines()
|
||||
if len(stdout_lines) != 2:
|
||||
raise DockerUnexpectedError(
|
||||
'Expected two-line output to obtain user and group ID for container {container}, but got {lc} lines:\n{stdout}'
|
||||
.format(container=container, lc=len(stdout_lines), stdout=stdout)
|
||||
f'Expected two-line output to obtain user and group ID for container {container}, but got {len(stdout_lines)} lines:\n{stdout}'
|
||||
)
|
||||
|
||||
user_id, group_id = stdout_lines
|
||||
@ -424,6 +421,5 @@ def determine_user_group(client, container, log=None):
|
||||
return int(user_id), int(group_id)
|
||||
except ValueError:
|
||||
raise DockerUnexpectedError(
|
||||
'Expected two-line output with numeric IDs to obtain user and group ID for container {container}, but got "{l1}" and "{l2}" instead'
|
||||
.format(container=container, l1=user_id, l2=group_id)
|
||||
f'Expected two-line output with numeric IDs to obtain user and group ID for container {container}, but got "{user_id}" and "{group_id}" instead'
|
||||
)
|
||||
|
||||
@ -93,7 +93,7 @@ def load_archived_image_manifest(archive_path):
|
||||
config_file = meta['Config']
|
||||
except KeyError as exc:
|
||||
raise ImageArchiveInvalidException(
|
||||
"Failed to get Config entry from {0}th manifest in manifest.json: {1}".format(index + 1, to_native(exc))
|
||||
f"Failed to get Config entry from {index + 1}th manifest in manifest.json: {exc}"
|
||||
) from exc
|
||||
|
||||
# Extracts hash without 'sha256:' prefix
|
||||
@ -102,7 +102,7 @@ def load_archived_image_manifest(archive_path):
|
||||
image_id = os.path.splitext(config_file)[0]
|
||||
except Exception as exc:
|
||||
raise ImageArchiveInvalidException(
|
||||
"Failed to extract image id from config file name %s: %s" % (config_file, to_native(exc))
|
||||
f"Failed to extract image id from config file name {config_file}: {exc}"
|
||||
) from exc
|
||||
|
||||
for prefix in (
|
||||
@ -115,7 +115,7 @@ def load_archived_image_manifest(archive_path):
|
||||
repo_tags = meta['RepoTags']
|
||||
except KeyError as exc:
|
||||
raise ImageArchiveInvalidException(
|
||||
"Failed to get RepoTags entry from {0}th manifest in manifest.json: {1}".format(index + 1, to_native(exc))
|
||||
f"Failed to get RepoTags entry from {index + 1}th manifest in manifest.json: {exc}"
|
||||
) from exc
|
||||
|
||||
result.append(ImageArchiveManifestSummary(
|
||||
|
||||
@ -365,15 +365,15 @@ def _parse_port_range(range_or_port, module):
|
||||
try:
|
||||
start, end = [int(port) for port in range_or_port.split('-')]
|
||||
except Exception:
|
||||
module.fail_json(msg='Invalid port range: "{0}"'.format(range_or_port))
|
||||
module.fail_json(msg=f'Invalid port range: "{range_or_port}"')
|
||||
if end < start:
|
||||
module.fail_json(msg='Invalid port range: "{0}"'.format(range_or_port))
|
||||
module.fail_json(msg=f'Invalid port range: "{range_or_port}"')
|
||||
return list(range(start, end + 1))
|
||||
else:
|
||||
try:
|
||||
return [int(range_or_port)]
|
||||
except Exception:
|
||||
module.fail_json(msg='Invalid port: "{0}"'.format(range_or_port))
|
||||
module.fail_json(msg=f'Invalid port: "{range_or_port}"')
|
||||
|
||||
|
||||
def _split_colon_ipv6(text, module):
|
||||
@ -391,7 +391,7 @@ def _split_colon_ipv6(text, module):
|
||||
break
|
||||
j = text.find(']', i)
|
||||
if j < 0:
|
||||
module.fail_json(msg='Cannot find closing "]" in input "{0}" for opening "[" at index {1}!'.format(text, i + 1))
|
||||
module.fail_json(msg=f'Cannot find closing "]" in input "{text}" for opening "[" at index {i + 1}!')
|
||||
result.extend(text[start:i].split(':'))
|
||||
k = text.find(':', j)
|
||||
if k < 0:
|
||||
@ -571,9 +571,9 @@ def _preprocess_mounts(module, values):
|
||||
def check_collision(t, name):
|
||||
if t in last:
|
||||
if name == last[t]:
|
||||
module.fail_json(msg='The mount point "{0}" appears twice in the {1} option'.format(t, name))
|
||||
module.fail_json(msg=f'The mount point "{t}" appears twice in the {name} option')
|
||||
else:
|
||||
module.fail_json(msg='The mount point "{0}" appears both in the {1} and {2} option'.format(t, name, last[t]))
|
||||
module.fail_json(msg=f'The mount point "{t}" appears both in the {name} and {last[t]} option')
|
||||
last[t] = name
|
||||
|
||||
if 'mounts' in values:
|
||||
@ -588,17 +588,13 @@ def _preprocess_mounts(module, values):
|
||||
|
||||
# Sanity checks
|
||||
if mount['source'] is None and mount_type not in ('tmpfs', 'volume', 'image', 'cluster'):
|
||||
module.fail_json(msg='source must be specified for mount "{0}" of type "{1}"'.format(target, mount_type))
|
||||
module.fail_json(msg=f'source must be specified for mount "{target}" of type "{mount_type}"')
|
||||
for option, req_mount_types in _MOUNT_OPTION_TYPES.items():
|
||||
if mount[option] is not None and mount_type not in req_mount_types:
|
||||
type_plural = "" if len(req_mount_types) == 1 else "s"
|
||||
type_list = '", "'.join(req_mount_types)
|
||||
module.fail_json(
|
||||
msg='{0} cannot be specified for mount "{1}" of type "{2}" (needs type{3} "{4}")'.format(
|
||||
option,
|
||||
target,
|
||||
mount_type,
|
||||
"" if len(req_mount_types) == 1 else "s",
|
||||
'", "'.join(req_mount_types),
|
||||
)
|
||||
msg=f'{option} cannot be specified for mount "{target}" of type "{mount_type}" (needs type{type_plural} "{type_list}")'
|
||||
)
|
||||
|
||||
# Streamline options
|
||||
@ -611,22 +607,22 @@ def _preprocess_mounts(module, values):
|
||||
try:
|
||||
mount_dict['tmpfs_size'] = human_to_bytes(mount_dict['tmpfs_size'])
|
||||
except ValueError as exc:
|
||||
module.fail_json(msg='Failed to convert tmpfs_size of mount "{0}" to bytes: {1}'.format(target, to_native(exc)))
|
||||
module.fail_json(msg=f'Failed to convert tmpfs_size of mount "{target}" to bytes: {exc}')
|
||||
if mount_dict['tmpfs_mode'] is not None:
|
||||
try:
|
||||
mount_dict['tmpfs_mode'] = int(mount_dict['tmpfs_mode'], 8)
|
||||
except Exception as dummy:
|
||||
module.fail_json(msg='tmp_fs mode of mount "{0}" is not an octal string!'.format(target))
|
||||
module.fail_json(msg=f'tmp_fs mode of mount "{target}" is not an octal string!')
|
||||
if mount_dict['tmpfs_options']:
|
||||
opts = []
|
||||
for idx, opt in enumerate(mount_dict['tmpfs_options']):
|
||||
if len(opt) != 1:
|
||||
module.fail_json(msg='tmpfs_options[{1}] of mount "{0}" must be a one-element dictionary!'.format(target, idx + 1))
|
||||
module.fail_json(msg=f'tmpfs_options[{idx + 1}] of mount "{target}" must be a one-element dictionary!')
|
||||
k, v = list(opt.items())[0]
|
||||
if not isinstance(k, str):
|
||||
module.fail_json(msg='key {2!r} in tmpfs_options[{1}] of mount "{0}" must be a string!'.format(target, idx + 1, k))
|
||||
module.fail_json(msg=f'key {k!r} in tmpfs_options[{idx + 1}] of mount "{target}" must be a string!')
|
||||
if v is not None and not isinstance(v, str):
|
||||
module.fail_json(msg='value {2!r} in tmpfs_options[{1}] of mount "{0}" must be a string or null/none!'.format(target, idx + 1, v))
|
||||
module.fail_json(msg=f'value {v!r} in tmpfs_options[{idx + 1}] of mount "{target}" must be a string or null/none!')
|
||||
opts.append([k, v] if v is not None else [k])
|
||||
mount_dict['tmpfs_options'] = opts
|
||||
|
||||
@ -641,7 +637,7 @@ def _preprocess_mounts(module, values):
|
||||
if len(parts) == 3:
|
||||
host, container, mode = parts
|
||||
if not _is_volume_permissions(mode):
|
||||
module.fail_json(msg='Found invalid volumes mode: {0}'.format(mode))
|
||||
module.fail_json(msg=f'Found invalid volumes mode: {mode}')
|
||||
if re.match(r'[.~]', host):
|
||||
host = os.path.abspath(os.path.expanduser(host))
|
||||
check_collision(container, 'volumes')
|
||||
@ -664,7 +660,7 @@ def _preprocess_mounts(module, values):
|
||||
if len(parts) == 3:
|
||||
host, container, mode = parts
|
||||
if not _is_volume_permissions(mode):
|
||||
module.fail_json(msg='Found invalid volumes mode: {0}'.format(mode))
|
||||
module.fail_json(msg=f'Found invalid volumes mode: {mode}')
|
||||
elif len(parts) == 2:
|
||||
if not _is_volume_permissions(parts[1]):
|
||||
host, container, mode = (parts + ['rw'])
|
||||
@ -735,7 +731,7 @@ def _preprocess_ports(module, values):
|
||||
if not re.match(r'^[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+$', parts[0]) and not re.match(r'^\[[0-9a-fA-F:]+(?:|%[^\]/]+)\]$', ipaddr):
|
||||
module.fail_json(
|
||||
msg='Bind addresses for published ports must be IPv4 or IPv6 addresses, not hostnames. '
|
||||
'Use the dig lookup to resolve hostnames. (Found hostname: {0})'.format(ipaddr)
|
||||
f'Use the dig lookup to resolve hostnames. (Found hostname: {ipaddr})'
|
||||
)
|
||||
if re.match(r'^\[[0-9a-fA-F:]+\]$', ipaddr):
|
||||
ipaddr = ipaddr[1:-1]
|
||||
@ -753,7 +749,7 @@ def _preprocess_ports(module, values):
|
||||
)
|
||||
|
||||
for bind, container_port in zip(port_binds, container_ports):
|
||||
idx = '{0}/{1}'.format(container_port, protocol) if protocol else container_port
|
||||
idx = f'{container_port}/{protocol}' if protocol else container_port
|
||||
if idx in binds:
|
||||
old_bind = binds[idx]
|
||||
if isinstance(old_bind, list):
|
||||
|
||||
@ -389,10 +389,10 @@ class DockerAPIEngineDriver(EngineDriver):
|
||||
try:
|
||||
runner()
|
||||
except DockerException as e:
|
||||
client.fail('An unexpected Docker error occurred: {0}'.format(to_native(e)), exception=traceback.format_exc())
|
||||
client.fail(f'An unexpected Docker error occurred: {e}', exception=traceback.format_exc())
|
||||
except RequestException as e:
|
||||
client.fail(
|
||||
'An unexpected requests error occurred when trying to talk to the Docker daemon: {0}'.format(to_native(e)),
|
||||
f'An unexpected requests error occurred when trying to talk to the Docker daemon: {e}',
|
||||
exception=traceback.format_exc())
|
||||
|
||||
|
||||
@ -611,7 +611,7 @@ def _get_default_host_ip(module, client):
|
||||
network = client.get_network(network_data['name'])
|
||||
if network is None:
|
||||
client.fail(
|
||||
"Cannot inspect the network '{0}' to determine the default IP".format(network_data['name']),
|
||||
f"Cannot inspect the network '{network_data['name']}' to determine the default IP",
|
||||
)
|
||||
if network.get('Driver') == 'bridge' and network.get('Options', {}).get('com.docker.network.bridge.host_binding_ipv4'):
|
||||
ip = network['Options']['com.docker.network.bridge.host_binding_ipv4']
|
||||
@ -832,9 +832,10 @@ def _ignore_mismatching_label_result(module, client, api_version, option, image,
|
||||
# Format label for error message
|
||||
would_remove_labels.append('"%s"' % (label, ))
|
||||
if would_remove_labels:
|
||||
labels = ', '.join(would_remove_labels)
|
||||
msg = ("Some labels should be removed but are present in the base image. You can set image_label_mismatch to 'ignore' to ignore"
|
||||
" this error. Labels: {0}")
|
||||
client.fail(msg.format(', '.join(would_remove_labels)))
|
||||
f" this error. Labels: {labels}")
|
||||
client.fail(msg)
|
||||
return False
|
||||
|
||||
|
||||
@ -1282,9 +1283,9 @@ def _preprocess_container_names(module, client, api_version, value):
|
||||
if container is None:
|
||||
# If we cannot find the container, issue a warning and continue with
|
||||
# what the user specified.
|
||||
module.warn('Cannot find a container with name or ID "{0}"'.format(container_name))
|
||||
module.warn(f'Cannot find a container with name or ID "{container_name}"')
|
||||
return value
|
||||
return 'container:{0}'.format(container['Id'])
|
||||
return f"container:{container['Id']}"
|
||||
|
||||
|
||||
def _get_value_command(module, container, api_version, options, image, host_info):
|
||||
|
||||
@ -102,11 +102,11 @@ class ContainerManager(DockerBaseClass):
|
||||
if re.match(r'^\[[0-9a-fA-F:]+\]$', self.param_default_host_ip):
|
||||
valid_ip = True
|
||||
if re.match(r'^[0-9a-fA-F:]+$', self.param_default_host_ip):
|
||||
self.param_default_host_ip = '[{0}]'.format(self.param_default_host_ip)
|
||||
self.param_default_host_ip = f'[{self.param_default_host_ip}]'
|
||||
valid_ip = True
|
||||
if not valid_ip:
|
||||
self.fail('The value of default_host_ip must be an empty string, an IPv4 address, '
|
||||
'or an IPv6 address. Got "{0}" instead.'.format(self.param_default_host_ip))
|
||||
f'or an IPv6 address. Got "{self.param_default_host_ip}" instead.')
|
||||
|
||||
def _collect_all_options(self, active_options):
|
||||
all_options = {}
|
||||
@ -228,8 +228,8 @@ class ContainerManager(DockerBaseClass):
|
||||
if result is None:
|
||||
if accept_removal:
|
||||
return result
|
||||
msg = 'Encontered vanished container while waiting for container "{0}"'
|
||||
self.fail(msg.format(container_id))
|
||||
msg = f'Encontered vanished container while waiting for container "{container_id}"'
|
||||
self.fail(msg)
|
||||
# Check container state
|
||||
state_info = result.get('State') or {}
|
||||
if health_state:
|
||||
@ -238,13 +238,13 @@ class ContainerManager(DockerBaseClass):
|
||||
if complete_states is not None and state in complete_states:
|
||||
return result
|
||||
if wait_states is not None and state not in wait_states:
|
||||
msg = 'Encontered unexpected state "{1}" while waiting for container "{0}"'
|
||||
self.fail(msg.format(container_id, state), container=result)
|
||||
msg = f'Encontered unexpected state "{state}" while waiting for container "{container_id}"'
|
||||
self.fail(msg, container=result)
|
||||
# Wait
|
||||
if max_wait is not None:
|
||||
if total_wait > max_wait or delay < 1E-4:
|
||||
msg = 'Timeout of {1} seconds exceeded while waiting for container "{0}"'
|
||||
self.fail(msg.format(container_id, max_wait), container=result)
|
||||
msg = f'Timeout of {max_wait} seconds exceeded while waiting for container "{container_id}"'
|
||||
self.fail(msg, container=result)
|
||||
if total_wait + delay > max_wait:
|
||||
delay = max_wait - total_wait
|
||||
sleep(delay)
|
||||
@ -674,7 +674,7 @@ class ContainerManager(DockerBaseClass):
|
||||
self.diff['differences'] = [dict(network_differences=network_differences)]
|
||||
for netdiff in network_differences:
|
||||
self.diff_tracker.add(
|
||||
'network.{0}'.format(netdiff['parameter']['name']),
|
||||
f"network.{netdiff['parameter']['name']}",
|
||||
parameter=netdiff['parameter'],
|
||||
active=netdiff['container']
|
||||
)
|
||||
@ -691,7 +691,7 @@ class ContainerManager(DockerBaseClass):
|
||||
self.diff['differences'] = [dict(purge_networks=extra_networks)]
|
||||
for extra_network in extra_networks:
|
||||
self.diff_tracker.add(
|
||||
'network.{0}'.format(extra_network['name']),
|
||||
f"network.{extra_network['name']}",
|
||||
active=extra_network
|
||||
)
|
||||
self.results['changed'] = True
|
||||
|
||||
@ -89,7 +89,7 @@ class DockerSocketHandlerBase(object):
|
||||
if data is None:
|
||||
# no data available
|
||||
return
|
||||
self._log('read {0} bytes'.format(len(data)))
|
||||
self._log(f'read {len(data)} bytes')
|
||||
if len(data) == 0:
|
||||
# Stream EOF
|
||||
self._eof = True
|
||||
@ -123,7 +123,7 @@ class DockerSocketHandlerBase(object):
|
||||
if len(self._write_buffer) > 0:
|
||||
written = write_to_socket(self._sock, self._write_buffer)
|
||||
self._write_buffer = self._write_buffer[written:]
|
||||
self._log('wrote {0} bytes, {1} are left'.format(written, len(self._write_buffer)))
|
||||
self._log(f'wrote {written} bytes, {len(self._write_buffer)} are left')
|
||||
if len(self._write_buffer) > 0:
|
||||
self._selector.modify(self._sock, self._selectors.EVENT_READ | self._selectors.EVENT_WRITE)
|
||||
else:
|
||||
@ -147,14 +147,13 @@ class DockerSocketHandlerBase(object):
|
||||
return True
|
||||
if timeout is not None:
|
||||
timeout -= PARAMIKO_POLL_TIMEOUT
|
||||
self._log('select... ({0})'.format(timeout))
|
||||
self._log(f'select... ({timeout})')
|
||||
events = self._selector.select(timeout)
|
||||
for key, event in events:
|
||||
if key.fileobj == self._sock:
|
||||
self._log(
|
||||
'select event read:{0} write:{1}'.format(
|
||||
event & self._selectors.EVENT_READ != 0,
|
||||
event & self._selectors.EVENT_WRITE != 0))
|
||||
ev_read = event & self._selectors.EVENT_READ != 0
|
||||
ev_write = event & self._selectors.EVENT_WRITE != 0
|
||||
self._log(f'select event read:{ev_read} write:{ev_write}')
|
||||
if event & self._selectors.EVENT_READ != 0:
|
||||
self._read()
|
||||
if event & self._selectors.EVENT_WRITE != 0:
|
||||
@ -183,7 +182,7 @@ class DockerSocketHandlerBase(object):
|
||||
elif stream_id == docker_socket.STDERR:
|
||||
stderr.append(data)
|
||||
else:
|
||||
raise ValueError('{0} is not a valid stream ID'.format(stream_id))
|
||||
raise ValueError(f'{stream_id} is not a valid stream ID')
|
||||
|
||||
self.end_of_writing()
|
||||
|
||||
|
||||
@ -44,7 +44,7 @@ def shutdown_writing(sock, log=_empty_writer):
|
||||
sock.shutdown(pysocket.SHUT_WR)
|
||||
except TypeError as e:
|
||||
# probably: "TypeError: shutdown() takes 1 positional argument but 2 were given"
|
||||
log('Shutting down for writing not possible; trying shutdown instead: {0}'.format(e))
|
||||
log(f'Shutting down for writing not possible; trying shutdown instead: {e}')
|
||||
sock.shutdown()
|
||||
elif isinstance(sock, getattr(pysocket, 'SocketIO')):
|
||||
sock._sock.shutdown(pysocket.SHUT_WR)
|
||||
|
||||
@ -289,13 +289,9 @@ def sanitize_labels(labels, labels_field, client=None, module=None):
|
||||
return
|
||||
for k, v in list(labels.items()):
|
||||
if not isinstance(k, str):
|
||||
fail(
|
||||
"The key {key!r} of {field} is not a string!".format(
|
||||
field=labels_field, key=k))
|
||||
fail(f"The key {k!r} of {labels_field} is not a string!")
|
||||
if isinstance(v, (bool, float)):
|
||||
fail(
|
||||
"The value {value!r} for {key!r} of {field} is not a string or something than can be safely converted to a string!".format(
|
||||
field=labels_field, key=k, value=v))
|
||||
fail(f"The value {v!r} for {k!r} of {labels_field} is not a string or something than can be safely converted to a string!")
|
||||
labels[k] = to_text(v)
|
||||
|
||||
|
||||
@ -389,8 +385,7 @@ def normalize_healthcheck(healthcheck, normalize_test=False):
|
||||
value = int(value)
|
||||
except ValueError:
|
||||
raise ValueError(
|
||||
'Cannot parse number of retries for healthcheck. '
|
||||
'Expected an integer, got "{0}".'.format(value)
|
||||
f'Cannot parse number of retries for healthcheck. Expected an integer, got "{value}".'
|
||||
)
|
||||
if key == 'test' and value and normalize_test:
|
||||
value = normalize_healthcheck_test(value)
|
||||
|
||||
@ -439,7 +439,6 @@ actions:
|
||||
import traceback
|
||||
|
||||
from ansible.module_utils.common.validation import check_type_int
|
||||
from ansible.module_utils.common.text.converters import to_native
|
||||
|
||||
from ansible_collections.community.docker.plugins.module_utils.common_cli import (
|
||||
AnsibleModuleDockerClient,
|
||||
@ -691,7 +690,7 @@ def main():
|
||||
manager.cleanup()
|
||||
client.module.exit_json(**result)
|
||||
except DockerException as e:
|
||||
client.fail('An unexpected docker error occurred: {0}'.format(to_native(e)), exception=traceback.format_exc())
|
||||
client.fail(f'An unexpected Docker error occurred: {e}', exception=traceback.format_exc())
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
||||
@ -167,7 +167,7 @@ rc:
|
||||
import shlex
|
||||
import traceback
|
||||
|
||||
from ansible.module_utils.common.text.converters import to_text, to_native
|
||||
from ansible.module_utils.common.text.converters import to_text
|
||||
|
||||
from ansible_collections.community.docker.plugins.module_utils.common_cli import (
|
||||
AnsibleModuleDockerClient,
|
||||
@ -232,7 +232,7 @@ class ExecManager(BaseComposeManager):
|
||||
if self.env:
|
||||
for name, value in list(self.env.items()):
|
||||
args.append('--env')
|
||||
args.append('{0}={1}'.format(name, value))
|
||||
args.append(f'{name}={value}')
|
||||
args.append('--')
|
||||
args.append(self.service)
|
||||
args.extend(self.argv)
|
||||
@ -295,7 +295,7 @@ def main():
|
||||
manager.cleanup()
|
||||
client.module.exit_json(**result)
|
||||
except DockerException as e:
|
||||
client.fail('An unexpected docker error occurred: {0}'.format(to_native(e)), exception=traceback.format_exc())
|
||||
client.fail(f'An unexpected Docker error occurred: {e}', exception=traceback.format_exc())
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
||||
@ -112,8 +112,6 @@ actions:
|
||||
|
||||
import traceback
|
||||
|
||||
from ansible.module_utils.common.text.converters import to_native
|
||||
|
||||
from ansible_collections.community.docker.plugins.module_utils.common_cli import (
|
||||
AnsibleModuleDockerClient,
|
||||
DockerException,
|
||||
@ -139,12 +137,12 @@ class PullManager(BaseComposeManager):
|
||||
|
||||
if self.policy != 'always' and self.compose_version < LooseVersion('2.22.0'):
|
||||
# https://github.com/docker/compose/pull/10981 - 2.22.0
|
||||
self.fail('A pull policy other than always is only supported since Docker Compose 2.22.0. {0} has version {1}'.format(
|
||||
self.client.get_cli(), self.compose_version))
|
||||
self.fail(
|
||||
f'A pull policy other than always is only supported since Docker Compose 2.22.0. {self.client.get_cli()} has version {self.compose_version}')
|
||||
if self.ignore_buildable and self.compose_version < LooseVersion('2.15.0'):
|
||||
# https://github.com/docker/compose/pull/10134 - 2.15.0
|
||||
self.fail('--ignore-buildable is only supported since Docker Compose 2.15.0. {0} has version {1}'.format(
|
||||
self.client.get_cli(), self.compose_version))
|
||||
self.fail(
|
||||
f'--ignore-buildable is only supported since Docker Compose 2.15.0. {self.client.get_cli()} has version {self.compose_version}')
|
||||
|
||||
def get_pull_cmd(self, dry_run, no_start=False):
|
||||
args = self.get_base_args() + ['pull']
|
||||
@ -196,7 +194,7 @@ def main():
|
||||
manager.cleanup()
|
||||
client.module.exit_json(**result)
|
||||
except DockerException as e:
|
||||
client.fail('An unexpected docker error occurred: {0}'.format(to_native(e)), exception=traceback.format_exc())
|
||||
client.fail(f'An unexpected docker error occurred: {e}', exception=traceback.format_exc())
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
||||
@ -240,7 +240,7 @@ rc:
|
||||
import shlex
|
||||
import traceback
|
||||
|
||||
from ansible.module_utils.common.text.converters import to_text, to_native
|
||||
from ansible.module_utils.common.text.converters import to_text
|
||||
|
||||
from ansible_collections.community.docker.plugins.module_utils.common_cli import (
|
||||
AnsibleModuleDockerClient,
|
||||
@ -349,7 +349,7 @@ class ExecManager(BaseComposeManager):
|
||||
if self.env:
|
||||
for name, value in list(self.env.items()):
|
||||
args.append('--env')
|
||||
args.append('{0}={1}'.format(name, value))
|
||||
args.append(f'{name}={value}')
|
||||
args.append('--')
|
||||
args.append(self.service)
|
||||
if self.argv:
|
||||
@ -428,7 +428,7 @@ def main():
|
||||
manager.cleanup()
|
||||
client.module.exit_json(**result)
|
||||
except DockerException as e:
|
||||
client.fail('An unexpected docker error occurred: {0}'.format(to_native(e)), exception=traceback.format_exc())
|
||||
client.fail(f'An unexpected Docker error occurred: {e}', exception=traceback.format_exc())
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
||||
@ -242,7 +242,7 @@ class ConfigManager(DockerBaseClass):
|
||||
with open(data_src, 'rb') as f:
|
||||
self.data = f.read()
|
||||
except Exception as exc:
|
||||
self.client.fail('Error while reading {src}: {error}'.format(src=data_src, error=to_native(exc)))
|
||||
self.client.fail(f'Error while reading {data_src}: {exc}')
|
||||
self.labels = parameters.get('labels')
|
||||
self.force = parameters.get('force')
|
||||
self.rolling_versions = parameters.get('rolling_versions')
|
||||
@ -287,7 +287,7 @@ class ConfigManager(DockerBaseClass):
|
||||
self.configs = [
|
||||
config
|
||||
for config in configs
|
||||
if config['Spec']['Name'].startswith('{name}_v'.format(name=self.name))
|
||||
if config['Spec']['Name'].startswith(f'{self.name}_v')
|
||||
]
|
||||
self.configs.sort(key=self.get_version)
|
||||
else:
|
||||
@ -305,7 +305,7 @@ class ConfigManager(DockerBaseClass):
|
||||
if self.rolling_versions:
|
||||
self.version += 1
|
||||
labels['ansible_version'] = str(self.version)
|
||||
self.name = '{name}_v{version}'.format(name=self.name, version=self.version)
|
||||
self.name = f'{self.name}_v{self.version}'
|
||||
if self.labels:
|
||||
labels.update(self.labels)
|
||||
|
||||
@ -425,10 +425,10 @@ def main():
|
||||
ConfigManager(client, results)()
|
||||
client.module.exit_json(**results)
|
||||
except DockerException as e:
|
||||
client.fail('An unexpected docker error occurred: {0}'.format(to_native(e)), exception=traceback.format_exc())
|
||||
client.fail(f'An unexpected Docker error occurred: {e}', exception=traceback.format_exc())
|
||||
except RequestException as e:
|
||||
client.fail(
|
||||
'An unexpected requests error occurred when Docker SDK for Python tried to talk to the docker daemon: {0}'.format(to_native(e)),
|
||||
f'An unexpected requests error occurred when Docker SDK for Python tried to talk to the docker daemon: {e}',
|
||||
exception=traceback.format_exc())
|
||||
|
||||
|
||||
|
||||
@ -451,7 +451,7 @@ def is_file_idempotent(client, container, managed_path, container_path, follow_l
|
||||
file_stat = os.stat(managed_path) if local_follow_links else os.lstat(managed_path)
|
||||
except OSError as exc:
|
||||
if exc.errno == 2:
|
||||
raise DockerFileNotFound('Cannot find local file {managed_path}'.format(managed_path=managed_path))
|
||||
raise DockerFileNotFound(f'Cannot find local file {managed_path}')
|
||||
raise
|
||||
if mode is None:
|
||||
mode = stat.S_IMODE(file_stat.st_mode)
|
||||
@ -786,13 +786,13 @@ def parse_modern(mode):
|
||||
return int(to_native(mode), 8)
|
||||
if isinstance(mode, int):
|
||||
return mode
|
||||
raise TypeError('must be an octal string or an integer, got {mode!r}'.format(mode=mode))
|
||||
raise TypeError(f'must be an octal string or an integer, got {mode!r}')
|
||||
|
||||
|
||||
def parse_octal_string_only(mode):
|
||||
if isinstance(mode, str):
|
||||
return int(to_native(mode), 8)
|
||||
raise TypeError('must be an octal string, got {mode!r}'.format(mode=mode))
|
||||
raise TypeError(f'must be an octal string, got {mode!r}')
|
||||
|
||||
|
||||
def main():
|
||||
@ -847,16 +847,16 @@ def main():
|
||||
elif mode_parse == 'octal_string_only':
|
||||
mode = parse_octal_string_only(mode)
|
||||
except (TypeError, ValueError) as e:
|
||||
client.fail("Error while parsing 'mode': {error}".format(error=e))
|
||||
client.fail(f"Error while parsing 'mode': {e}")
|
||||
if mode < 0:
|
||||
client.fail("'mode' must not be negative; got {mode}".format(mode=mode))
|
||||
client.fail(f"'mode' must not be negative; got {mode}")
|
||||
|
||||
if content is not None:
|
||||
if client.module.params['content_is_b64']:
|
||||
try:
|
||||
content = base64.b64decode(content)
|
||||
except Exception as e: # depending on Python version and error, multiple different exceptions can be raised
|
||||
client.fail('Cannot Base64 decode the content option: {0}'.format(e))
|
||||
client.fail(f'Cannot Base64 decode the content option: {e}')
|
||||
else:
|
||||
content = to_bytes(content)
|
||||
|
||||
@ -901,21 +901,21 @@ def main():
|
||||
# Can happen if a user explicitly passes `content: null` or `path: null`...
|
||||
client.fail('One of path and content must be supplied')
|
||||
except NotFound as exc:
|
||||
client.fail('Could not find container "{1}" or resource in it ({0})'.format(exc, container))
|
||||
client.fail(f'Could not find container "{container}" or resource in it ({exc})')
|
||||
except APIError as exc:
|
||||
client.fail('An unexpected Docker error occurred for container "{1}": {0}'.format(exc, container), exception=traceback.format_exc())
|
||||
client.fail(f'An unexpected Docker error occurred for container "{container}": {exc}', exception=traceback.format_exc())
|
||||
except DockerException as exc:
|
||||
client.fail('An unexpected Docker error occurred for container "{1}": {0}'.format(exc, container), exception=traceback.format_exc())
|
||||
client.fail(f'An unexpected Docker error occurred for container "{container}": {exc}', exception=traceback.format_exc())
|
||||
except RequestException as exc:
|
||||
client.fail(
|
||||
'An unexpected requests error occurred for container "{1}" when trying to talk to the Docker daemon: {0}'.format(exc, container),
|
||||
f'An unexpected requests error occurred for container "{container}" when trying to talk to the Docker daemon: {exc}',
|
||||
exception=traceback.format_exc())
|
||||
except DockerUnexpectedError as exc:
|
||||
client.fail('Unexpected error: {exc}'.format(exc=to_native(exc)), exception=traceback.format_exc())
|
||||
client.fail(f'Unexpected error: {exc}', exception=traceback.format_exc())
|
||||
except DockerFileCopyError as exc:
|
||||
client.fail(to_native(exc))
|
||||
except OSError as exc:
|
||||
client.fail('Unexpected error: {exc}'.format(exc=to_native(exc)), exception=traceback.format_exc())
|
||||
client.fail(f'Unexpected error: {exc}', exception=traceback.format_exc())
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
||||
@ -167,7 +167,7 @@ import selectors
|
||||
import shlex
|
||||
import traceback
|
||||
|
||||
from ansible.module_utils.common.text.converters import to_text, to_bytes, to_native
|
||||
from ansible.module_utils.common.text.converters import to_text, to_bytes
|
||||
|
||||
from ansible_collections.community.docker.plugins.module_utils.common_api import (
|
||||
AnsibleDockerClient,
|
||||
@ -295,16 +295,16 @@ def main():
|
||||
rc=result.get('ExitCode') or 0,
|
||||
)
|
||||
except NotFound:
|
||||
client.fail('Could not find container "{0}"'.format(container))
|
||||
client.fail(f'Could not find container "{container}"')
|
||||
except APIError as e:
|
||||
if e.response is not None and e.response.status_code == 409:
|
||||
client.fail('The container "{0}" has been paused ({1})'.format(container, to_native(e)))
|
||||
client.fail('An unexpected Docker error occurred: {0}'.format(to_native(e)), exception=traceback.format_exc())
|
||||
client.fail(f'The container "{container}" has been paused ({e})')
|
||||
client.fail(f'An unexpected Docker error occurred: {e}', exception=traceback.format_exc())
|
||||
except DockerException as e:
|
||||
client.fail('An unexpected Docker error occurred: {0}'.format(to_native(e)), exception=traceback.format_exc())
|
||||
client.fail(f'An unexpected Docker error occurred: {e}', exception=traceback.format_exc())
|
||||
except RequestException as e:
|
||||
client.fail(
|
||||
'An unexpected requests error occurred when trying to talk to the Docker daemon: {0}'.format(to_native(e)),
|
||||
f'An unexpected requests error occurred when trying to talk to the Docker daemon: {e}',
|
||||
exception=traceback.format_exc())
|
||||
|
||||
|
||||
|
||||
@ -77,8 +77,6 @@ container:
|
||||
|
||||
import traceback
|
||||
|
||||
from ansible.module_utils.common.text.converters import to_native
|
||||
|
||||
from ansible_collections.community.docker.plugins.module_utils.common_api import (
|
||||
AnsibleDockerClient,
|
||||
RequestException,
|
||||
@ -105,10 +103,10 @@ def main():
|
||||
container=container,
|
||||
)
|
||||
except DockerException as e:
|
||||
client.fail('An unexpected Docker error occurred: {0}'.format(to_native(e)), exception=traceback.format_exc())
|
||||
client.fail(f'An unexpected Docker error occurred: {e}', exception=traceback.format_exc())
|
||||
except RequestException as e:
|
||||
client.fail(
|
||||
'An unexpected requests error occurred when trying to talk to the Docker daemon: {0}'.format(to_native(e)),
|
||||
f'An unexpected requests error occurred when trying to talk to the Docker daemon: {e}',
|
||||
exception=traceback.format_exc())
|
||||
|
||||
|
||||
|
||||
@ -175,7 +175,7 @@ current_context_name:
|
||||
import traceback
|
||||
|
||||
from ansible.module_utils.basic import AnsibleModule
|
||||
from ansible.module_utils.common.text.converters import to_native, to_text
|
||||
from ansible.module_utils.common.text.converters import to_text
|
||||
|
||||
from ansible_collections.community.docker.plugins.module_utils._api.context.api import (
|
||||
ContextAPI,
|
||||
@ -226,7 +226,7 @@ def context_to_json(context, current):
|
||||
if proto == 'http+unix':
|
||||
proto = 'unix'
|
||||
if proto:
|
||||
host_str = "{0}://{1}".format(proto, host_str)
|
||||
host_str = f"{proto}://{host_str}"
|
||||
|
||||
# Create config for the modules
|
||||
module_config['docker_host'] = host_str
|
||||
@ -274,15 +274,12 @@ def main():
|
||||
if module.params['name']:
|
||||
contexts = [ContextAPI.get_context(module.params['name'])]
|
||||
if not contexts[0]:
|
||||
module.fail_json(msg="There is no context of name {name!r}".format(name=module.params['name']))
|
||||
module.fail_json(msg=f"There is no context of name {module.params['name']!r}")
|
||||
elif module.params['only_current']:
|
||||
contexts = [ContextAPI.get_context(current_context_name)]
|
||||
if not contexts[0]:
|
||||
module.fail_json(
|
||||
msg="There is no context of name {name!r}, which is configured as the default context ({source})".format(
|
||||
name=current_context_name,
|
||||
source=current_context_source,
|
||||
),
|
||||
msg=f"There is no context of name {current_context_name!r}, which is configured as the default context ({current_context_source})",
|
||||
)
|
||||
else:
|
||||
contexts = ContextAPI.contexts()
|
||||
@ -298,9 +295,9 @@ def main():
|
||||
current_context_name=current_context_name,
|
||||
)
|
||||
except ContextException as e:
|
||||
module.fail_json(msg='Error when handling Docker contexts: {0}'.format(to_native(e)), exception=traceback.format_exc())
|
||||
module.fail_json(msg=f'Error when handling Docker contexts: {e}', exception=traceback.format_exc())
|
||||
except DockerException as e:
|
||||
module.fail_json(msg='An unexpected Docker error occurred: {0}'.format(to_native(e)), exception=traceback.format_exc())
|
||||
module.fail_json(msg=f'An unexpected Docker error occurred: {e}', exception=traceback.format_exc())
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
||||
@ -370,10 +370,10 @@ def main():
|
||||
DockerHostManager(client, results)
|
||||
client.module.exit_json(**results)
|
||||
except DockerException as e:
|
||||
client.fail('An unexpected Docker error occurred: {0}'.format(to_native(e)), exception=traceback.format_exc())
|
||||
client.fail(f'An unexpected Docker error occurred: {e}', exception=traceback.format_exc())
|
||||
except RequestException as e:
|
||||
client.fail(
|
||||
'An unexpected requests error occurred when trying to talk to the Docker daemon: {0}'.format(to_native(e)),
|
||||
f'An unexpected requests error occurred when trying to talk to the Docker daemon: {e}',
|
||||
exception=traceback.format_exc())
|
||||
|
||||
|
||||
|
||||
@ -826,7 +826,7 @@ class ImageManager(DockerBaseClass):
|
||||
container_limits = self.container_limits or {}
|
||||
for key in container_limits.keys():
|
||||
if key not in CONTAINER_LIMITS_KEYS:
|
||||
raise DockerException('Invalid container_limits key {key}'.format(key=key))
|
||||
raise DockerException(f'Invalid container_limits key {key}')
|
||||
|
||||
dockerfile = self.dockerfile
|
||||
if self.build_path.startswith(('http://', 'https://', 'git://', 'github.com/', 'git@')):
|
||||
@ -1068,7 +1068,7 @@ def main():
|
||||
)
|
||||
|
||||
if not is_valid_tag(client.module.params['tag'], allow_empty=True):
|
||||
client.fail('"{0}" is not a valid docker tag!'.format(client.module.params['tag']))
|
||||
client.fail(f'"{client.module.params["tag"]}" is not a valid docker tag!')
|
||||
|
||||
if client.module.params['source'] == 'build':
|
||||
if not client.module.params['build'] or not client.module.params['build'].get('path'):
|
||||
@ -1084,10 +1084,10 @@ def main():
|
||||
ImageManager(client, results)
|
||||
client.module.exit_json(**results)
|
||||
except DockerException as e:
|
||||
client.fail('An unexpected Docker error occurred: {0}'.format(to_native(e)), exception=traceback.format_exc())
|
||||
client.fail(f'An unexpected Docker error occurred: {e}', exception=traceback.format_exc())
|
||||
except RequestException as e:
|
||||
client.fail(
|
||||
'An unexpected requests error occurred when trying to talk to the Docker daemon: {0}'.format(to_native(e)),
|
||||
f'An unexpected requests error occurred when trying to talk to the Docker daemon: {e}',
|
||||
exception=traceback.format_exc())
|
||||
|
||||
|
||||
|
||||
@ -323,7 +323,8 @@ def dict_to_list(dictionary, concat='='):
|
||||
def _quote_csv(input):
|
||||
if input.strip() == input and all(i not in input for i in '",\r\n'):
|
||||
return input
|
||||
return '"{0}"'.format(input.replace('"', '""'))
|
||||
input = input.replace('"', '""')
|
||||
return f'"{input}"'
|
||||
|
||||
|
||||
class ImageBuilder(DockerBaseClass):
|
||||
@ -349,33 +350,29 @@ class ImageBuilder(DockerBaseClass):
|
||||
|
||||
buildx = self.client.get_client_plugin_info('buildx')
|
||||
if buildx is None:
|
||||
self.fail('Docker CLI {0} does not have the buildx plugin installed'.format(self.client.get_cli()))
|
||||
self.fail(f'Docker CLI {self.client.get_cli()} does not have the buildx plugin installed')
|
||||
buildx_version = buildx['Version'].lstrip('v')
|
||||
|
||||
if self.secrets:
|
||||
for secret in self.secrets:
|
||||
if secret['type'] in ('env', 'value'):
|
||||
if LooseVersion(buildx_version) < LooseVersion('0.6.0'):
|
||||
self.fail('The Docker buildx plugin has version {version}, but 0.6.0 is needed for secrets of type=env and type=value'.format(
|
||||
version=buildx_version,
|
||||
))
|
||||
self.fail(f'The Docker buildx plugin has version {buildx_version}, but 0.6.0 is needed for secrets of type=env and type=value')
|
||||
if self.outputs and len(self.outputs) > 1:
|
||||
if LooseVersion(buildx_version) < LooseVersion('0.13.0'):
|
||||
self.fail('The Docker buildx plugin has version {version}, but 0.13.0 is needed to specify more than one output'.format(
|
||||
version=buildx_version,
|
||||
))
|
||||
self.fail(f'The Docker buildx plugin has version {buildx_version}, but 0.13.0 is needed to specify more than one output')
|
||||
|
||||
self.path = parameters['path']
|
||||
if not os.path.isdir(self.path):
|
||||
self.fail('"{0}" is not an existing directory'.format(self.path))
|
||||
self.fail(f'"{self.path}" is not an existing directory')
|
||||
self.dockerfile = parameters['dockerfile']
|
||||
if self.dockerfile and not os.path.isfile(os.path.join(self.path, self.dockerfile)):
|
||||
self.fail('"{0}" is not an existing file'.format(os.path.join(self.path, self.dockerfile)))
|
||||
self.fail(f'"{os.path.join(self.path, self.dockerfile)}" is not an existing file')
|
||||
|
||||
self.name = parameters['name']
|
||||
self.tag = parameters['tag']
|
||||
if not is_valid_tag(self.tag, allow_empty=True):
|
||||
self.fail('"{0}" is not a valid docker tag'.format(self.tag))
|
||||
self.fail(f'"{self.tag}" is not a valid docker tag')
|
||||
if is_image_name_id(self.name):
|
||||
self.fail('Image name must not be a digest')
|
||||
|
||||
@ -406,11 +403,8 @@ class ImageBuilder(DockerBaseClass):
|
||||
})
|
||||
if LooseVersion(buildx_version) < LooseVersion('0.13.0'):
|
||||
self.fail(
|
||||
"The output does not include an image with name {name_tag}, and the Docker"
|
||||
" buildx plugin has version {version} which only supports one output.".format(
|
||||
name_tag=name_tag,
|
||||
version=buildx_version,
|
||||
),
|
||||
f"The output does not include an image with name {name_tag}, and the Docker"
|
||||
f" buildx plugin has version {buildx_version} which only supports one output."
|
||||
)
|
||||
|
||||
def fail(self, msg, **kwargs):
|
||||
@ -450,41 +444,47 @@ class ImageBuilder(DockerBaseClass):
|
||||
if self.secrets:
|
||||
random_prefix = None
|
||||
for index, secret in enumerate(self.secrets):
|
||||
sid = secret['id']
|
||||
if secret['type'] == 'file':
|
||||
args.extend(['--secret', 'id={id},type=file,src={src}'.format(id=secret['id'], src=secret['src'])])
|
||||
src = secret['src']
|
||||
args.extend(['--secret', f'id={sid},type=file,src={src}'])
|
||||
if secret['type'] == 'env':
|
||||
args.extend(['--secret', 'id={id},type=env,env={env}'.format(id=secret['id'], env=secret['src'])])
|
||||
env = secret['src']
|
||||
args.extend(['--secret', f'id={sid},type=env,env={env}'])
|
||||
if secret['type'] == 'value':
|
||||
# We pass values on using environment variables. The user has been warned in the documentation
|
||||
# that they should only use this mechanism when being comfortable with it.
|
||||
if random_prefix is None:
|
||||
# Use /dev/urandom to generate some entropy to make the environment variable's name unguessable
|
||||
random_prefix = base64.b64encode(os.urandom(16)).decode('utf-8').replace('=', '')
|
||||
env_name = 'ANSIBLE_DOCKER_COMPOSE_ENV_SECRET_{random}_{id}'.format(
|
||||
random=random_prefix,
|
||||
id=index,
|
||||
)
|
||||
env_name = f'ANSIBLE_DOCKER_COMPOSE_ENV_SECRET_{random_prefix}_{index}'
|
||||
environ_update[env_name] = secret['value']
|
||||
args.extend(['--secret', 'id={id},type=env,env={env}'.format(id=secret['id'], env=env_name)])
|
||||
args.extend(['--secret', f'id={sid},type=env,env={env_name}'])
|
||||
if self.outputs:
|
||||
for output in self.outputs:
|
||||
subargs = []
|
||||
if output['type'] == 'local':
|
||||
subargs.extend(['type=local', 'dest={dest}'.format(dest=output['dest'])])
|
||||
dest = output['dest']
|
||||
subargs.extend(['type=local', f'dest={dest}'])
|
||||
if output['type'] == 'tar':
|
||||
subargs.extend(['type=tar', 'dest={dest}'.format(dest=output['dest'])])
|
||||
dest = output['dest']
|
||||
subargs.extend(['type=tar', f'dest={dest}'])
|
||||
if output['type'] == 'oci':
|
||||
subargs.extend(['type=oci', 'dest={dest}'.format(dest=output['dest'])])
|
||||
dest = output['dest']
|
||||
subargs.extend(['type=oci', f'dest={dest}'])
|
||||
if output['type'] == 'docker':
|
||||
subargs.append('type=docker')
|
||||
dest = output['dest']
|
||||
if output['dest'] is not None:
|
||||
subargs.append('dest={dest}'.format(dest=output['dest']))
|
||||
subargs.append(f'dest={dest}')
|
||||
if output['context'] is not None:
|
||||
subargs.append('context={context}'.format(context=output['context']))
|
||||
context = output['context']
|
||||
subargs.append(f'context={context}')
|
||||
if output['type'] == 'image':
|
||||
subargs.append('type=image')
|
||||
if output['name'] is not None:
|
||||
subargs.append('name={name}'.format(name=','.join(output['name'])))
|
||||
name = ','.join(output['name'])
|
||||
subargs.append(f'name={name}')
|
||||
if output['push']:
|
||||
subargs.append('push=true')
|
||||
if subargs:
|
||||
@ -590,7 +590,7 @@ def main():
|
||||
results = ImageBuilder(client).build_image()
|
||||
client.module.exit_json(**results)
|
||||
except DockerException as e:
|
||||
client.fail('An unexpected Docker error occurred: {0}'.format(to_native(e)), exception=traceback.format_exc())
|
||||
client.fail(f'An unexpected Docker error occurred: {e}', exception=traceback.format_exc())
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
||||
@ -135,7 +135,7 @@ class ImageExportManager(DockerBaseClass):
|
||||
self.tag = parameters['tag']
|
||||
|
||||
if not is_valid_tag(self.tag, allow_empty=True):
|
||||
self.fail('"{0}" is not a valid docker tag'.format(self.tag))
|
||||
self.fail(f'"{self.tag}" is not a valid docker tag')
|
||||
|
||||
# If name contains a tag, it takes precedence over tag parameter.
|
||||
self.names = []
|
||||
@ -272,10 +272,10 @@ def main():
|
||||
results = ImageExportManager(client).run()
|
||||
client.module.exit_json(**results)
|
||||
except DockerException as e:
|
||||
client.fail('An unexpected Docker error occurred: {0}'.format(to_native(e)), exception=traceback.format_exc())
|
||||
client.fail(f'An unexpected Docker error occurred: {e}', exception=traceback.format_exc())
|
||||
except RequestException as e:
|
||||
client.fail(
|
||||
'An unexpected requests error occurred when trying to talk to the Docker daemon: {0}'.format(to_native(e)),
|
||||
f'An unexpected requests error occurred when trying to talk to the Docker daemon: {e}',
|
||||
exception=traceback.format_exc())
|
||||
|
||||
|
||||
|
||||
@ -234,10 +234,10 @@ def main():
|
||||
ImageManager(client, results)
|
||||
client.module.exit_json(**results)
|
||||
except DockerException as e:
|
||||
client.fail('An unexpected Docker error occurred: {0}'.format(to_native(e)), exception=traceback.format_exc())
|
||||
client.fail(f'An unexpected Docker error occurred: {e}', exception=traceback.format_exc())
|
||||
except RequestException as e:
|
||||
client.fail(
|
||||
'An unexpected requests error occurred when trying to talk to the Docker daemon: {0}'.format(to_native(e)),
|
||||
f'An unexpected requests error occurred when trying to talk to the Docker daemon: {e}',
|
||||
exception=traceback.format_exc())
|
||||
|
||||
|
||||
|
||||
@ -82,8 +82,6 @@ images:
|
||||
import errno
|
||||
import traceback
|
||||
|
||||
from ansible.module_utils.common.text.converters import to_native
|
||||
|
||||
from ansible_collections.community.docker.plugins.module_utils.common_api import (
|
||||
AnsibleDockerClient,
|
||||
RequestException,
|
||||
@ -127,19 +125,19 @@ class ImageManager(DockerBaseClass):
|
||||
# Load image(s) from file
|
||||
load_output = []
|
||||
try:
|
||||
self.log("Opening image {0}".format(self.path))
|
||||
self.log(f"Opening image {self.path}")
|
||||
with open(self.path, 'rb') as image_tar:
|
||||
self.log("Loading images from {0}".format(self.path))
|
||||
self.log(f"Loading images from {self.path}")
|
||||
res = self.client._post(self.client._url("/images/load"), data=image_tar, stream=True)
|
||||
for line in self.client._stream_helper(res, decode=True):
|
||||
self.log(line, pretty_print=True)
|
||||
self._extract_output_line(line, load_output)
|
||||
except EnvironmentError as exc:
|
||||
if exc.errno == errno.ENOENT:
|
||||
self.client.fail("Error opening archive {0} - {1}".format(self.path, to_native(exc)))
|
||||
self.client.fail("Error loading archive {0} - {1}".format(self.path, to_native(exc)), stdout='\n'.join(load_output))
|
||||
self.client.fail(f"Error opening archive {self.path} - {exc}")
|
||||
self.client.fail(f"Error loading archive {self.path} - {exc}", stdout='\n'.join(load_output))
|
||||
except Exception as exc:
|
||||
self.client.fail("Error loading archive {0} - {1}".format(self.path, to_native(exc)), stdout='\n'.join(load_output))
|
||||
self.client.fail(f"Error loading archive {self.path} - {exc}", stdout='\n'.join(load_output))
|
||||
|
||||
# Collect loaded images
|
||||
loaded_images = []
|
||||
@ -160,7 +158,7 @@ class ImageManager(DockerBaseClass):
|
||||
image_name, tag = image_name.rsplit(':', 1)
|
||||
images.append(self.client.find_image(image_name, tag))
|
||||
else:
|
||||
self.client.module.warn('Image name "{0}" is neither ID nor has a tag'.format(image_name))
|
||||
self.client.module.warn(f'Image name "{image_name}" is neither ID nor has a tag')
|
||||
|
||||
self.results['image_names'] = loaded_images
|
||||
self.results['images'] = images
|
||||
@ -185,10 +183,10 @@ def main():
|
||||
ImageManager(client, results)
|
||||
client.module.exit_json(**results)
|
||||
except DockerException as e:
|
||||
client.fail('An unexpected Docker error occurred: {0}'.format(to_native(e)), exception=traceback.format_exc())
|
||||
client.fail(f'An unexpected Docker error occurred: {e}', exception=traceback.format_exc())
|
||||
except RequestException as e:
|
||||
client.fail(
|
||||
'An unexpected requests error occurred when trying to talk to the Docker daemon: {0}'.format(to_native(e)),
|
||||
f'An unexpected requests error occurred when trying to talk to the Docker daemon: {e}',
|
||||
exception=traceback.format_exc())
|
||||
|
||||
|
||||
|
||||
@ -92,8 +92,6 @@ image:
|
||||
|
||||
import traceback
|
||||
|
||||
from ansible.module_utils.common.text.converters import to_native
|
||||
|
||||
from ansible_collections.community.docker.plugins.module_utils.common_api import (
|
||||
AnsibleDockerClient,
|
||||
RequestException,
|
||||
@ -142,7 +140,7 @@ class ImagePuller(DockerBaseClass):
|
||||
if is_image_name_id(self.name):
|
||||
self.client.fail("Cannot pull an image by ID")
|
||||
if not is_valid_tag(self.tag, allow_empty=True):
|
||||
self.client.fail('"{0}" is not a valid docker tag!'.format(self.tag))
|
||||
self.client.fail(f'"{self.tag}" is not a valid docker tag!')
|
||||
|
||||
# If name contains a tag, it takes precedence over tag parameter.
|
||||
repo, repo_tag = parse_repository_tag(self.name)
|
||||
@ -212,10 +210,10 @@ def main():
|
||||
results = ImagePuller(client).pull()
|
||||
client.module.exit_json(**results)
|
||||
except DockerException as e:
|
||||
client.fail('An unexpected Docker error occurred: {0}'.format(to_native(e)), exception=traceback.format_exc())
|
||||
client.fail(f'An unexpected Docker error occurred: {e}', exception=traceback.format_exc())
|
||||
except RequestException as e:
|
||||
client.fail(
|
||||
'An unexpected requests error occurred when trying to talk to the Docker daemon: {0}'.format(to_native(e)),
|
||||
f'An unexpected requests error occurred when trying to talk to the Docker daemon: {e}',
|
||||
exception=traceback.format_exc())
|
||||
|
||||
|
||||
|
||||
@ -112,7 +112,7 @@ class ImagePusher(DockerBaseClass):
|
||||
if is_image_name_id(self.name):
|
||||
self.client.fail("Cannot push an image by ID")
|
||||
if not is_valid_tag(self.tag, allow_empty=True):
|
||||
self.client.fail('"{0}" is not a valid docker tag!'.format(self.tag))
|
||||
self.client.fail(f'"{self.tag}" is not a valid docker tag!')
|
||||
|
||||
# If name contains a tag, it takes precedence over tag parameter.
|
||||
repo, repo_tag = parse_repository_tag(self.name)
|
||||
@ -123,7 +123,7 @@ class ImagePusher(DockerBaseClass):
|
||||
if is_image_name_id(self.tag):
|
||||
self.client.fail("Cannot push an image by digest")
|
||||
if not is_valid_tag(self.tag, allow_empty=False):
|
||||
self.client.fail('"{0}" is not a valid docker tag!'.format(self.tag))
|
||||
self.client.fail(f'"{self.tag}" is not a valid docker tag!')
|
||||
|
||||
def push(self):
|
||||
image = self.client.find_image(name=self.name, tag=self.tag)
|
||||
@ -190,10 +190,10 @@ def main():
|
||||
results = ImagePusher(client).push()
|
||||
client.module.exit_json(**results)
|
||||
except DockerException as e:
|
||||
client.fail('An unexpected Docker error occurred: {0}'.format(to_native(e)), exception=traceback.format_exc())
|
||||
client.fail(f'An unexpected Docker error occurred: {e}', exception=traceback.format_exc())
|
||||
except RequestException as e:
|
||||
client.fail(
|
||||
'An unexpected requests error occurred when trying to talk to the Docker daemon: {0}'.format(to_native(e)),
|
||||
f'An unexpected requests error occurred when trying to talk to the Docker daemon: {e}',
|
||||
exception=traceback.format_exc())
|
||||
|
||||
|
||||
|
||||
@ -134,7 +134,7 @@ class ImageRemover(DockerBaseClass):
|
||||
self.prune = parameters['prune']
|
||||
|
||||
if not is_valid_tag(self.tag, allow_empty=True):
|
||||
self.fail('"{0}" is not a valid docker tag'.format(self.tag))
|
||||
self.fail(f'"{self.tag}" is not a valid docker tag')
|
||||
|
||||
# If name contains a tag, it takes precedence over tag parameter.
|
||||
if not is_image_name_id(self.name):
|
||||
@ -257,10 +257,10 @@ def main():
|
||||
results = ImageRemover(client).absent()
|
||||
client.module.exit_json(**results)
|
||||
except DockerException as e:
|
||||
client.fail('An unexpected Docker error occurred: {0}'.format(to_native(e)), exception=traceback.format_exc())
|
||||
client.fail(f'An unexpected Docker error occurred: {e}', exception=traceback.format_exc())
|
||||
except RequestException as e:
|
||||
client.fail(
|
||||
'An unexpected requests error occurred when trying to talk to the Docker daemon: {0}'.format(to_native(e)),
|
||||
f'An unexpected requests error occurred when trying to talk to the Docker daemon: {e}',
|
||||
exception=traceback.format_exc())
|
||||
|
||||
|
||||
|
||||
@ -153,7 +153,7 @@ class ImageTagger(DockerBaseClass):
|
||||
self.name = parameters['name']
|
||||
self.tag = parameters['tag']
|
||||
if not is_valid_tag(self.tag, allow_empty=True):
|
||||
self.fail('"{0}" is not a valid docker tag'.format(self.tag))
|
||||
self.fail(f'"{self.tag}" is not a valid docker tag')
|
||||
|
||||
# If name contains a tag, it takes precedence over tag parameter.
|
||||
if not is_image_name_id(self.name):
|
||||
@ -264,10 +264,10 @@ def main():
|
||||
results = ImageTagger(client).tag_images()
|
||||
client.module.exit_json(**results)
|
||||
except DockerException as e:
|
||||
client.fail('An unexpected Docker error occurred: {0}'.format(to_native(e)), exception=traceback.format_exc())
|
||||
client.fail(f'An unexpected Docker error occurred: {e}', exception=traceback.format_exc())
|
||||
except RequestException as e:
|
||||
client.fail(
|
||||
'An unexpected requests error occurred when trying to talk to the Docker daemon: {0}'.format(to_native(e)),
|
||||
f'An unexpected requests error occurred when trying to talk to the Docker daemon: {e}',
|
||||
exception=traceback.format_exc())
|
||||
|
||||
|
||||
|
||||
@ -435,10 +435,10 @@ def main():
|
||||
del results['actions']
|
||||
client.module.exit_json(**results)
|
||||
except DockerException as e:
|
||||
client.fail('An unexpected Docker error occurred: {0}'.format(to_native(e)), exception=traceback.format_exc())
|
||||
client.fail(f'An unexpected Docker error occurred: {e}', exception=traceback.format_exc())
|
||||
except RequestException as e:
|
||||
client.fail(
|
||||
'An unexpected requests error occurred when trying to talk to the Docker daemon: {0}'.format(to_native(e)),
|
||||
f'An unexpected requests error occurred when trying to talk to the Docker daemon: {e}',
|
||||
exception=traceback.format_exc())
|
||||
|
||||
|
||||
|
||||
@ -355,7 +355,7 @@ def validate_cidr(cidr):
|
||||
return 'ipv4'
|
||||
elif CIDR_IPV6.match(cidr):
|
||||
return 'ipv6'
|
||||
raise ValueError('"{0}" is not a valid CIDR'.format(cidr))
|
||||
raise ValueError(f'"{cidr}" is not a valid CIDR')
|
||||
|
||||
|
||||
def normalize_ipam_config_key(key):
|
||||
@ -621,10 +621,10 @@ class DockerNetworkManager(object):
|
||||
return bool(container)
|
||||
|
||||
except DockerException as e:
|
||||
self.client.fail('An unexpected Docker error occurred: {0}'.format(to_native(e)), exception=traceback.format_exc())
|
||||
self.client.fail(f'An unexpected Docker error occurred: {e}', exception=traceback.format_exc())
|
||||
except RequestException as e:
|
||||
self.client.fail(
|
||||
'An unexpected requests error occurred when trying to talk to the Docker daemon: {0}'.format(to_native(e)),
|
||||
f'An unexpected requests error occurred when trying to talk to the Docker daemon: {e}',
|
||||
exception=traceback.format_exc())
|
||||
|
||||
def connect_containers(self):
|
||||
@ -638,7 +638,7 @@ class DockerNetworkManager(object):
|
||||
self.client.post_json('/networks/{0}/connect', self.parameters.name, data=data)
|
||||
self.results['actions'].append("Connected container %s" % (name,))
|
||||
self.results['changed'] = True
|
||||
self.diff_tracker.add('connected.{0}'.format(name), parameter=True, active=False)
|
||||
self.diff_tracker.add(f'connected.{name}', parameter=True, active=False)
|
||||
|
||||
def disconnect_missing(self):
|
||||
if not self.existing_network:
|
||||
@ -664,7 +664,7 @@ class DockerNetworkManager(object):
|
||||
self.client.post_json('/networks/{0}/disconnect', self.parameters.name, data=data)
|
||||
self.results['actions'].append("Disconnected container %s" % (container_name,))
|
||||
self.results['changed'] = True
|
||||
self.diff_tracker.add('connected.{0}'.format(container_name),
|
||||
self.diff_tracker.add(f'connected.{container_name}',
|
||||
parameter=False,
|
||||
active=True)
|
||||
|
||||
@ -747,10 +747,10 @@ def main():
|
||||
cm = DockerNetworkManager(client)
|
||||
client.module.exit_json(**cm.results)
|
||||
except DockerException as e:
|
||||
client.fail('An unexpected Docker error occurred: {0}'.format(to_native(e)), exception=traceback.format_exc())
|
||||
client.fail(f'An unexpected Docker error occurred: {e}', exception=traceback.format_exc())
|
||||
except RequestException as e:
|
||||
client.fail(
|
||||
'An unexpected requests error occurred when trying to talk to the Docker daemon: {0}'.format(to_native(e)),
|
||||
f'An unexpected requests error occurred when trying to talk to the Docker daemon: {e}',
|
||||
exception=traceback.format_exc())
|
||||
|
||||
|
||||
|
||||
@ -98,8 +98,6 @@ network:
|
||||
|
||||
import traceback
|
||||
|
||||
from ansible.module_utils.common.text.converters import to_native
|
||||
|
||||
from ansible_collections.community.docker.plugins.module_utils.common_api import (
|
||||
AnsibleDockerClient,
|
||||
RequestException,
|
||||
@ -126,10 +124,10 @@ def main():
|
||||
network=network,
|
||||
)
|
||||
except DockerException as e:
|
||||
client.fail('An unexpected Docker error occurred: {0}'.format(to_native(e)), exception=traceback.format_exc())
|
||||
client.fail(f'An unexpected Docker error occurred: {e}', exception=traceback.format_exc())
|
||||
except RequestException as e:
|
||||
client.fail(
|
||||
'An unexpected requests error occurred when trying to talk to the Docker daemon: {0}'.format(to_native(e)),
|
||||
f'An unexpected requests error occurred when trying to talk to the Docker daemon: {e}',
|
||||
exception=traceback.format_exc())
|
||||
|
||||
|
||||
|
||||
@ -293,10 +293,10 @@ def main():
|
||||
SwarmNodeManager(client, results)
|
||||
client.module.exit_json(**results)
|
||||
except DockerException as e:
|
||||
client.fail('An unexpected docker error occurred: {0}'.format(to_native(e)), exception=traceback.format_exc())
|
||||
client.fail(f'An unexpected Docker error occurred: {e}', exception=traceback.format_exc())
|
||||
except RequestException as e:
|
||||
client.fail(
|
||||
'An unexpected requests error occurred when Docker SDK for Python tried to talk to the docker daemon: {0}'.format(to_native(e)),
|
||||
f'An unexpected requests error occurred when Docker SDK for Python tried to talk to the docker daemon: {e}',
|
||||
exception=traceback.format_exc())
|
||||
|
||||
|
||||
|
||||
@ -88,8 +88,6 @@ nodes:
|
||||
|
||||
import traceback
|
||||
|
||||
from ansible.module_utils.common.text.converters import to_native
|
||||
|
||||
from ansible_collections.community.docker.plugins.module_utils.common import (
|
||||
RequestException,
|
||||
)
|
||||
@ -149,10 +147,10 @@ def main():
|
||||
nodes=nodes,
|
||||
)
|
||||
except DockerException as e:
|
||||
client.fail('An unexpected docker error occurred: {0}'.format(to_native(e)), exception=traceback.format_exc())
|
||||
client.fail(f'An unexpected docker error occurred: {e}', exception=traceback.format_exc())
|
||||
except RequestException as e:
|
||||
client.fail(
|
||||
'An unexpected requests error occurred when Docker SDK for Python tried to talk to the docker daemon: {0}'.format(to_native(e)),
|
||||
f'An unexpected requests error occurred when Docker SDK for Python tried to talk to the docker daemon: {e}',
|
||||
exception=traceback.format_exc())
|
||||
|
||||
|
||||
|
||||
@ -384,10 +384,10 @@ def main():
|
||||
cm = DockerPluginManager(client)
|
||||
client.module.exit_json(**cm.result)
|
||||
except DockerException as e:
|
||||
client.fail('An unexpected docker error occurred: {0}'.format(to_native(e)), exception=traceback.format_exc())
|
||||
client.fail(f'An unexpected docker error occurred: {e}', exception=traceback.format_exc())
|
||||
except RequestException as e:
|
||||
client.fail(
|
||||
'An unexpected requests error occurred when trying to talk to the Docker daemon: {0}'.format(to_native(e)),
|
||||
f'An unexpected requests error occurred when trying to talk to the Docker daemon: {e}',
|
||||
exception=traceback.format_exc())
|
||||
|
||||
|
||||
|
||||
@ -230,7 +230,6 @@ builder_cache_caches_deleted:
|
||||
|
||||
import traceback
|
||||
|
||||
from ansible.module_utils.common.text.converters import to_native
|
||||
from ansible.module_utils.common.text.formatters import human_to_bytes
|
||||
|
||||
from ansible_collections.community.docker.plugins.module_utils.common_api import (
|
||||
@ -276,7 +275,7 @@ def main():
|
||||
try:
|
||||
builder_cache_keep_storage = human_to_bytes(client.module.params.get('builder_cache_keep_storage'))
|
||||
except ValueError as exc:
|
||||
client.module.fail_json(msg='Error while parsing value of builder_cache_keep_storage: {0}'.format(exc))
|
||||
client.module.fail_json(msg=f'Error while parsing value of builder_cache_keep_storage: {exc}')
|
||||
|
||||
try:
|
||||
result = dict()
|
||||
@ -337,10 +336,10 @@ def main():
|
||||
result['changed'] = changed
|
||||
client.module.exit_json(**result)
|
||||
except DockerException as e:
|
||||
client.fail('An unexpected Docker error occurred: {0}'.format(to_native(e)), exception=traceback.format_exc())
|
||||
client.fail(f'An unexpected Docker error occurred: {e}', exception=traceback.format_exc())
|
||||
except RequestException as e:
|
||||
client.fail(
|
||||
'An unexpected requests error occurred when trying to talk to the Docker daemon: {0}'.format(to_native(e)),
|
||||
f'An unexpected requests error occurred when trying to talk to the Docker daemon: {e}',
|
||||
exception=traceback.format_exc())
|
||||
|
||||
|
||||
|
||||
@ -234,7 +234,7 @@ class SecretManager(DockerBaseClass):
|
||||
with open(data_src, 'rb') as f:
|
||||
self.data = f.read()
|
||||
except Exception as exc:
|
||||
self.client.fail('Error while reading {src}: {error}'.format(src=data_src, error=to_native(exc)))
|
||||
self.client.fail(f'Error while reading {data_src}: {exc}')
|
||||
self.labels = parameters.get('labels')
|
||||
self.force = parameters.get('force')
|
||||
self.rolling_versions = parameters.get('rolling_versions')
|
||||
@ -278,7 +278,7 @@ class SecretManager(DockerBaseClass):
|
||||
self.secrets = [
|
||||
secret
|
||||
for secret in secrets
|
||||
if secret['Spec']['Name'].startswith('{name}_v'.format(name=self.name))
|
||||
if secret['Spec']['Name'].startswith(f'{self.name}_v')
|
||||
]
|
||||
self.secrets.sort(key=self.get_version)
|
||||
else:
|
||||
@ -296,7 +296,7 @@ class SecretManager(DockerBaseClass):
|
||||
if self.rolling_versions:
|
||||
self.version += 1
|
||||
labels['ansible_version'] = str(self.version)
|
||||
self.name = '{name}_v{version}'.format(name=self.name, version=self.version)
|
||||
self.name = f'{self.name}_v{self.version}'
|
||||
if self.labels:
|
||||
labels.update(self.labels)
|
||||
|
||||
@ -397,10 +397,10 @@ def main():
|
||||
SecretManager(client, results)()
|
||||
client.module.exit_json(**results)
|
||||
except DockerException as e:
|
||||
client.fail('An unexpected docker error occurred: {0}'.format(to_native(e)), exception=traceback.format_exc())
|
||||
client.fail(f'An unexpected Docker error occurred: {e}', exception=traceback.format_exc())
|
||||
except RequestException as e:
|
||||
client.fail(
|
||||
'An unexpected requests error occurred when Docker SDK for Python tried to talk to the docker daemon: {0}'.format(to_native(e)),
|
||||
f'An unexpected requests error occurred when Docker SDK for Python tried to talk to the docker daemon: {e}',
|
||||
exception=traceback.format_exc())
|
||||
|
||||
|
||||
|
||||
@ -340,7 +340,7 @@ def main():
|
||||
)
|
||||
client.module.exit_json(changed=False)
|
||||
except DockerException as e:
|
||||
client.fail('An unexpected Docker error occurred: {0}'.format(to_native(e)), exception=traceback.format_exc())
|
||||
client.fail(f'An unexpected Docker error occurred: {e}', exception=traceback.format_exc())
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
@ -110,7 +110,7 @@ def main():
|
||||
results=ret,
|
||||
)
|
||||
except DockerException as e:
|
||||
client.fail('An unexpected Docker error occurred: {0}'.format(to_native(e)), exception=traceback.format_exc())
|
||||
client.fail(f'An unexpected Docker error occurred: {e}', exception=traceback.format_exc())
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
@ -120,7 +120,7 @@ def main():
|
||||
results=ret,
|
||||
)
|
||||
except DockerException as e:
|
||||
client.fail('An unexpected Docker error occurred: {0}'.format(to_native(e)), exception=traceback.format_exc())
|
||||
client.fail(f'An unexpected Docker error occurred: {e}', exception=traceback.format_exc())
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
@ -707,10 +707,10 @@ def main():
|
||||
SwarmManager(client, results)()
|
||||
client.module.exit_json(**results)
|
||||
except DockerException as e:
|
||||
client.fail('An unexpected docker error occurred: {0}'.format(to_native(e)), exception=traceback.format_exc())
|
||||
client.fail(f'An unexpected Docker error occurred: {e}', exception=traceback.format_exc())
|
||||
except RequestException as e:
|
||||
client.fail(
|
||||
'An unexpected requests error occurred when Docker SDK for Python tried to talk to the docker daemon: {0}'.format(to_native(e)),
|
||||
f'An unexpected requests error occurred when Docker SDK for Python tried to talk to the docker daemon: {e}',
|
||||
exception=traceback.format_exc())
|
||||
|
||||
|
||||
|
||||
@ -367,10 +367,10 @@ def main():
|
||||
results.update(client.fail_results)
|
||||
client.module.exit_json(**results)
|
||||
except DockerException as e:
|
||||
client.fail('An unexpected docker error occurred: {0}'.format(to_native(e)), exception=traceback.format_exc())
|
||||
client.fail(f'An unexpected Docker error occurred: {e}', exception=traceback.format_exc())
|
||||
except RequestException as e:
|
||||
client.fail(
|
||||
'An unexpected requests error occurred when Docker SDK for Python tried to talk to the docker daemon: {0}'.format(to_native(e)),
|
||||
f'An unexpected requests error occurred when Docker SDK for Python tried to talk to the docker daemon: {e}',
|
||||
exception=traceback.format_exc())
|
||||
|
||||
|
||||
|
||||
@ -2792,10 +2792,10 @@ def main():
|
||||
|
||||
client.module.exit_json(**results)
|
||||
except DockerException as e:
|
||||
client.fail('An unexpected docker error occurred: {0}'.format(to_native(e)), exception=traceback.format_exc())
|
||||
client.fail(f'An unexpected Docker error occurred: {e}', exception=traceback.format_exc())
|
||||
except RequestException as e:
|
||||
client.fail(
|
||||
'An unexpected requests error occurred when Docker SDK for Python tried to talk to the docker daemon: {0}'.format(to_native(e)),
|
||||
f'An unexpected requests error occurred when Docker SDK for Python tried to talk to the docker daemon: {e}',
|
||||
exception=traceback.format_exc())
|
||||
|
||||
|
||||
|
||||
@ -64,8 +64,6 @@ service:
|
||||
|
||||
import traceback
|
||||
|
||||
from ansible.module_utils.common.text.converters import to_native
|
||||
|
||||
try:
|
||||
from docker.errors import DockerException
|
||||
except ImportError:
|
||||
@ -109,10 +107,10 @@ def main():
|
||||
exists=bool(service)
|
||||
)
|
||||
except DockerException as e:
|
||||
client.fail('An unexpected docker error occurred: {0}'.format(to_native(e)), exception=traceback.format_exc())
|
||||
client.fail(f'An unexpected Docker error occurred: {e}', exception=traceback.format_exc())
|
||||
except RequestException as e:
|
||||
client.fail(
|
||||
'An unexpected requests error occurred when Docker SDK for Python tried to talk to the docker daemon: {0}'.format(to_native(e)),
|
||||
f'An unexpected requests error occurred when Docker SDK for Python tried to talk to the docker daemon: {e}',
|
||||
exception=traceback.format_exc())
|
||||
|
||||
|
||||
|
||||
@ -304,10 +304,10 @@ def main():
|
||||
cm = DockerVolumeManager(client)
|
||||
client.module.exit_json(**cm.results)
|
||||
except DockerException as e:
|
||||
client.fail('An unexpected Docker error occurred: {0}'.format(to_native(e)), exception=traceback.format_exc())
|
||||
client.fail(f'An unexpected Docker error occurred: {e}', exception=traceback.format_exc())
|
||||
except RequestException as e:
|
||||
client.fail(
|
||||
'An unexpected requests error occurred when trying to talk to the Docker daemon: {0}'.format(to_native(e)),
|
||||
f'An unexpected requests error occurred when trying to talk to the Docker daemon: {e}',
|
||||
exception=traceback.format_exc())
|
||||
|
||||
|
||||
|
||||
@ -109,10 +109,10 @@ def main():
|
||||
volume=volume,
|
||||
)
|
||||
except DockerException as e:
|
||||
client.fail('An unexpected Docker error occurred: {0}'.format(to_native(e)), exception=traceback.format_exc())
|
||||
client.fail(f'An unexpected Docker error occurred: {e}', exception=traceback.format_exc())
|
||||
except RequestException as e:
|
||||
client.fail(
|
||||
'An unexpected requests error occurred when trying to talk to the Docker daemon: {0}'.format(to_native(e)),
|
||||
f'An unexpected requests error occurred when trying to talk to the Docker daemon: {e}',
|
||||
exception=traceback.format_exc())
|
||||
|
||||
|
||||
|
||||
@ -28,7 +28,7 @@ class AnsibleDockerClient(AnsibleDockerClientBase):
|
||||
|
||||
def fail(self, msg, **kwargs):
|
||||
if kwargs:
|
||||
msg += '\nContext:\n' + '\n'.join(' {0} = {1!r}'.format(k, v) for (k, v) in kwargs.items())
|
||||
msg += '\nContext:\n' + '\n'.join(f' {k} = {v!r}' for (k, v) in kwargs.items())
|
||||
raise AnsibleConnectionFailure(msg)
|
||||
|
||||
def deprecate(self, msg, version=None, date=None, collection_name=None):
|
||||
|
||||
@ -26,7 +26,7 @@ class AnsibleDockerClient(AnsibleDockerClientBase):
|
||||
|
||||
def fail(self, msg, **kwargs):
|
||||
if kwargs:
|
||||
msg += '\nContext:\n' + '\n'.join(' {0} = {1!r}'.format(k, v) for (k, v) in kwargs.items())
|
||||
msg += '\nContext:\n' + '\n'.join(f' {k} = {v!r}' for (k, v) in kwargs.items())
|
||||
raise AnsibleConnectionFailure(msg)
|
||||
|
||||
def deprecate(self, msg, version=None, date=None, collection_name=None):
|
||||
|
||||
@ -107,8 +107,8 @@ class FakeClient(object):
|
||||
'Image': host['Config']['Image'],
|
||||
'ImageId': host['Image'],
|
||||
})
|
||||
self.get_results['/containers/{0}/json'.format(host['Name'])] = host
|
||||
self.get_results['/containers/{0}/json'.format(host['Id'])] = host
|
||||
self.get_results[f"/containers/{host['Name']}/json"] = host
|
||||
self.get_results[f"/containers/{host['Id']}/json"] = host
|
||||
self.get_results['/containers/json'] = list_reply
|
||||
|
||||
def get_json(self, url, *param, **kwargs):
|
||||
|
||||
@ -74,7 +74,7 @@ def fake_resp(method, url, *args, **kwargs):
|
||||
elif (url, method) in fake_api.fake_responses:
|
||||
key = (url, method)
|
||||
if not key:
|
||||
raise Exception('{method} {url}'.format(method=method, url=url))
|
||||
raise Exception(f'{method} {url}')
|
||||
status_code, content = fake_api.fake_responses[key]()
|
||||
return response(status_code=status_code, content=content)
|
||||
|
||||
@ -102,10 +102,8 @@ def fake_read_from_socket(self, response, stream, tty=False, demux=False):
|
||||
return b""
|
||||
|
||||
|
||||
url_base = '{prefix}/'.format(prefix=fake_api.prefix)
|
||||
url_prefix = '{0}v{1}/'.format(
|
||||
url_base,
|
||||
DEFAULT_DOCKER_API_VERSION)
|
||||
url_base = f'{fake_api.prefix}/'
|
||||
url_prefix = f'{url_base}v{DEFAULT_DOCKER_API_VERSION}/'
|
||||
|
||||
|
||||
class BaseAPIClientTest(unittest.TestCase):
|
||||
@ -147,22 +145,18 @@ class DockerApiTest(BaseAPIClientTest):
|
||||
|
||||
def test_url_valid_resource(self):
|
||||
url = self.client._url('/hello/{0}/world', 'somename')
|
||||
assert url == '{0}{1}'.format(url_prefix, 'hello/somename/world')
|
||||
assert url == f'{url_prefix}hello/somename/world'
|
||||
|
||||
url = self.client._url(
|
||||
'/hello/{0}/world/{1}', 'somename', 'someothername'
|
||||
)
|
||||
assert url == '{0}{1}'.format(
|
||||
url_prefix, 'hello/somename/world/someothername'
|
||||
)
|
||||
assert url == f'{url_prefix}hello/somename/world/someothername'
|
||||
|
||||
url = self.client._url('/hello/{0}/world', 'some?name')
|
||||
assert url == '{0}{1}'.format(url_prefix, 'hello/some%3Fname/world')
|
||||
assert url == f'{url_prefix}hello/some%3Fname/world'
|
||||
|
||||
url = self.client._url("/images/{0}/push", "localhost:5000/image")
|
||||
assert url == '{0}{1}'.format(
|
||||
url_prefix, 'images/localhost:5000/image/push'
|
||||
)
|
||||
assert url == f'{url_prefix}images/localhost:5000/image/push'
|
||||
|
||||
def test_url_invalid_resource(self):
|
||||
with pytest.raises(ValueError):
|
||||
@ -170,13 +164,13 @@ class DockerApiTest(BaseAPIClientTest):
|
||||
|
||||
def test_url_no_resource(self):
|
||||
url = self.client._url('/simple')
|
||||
assert url == '{0}{1}'.format(url_prefix, 'simple')
|
||||
assert url == f'{url_prefix}simple'
|
||||
|
||||
def test_url_unversioned_api(self):
|
||||
url = self.client._url(
|
||||
'/hello/{0}/world', 'somename', versioned_api=False
|
||||
)
|
||||
assert url == '{0}{1}'.format(url_base, 'hello/somename/world')
|
||||
assert url == f'{url_base}hello/somename/world'
|
||||
|
||||
def test_version(self):
|
||||
self.client.version()
|
||||
@ -463,8 +457,7 @@ class TCPSocketStreamTest(unittest.TestCase):
|
||||
cls.thread = threading.Thread(target=cls.server.serve_forever)
|
||||
cls.thread.daemon = True
|
||||
cls.thread.start()
|
||||
cls.address = 'http://{0}:{1}'.format(
|
||||
socket.gethostname(), cls.server.server_address[1])
|
||||
cls.address = f'http://{socket.gethostname()}:{cls.server.server_address[1]}'
|
||||
|
||||
@classmethod
|
||||
def teardown_class(cls):
|
||||
@ -503,7 +496,7 @@ class TCPSocketStreamTest(unittest.TestCase):
|
||||
data += stderr_data
|
||||
return data
|
||||
else:
|
||||
raise Exception('Unknown path {path}'.format(path=path))
|
||||
raise Exception(f'Unknown path {path}')
|
||||
|
||||
@staticmethod
|
||||
def frame_header(stream, data):
|
||||
|
||||
@ -15,7 +15,7 @@ from ansible_collections.community.docker.tests.unit.plugins.module_utils._api.c
|
||||
|
||||
from . import fake_stat
|
||||
|
||||
CURRENT_VERSION = 'v{api_version}'.format(api_version=DEFAULT_DOCKER_API_VERSION)
|
||||
CURRENT_VERSION = f'v{DEFAULT_DOCKER_API_VERSION}'
|
||||
|
||||
FAKE_CONTAINER_ID = '3cc2351ab11b'
|
||||
FAKE_IMAGE_ID = 'e9aa60c60128'
|
||||
@ -539,131 +539,117 @@ if constants.IS_WINDOWS_PLATFORM:
|
||||
prefix = 'http+docker://localnpipe'
|
||||
|
||||
fake_responses = {
|
||||
'{prefix}/version'.format(prefix=prefix):
|
||||
f'{prefix}/version':
|
||||
get_fake_version,
|
||||
'{prefix}/{CURRENT_VERSION}/version'.format(prefix=prefix, CURRENT_VERSION=CURRENT_VERSION):
|
||||
f'{prefix}/{CURRENT_VERSION}/version':
|
||||
get_fake_version,
|
||||
'{prefix}/{CURRENT_VERSION}/info'.format(prefix=prefix, CURRENT_VERSION=CURRENT_VERSION):
|
||||
f'{prefix}/{CURRENT_VERSION}/info':
|
||||
get_fake_info,
|
||||
'{prefix}/{CURRENT_VERSION}/auth'.format(prefix=prefix, CURRENT_VERSION=CURRENT_VERSION):
|
||||
f'{prefix}/{CURRENT_VERSION}/auth':
|
||||
post_fake_auth,
|
||||
'{prefix}/{CURRENT_VERSION}/_ping'.format(prefix=prefix, CURRENT_VERSION=CURRENT_VERSION):
|
||||
f'{prefix}/{CURRENT_VERSION}/_ping':
|
||||
get_fake_ping,
|
||||
'{prefix}/{CURRENT_VERSION}/images/search'.format(prefix=prefix, CURRENT_VERSION=CURRENT_VERSION):
|
||||
f'{prefix}/{CURRENT_VERSION}/images/search':
|
||||
get_fake_search,
|
||||
'{prefix}/{CURRENT_VERSION}/images/json'.format(prefix=prefix, CURRENT_VERSION=CURRENT_VERSION):
|
||||
f'{prefix}/{CURRENT_VERSION}/images/json':
|
||||
get_fake_images,
|
||||
'{prefix}/{CURRENT_VERSION}/images/test_image/history'.format(prefix=prefix, CURRENT_VERSION=CURRENT_VERSION):
|
||||
f'{prefix}/{CURRENT_VERSION}/images/test_image/history':
|
||||
get_fake_image_history,
|
||||
'{prefix}/{CURRENT_VERSION}/images/create'.format(prefix=prefix, CURRENT_VERSION=CURRENT_VERSION):
|
||||
f'{prefix}/{CURRENT_VERSION}/images/create':
|
||||
post_fake_import_image,
|
||||
'{prefix}/{CURRENT_VERSION}/containers/json'.format(prefix=prefix, CURRENT_VERSION=CURRENT_VERSION):
|
||||
f'{prefix}/{CURRENT_VERSION}/containers/json':
|
||||
get_fake_containers,
|
||||
'{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/start'.format(prefix=prefix, CURRENT_VERSION=CURRENT_VERSION):
|
||||
f'{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/start':
|
||||
post_fake_start_container,
|
||||
'{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/resize'.format(prefix=prefix, CURRENT_VERSION=CURRENT_VERSION):
|
||||
f'{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/resize':
|
||||
post_fake_resize_container,
|
||||
'{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/json'.format(prefix=prefix, CURRENT_VERSION=CURRENT_VERSION):
|
||||
f'{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/json':
|
||||
get_fake_inspect_container,
|
||||
'{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/rename'.format(prefix=prefix, CURRENT_VERSION=CURRENT_VERSION):
|
||||
f'{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/rename':
|
||||
post_fake_rename_container,
|
||||
'{prefix}/{CURRENT_VERSION}/images/e9aa60c60128/tag'.format(prefix=prefix, CURRENT_VERSION=CURRENT_VERSION):
|
||||
f'{prefix}/{CURRENT_VERSION}/images/e9aa60c60128/tag':
|
||||
post_fake_tag_image,
|
||||
'{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/wait'.format(prefix=prefix, CURRENT_VERSION=CURRENT_VERSION):
|
||||
f'{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/wait':
|
||||
get_fake_wait,
|
||||
'{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/logs'.format(prefix=prefix, CURRENT_VERSION=CURRENT_VERSION):
|
||||
f'{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/logs':
|
||||
get_fake_logs,
|
||||
'{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/changes'.format(prefix=prefix, CURRENT_VERSION=CURRENT_VERSION):
|
||||
f'{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/changes':
|
||||
get_fake_diff,
|
||||
'{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/export'.format(prefix=prefix, CURRENT_VERSION=CURRENT_VERSION):
|
||||
f'{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/export':
|
||||
get_fake_export,
|
||||
'{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/update'.format(prefix=prefix, CURRENT_VERSION=CURRENT_VERSION):
|
||||
f'{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/update':
|
||||
post_fake_update_container,
|
||||
'{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/exec'.format(prefix=prefix, CURRENT_VERSION=CURRENT_VERSION):
|
||||
f'{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/exec':
|
||||
post_fake_exec_create,
|
||||
'{prefix}/{CURRENT_VERSION}/exec/d5d177f121dc/start'.format(prefix=prefix, CURRENT_VERSION=CURRENT_VERSION):
|
||||
f'{prefix}/{CURRENT_VERSION}/exec/d5d177f121dc/start':
|
||||
post_fake_exec_start,
|
||||
'{prefix}/{CURRENT_VERSION}/exec/d5d177f121dc/json'.format(prefix=prefix, CURRENT_VERSION=CURRENT_VERSION):
|
||||
f'{prefix}/{CURRENT_VERSION}/exec/d5d177f121dc/json':
|
||||
get_fake_exec_inspect,
|
||||
'{prefix}/{CURRENT_VERSION}/exec/d5d177f121dc/resize'.format(prefix=prefix, CURRENT_VERSION=CURRENT_VERSION):
|
||||
f'{prefix}/{CURRENT_VERSION}/exec/d5d177f121dc/resize':
|
||||
post_fake_exec_resize,
|
||||
|
||||
'{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/stats'.format(prefix=prefix, CURRENT_VERSION=CURRENT_VERSION):
|
||||
f'{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/stats':
|
||||
get_fake_stats,
|
||||
'{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/top'.format(prefix=prefix, CURRENT_VERSION=CURRENT_VERSION):
|
||||
f'{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/top':
|
||||
get_fake_top,
|
||||
'{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/stop'.format(prefix=prefix, CURRENT_VERSION=CURRENT_VERSION):
|
||||
f'{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/stop':
|
||||
post_fake_stop_container,
|
||||
'{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/kill'.format(prefix=prefix, CURRENT_VERSION=CURRENT_VERSION):
|
||||
f'{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/kill':
|
||||
post_fake_kill_container,
|
||||
'{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/pause'.format(prefix=prefix, CURRENT_VERSION=CURRENT_VERSION):
|
||||
f'{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/pause':
|
||||
post_fake_pause_container,
|
||||
'{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/unpause'.format(prefix=prefix, CURRENT_VERSION=CURRENT_VERSION):
|
||||
f'{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/unpause':
|
||||
post_fake_unpause_container,
|
||||
'{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/restart'.format(prefix=prefix, CURRENT_VERSION=CURRENT_VERSION):
|
||||
f'{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/restart':
|
||||
post_fake_restart_container,
|
||||
'{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b'.format(prefix=prefix, CURRENT_VERSION=CURRENT_VERSION):
|
||||
f'{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b':
|
||||
delete_fake_remove_container,
|
||||
'{prefix}/{CURRENT_VERSION}/images/create'.format(prefix=prefix, CURRENT_VERSION=CURRENT_VERSION):
|
||||
f'{prefix}/{CURRENT_VERSION}/images/create':
|
||||
post_fake_image_create,
|
||||
'{prefix}/{CURRENT_VERSION}/images/e9aa60c60128'.format(prefix=prefix, CURRENT_VERSION=CURRENT_VERSION):
|
||||
f'{prefix}/{CURRENT_VERSION}/images/e9aa60c60128':
|
||||
delete_fake_remove_image,
|
||||
'{prefix}/{CURRENT_VERSION}/images/e9aa60c60128/get'.format(prefix=prefix, CURRENT_VERSION=CURRENT_VERSION):
|
||||
f'{prefix}/{CURRENT_VERSION}/images/e9aa60c60128/get':
|
||||
get_fake_get_image,
|
||||
'{prefix}/{CURRENT_VERSION}/images/load'.format(prefix=prefix, CURRENT_VERSION=CURRENT_VERSION):
|
||||
f'{prefix}/{CURRENT_VERSION}/images/load':
|
||||
post_fake_load_image,
|
||||
'{prefix}/{CURRENT_VERSION}/images/test_image/json'.format(prefix=prefix, CURRENT_VERSION=CURRENT_VERSION):
|
||||
f'{prefix}/{CURRENT_VERSION}/images/test_image/json':
|
||||
get_fake_inspect_image,
|
||||
'{prefix}/{CURRENT_VERSION}/images/test_image/insert'.format(prefix=prefix, CURRENT_VERSION=CURRENT_VERSION):
|
||||
f'{prefix}/{CURRENT_VERSION}/images/test_image/insert':
|
||||
get_fake_insert_image,
|
||||
'{prefix}/{CURRENT_VERSION}/images/test_image/push'.format(prefix=prefix, CURRENT_VERSION=CURRENT_VERSION):
|
||||
f'{prefix}/{CURRENT_VERSION}/images/test_image/push':
|
||||
post_fake_push,
|
||||
'{prefix}/{CURRENT_VERSION}/commit'.format(prefix=prefix, CURRENT_VERSION=CURRENT_VERSION):
|
||||
f'{prefix}/{CURRENT_VERSION}/commit':
|
||||
post_fake_commit,
|
||||
'{prefix}/{CURRENT_VERSION}/containers/create'.format(prefix=prefix, CURRENT_VERSION=CURRENT_VERSION):
|
||||
f'{prefix}/{CURRENT_VERSION}/containers/create':
|
||||
post_fake_create_container,
|
||||
'{prefix}/{CURRENT_VERSION}/build'.format(prefix=prefix, CURRENT_VERSION=CURRENT_VERSION):
|
||||
f'{prefix}/{CURRENT_VERSION}/build':
|
||||
post_fake_build_container,
|
||||
'{prefix}/{CURRENT_VERSION}/events'.format(prefix=prefix, CURRENT_VERSION=CURRENT_VERSION):
|
||||
f'{prefix}/{CURRENT_VERSION}/events':
|
||||
get_fake_events,
|
||||
('{prefix}/{CURRENT_VERSION}/volumes'.format(prefix=prefix, CURRENT_VERSION=CURRENT_VERSION), 'GET'):
|
||||
(f'{prefix}/{CURRENT_VERSION}/volumes', 'GET'):
|
||||
get_fake_volume_list,
|
||||
('{prefix}/{CURRENT_VERSION}/volumes/create'.format(prefix=prefix, CURRENT_VERSION=CURRENT_VERSION), 'POST'):
|
||||
(f'{prefix}/{CURRENT_VERSION}/volumes/create', 'POST'):
|
||||
get_fake_volume,
|
||||
('{1}/{0}/volumes/{2}'.format(
|
||||
CURRENT_VERSION, prefix, FAKE_VOLUME_NAME
|
||||
), 'GET'):
|
||||
(f'{prefix}/{CURRENT_VERSION}/volumes/{FAKE_VOLUME_NAME}', 'GET'):
|
||||
get_fake_volume,
|
||||
('{1}/{0}/volumes/{2}'.format(
|
||||
CURRENT_VERSION, prefix, FAKE_VOLUME_NAME
|
||||
), 'DELETE'):
|
||||
(f'{prefix}/{CURRENT_VERSION}/volumes/{FAKE_VOLUME_NAME}', 'DELETE'):
|
||||
fake_remove_volume,
|
||||
('{1}/{0}/nodes/{2}/update?version=1'.format(
|
||||
CURRENT_VERSION, prefix, FAKE_NODE_ID
|
||||
), 'POST'):
|
||||
(f'{prefix}/{CURRENT_VERSION}/nodes/{FAKE_NODE_ID}/update?version=1', 'POST'):
|
||||
post_fake_update_node,
|
||||
('{prefix}/{CURRENT_VERSION}/swarm/join'.format(prefix=prefix, CURRENT_VERSION=CURRENT_VERSION), 'POST'):
|
||||
(f'{prefix}/{CURRENT_VERSION}/swarm/join', 'POST'):
|
||||
post_fake_join_swarm,
|
||||
('{prefix}/{CURRENT_VERSION}/networks'.format(prefix=prefix, CURRENT_VERSION=CURRENT_VERSION), 'GET'):
|
||||
(f'{prefix}/{CURRENT_VERSION}/networks', 'GET'):
|
||||
get_fake_network_list,
|
||||
('{prefix}/{CURRENT_VERSION}/networks/create'.format(prefix=prefix, CURRENT_VERSION=CURRENT_VERSION), 'POST'):
|
||||
(f'{prefix}/{CURRENT_VERSION}/networks/create', 'POST'):
|
||||
post_fake_network,
|
||||
('{1}/{0}/networks/{2}'.format(
|
||||
CURRENT_VERSION, prefix, FAKE_NETWORK_ID
|
||||
), 'GET'):
|
||||
(f'{prefix}/{CURRENT_VERSION}/networks/{FAKE_NETWORK_ID}', 'GET'):
|
||||
get_fake_network,
|
||||
('{1}/{0}/networks/{2}'.format(
|
||||
CURRENT_VERSION, prefix, FAKE_NETWORK_ID
|
||||
), 'DELETE'):
|
||||
(f'{prefix}/{CURRENT_VERSION}/networks/{FAKE_NETWORK_ID}', 'DELETE'):
|
||||
delete_fake_network,
|
||||
('{1}/{0}/networks/{2}/connect'.format(
|
||||
CURRENT_VERSION, prefix, FAKE_NETWORK_ID
|
||||
), 'POST'):
|
||||
(f'{prefix}/{CURRENT_VERSION}/networks/{FAKE_NETWORK_ID}/connect', 'POST'):
|
||||
post_fake_network_connect,
|
||||
('{1}/{0}/networks/{2}/disconnect'.format(
|
||||
CURRENT_VERSION, prefix, FAKE_NETWORK_ID
|
||||
), 'POST'):
|
||||
(f'{prefix}/{CURRENT_VERSION}/networks/{FAKE_NETWORK_ID}/disconnect', 'POST'):
|
||||
post_fake_network_disconnect,
|
||||
'{prefix}/{CURRENT_VERSION}/secrets/create'.format(prefix=prefix, CURRENT_VERSION=CURRENT_VERSION):
|
||||
f'{prefix}/{CURRENT_VERSION}/secrets/create':
|
||||
post_fake_secret,
|
||||
}
|
||||
|
||||
@ -252,7 +252,7 @@ class LoadConfigTest(unittest.TestCase):
|
||||
cfg_path = os.path.join(folder, '.dockercfg')
|
||||
auth_ = base64.b64encode(b'sakuya:izayoi').decode('ascii')
|
||||
with open(cfg_path, 'w') as f:
|
||||
f.write('auth = {auth}\n'.format(auth=auth_))
|
||||
f.write(f'auth = {auth_}\n')
|
||||
f.write('email = sakuya@scarlet.net')
|
||||
|
||||
cfg = auth.load_config(cfg_path)
|
||||
@ -309,14 +309,12 @@ class LoadConfigTest(unittest.TestCase):
|
||||
folder = tempfile.mkdtemp()
|
||||
self.addCleanup(shutil.rmtree, folder)
|
||||
|
||||
dockercfg_path = os.path.join(folder,
|
||||
'.{0}.dockercfg'.format(
|
||||
random.randrange(100000)))
|
||||
dockercfg_path = os.path.join(folder, f'.{random.randrange(100000)}.dockercfg')
|
||||
registry = 'https://your.private.registry.io'
|
||||
auth_ = base64.b64encode(b'sakuya:izayoi').decode('ascii')
|
||||
config = {
|
||||
registry: {
|
||||
'auth': '{auth}'.format(auth=auth_),
|
||||
'auth': f'{auth_}',
|
||||
'email': 'sakuya@scarlet.net'
|
||||
}
|
||||
}
|
||||
@ -342,7 +340,7 @@ class LoadConfigTest(unittest.TestCase):
|
||||
auth_ = base64.b64encode(b'sakuya:izayoi').decode('ascii')
|
||||
config = {
|
||||
registry: {
|
||||
'auth': '{auth}'.format(auth=auth_),
|
||||
'auth': f'{auth_}',
|
||||
'email': 'sakuya@scarlet.net'
|
||||
}
|
||||
}
|
||||
@ -370,7 +368,7 @@ class LoadConfigTest(unittest.TestCase):
|
||||
config = {
|
||||
'auths': {
|
||||
registry: {
|
||||
'auth': '{auth}'.format(auth=auth_),
|
||||
'auth': f'{auth_}',
|
||||
'email': 'sakuya@scarlet.net'
|
||||
}
|
||||
}
|
||||
@ -399,7 +397,7 @@ class LoadConfigTest(unittest.TestCase):
|
||||
config = {
|
||||
'auths': {
|
||||
registry: {
|
||||
'auth': '{auth}'.format(auth=auth_),
|
||||
'auth': f'{auth_}',
|
||||
'email': 'sakuya@scarlet.net'
|
||||
}
|
||||
}
|
||||
|
||||
@ -431,9 +431,7 @@ class TarTest(unittest.TestCase):
|
||||
with pytest.raises(IOError) as ei:
|
||||
tar(base)
|
||||
|
||||
assert 'Can not read file in context: {full_path}'.format(full_path=full_path) in (
|
||||
ei.exconly()
|
||||
)
|
||||
assert f'Can not read file in context: {full_path}' in ei.exconly()
|
||||
|
||||
@pytest.mark.skipif(IS_WINDOWS_PLATFORM, reason='No symlinks on Windows')
|
||||
def test_tar_with_file_symlinks(self):
|
||||
|
||||
@ -75,7 +75,7 @@ class ProxyConfigTest(unittest.TestCase):
|
||||
# Proxy config is non null, env is None.
|
||||
self.assertSetEqual(
|
||||
set(CONFIG.inject_proxy_environment(None)),
|
||||
set('{k}={v}'.format(k=k, v=v) for k, v in ENV.items()))
|
||||
set(f'{k}={v}' for k, v in ENV.items()))
|
||||
|
||||
# Proxy config is null, env is None.
|
||||
self.assertIsNone(ProxyConfig().inject_proxy_environment(None), None)
|
||||
@ -84,7 +84,7 @@ class ProxyConfigTest(unittest.TestCase):
|
||||
|
||||
# Proxy config is non null, env is non null
|
||||
actual = CONFIG.inject_proxy_environment(env)
|
||||
expected = ['{k}={v}'.format(k=k, v=v) for k, v in ENV.items()] + env
|
||||
expected = [f'{k}={v}' for k, v in ENV.items()] + env
|
||||
# It's important that the first 8 variables are the ones from the proxy
|
||||
# config, and the last 2 are the ones from the input environment
|
||||
self.assertSetEqual(set(actual[:8]), set(expected[:8]))
|
||||
|
||||
@ -288,7 +288,7 @@ class ParseHostTest(unittest.TestCase):
|
||||
}
|
||||
|
||||
for host in invalid_hosts:
|
||||
msg = 'Should have failed to parse invalid host: {0}'.format(host)
|
||||
msg = f'Should have failed to parse invalid host: {host}'
|
||||
with self.assertRaises(DockerException, msg=msg):
|
||||
parse_host(host, None)
|
||||
|
||||
@ -296,7 +296,7 @@ class ParseHostTest(unittest.TestCase):
|
||||
self.assertEqual(
|
||||
parse_host(host, None),
|
||||
expected,
|
||||
msg='Failed to parse valid host: {0}'.format(host),
|
||||
msg=f'Failed to parse valid host: {host}',
|
||||
)
|
||||
|
||||
def test_parse_host_empty_value(self):
|
||||
@ -347,14 +347,14 @@ class ParseRepositoryTagTest(unittest.TestCase):
|
||||
)
|
||||
|
||||
def test_index_image_sha(self):
|
||||
assert parse_repository_tag("root@sha256:{sha}".format(sha=self.sha)) == (
|
||||
"root", "sha256:{sha}".format(sha=self.sha)
|
||||
assert parse_repository_tag(f"root@sha256:{self.sha}") == (
|
||||
"root", f"sha256:{self.sha}"
|
||||
)
|
||||
|
||||
def test_private_reg_image_sha(self):
|
||||
assert parse_repository_tag(
|
||||
"url:5000/repo@sha256:{sha}".format(sha=self.sha)
|
||||
) == ("url:5000/repo", "sha256:{sha}".format(sha=self.sha))
|
||||
f"url:5000/repo@sha256:{self.sha}"
|
||||
) == ("url:5000/repo", f"sha256:{self.sha}")
|
||||
|
||||
|
||||
class ParseDeviceTest(unittest.TestCase):
|
||||
|
||||
@ -20,9 +20,9 @@ from ansible_collections.community.docker.plugins.module_utils._scramble import
|
||||
])
|
||||
def test_scramble_unscramble(plaintext, key, scrambled):
|
||||
scrambled_ = scramble(plaintext, key)
|
||||
print('{0!r} == {1!r}'.format(scrambled_, scrambled))
|
||||
print(f'{scrambled_!r} == {scrambled!r}')
|
||||
assert scrambled_ == scrambled
|
||||
|
||||
plaintext_ = unscramble(scrambled, key)
|
||||
print('{0!r} == {1!r}'.format(plaintext_, plaintext))
|
||||
print(f'{plaintext_!r} == {plaintext!r}')
|
||||
assert plaintext_ == plaintext
|
||||
|
||||
@ -39,7 +39,7 @@ def test_parse_string(input, expected):
|
||||
])
|
||||
def test_parse_int(input):
|
||||
assert parse_modern(input) == input
|
||||
with pytest.raises(TypeError, match="^must be an octal string, got {value}L?$".format(value=input)):
|
||||
with pytest.raises(TypeError, match=f"^must be an octal string, got {input}L?$"):
|
||||
parse_octal_string_only(input)
|
||||
|
||||
|
||||
|
||||
@ -32,4 +32,4 @@ def test_validate_cidr_positives(cidr, expected):
|
||||
def test_validate_cidr_negatives(cidr):
|
||||
with pytest.raises(ValueError) as e:
|
||||
validate_cidr(cidr)
|
||||
assert '"{0}" is not a valid CIDR'.format(cidr) == str(e.value)
|
||||
assert f'"{cidr}" is not a valid CIDR' == str(e.value)
|
||||
|
||||
@ -75,7 +75,7 @@ def test_get_docker_environment(mocker, docker_swarm_service):
|
||||
mocker.patch.object(
|
||||
docker_swarm_service,
|
||||
'format_environment',
|
||||
side_effect=lambda d: ['{0}={1}'.format(key, value) for key, value in d.items()],
|
||||
side_effect=lambda d: [f'{key}={value}' for key, value in d.items()],
|
||||
)
|
||||
# Test with env dict and file
|
||||
result = docker_swarm_service.get_docker_environment(
|
||||
|
||||
Loading…
Reference in New Issue
Block a user