Address some pylint issues (#1155)

* Address cyclic-import.

* Address redefined-builtin.

* Address redefined-argument-from-local.

* Address many redefined-outer-name.

* Address pointless-string-statement.

* No longer needed due to separate bugfix.

* Address useless-return.

* Address possibly-used-before-assignment.

* Add TODOs.

* Address super-init-not-called.

* Address function-redefined.

* Address unspecified-encoding.

* Clean up more imports.
This commit is contained in:
Felix Fontein 2025-10-09 20:11:36 +02:00 committed by GitHub
parent db09affaea
commit a3efa26e2e
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
42 changed files with 348 additions and 323 deletions

View File

@ -381,7 +381,6 @@ disable=raw-checker-failed,
# To clean up: # To clean up:
abstract-method, abstract-method,
arguments-differ, arguments-differ,
assignment-from-no-return,
attribute-defined-outside-init, attribute-defined-outside-init,
broad-exception-caught, broad-exception-caught,
broad-exception-raised, broad-exception-raised,
@ -391,10 +390,8 @@ disable=raw-checker-failed,
consider-using-in, consider-using-in,
consider-using-max-builtin, consider-using-max-builtin,
consider-using-with, consider-using-with,
cyclic-import,
fixme, fixme,
function-redefined, import-error, # TODO figure out why pylint cannot find the module
import-error,
invalid-name, invalid-name,
keyword-arg-before-vararg, keyword-arg-before-vararg,
line-too-long, line-too-long,
@ -402,30 +399,23 @@ disable=raw-checker-failed,
no-else-raise, no-else-raise,
no-else-return, no-else-return,
no-member, no-member,
no-name-in-module, no-name-in-module, # TODO figure out why pylint cannot find the module
not-an-iterable, not-an-iterable, # TODO: needs better typing info
pointless-string-statement,
possibly-used-before-assignment,
protected-access, protected-access,
raise-missing-from, raise-missing-from,
redefined-argument-from-local, redefined-outer-name, # needed for test fixtures
redefined-builtin,
redefined-outer-name,
simplifiable-if-expression, simplifiable-if-expression,
subprocess-popen-preexec-fn, subprocess-popen-preexec-fn,
super-init-not-called,
super-with-arguments, super-with-arguments,
unexpected-keyword-arg, unexpected-keyword-arg,
unnecessary-dunder-call, unnecessary-dunder-call,
unnecessary-pass, unnecessary-pass,
unspecified-encoding, unsupported-assignment-operation, # TODO: needs better typing info
unsupported-assignment-operation,
unused-argument, unused-argument,
unused-variable, unused-variable,
use-dict-literal, use-dict-literal,
use-list-literal, use-list-literal,
useless-object-inheritance, useless-object-inheritance,
useless-return,
# Cannot remove yet due to inadequacy of rules # Cannot remove yet due to inadequacy of rules
inconsistent-return-statements, # doesn't notice that fail_json() does not return inconsistent-return-statements, # doesn't notice that fail_json() does not return

View File

@ -425,11 +425,13 @@ class Connection(ConnectionBase):
+ to_native(become_output) + to_native(become_output)
) )
chunk = None
for key, event in events: for key, event in events:
if key.fileobj == p.stdout: if key.fileobj == p.stdout:
chunk = p.stdout.read() chunk = p.stdout.read()
elif key.fileobj == p.stderr: elif key.fileobj == p.stderr:
chunk = p.stderr.read() chunk = p.stderr.read()
# TODO: avoid chunk being set multiple times!
if not chunk: if not chunk:
stdout, stderr = p.communicate() stdout, stderr = p.communicate()

View File

@ -149,10 +149,10 @@ class Connection(ConnectionBase):
transport = "community.docker.docker_api" transport = "community.docker.docker_api"
has_pipelining = True has_pipelining = True
def _call_client(self, callable, not_found_can_be_resource=False): def _call_client(self, f, not_found_can_be_resource=False):
remote_addr = self.get_option("remote_addr") remote_addr = self.get_option("remote_addr")
try: try:
return callable() return f()
except NotFound as e: except NotFound as e:
if not_found_can_be_resource: if not_found_can_be_resource:
raise AnsibleConnectionFailure( raise AnsibleConnectionFailure(

View File

@ -177,11 +177,13 @@ class Connection(ConnectionBase):
+ to_native(become_output) + to_native(become_output)
) )
chunk = None
for key, event in events: for key, event in events:
if key.fileobj == p.stdout: if key.fileobj == p.stdout:
chunk = p.stdout.read() chunk = p.stdout.read()
elif key.fileobj == p.stderr: elif key.fileobj == p.stderr:
chunk = p.stderr.read() chunk = p.stderr.read()
# TODO: avoid chunk being set multiple times!
if not chunk: if not chunk:
stdout, stderr = p.communicate() stdout, stderr = p.communicate()

View File

@ -243,24 +243,24 @@ class InventoryModule(BaseInventoryPlugin, Constructable):
filters = parse_filters(self.get_option("filters")) filters = parse_filters(self.get_option("filters"))
for container in containers: for container in containers:
id = container.get("Id") container_id = container.get("Id")
short_id = id[:13] short_container_id = container_id[:13]
try: try:
name = container.get("Names", list())[0].lstrip("/") name = container.get("Names", list())[0].lstrip("/")
full_name = name full_name = name
except IndexError: except IndexError:
name = short_id name = short_container_id
full_name = id full_name = container_id
facts = dict( facts = dict(
docker_name=make_unsafe(name), docker_name=make_unsafe(name),
docker_short_id=make_unsafe(short_id), docker_short_id=make_unsafe(short_container_id),
) )
full_facts = dict() full_facts = dict()
try: try:
inspect = client.get_json("/containers/{0}/json", id) inspect = client.get_json("/containers/{0}/json", container_id)
except APIError as exc: except APIError as exc:
raise AnsibleError(f"Error inspecting container {name} - {exc}") raise AnsibleError(f"Error inspecting container {name} - {exc}")
@ -371,12 +371,12 @@ class InventoryModule(BaseInventoryPlugin, Constructable):
# When we do this before a set_variable() call, the variables are assigned # When we do this before a set_variable() call, the variables are assigned
# to the group, and not to the host. # to the group, and not to the host.
if add_legacy_groups: if add_legacy_groups:
self.inventory.add_group(id) self.inventory.add_group(container_id)
self.inventory.add_host(name, group=id) self.inventory.add_host(name, group=container_id)
self.inventory.add_group(name) self.inventory.add_group(name)
self.inventory.add_host(name, group=name) self.inventory.add_host(name, group=name)
self.inventory.add_group(short_id) self.inventory.add_group(short_container_id)
self.inventory.add_host(name, group=short_id) self.inventory.add_host(name, group=short_container_id)
self.inventory.add_group(hostname) self.inventory.add_group(hostname)
self.inventory.add_host(name, group=hostname) self.inventory.add_host(name, group=hostname)

View File

@ -170,15 +170,15 @@ class InventoryModule(BaseInventoryPlugin, Constructable, Cacheable):
# capture any of the DOCKER_xxx variables that were output and create Ansible host vars # capture any of the DOCKER_xxx variables that were output and create Ansible host vars
# with the same name and value but with a dm_ name prefix. # with the same name and value but with a dm_ name prefix.
vars = [] env_vars = []
for line in env_lines: for line in env_lines:
match = re.search('(DOCKER_[^=]+)="([^"]+)"', line) match = re.search('(DOCKER_[^=]+)="([^"]+)"', line)
if match: if match:
env_var_name = match.group(1) env_var_name = match.group(1)
env_var_value = match.group(2) env_var_value = match.group(2)
vars.append((env_var_name, env_var_value)) env_vars.append((env_var_name, env_var_value))
return vars return env_vars
def _get_machine_names(self): def _get_machine_names(self):
# Filter out machines that are not in the Running state as we probably cannot do anything useful actions # Filter out machines that are not in the Running state as we probably cannot do anything useful actions

View File

@ -86,13 +86,13 @@ except ImportError:
def fail_on_missing_imports(): def fail_on_missing_imports():
if REQUESTS_IMPORT_ERROR is not None: if REQUESTS_IMPORT_ERROR is not None:
from .errors import MissingRequirementException from .errors import MissingRequirementException # pylint: disable=cyclic-import
raise MissingRequirementException( raise MissingRequirementException(
"You have to install requests", "requests", REQUESTS_IMPORT_ERROR "You have to install requests", "requests", REQUESTS_IMPORT_ERROR
) )
if URLLIB3_IMPORT_ERROR is not None: if URLLIB3_IMPORT_ERROR is not None:
from .errors import MissingRequirementException from .errors import MissingRequirementException # pylint: disable=cyclic-import
raise MissingRequirementException( raise MissingRequirementException(
"You have to install urllib3", "urllib3", URLLIB3_IMPORT_ERROR "You have to install urllib3", "urllib3", URLLIB3_IMPORT_ERROR

View File

@ -289,14 +289,14 @@ class APIClient(_Session, DaemonApiMixin):
except _HTTPError as e: except _HTTPError as e:
create_api_error_from_http_exception(e) create_api_error_from_http_exception(e)
def _result(self, response, json=False, binary=False): def _result(self, response, get_json=False, get_binary=False):
if json and binary: if get_json and get_binary:
raise AssertionError("json and binary must not be both True") raise AssertionError("json and binary must not be both True")
self._raise_for_status(response) self._raise_for_status(response)
if json: if get_json:
return response.json() return response.json()
if binary: if get_binary:
return response.content return response.content
return response.text return response.text
@ -360,12 +360,12 @@ class APIClient(_Session, DaemonApiMixin):
else: else:
# Response is not chunked, meaning we probably # Response is not chunked, meaning we probably
# encountered an error immediately # encountered an error immediately
yield self._result(response, json=decode) yield self._result(response, get_json=decode)
def _multiplexed_buffer_helper(self, response): def _multiplexed_buffer_helper(self, response):
"""A generator of multiplexed data blocks read from a buffered """A generator of multiplexed data blocks read from a buffered
response.""" response."""
buf = self._result(response, binary=True) buf = self._result(response, get_binary=True)
buf_length = len(buf) buf_length = len(buf)
walker = 0 walker = 0
while True: while True:
@ -478,7 +478,7 @@ class APIClient(_Session, DaemonApiMixin):
return ( return (
self._stream_raw_result(res) self._stream_raw_result(res)
if stream if stream
else self._result(res, binary=True) else self._result(res, get_binary=True)
) )
self._raise_for_status(res) self._raise_for_status(res)
@ -551,13 +551,13 @@ class APIClient(_Session, DaemonApiMixin):
def get_binary(self, pathfmt, *args, **kwargs): def get_binary(self, pathfmt, *args, **kwargs):
return self._result( return self._result(
self._get(self._url(pathfmt, *args, versioned_api=True), **kwargs), self._get(self._url(pathfmt, *args, versioned_api=True), **kwargs),
binary=True, get_binary=True,
) )
def get_json(self, pathfmt, *args, **kwargs): def get_json(self, pathfmt, *args, **kwargs):
return self._result( return self._result(
self._get(self._url(pathfmt, *args, versioned_api=True), **kwargs), self._get(self._url(pathfmt, *args, versioned_api=True), **kwargs),
json=True, get_json=True,
) )
def get_text(self, pathfmt, *args, **kwargs): def get_text(self, pathfmt, *args, **kwargs):
@ -581,7 +581,7 @@ class APIClient(_Session, DaemonApiMixin):
def delete_json(self, pathfmt, *args, **kwargs): def delete_json(self, pathfmt, *args, **kwargs):
return self._result( return self._result(
self._delete(self._url(pathfmt, *args, versioned_api=True), **kwargs), self._delete(self._url(pathfmt, *args, versioned_api=True), **kwargs),
json=True, get_json=True,
) )
def post_call(self, pathfmt, *args, **kwargs): def post_call(self, pathfmt, *args, **kwargs):
@ -603,7 +603,7 @@ class APIClient(_Session, DaemonApiMixin):
self._post_json( self._post_json(
self._url(pathfmt, *args, versioned_api=True), data, **kwargs self._url(pathfmt, *args, versioned_api=True), data, **kwargs
), ),
binary=True, get_binary=True,
) )
def post_json_to_json(self, pathfmt, *args, **kwargs): def post_json_to_json(self, pathfmt, *args, **kwargs):
@ -612,7 +612,7 @@ class APIClient(_Session, DaemonApiMixin):
self._post_json( self._post_json(
self._url(pathfmt, *args, versioned_api=True), data, **kwargs self._url(pathfmt, *args, versioned_api=True), data, **kwargs
), ),
json=True, get_json=True,
) )
def post_json_to_text(self, pathfmt, *args, **kwargs): def post_json_to_text(self, pathfmt, *args, **kwargs):
@ -670,5 +670,5 @@ class APIClient(_Session, DaemonApiMixin):
def post_to_json(self, pathfmt, *args, **kwargs): def post_to_json(self, pathfmt, *args, **kwargs):
return self._result( return self._result(
self._post(self._url(pathfmt, *args, versioned_api=True), **kwargs), self._post(self._url(pathfmt, *args, versioned_api=True), **kwargs),
json=True, get_json=True,
) )

View File

@ -33,7 +33,7 @@ class DaemonApiMixin(object):
If the server returns an error. If the server returns an error.
""" """
url = self._url("/system/df") url = self._url("/system/df")
return self._result(self._get(url), True) return self._result(self._get(url), get_json=True)
def info(self): def info(self):
""" """
@ -47,7 +47,7 @@ class DaemonApiMixin(object):
:py:class:`docker.errors.APIError` :py:class:`docker.errors.APIError`
If the server returns an error. If the server returns an error.
""" """
return self._result(self._get(self._url("/info")), True) return self._result(self._get(self._url("/info")), get_json=True)
def login( def login(
self, self,
@ -108,7 +108,7 @@ class DaemonApiMixin(object):
response = self._post_json(self._url("/auth"), data=req_data) response = self._post_json(self._url("/auth"), data=req_data)
if response.status_code == 200: if response.status_code == 200:
self._auth_configs.add_auth(registry or auth.INDEX_NAME, req_data) self._auth_configs.add_auth(registry or auth.INDEX_NAME, req_data)
return self._result(response, json=True) return self._result(response, get_json=True)
def ping(self): def ping(self):
""" """
@ -137,4 +137,4 @@ class DaemonApiMixin(object):
If the server returns an error. If the server returns an error.
""" """
url = self._url("/version", versioned_api=api_version) url = self._url("/version", versioned_api=api_version)
return self._result(self._get(url), json=True) return self._result(self._get(url), get_json=True)

View File

@ -166,7 +166,7 @@ class AuthConfig(dict):
if not config_file: if not config_file:
return cls({}, credstore_env) return cls({}, credstore_env)
try: try:
with open(config_file) as f: with open(config_file, "rt", encoding="utf-8") as f:
config_dict = json.load(f) config_dict = json.load(f)
except (IOError, KeyError, ValueError) as e: except (IOError, KeyError, ValueError) as e:
# Likely missing new Docker config file or it is in an # Likely missing new Docker config file or it is in an
@ -351,7 +351,7 @@ def _load_legacy_config(config_file):
log.debug("Attempting to parse legacy auth file format") log.debug("Attempting to parse legacy auth file format")
try: try:
data = [] data = []
with open(config_file) as f: with open(config_file, "rt", encoding="utf-8") as f:
for line in f.readlines(): for line in f.readlines():
data.append(line.strip().split(" = ")[1]) data.append(line.strip().split(" = ")[1])
if len(data) < 2: if len(data) < 2:

View File

@ -151,7 +151,7 @@ class ContextAPI(object):
if filename == METAFILE: if filename == METAFILE:
filepath = os.path.join(dirname, filename) filepath = os.path.join(dirname, filename)
try: try:
with open(filepath, "r") as f: with open(filepath, "rt", encoding="utf-8") as f:
data = json.load(f) data = json.load(f)
name = data["Name"] name = data["Name"]
if name == "default": if name == "default":

View File

@ -32,7 +32,7 @@ def get_current_context_name_with_source():
docker_cfg_path = find_config_file() docker_cfg_path = find_config_file()
if docker_cfg_path: if docker_cfg_path:
try: try:
with open(docker_cfg_path) as f: with open(docker_cfg_path, "rt", encoding="utf-8") as f:
return ( return (
json.load(f).get("currentContext", "default"), json.load(f).get("currentContext", "default"),
f"configuration file {docker_cfg_path}", f"configuration file {docker_cfg_path}",
@ -53,7 +53,7 @@ def write_context_name_to_docker_config(name=None):
config = {} config = {}
if docker_cfg_path: if docker_cfg_path:
try: try:
with open(docker_cfg_path) as f: with open(docker_cfg_path, "rt", encoding="utf-8") as f:
config = json.load(f) config = json.load(f)
except Exception as e: except Exception as e:
return e return e
@ -67,7 +67,7 @@ def write_context_name_to_docker_config(name=None):
if not docker_cfg_path: if not docker_cfg_path:
docker_cfg_path = get_default_config_file() docker_cfg_path = get_default_config_file()
try: try:
with open(docker_cfg_path, "w") as f: with open(docker_cfg_path, "wt", encoding="utf-8") as f:
json.dump(config, f, indent=4) json.dump(config, f, indent=4)
except Exception as e: except Exception as e:
return e return e

View File

@ -133,7 +133,7 @@ class Context(object):
metadata = {} metadata = {}
try: try:
with open(meta_file) as f: with open(meta_file, "rt", encoding="utf-8") as f:
metadata = json.load(f) metadata = json.load(f)
except (OSError, KeyError, ValueError) as e: except (OSError, KeyError, ValueError) as e:
# unknown format # unknown format
@ -189,7 +189,7 @@ class Context(object):
meta_dir = get_meta_dir(self.name) meta_dir = get_meta_dir(self.name)
if not os.path.isdir(meta_dir): if not os.path.isdir(meta_dir):
os.makedirs(meta_dir) os.makedirs(meta_dir)
with open(get_meta_file(self.name), "w") as f: with open(get_meta_file(self.name), "wt", encoding="utf-8") as f:
f.write(json.dumps(self.Metadata)) f.write(json.dumps(self.Metadata))
tls_dir = get_tls_dir(self.name) tls_dir = get_tls_dir(self.name)

View File

@ -222,7 +222,7 @@ class SSHHTTPAdapter(BaseHTTPAdapter):
ssh_config_file = os.path.expanduser("~/.ssh/config") ssh_config_file = os.path.expanduser("~/.ssh/config")
if os.path.exists(ssh_config_file): if os.path.exists(ssh_config_file):
conf = paramiko.SSHConfig() conf = paramiko.SSHConfig()
with open(ssh_config_file) as f: with open(ssh_config_file, "rt", encoding="utf-8") as f:
conf.parse(f) conf.parse(f)
host_config = conf.lookup(base_url.hostname) host_config = conf.lookup(base_url.hostname)
if "proxycommand" in host_config: if "proxycommand" in host_config:

View File

@ -12,12 +12,6 @@
from __future__ import annotations from __future__ import annotations
""" Resolves OpenSSL issues in some servers:
https://lukasa.co.uk/2013/01/Choosing_SSL_Version_In_Requests/
https://github.com/kennethreitz/requests/pull/799
"""
from ansible_collections.community.docker.plugins.module_utils._version import ( from ansible_collections.community.docker.plugins.module_utils._version import (
LooseVersion, LooseVersion,
) )
@ -26,6 +20,11 @@ from .._import_helper import HTTPAdapter, urllib3
from .basehttpadapter import BaseHTTPAdapter from .basehttpadapter import BaseHTTPAdapter
# Resolves OpenSSL issues in some servers:
# https://lukasa.co.uk/2013/01/Choosing_SSL_Version_In_Requests/
# https://github.com/kennethreitz/requests/pull/799
PoolManager = urllib3.poolmanager.PoolManager PoolManager = urllib3.poolmanager.PoolManager

View File

@ -271,7 +271,7 @@ def process_dockerfile(dockerfile, path):
0 0
] or os.path.relpath(abs_dockerfile, path).startswith(".."): ] or os.path.relpath(abs_dockerfile, path).startswith(".."):
# Dockerfile not in context - read data to insert into tar later # Dockerfile not in context - read data to insert into tar later
with open(abs_dockerfile) as df: with open(abs_dockerfile, "rt", encoding="utf-8") as df:
return (f".dockerfile.{random.getrandbits(160):x}", df.read()) return (f".dockerfile.{random.getrandbits(160):x}", df.read())
# Dockerfile is inside the context - return path relative to context root # Dockerfile is inside the context - return path relative to context root

View File

@ -80,7 +80,7 @@ def load_general_config(config_path=None):
return {} return {}
try: try:
with open(config_file) as f: with open(config_file, "rt", encoding="utf-8") as f:
return json.load(f) return json.load(f)
except (IOError, ValueError) as e: except (IOError, ValueError) as e:
# In the case of a legacy `.dockercfg` file, we will not # In the case of a legacy `.dockercfg` file, we will not

View File

@ -10,9 +10,6 @@
# Note that this module util is **PRIVATE** to the collection. It can have breaking changes at any time. # Note that this module util is **PRIVATE** to the collection. It can have breaking changes at any time.
# Do not use this from other collections or standalone plugins/modules! # Do not use this from other collections or standalone plugins/modules!
from __future__ import annotations
"""Filename matching with shell patterns. """Filename matching with shell patterns.
fnmatch(FILENAME, PATTERN) matches according to the local convention. fnmatch(FILENAME, PATTERN) matches according to the local convention.
@ -25,6 +22,8 @@ The function translate(PATTERN) returns a regular expression
corresponding to PATTERN. (It does not compile it.) corresponding to PATTERN. (It does not compile it.)
""" """
from __future__ import annotations
import re import re

View File

@ -452,7 +452,7 @@ def parse_env_file(env_file):
""" """
environment = {} environment = {}
with open(env_file, "r") as f: with open(env_file, "rt", encoding="utf-8") as f:
for line in f: for line in f:
if line[0] == "#": if line[0] == "#":

View File

@ -17,7 +17,7 @@ from collections.abc import Mapping, Sequence
from ansible.module_utils.basic import AnsibleModule, missing_required_lib from ansible.module_utils.basic import AnsibleModule, missing_required_lib
from ansible.module_utils.parsing.convert_bool import BOOLEANS_FALSE, BOOLEANS_TRUE from ansible.module_utils.parsing.convert_bool import BOOLEANS_FALSE, BOOLEANS_TRUE
from ansible_collections.community.docker.plugins.module_utils._util import ( # noqa: F401, pylint: disable=unused-import from ansible_collections.community.docker.plugins.module_utils._util import (
DEFAULT_DOCKER_HOST, DEFAULT_DOCKER_HOST,
DEFAULT_TIMEOUT_SECONDS, DEFAULT_TIMEOUT_SECONDS,
DEFAULT_TLS, DEFAULT_TLS,
@ -101,14 +101,14 @@ if not HAS_DOCKER_PY:
# No Docker SDK for Python. Create a place holder client to allow # No Docker SDK for Python. Create a place holder client to allow
# instantiation of AnsibleModule and proper error handing # instantiation of AnsibleModule and proper error handing
class Client(object): # noqa: F811 class Client(object): # noqa: F811, pylint: disable=function-redefined
def __init__(self, **kwargs): def __init__(self, **kwargs):
pass pass
class APIError(Exception): # noqa: F811 class APIError(Exception): # noqa: F811, pylint: disable=function-redefined
pass pass
class NotFound(Exception): # noqa: F811 class NotFound(Exception): # noqa: F811, pylint: disable=function-redefined
pass pass
@ -133,43 +133,45 @@ def _get_tls_config(fail_function, **kwargs):
fail_function(f"TLS config error: {exc}") fail_function(f"TLS config error: {exc}")
def is_using_tls(auth): def is_using_tls(auth_data):
return auth["tls_verify"] or auth["tls"] return auth_data["tls_verify"] or auth_data["tls"]
def get_connect_params(auth, fail_function): def get_connect_params(auth_data, fail_function):
if is_using_tls(auth): if is_using_tls(auth_data):
auth["docker_host"] = auth["docker_host"].replace("tcp://", "https://") auth_data["docker_host"] = auth_data["docker_host"].replace(
"tcp://", "https://"
result = dict(
base_url=auth["docker_host"],
version=auth["api_version"],
timeout=auth["timeout"],
) )
if auth["tls_verify"]: result = dict(
base_url=auth_data["docker_host"],
version=auth_data["api_version"],
timeout=auth_data["timeout"],
)
if auth_data["tls_verify"]:
# TLS with verification # TLS with verification
tls_config = dict( tls_config = dict(
verify=True, verify=True,
assert_hostname=auth["tls_hostname"], assert_hostname=auth_data["tls_hostname"],
fail_function=fail_function, fail_function=fail_function,
) )
if auth["cert_path"] and auth["key_path"]: if auth_data["cert_path"] and auth_data["key_path"]:
tls_config["client_cert"] = (auth["cert_path"], auth["key_path"]) tls_config["client_cert"] = (auth_data["cert_path"], auth_data["key_path"])
if auth["cacert_path"]: if auth_data["cacert_path"]:
tls_config["ca_cert"] = auth["cacert_path"] tls_config["ca_cert"] = auth_data["cacert_path"]
result["tls"] = _get_tls_config(**tls_config) result["tls"] = _get_tls_config(**tls_config)
elif auth["tls"]: elif auth_data["tls"]:
# TLS without verification # TLS without verification
tls_config = dict( tls_config = dict(
verify=False, verify=False,
fail_function=fail_function, fail_function=fail_function,
) )
if auth["cert_path"] and auth["key_path"]: if auth_data["cert_path"] and auth_data["key_path"]:
tls_config["client_cert"] = (auth["cert_path"], auth["key_path"]) tls_config["client_cert"] = (auth_data["cert_path"], auth_data["key_path"])
result["tls"] = _get_tls_config(**tls_config) result["tls"] = _get_tls_config(**tls_config)
if auth.get("use_ssh_client"): if auth_data.get("use_ssh_client"):
if LooseVersion(docker_version) < LooseVersion("4.4.0"): if LooseVersion(docker_version) < LooseVersion("4.4.0"):
fail_function( fail_function(
"use_ssh_client=True requires Docker SDK for Python 4.4.0 or newer" "use_ssh_client=True requires Docker SDK for Python 4.4.0 or newer"
@ -258,16 +260,18 @@ class AnsibleDockerClientBase(Client):
pass pass
@staticmethod @staticmethod
def _get_value(param_name, param_value, env_variable, default_value, type="str"): def _get_value(
param_name, param_value, env_variable, default_value, value_type="str"
):
if param_value is not None: if param_value is not None:
# take module parameter value # take module parameter value
if type == "bool": if value_type == "bool":
if param_value in BOOLEANS_TRUE: if param_value in BOOLEANS_TRUE:
return True return True
if param_value in BOOLEANS_FALSE: if param_value in BOOLEANS_FALSE:
return False return False
return bool(param_value) return bool(param_value)
if type == "int": if value_type == "int":
return int(param_value) return int(param_value)
return param_value return param_value
@ -281,13 +285,13 @@ class AnsibleDockerClientBase(Client):
return os.path.join(env_value, "ca.pem") return os.path.join(env_value, "ca.pem")
if param_name == "key_path": if param_name == "key_path":
return os.path.join(env_value, "key.pem") return os.path.join(env_value, "key.pem")
if type == "bool": if value_type == "bool":
if env_value in BOOLEANS_TRUE: if env_value in BOOLEANS_TRUE:
return True return True
if env_value in BOOLEANS_FALSE: if env_value in BOOLEANS_FALSE:
return False return False
return bool(env_value) return bool(env_value)
if type == "int": if value_type == "int":
return int(env_value) return int(env_value)
return env_value return env_value
@ -317,50 +321,66 @@ class AnsibleDockerClientBase(Client):
params["docker_host"], params["docker_host"],
"DOCKER_HOST", "DOCKER_HOST",
DEFAULT_DOCKER_HOST, DEFAULT_DOCKER_HOST,
type="str", value_type="str",
), ),
tls_hostname=self._get_value( tls_hostname=self._get_value(
"tls_hostname", "tls_hostname",
params["tls_hostname"], params["tls_hostname"],
"DOCKER_TLS_HOSTNAME", "DOCKER_TLS_HOSTNAME",
None, None,
type="str", value_type="str",
), ),
api_version=self._get_value( api_version=self._get_value(
"api_version", "api_version",
params["api_version"], params["api_version"],
"DOCKER_API_VERSION", "DOCKER_API_VERSION",
"auto", "auto",
type="str", value_type="str",
), ),
cacert_path=self._get_value( cacert_path=self._get_value(
"cacert_path", params["ca_path"], "DOCKER_CERT_PATH", None, type="str" "cacert_path",
params["ca_path"],
"DOCKER_CERT_PATH",
None,
value_type="str",
), ),
cert_path=self._get_value( cert_path=self._get_value(
"cert_path", params["client_cert"], "DOCKER_CERT_PATH", None, type="str" "cert_path",
params["client_cert"],
"DOCKER_CERT_PATH",
None,
value_type="str",
), ),
key_path=self._get_value( key_path=self._get_value(
"key_path", params["client_key"], "DOCKER_CERT_PATH", None, type="str" "key_path",
params["client_key"],
"DOCKER_CERT_PATH",
None,
value_type="str",
), ),
tls=self._get_value( tls=self._get_value(
"tls", params["tls"], "DOCKER_TLS", DEFAULT_TLS, type="bool" "tls", params["tls"], "DOCKER_TLS", DEFAULT_TLS, value_type="bool"
), ),
tls_verify=self._get_value( tls_verify=self._get_value(
"validate_certs", "validate_certs",
params["validate_certs"], params["validate_certs"],
"DOCKER_TLS_VERIFY", "DOCKER_TLS_VERIFY",
DEFAULT_TLS_VERIFY, DEFAULT_TLS_VERIFY,
type="bool", value_type="bool",
), ),
timeout=self._get_value( timeout=self._get_value(
"timeout", "timeout",
params["timeout"], params["timeout"],
"DOCKER_TIMEOUT", "DOCKER_TIMEOUT",
DEFAULT_TIMEOUT_SECONDS, DEFAULT_TIMEOUT_SECONDS,
type="int", value_type="int",
), ),
use_ssh_client=self._get_value( use_ssh_client=self._get_value(
"use_ssh_client", params["use_ssh_client"], None, False, type="bool" "use_ssh_client",
params["use_ssh_client"],
None,
False,
value_type="bool",
), ),
) )
@ -561,7 +581,7 @@ class AnsibleDockerClientBase(Client):
break break
return images return images
def pull_image(self, name, tag="latest", platform=None): def pull_image(self, name, tag="latest", image_platform=None):
""" """
Pull an image Pull an image
""" """
@ -570,8 +590,8 @@ class AnsibleDockerClientBase(Client):
stream=True, stream=True,
decode=True, decode=True,
) )
if platform is not None: if image_platform is not None:
kwargs["platform"] = platform kwargs["platform"] = image_platform
self.log(f"Pulling image {name}:{tag}") self.log(f"Pulling image {name}:{tag}")
old_tag = self.find_image(name, tag) old_tag = self.find_image(name, tag)
try: try:
@ -606,7 +626,7 @@ class AnsibleDockerClientBase(Client):
self._url("/distribution/{0}/json", image), self._url("/distribution/{0}/json", image),
headers={"X-Registry-Auth": header}, headers={"X-Registry-Auth": header},
), ),
json=True, get_json=True,
) )
return super(AnsibleDockerClientBase, self).inspect_distribution( return super(AnsibleDockerClientBase, self).inspect_distribution(
image, **kwargs image, **kwargs

View File

@ -46,7 +46,7 @@ from ansible_collections.community.docker.plugins.module_utils._api.utils.utils
convert_filters, convert_filters,
parse_repository_tag, parse_repository_tag,
) )
from ansible_collections.community.docker.plugins.module_utils._util import ( # noqa: F401, pylint: disable=unused-import from ansible_collections.community.docker.plugins.module_utils._util import (
DEFAULT_DOCKER_HOST, DEFAULT_DOCKER_HOST,
DEFAULT_TIMEOUT_SECONDS, DEFAULT_TIMEOUT_SECONDS,
DEFAULT_TLS, DEFAULT_TLS,
@ -151,16 +151,18 @@ class AnsibleDockerClientBase(Client):
pass pass
@staticmethod @staticmethod
def _get_value(param_name, param_value, env_variable, default_value, type="str"): def _get_value(
param_name, param_value, env_variable, default_value, value_type="str"
):
if param_value is not None: if param_value is not None:
# take module parameter value # take module parameter value
if type == "bool": if value_type == "bool":
if param_value in BOOLEANS_TRUE: if param_value in BOOLEANS_TRUE:
return True return True
if param_value in BOOLEANS_FALSE: if param_value in BOOLEANS_FALSE:
return False return False
return bool(param_value) return bool(param_value)
if type == "int": if value_type == "int":
return int(param_value) return int(param_value)
return param_value return param_value
@ -174,13 +176,13 @@ class AnsibleDockerClientBase(Client):
return os.path.join(env_value, "ca.pem") return os.path.join(env_value, "ca.pem")
if param_name == "key_path": if param_name == "key_path":
return os.path.join(env_value, "key.pem") return os.path.join(env_value, "key.pem")
if type == "bool": if value_type == "bool":
if env_value in BOOLEANS_TRUE: if env_value in BOOLEANS_TRUE:
return True return True
if env_value in BOOLEANS_FALSE: if env_value in BOOLEANS_FALSE:
return False return False
return bool(env_value) return bool(env_value)
if type == "int": if value_type == "int":
return int(env_value) return int(env_value)
return env_value return env_value
@ -210,50 +212,66 @@ class AnsibleDockerClientBase(Client):
params["docker_host"], params["docker_host"],
"DOCKER_HOST", "DOCKER_HOST",
DEFAULT_DOCKER_HOST, DEFAULT_DOCKER_HOST,
type="str", value_type="str",
), ),
tls_hostname=self._get_value( tls_hostname=self._get_value(
"tls_hostname", "tls_hostname",
params["tls_hostname"], params["tls_hostname"],
"DOCKER_TLS_HOSTNAME", "DOCKER_TLS_HOSTNAME",
None, None,
type="str", value_type="str",
), ),
api_version=self._get_value( api_version=self._get_value(
"api_version", "api_version",
params["api_version"], params["api_version"],
"DOCKER_API_VERSION", "DOCKER_API_VERSION",
"auto", "auto",
type="str", value_type="str",
), ),
cacert_path=self._get_value( cacert_path=self._get_value(
"cacert_path", params["ca_path"], "DOCKER_CERT_PATH", None, type="str" "cacert_path",
params["ca_path"],
"DOCKER_CERT_PATH",
None,
value_type="str",
), ),
cert_path=self._get_value( cert_path=self._get_value(
"cert_path", params["client_cert"], "DOCKER_CERT_PATH", None, type="str" "cert_path",
params["client_cert"],
"DOCKER_CERT_PATH",
None,
value_type="str",
), ),
key_path=self._get_value( key_path=self._get_value(
"key_path", params["client_key"], "DOCKER_CERT_PATH", None, type="str" "key_path",
params["client_key"],
"DOCKER_CERT_PATH",
None,
value_type="str",
), ),
tls=self._get_value( tls=self._get_value(
"tls", params["tls"], "DOCKER_TLS", DEFAULT_TLS, type="bool" "tls", params["tls"], "DOCKER_TLS", DEFAULT_TLS, value_type="bool"
), ),
tls_verify=self._get_value( tls_verify=self._get_value(
"validate_certs", "validate_certs",
params["validate_certs"], params["validate_certs"],
"DOCKER_TLS_VERIFY", "DOCKER_TLS_VERIFY",
DEFAULT_TLS_VERIFY, DEFAULT_TLS_VERIFY,
type="bool", value_type="bool",
), ),
timeout=self._get_value( timeout=self._get_value(
"timeout", "timeout",
params["timeout"], params["timeout"],
"DOCKER_TIMEOUT", "DOCKER_TIMEOUT",
DEFAULT_TIMEOUT_SECONDS, DEFAULT_TIMEOUT_SECONDS,
type="int", value_type="int",
), ),
use_ssh_client=self._get_value( use_ssh_client=self._get_value(
"use_ssh_client", params["use_ssh_client"], None, False, type="bool" "use_ssh_client",
params["use_ssh_client"],
None,
False,
value_type="bool",
), ),
) )
@ -477,7 +495,7 @@ class AnsibleDockerClientBase(Client):
except Exception as exc: except Exception as exc:
self.fail(f"Error inspecting image ID {image_id} - {exc}") self.fail(f"Error inspecting image ID {image_id} - {exc}")
def pull_image(self, name, tag="latest", platform=None): def pull_image(self, name, tag="latest", image_platform=None):
""" """
Pull an image Pull an image
""" """
@ -490,8 +508,8 @@ class AnsibleDockerClientBase(Client):
"tag": tag or image_tag or "latest", "tag": tag or image_tag or "latest",
"fromImage": repository, "fromImage": repository,
} }
if platform is not None: if image_platform is not None:
params["platform"] = platform params["platform"] = image_platform
headers = {} headers = {}
header = auth.get_config_header(self, registry) header = auth.get_config_header(self, registry)

View File

@ -17,7 +17,7 @@ from ansible.module_utils.common.text.converters import to_native
from ansible_collections.community.docker.plugins.module_utils._api.auth import ( from ansible_collections.community.docker.plugins.module_utils._api.auth import (
resolve_repository_name, resolve_repository_name,
) )
from ansible_collections.community.docker.plugins.module_utils._util import ( # noqa: F401, pylint: disable=unused-import from ansible_collections.community.docker.plugins.module_utils._util import (
DEFAULT_DOCKER_HOST, DEFAULT_DOCKER_HOST,
DEFAULT_TLS, DEFAULT_TLS,
DEFAULT_TLS_VERIFY, DEFAULT_TLS_VERIFY,

View File

@ -942,9 +942,9 @@ class BaseComposeManager(DockerBaseClass):
result.pop(res) result.pop(res)
def cleanup(self): def cleanup(self):
for dir in self.cleanup_dirs: for directory in self.cleanup_dirs:
try: try:
shutil.rmtree(dir, True) shutil.rmtree(directory, True)
except Exception: except Exception:
# should not happen, but simply ignore to be on the safe side # should not happen, but simply ignore to be on the safe side
pass pass

View File

@ -53,19 +53,19 @@ _MOUNT_OPTION_TYPES = dict(
) )
def _get_ansible_type(type): def _get_ansible_type(value_type):
if type == "set": if value_type == "set":
return "list" return "list"
if type not in ("list", "dict", "bool", "int", "float", "str"): if value_type not in ("list", "dict", "bool", "int", "float", "str"):
raise Exception(f'Invalid type "{type}"') raise Exception(f'Invalid type "{value_type}"')
return type return value_type
class Option(object): class Option(object):
def __init__( def __init__(
self, self,
name, name,
type, value_type,
owner, owner,
ansible_type=None, ansible_type=None,
elements=None, elements=None,
@ -81,9 +81,9 @@ class Option(object):
compare=None, compare=None,
): ):
self.name = name self.name = name
self.type = type self.value_type = value_type
self.ansible_type = ansible_type or _get_ansible_type(type) self.ansible_type = ansible_type or _get_ansible_type(value_type)
needs_elements = self.type in ("list", "set") needs_elements = self.value_type in ("list", "set")
needs_ansible_elements = self.ansible_type in ("list",) needs_ansible_elements = self.ansible_type in ("list",)
if elements is not None and not needs_elements: if elements is not None and not needs_elements:
raise Exception("elements only allowed for lists/sets") raise Exception("elements only allowed for lists/sets")
@ -118,7 +118,7 @@ class Option(object):
self.ansible_suboptions = ansible_suboptions if needs_suboptions else None self.ansible_suboptions = ansible_suboptions if needs_suboptions else None
self.ansible_aliases = ansible_aliases or [] self.ansible_aliases = ansible_aliases or []
self.ansible_choices = ansible_choices self.ansible_choices = ansible_choices
comparison_type = self.type comparison_type = self.value_type
if comparison_type == "set" and self.elements == "dict": if comparison_type == "set" and self.elements == "dict":
comparison_type = "set(dict)" comparison_type = "set(dict)"
elif comparison_type not in ("set", "list", "dict"): elif comparison_type not in ("set", "list", "dict"):
@ -330,7 +330,7 @@ class EngineDriver(object):
pass pass
@abc.abstractmethod @abc.abstractmethod
def pull_image(self, client, repository, tag, platform=None): def pull_image(self, client, repository, tag, image_platform=None):
pass pass
@abc.abstractmethod @abc.abstractmethod
@ -859,7 +859,7 @@ def _preprocess_ports(module, values):
else: else:
port_binds = len(container_ports) * [(ipaddr,)] port_binds = len(container_ports) * [(ipaddr,)]
else: else:
module.fail_json( return module.fail_json(
msg=f'Invalid port description "{port}" - expected 1 to 3 colon-separated parts, but got {p_len}. ' msg=f'Invalid port description "{port}" - expected 1 to 3 colon-separated parts, but got {p_len}. '
"Maybe you forgot to use square brackets ([...]) around an IPv6 address?" "Maybe you forgot to use square brackets ([...]) around an IPv6 address?"
) )
@ -920,55 +920,55 @@ def _compare_platform(option, param_value, container_value):
return param_value == container_value return param_value == container_value
OPTION_AUTO_REMOVE = OptionGroup().add_option("auto_remove", type="bool") OPTION_AUTO_REMOVE = OptionGroup().add_option("auto_remove", value_type="bool")
OPTION_BLKIO_WEIGHT = OptionGroup().add_option("blkio_weight", type="int") OPTION_BLKIO_WEIGHT = OptionGroup().add_option("blkio_weight", value_type="int")
OPTION_CAPABILITIES = OptionGroup().add_option( OPTION_CAPABILITIES = OptionGroup().add_option(
"capabilities", type="set", elements="str" "capabilities", value_type="set", elements="str"
) )
OPTION_CAP_DROP = OptionGroup().add_option("cap_drop", type="set", elements="str") OPTION_CAP_DROP = OptionGroup().add_option("cap_drop", value_type="set", elements="str")
OPTION_CGROUP_NS_MODE = OptionGroup().add_option( OPTION_CGROUP_NS_MODE = OptionGroup().add_option(
"cgroupns_mode", type="str", ansible_choices=["private", "host"] "cgroupns_mode", value_type="str", ansible_choices=["private", "host"]
) )
OPTION_CGROUP_PARENT = OptionGroup().add_option("cgroup_parent", type="str") OPTION_CGROUP_PARENT = OptionGroup().add_option("cgroup_parent", value_type="str")
OPTION_COMMAND = OptionGroup(preprocess=_preprocess_command).add_option( OPTION_COMMAND = OptionGroup(preprocess=_preprocess_command).add_option(
"command", type="list", elements="str", ansible_type="raw" "command", value_type="list", elements="str", ansible_type="raw"
) )
OPTION_CPU_PERIOD = OptionGroup().add_option("cpu_period", type="int") OPTION_CPU_PERIOD = OptionGroup().add_option("cpu_period", value_type="int")
OPTION_CPU_QUOTA = OptionGroup().add_option("cpu_quota", type="int") OPTION_CPU_QUOTA = OptionGroup().add_option("cpu_quota", value_type="int")
OPTION_CPUSET_CPUS = OptionGroup().add_option("cpuset_cpus", type="str") OPTION_CPUSET_CPUS = OptionGroup().add_option("cpuset_cpus", value_type="str")
OPTION_CPUSET_MEMS = OptionGroup().add_option("cpuset_mems", type="str") OPTION_CPUSET_MEMS = OptionGroup().add_option("cpuset_mems", value_type="str")
OPTION_CPU_SHARES = OptionGroup().add_option("cpu_shares", type="int") OPTION_CPU_SHARES = OptionGroup().add_option("cpu_shares", value_type="int")
OPTION_ENTRYPOINT = OptionGroup(preprocess=_preprocess_entrypoint).add_option( OPTION_ENTRYPOINT = OptionGroup(preprocess=_preprocess_entrypoint).add_option(
"entrypoint", type="list", elements="str" "entrypoint", value_type="list", elements="str"
) )
OPTION_CPUS = OptionGroup().add_option("cpus", type="int", ansible_type="float") OPTION_CPUS = OptionGroup().add_option("cpus", value_type="int", ansible_type="float")
OPTION_DETACH_INTERACTIVE = ( OPTION_DETACH_INTERACTIVE = (
OptionGroup() OptionGroup()
.add_option("detach", type="bool") .add_option("detach", value_type="bool")
.add_option("interactive", type="bool") .add_option("interactive", value_type="bool")
) )
OPTION_DEVICES = OptionGroup().add_option( OPTION_DEVICES = OptionGroup().add_option(
"devices", type="set", elements="dict", ansible_elements="str" "devices", value_type="set", elements="dict", ansible_elements="str"
) )
OPTION_DEVICE_READ_BPS = OptionGroup().add_option( OPTION_DEVICE_READ_BPS = OptionGroup().add_option(
"device_read_bps", "device_read_bps",
type="set", value_type="set",
elements="dict", elements="dict",
ansible_suboptions=dict( ansible_suboptions=dict(
path=dict(required=True, type="str"), path=dict(required=True, type="str"),
@ -978,7 +978,7 @@ OPTION_DEVICE_READ_BPS = OptionGroup().add_option(
OPTION_DEVICE_WRITE_BPS = OptionGroup().add_option( OPTION_DEVICE_WRITE_BPS = OptionGroup().add_option(
"device_write_bps", "device_write_bps",
type="set", value_type="set",
elements="dict", elements="dict",
ansible_suboptions=dict( ansible_suboptions=dict(
path=dict(required=True, type="str"), path=dict(required=True, type="str"),
@ -988,7 +988,7 @@ OPTION_DEVICE_WRITE_BPS = OptionGroup().add_option(
OPTION_DEVICE_READ_IOPS = OptionGroup().add_option( OPTION_DEVICE_READ_IOPS = OptionGroup().add_option(
"device_read_iops", "device_read_iops",
type="set", value_type="set",
elements="dict", elements="dict",
ansible_suboptions=dict( ansible_suboptions=dict(
path=dict(required=True, type="str"), path=dict(required=True, type="str"),
@ -998,7 +998,7 @@ OPTION_DEVICE_READ_IOPS = OptionGroup().add_option(
OPTION_DEVICE_WRITE_IOPS = OptionGroup().add_option( OPTION_DEVICE_WRITE_IOPS = OptionGroup().add_option(
"device_write_iops", "device_write_iops",
type="set", value_type="set",
elements="dict", elements="dict",
ansible_suboptions=dict( ansible_suboptions=dict(
path=dict(required=True, type="str"), path=dict(required=True, type="str"),
@ -1008,7 +1008,7 @@ OPTION_DEVICE_WRITE_IOPS = OptionGroup().add_option(
OPTION_DEVICE_REQUESTS = OptionGroup().add_option( OPTION_DEVICE_REQUESTS = OptionGroup().add_option(
"device_requests", "device_requests",
type="set", value_type="set",
elements="dict", elements="dict",
ansible_suboptions=dict( ansible_suboptions=dict(
capabilities=dict(type="list", elements="list"), capabilities=dict(type="list", elements="list"),
@ -1020,29 +1020,33 @@ OPTION_DEVICE_REQUESTS = OptionGroup().add_option(
) )
OPTION_DEVICE_CGROUP_RULES = OptionGroup().add_option( OPTION_DEVICE_CGROUP_RULES = OptionGroup().add_option(
"device_cgroup_rules", type="list", elements="str" "device_cgroup_rules", value_type="list", elements="str"
) )
OPTION_DNS_SERVERS = OptionGroup().add_option( OPTION_DNS_SERVERS = OptionGroup().add_option(
"dns_servers", type="list", elements="str" "dns_servers", value_type="list", elements="str"
) )
OPTION_DNS_OPTS = OptionGroup().add_option("dns_opts", type="set", elements="str") OPTION_DNS_OPTS = OptionGroup().add_option("dns_opts", value_type="set", elements="str")
OPTION_DNS_SEARCH_DOMAINS = OptionGroup().add_option( OPTION_DNS_SEARCH_DOMAINS = OptionGroup().add_option(
"dns_search_domains", type="list", elements="str" "dns_search_domains", value_type="list", elements="str"
) )
OPTION_DOMAINNAME = OptionGroup().add_option("domainname", type="str") OPTION_DOMAINNAME = OptionGroup().add_option("domainname", value_type="str")
OPTION_ENVIRONMENT = ( OPTION_ENVIRONMENT = (
OptionGroup(preprocess=_preprocess_env) OptionGroup(preprocess=_preprocess_env)
.add_option( .add_option(
"env", type="set", ansible_type="dict", elements="str", needs_no_suboptions=True "env",
value_type="set",
ansible_type="dict",
elements="str",
needs_no_suboptions=True,
) )
.add_option( .add_option(
"env_file", "env_file",
type="set", value_type="set",
ansible_type="path", ansible_type="path",
elements="str", elements="str",
not_a_container_option=True, not_a_container_option=True,
@ -1051,17 +1055,17 @@ OPTION_ENVIRONMENT = (
OPTION_ETC_HOSTS = OptionGroup().add_option( OPTION_ETC_HOSTS = OptionGroup().add_option(
"etc_hosts", "etc_hosts",
type="set", value_type="set",
ansible_type="dict", ansible_type="dict",
elements="str", elements="str",
needs_no_suboptions=True, needs_no_suboptions=True,
) )
OPTION_GROUPS = OptionGroup().add_option("groups", type="set", elements="str") OPTION_GROUPS = OptionGroup().add_option("groups", value_type="set", elements="str")
OPTION_HEALTHCHECK = OptionGroup(preprocess=_preprocess_healthcheck).add_option( OPTION_HEALTHCHECK = OptionGroup(preprocess=_preprocess_healthcheck).add_option(
"healthcheck", "healthcheck",
type="dict", value_type="dict",
ansible_suboptions=dict( ansible_suboptions=dict(
test=dict(type="raw"), test=dict(type="raw"),
test_cli_compatible=dict(type="bool", default=False), test_cli_compatible=dict(type="bool", default=False),
@ -1073,69 +1077,71 @@ OPTION_HEALTHCHECK = OptionGroup(preprocess=_preprocess_healthcheck).add_option(
), ),
) )
OPTION_HOSTNAME = OptionGroup().add_option("hostname", type="str") OPTION_HOSTNAME = OptionGroup().add_option("hostname", value_type="str")
OPTION_IMAGE = OptionGroup().add_option("image", type="str") OPTION_IMAGE = OptionGroup().add_option("image", value_type="str")
OPTION_INIT = OptionGroup().add_option("init", type="bool") OPTION_INIT = OptionGroup().add_option("init", value_type="bool")
OPTION_IPC_MODE = OptionGroup().add_option("ipc_mode", type="str") OPTION_IPC_MODE = OptionGroup().add_option("ipc_mode", value_type="str")
OPTION_KERNEL_MEMORY = OptionGroup( OPTION_KERNEL_MEMORY = OptionGroup(
preprocess=partial(_preprocess_convert_to_bytes, name="kernel_memory") preprocess=partial(_preprocess_convert_to_bytes, name="kernel_memory")
).add_option("kernel_memory", type="int", ansible_type="str") ).add_option("kernel_memory", value_type="int", ansible_type="str")
OPTION_LABELS = OptionGroup(preprocess=_preprocess_labels).add_option( OPTION_LABELS = OptionGroup(preprocess=_preprocess_labels).add_option(
"labels", type="dict", needs_no_suboptions=True "labels", value_type="dict", needs_no_suboptions=True
) )
OPTION_LINKS = OptionGroup().add_option( OPTION_LINKS = OptionGroup().add_option(
"links", type="set", elements="list", ansible_elements="str" "links", value_type="set", elements="list", ansible_elements="str"
) )
OPTION_LOG_DRIVER_OPTIONS = ( OPTION_LOG_DRIVER_OPTIONS = (
OptionGroup( OptionGroup(
preprocess=_preprocess_log, ansible_required_by={"log_options": ["log_driver"]} preprocess=_preprocess_log, ansible_required_by={"log_options": ["log_driver"]}
) )
.add_option("log_driver", type="str") .add_option("log_driver", value_type="str")
.add_option( .add_option(
"log_options", "log_options",
type="dict", value_type="dict",
ansible_aliases=["log_opt"], ansible_aliases=["log_opt"],
needs_no_suboptions=True, needs_no_suboptions=True,
) )
) )
OPTION_MAC_ADDRESS = OptionGroup(preprocess=_preprocess_mac_address).add_option( OPTION_MAC_ADDRESS = OptionGroup(preprocess=_preprocess_mac_address).add_option(
"mac_address", type="str" "mac_address", value_type="str"
) )
OPTION_MEMORY = OptionGroup( OPTION_MEMORY = OptionGroup(
preprocess=partial(_preprocess_convert_to_bytes, name="memory") preprocess=partial(_preprocess_convert_to_bytes, name="memory")
).add_option("memory", type="int", ansible_type="str") ).add_option("memory", value_type="int", ansible_type="str")
OPTION_MEMORY_RESERVATION = OptionGroup( OPTION_MEMORY_RESERVATION = OptionGroup(
preprocess=partial(_preprocess_convert_to_bytes, name="memory_reservation") preprocess=partial(_preprocess_convert_to_bytes, name="memory_reservation")
).add_option("memory_reservation", type="int", ansible_type="str") ).add_option("memory_reservation", value_type="int", ansible_type="str")
OPTION_MEMORY_SWAP = OptionGroup( OPTION_MEMORY_SWAP = OptionGroup(
preprocess=partial( preprocess=partial(
_preprocess_convert_to_bytes, name="memory_swap", unlimited_value=-1 _preprocess_convert_to_bytes, name="memory_swap", unlimited_value=-1
) )
).add_option("memory_swap", type="int", ansible_type="str") ).add_option("memory_swap", value_type="int", ansible_type="str")
OPTION_MEMORY_SWAPPINESS = OptionGroup().add_option("memory_swappiness", type="int") OPTION_MEMORY_SWAPPINESS = OptionGroup().add_option(
"memory_swappiness", value_type="int"
)
OPTION_STOP_TIMEOUT = OptionGroup().add_option( OPTION_STOP_TIMEOUT = OptionGroup().add_option(
"stop_timeout", type="int", default_comparison="ignore" "stop_timeout", value_type="int", default_comparison="ignore"
) )
OPTION_NETWORK = ( OPTION_NETWORK = (
OptionGroup(preprocess=_preprocess_networks) OptionGroup(preprocess=_preprocess_networks)
.add_option("network_mode", type="str") .add_option("network_mode", value_type="str")
.add_option( .add_option(
"networks", "networks",
type="set", value_type="set",
elements="dict", elements="dict",
ansible_suboptions=dict( ansible_suboptions=dict(
name=dict(type="str", required=True), name=dict(type="str", required=True),
@ -1150,81 +1156,81 @@ OPTION_NETWORK = (
) )
) )
OPTION_OOM_KILLER = OptionGroup().add_option("oom_killer", type="bool") OPTION_OOM_KILLER = OptionGroup().add_option("oom_killer", value_type="bool")
OPTION_OOM_SCORE_ADJ = OptionGroup().add_option("oom_score_adj", type="int") OPTION_OOM_SCORE_ADJ = OptionGroup().add_option("oom_score_adj", value_type="int")
OPTION_PID_MODE = OptionGroup().add_option("pid_mode", type="str") OPTION_PID_MODE = OptionGroup().add_option("pid_mode", value_type="str")
OPTION_PIDS_LIMIT = OptionGroup().add_option("pids_limit", type="int") OPTION_PIDS_LIMIT = OptionGroup().add_option("pids_limit", value_type="int")
OPTION_PLATFORM = OptionGroup().add_option( OPTION_PLATFORM = OptionGroup().add_option(
"platform", type="str", compare=_compare_platform "platform", value_type="str", compare=_compare_platform
) )
OPTION_PRIVILEGED = OptionGroup().add_option("privileged", type="bool") OPTION_PRIVILEGED = OptionGroup().add_option("privileged", value_type="bool")
OPTION_READ_ONLY = OptionGroup().add_option("read_only", type="bool") OPTION_READ_ONLY = OptionGroup().add_option("read_only", value_type="bool")
OPTION_RESTART_POLICY = ( OPTION_RESTART_POLICY = (
OptionGroup(ansible_required_by={"restart_retries": ["restart_policy"]}) OptionGroup(ansible_required_by={"restart_retries": ["restart_policy"]})
.add_option( .add_option(
"restart_policy", "restart_policy",
type="str", value_type="str",
ansible_choices=["no", "on-failure", "always", "unless-stopped"], ansible_choices=["no", "on-failure", "always", "unless-stopped"],
) )
.add_option("restart_retries", type="int") .add_option("restart_retries", value_type="int")
) )
OPTION_RUNTIME = OptionGroup().add_option("runtime", type="str") OPTION_RUNTIME = OptionGroup().add_option("runtime", value_type="str")
OPTION_SECURITY_OPTS = OptionGroup().add_option( OPTION_SECURITY_OPTS = OptionGroup().add_option(
"security_opts", type="set", elements="str" "security_opts", value_type="set", elements="str"
) )
OPTION_SHM_SIZE = OptionGroup( OPTION_SHM_SIZE = OptionGroup(
preprocess=partial(_preprocess_convert_to_bytes, name="shm_size") preprocess=partial(_preprocess_convert_to_bytes, name="shm_size")
).add_option("shm_size", type="int", ansible_type="str") ).add_option("shm_size", value_type="int", ansible_type="str")
OPTION_STOP_SIGNAL = OptionGroup().add_option("stop_signal", type="str") OPTION_STOP_SIGNAL = OptionGroup().add_option("stop_signal", value_type="str")
OPTION_STORAGE_OPTS = OptionGroup().add_option( OPTION_STORAGE_OPTS = OptionGroup().add_option(
"storage_opts", type="dict", needs_no_suboptions=True "storage_opts", value_type="dict", needs_no_suboptions=True
) )
OPTION_SYSCTLS = OptionGroup(preprocess=_preprocess_sysctls).add_option( OPTION_SYSCTLS = OptionGroup(preprocess=_preprocess_sysctls).add_option(
"sysctls", type="dict", needs_no_suboptions=True "sysctls", value_type="dict", needs_no_suboptions=True
) )
OPTION_TMPFS = OptionGroup(preprocess=_preprocess_tmpfs).add_option( OPTION_TMPFS = OptionGroup(preprocess=_preprocess_tmpfs).add_option(
"tmpfs", type="dict", ansible_type="list", ansible_elements="str" "tmpfs", value_type="dict", ansible_type="list", ansible_elements="str"
) )
OPTION_TTY = OptionGroup().add_option("tty", type="bool") OPTION_TTY = OptionGroup().add_option("tty", value_type="bool")
OPTION_ULIMITS = OptionGroup(preprocess=_preprocess_ulimits).add_option( OPTION_ULIMITS = OptionGroup(preprocess=_preprocess_ulimits).add_option(
"ulimits", type="set", elements="dict", ansible_elements="str" "ulimits", value_type="set", elements="dict", ansible_elements="str"
) )
OPTION_USER = OptionGroup().add_option("user", type="str") OPTION_USER = OptionGroup().add_option("user", value_type="str")
OPTION_USERNS_MODE = OptionGroup().add_option("userns_mode", type="str") OPTION_USERNS_MODE = OptionGroup().add_option("userns_mode", value_type="str")
OPTION_UTS = OptionGroup().add_option("uts", type="str") OPTION_UTS = OptionGroup().add_option("uts", value_type="str")
OPTION_VOLUME_DRIVER = OptionGroup().add_option("volume_driver", type="str") OPTION_VOLUME_DRIVER = OptionGroup().add_option("volume_driver", value_type="str")
OPTION_VOLUMES_FROM = OptionGroup().add_option( OPTION_VOLUMES_FROM = OptionGroup().add_option(
"volumes_from", type="set", elements="str" "volumes_from", value_type="set", elements="str"
) )
OPTION_WORKING_DIR = OptionGroup().add_option("working_dir", type="str") OPTION_WORKING_DIR = OptionGroup().add_option("working_dir", value_type="str")
OPTION_MOUNTS_VOLUMES = ( OPTION_MOUNTS_VOLUMES = (
OptionGroup(preprocess=_preprocess_mounts) OptionGroup(preprocess=_preprocess_mounts)
.add_option( .add_option(
"mounts", "mounts",
type="set", value_type="set",
elements="dict", elements="dict",
ansible_suboptions=dict( ansible_suboptions=dict(
target=dict(type="str", required=True), target=dict(type="str", required=True),
@ -1256,10 +1262,10 @@ OPTION_MOUNTS_VOLUMES = (
tmpfs_options=dict(type="list", elements="dict"), tmpfs_options=dict(type="list", elements="dict"),
), ),
) )
.add_option("volumes", type="set", elements="str") .add_option("volumes", value_type="set", elements="str")
.add_option( .add_option(
"volume_binds", "volume_binds",
type="set", value_type="set",
elements="str", elements="str",
not_an_ansible_option=True, not_an_ansible_option=True,
copy_comparison_from="volumes", copy_comparison_from="volumes",
@ -1270,21 +1276,21 @@ OPTION_PORTS = (
OptionGroup(preprocess=_preprocess_ports) OptionGroup(preprocess=_preprocess_ports)
.add_option( .add_option(
"exposed_ports", "exposed_ports",
type="set", value_type="set",
elements="str", elements="str",
ansible_aliases=["exposed", "expose"], ansible_aliases=["exposed", "expose"],
) )
.add_option("publish_all_ports", type="bool") .add_option("publish_all_ports", value_type="bool")
.add_option( .add_option(
"published_ports", "published_ports",
type="dict", value_type="dict",
ansible_type="list", ansible_type="list",
ansible_elements="str", ansible_elements="str",
ansible_aliases=["ports"], ansible_aliases=["ports"],
) )
.add_option( .add_option(
"ports", "ports",
type="set", value_type="set",
elements="str", elements="str",
not_an_ansible_option=True, not_an_ansible_option=True,
default_comparison="ignore", default_comparison="ignore",

View File

@ -119,12 +119,12 @@ _DEFAULT_IP_REPLACEMENT_STRING = (
) )
def _get_ansible_type(type): def _get_ansible_type(our_type):
if type == "set": if our_type == "set":
return "list" return "list"
if type not in ("list", "dict", "bool", "int", "float", "str"): if our_type not in ("list", "dict", "bool", "int", "float", "str"):
raise Exception(f'Invalid type "{type}"') raise Exception(f'Invalid type "{our_type}"')
return type return our_type
_SENTRY = object() _SENTRY = object()
@ -232,8 +232,8 @@ class DockerAPIEngineDriver(EngineDriver):
def inspect_image_by_name(self, client, repository, tag): def inspect_image_by_name(self, client, repository, tag):
return client.find_image(repository, tag) return client.find_image(repository, tag)
def pull_image(self, client, repository, tag, platform=None): def pull_image(self, client, repository, tag, image_platform=None):
return client.pull_image(repository, tag, platform=platform) return client.pull_image(repository, tag, image_platform=image_platform)
def pause_container(self, client, container_id): def pause_container(self, client, container_id):
client.post_call("/containers/{0}/pause", container_id) client.post_call("/containers/{0}/pause", container_id)
@ -892,8 +892,8 @@ def _preprocess_etc_hosts(module, client, api_version, value):
if value is None: if value is None:
return value return value
results = [] results = []
for key, value in value.items(): for key, val in value.items():
results.append(f"{key}:{value}") results.append(f"{key}:{val}")
return results return results

View File

@ -70,6 +70,7 @@ class Container(DockerBaseClass):
class ContainerManager(DockerBaseClass): class ContainerManager(DockerBaseClass):
def __init__(self, module, engine_driver, client, active_options): def __init__(self, module, engine_driver, client, active_options):
super().__init__()
self.module = module self.module = module
self.engine_driver = engine_driver self.engine_driver = engine_driver
self.client = client self.client = client
@ -569,7 +570,7 @@ class ContainerManager(DockerBaseClass):
self.client, self.client,
repository, repository,
tag, tag,
platform=self.module.params["platform"], image_platform=self.module.params["platform"],
) )
if alreadyToLatest: if alreadyToLatest:
self.results["changed"] = False self.results["changed"] = False

View File

@ -1,14 +0,0 @@
# -*- coding: utf-8 -*-
# Copyright (c) 2024, Felix Fontein <felix@fontein.de>
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
# SPDX-License-Identifier: GPL-3.0-or-later
# Note that this module util is **PRIVATE** to the collection. It can have breaking changes at any time.
# Do not use this from other collections or standalone plugins/modules!
"""Provide selectors import."""
from __future__ import annotations
import selectors # noqa: F401, pylint: disable=unused-import

View File

@ -56,7 +56,7 @@ class DockerSocketHandlerBase(object):
def __enter__(self): def __enter__(self):
return self return self
def __exit__(self, type, value, tb): def __exit__(self, type_, value, tb):
self._selector.close() self._selector.close()
def set_block_done_callback(self, block_done_callback): def set_block_done_callback(self, block_done_callback):
@ -204,9 +204,9 @@ class DockerSocketHandlerBase(object):
self.select() self.select()
return b"".join(stdout), b"".join(stderr) return b"".join(stdout), b"".join(stderr)
def write(self, str): def write(self, str_to_write):
self._write_buffer += str self._write_buffer += str_to_write
if len(self._write_buffer) == len(str): if len(self._write_buffer) == len(str_to_write):
self._write() self._write()

View File

@ -111,7 +111,7 @@ def log_debug(msg, pretty_print=False):
If ``pretty_print=True``, the message will be pretty-printed as JSON. If ``pretty_print=True``, the message will be pretty-printed as JSON.
""" """
with open("docker.log", "a") as log_file: with open("docker.log", "at", encoding="utf-8") as log_file:
if pretty_print: if pretty_print:
log_file.write( log_file.write(
json.dumps(msg, sort_keys=True, indent=4, separators=(",", ": ")) json.dumps(msg, sort_keys=True, indent=4, separators=(",", ": "))

View File

@ -499,6 +499,8 @@ class ServicesManager(BaseComposeManager):
result = self.cmd_restart() result = self.cmd_restart()
elif self.state == "absent": elif self.state == "absent":
result = self.cmd_down() result = self.cmd_down()
else:
raise AssertionError("Unexpected state")
result["containers"] = self.list_containers() result["containers"] = self.list_containers()
result["images"] = self.list_images() result["images"] = self.list_images()

View File

@ -570,7 +570,7 @@ class ImageManager(DockerBaseClass):
self.results["changed"] = True self.results["changed"] = True
if not self.check_mode: if not self.check_mode:
self.results["image"], dummy = self.client.pull_image( self.results["image"], dummy = self.client.pull_image(
self.name, tag=self.tag, platform=self.pull_platform self.name, tag=self.tag, image_platform=self.pull_platform
) )
elif self.source == "local": elif self.source == "local":
if image is None: if image is None:
@ -891,7 +891,7 @@ class ImageManager(DockerBaseClass):
dockerignore = os.path.join(self.build_path, ".dockerignore") dockerignore = os.path.join(self.build_path, ".dockerignore")
exclude = None exclude = None
if os.path.exists(dockerignore): if os.path.exists(dockerignore):
with open(dockerignore) as f: with open(dockerignore, "rt", encoding="utf-8") as f:
exclude = list( exclude = list(
filter( filter(
lambda x: x != "" and x[0] != "#", lambda x: x != "" and x[0] != "#",

View File

@ -318,11 +318,11 @@ def dict_to_list(dictionary, concat="="):
return [f"{k}{concat}{v}" for k, v in sorted(dictionary.items())] return [f"{k}{concat}{v}" for k, v in sorted(dictionary.items())]
def _quote_csv(input): def _quote_csv(text):
if input.strip() == input and all(i not in input for i in '",\r\n'): if text.strip() == text and all(i not in text for i in '",\r\n'):
return input return text
input = input.replace('"', '""') text = text.replace('"', '""')
return f'"{input}"' return f'"{text}"'
class ImageBuilder(DockerBaseClass): class ImageBuilder(DockerBaseClass):

View File

@ -181,7 +181,7 @@ class ImagePuller(DockerBaseClass):
results["diff"]["after"] = image_info(dict(Id="unknown")) results["diff"]["after"] = image_info(dict(Id="unknown"))
else: else:
results["image"], not_changed = self.client.pull_image( results["image"], not_changed = self.client.pull_image(
self.name, tag=self.tag, platform=self.platform self.name, tag=self.tag, image_platform=self.platform
) )
results["changed"] = not not_changed results["changed"] = not not_changed
results["diff"]["after"] = image_info(results["image"]) results["diff"]["after"] = image_info(results["image"])

View File

@ -161,7 +161,7 @@ class DockerFileStore(object):
try: try:
# Attempt to read the existing config. # Attempt to read the existing config.
with open(self._config_path, "r") as f: with open(self._config_path, "rt", encoding="utf-8") as f:
config = json.load(f) config = json.load(f)
except (ValueError, IOError): except (ValueError, IOError):
# No config found or an invalid config found so we'll ignore it. # No config found or an invalid config found so we'll ignore it.
@ -197,9 +197,9 @@ class DockerFileStore(object):
Write config back out to disk. Write config back out to disk.
""" """
# Make sure directory exists # Make sure directory exists
dir = os.path.dirname(self._config_path) directory = os.path.dirname(self._config_path)
if not os.path.exists(dir): if not os.path.exists(directory):
os.makedirs(dir) os.makedirs(directory)
# Write config; make sure it has permissions 0x600 # Write config; make sure it has permissions 0x600
content = json.dumps(self._config, indent=4, sort_keys=True).encode("utf-8") content = json.dumps(self._config, indent=4, sort_keys=True).encode("utf-8")
f = os.open(self._config_path, os.O_WRONLY | os.O_CREAT | os.O_TRUNC, 0o600) f = os.open(self._config_path, os.O_WRONLY | os.O_CREAT | os.O_TRUNC, 0o600)
@ -214,13 +214,13 @@ class DockerFileStore(object):
""" """
b64auth = base64.b64encode(to_bytes(username) + b":" + to_bytes(password)) b64auth = base64.b64encode(to_bytes(username) + b":" + to_bytes(password))
auth = to_text(b64auth) tauth = to_text(b64auth)
# build up the auth structure # build up the auth structure
if "auths" not in self._config: if "auths" not in self._config:
self._config["auths"] = dict() self._config["auths"] = dict()
self._config["auths"][server] = dict(auth=auth) self._config["auths"][server] = dict(auth=tauth)
self._write() self._write()
@ -294,7 +294,7 @@ class LoginManager(DockerBaseClass):
self.client._auth_configs.add_auth( self.client._auth_configs.add_auth(
self.registry_url or auth.INDEX_NAME, req_data self.registry_url or auth.INDEX_NAME, req_data
) )
return self.client._result(response, json=True) return self.client._result(response, get_json=True)
def login(self): def login(self):
""" """

View File

@ -504,7 +504,7 @@ class SwarmManager(DockerBaseClass):
unlock_key = self.get_unlock_key() unlock_key = self.get_unlock_key()
self.swarm_info.update(unlock_key) self.swarm_info.update(unlock_key)
except APIError: except APIError:
return pass
def get_unlock_key(self): def get_unlock_key(self):
default = {"UnlockKey": None} default = {"UnlockKey": None}

View File

@ -619,29 +619,29 @@ class DisableSocketTest(unittest.TestCase):
def test_disable_socket_timeout(self): def test_disable_socket_timeout(self):
"""Test that the timeout is disabled on a generic socket object.""" """Test that the timeout is disabled on a generic socket object."""
socket = self.DummySocket() the_socket = self.DummySocket()
self.client._disable_socket_timeout(socket) self.client._disable_socket_timeout(the_socket)
assert socket.timeout is None assert the_socket.timeout is None
def test_disable_socket_timeout2(self): def test_disable_socket_timeout2(self):
"""Test that the timeouts are disabled on a generic socket object """Test that the timeouts are disabled on a generic socket object
and it's _sock object if present.""" and it's _sock object if present."""
socket = self.DummySocket() the_socket = self.DummySocket()
socket._sock = self.DummySocket() the_socket._sock = self.DummySocket()
self.client._disable_socket_timeout(socket) self.client._disable_socket_timeout(the_socket)
assert socket.timeout is None assert the_socket.timeout is None
assert socket._sock.timeout is None assert the_socket._sock.timeout is None
def test_disable_socket_timout_non_blocking(self): def test_disable_socket_timout_non_blocking(self):
"""Test that a non-blocking socket does not get set to blocking.""" """Test that a non-blocking socket does not get set to blocking."""
socket = self.DummySocket() the_socket = self.DummySocket()
socket._sock = self.DummySocket(0.0) the_socket._sock = self.DummySocket(0.0)
self.client._disable_socket_timeout(socket) self.client._disable_socket_timeout(the_socket)
assert socket.timeout is None assert the_socket.timeout is None
assert socket._sock.timeout == 0.0 assert the_socket._sock.timeout == 0.0

View File

@ -300,7 +300,7 @@ class LoadConfigTest(unittest.TestCase):
self.addCleanup(shutil.rmtree, folder) self.addCleanup(shutil.rmtree, folder)
cfg_path = os.path.join(folder, ".dockercfg") cfg_path = os.path.join(folder, ".dockercfg")
auth_ = base64.b64encode(b"sakuya:izayoi").decode("ascii") auth_ = base64.b64encode(b"sakuya:izayoi").decode("ascii")
with open(cfg_path, "w") as f: with open(cfg_path, "wt", encoding="utf-8") as f:
f.write(f"auth = {auth_}\n") f.write(f"auth = {auth_}\n")
f.write("email = sakuya@scarlet.net") f.write("email = sakuya@scarlet.net")
@ -319,7 +319,7 @@ class LoadConfigTest(unittest.TestCase):
cfg_path = os.path.join(folder, ".dockercfg") cfg_path = os.path.join(folder, ".dockercfg")
auth_ = base64.b64encode(b"sakuya:izayoi").decode("ascii") auth_ = base64.b64encode(b"sakuya:izayoi").decode("ascii")
email = "sakuya@scarlet.net" email = "sakuya@scarlet.net"
with open(cfg_path, "w") as f: with open(cfg_path, "wt", encoding="utf-8") as f:
json.dump({auth.INDEX_URL: {"auth": auth_, "email": email}}, f) json.dump({auth.INDEX_URL: {"auth": auth_, "email": email}}, f)
cfg = auth.load_config(cfg_path) cfg = auth.load_config(cfg_path)
assert auth.resolve_authconfig(cfg) is not None assert auth.resolve_authconfig(cfg) is not None
@ -336,7 +336,7 @@ class LoadConfigTest(unittest.TestCase):
cfg_path = os.path.join(folder, "config.json") cfg_path = os.path.join(folder, "config.json")
auth_ = base64.b64encode(b"sakuya:izayoi").decode("ascii") auth_ = base64.b64encode(b"sakuya:izayoi").decode("ascii")
email = "sakuya@scarlet.net" email = "sakuya@scarlet.net"
with open(cfg_path, "w") as f: with open(cfg_path, "wt", encoding="utf-8") as f:
json.dump({"auths": {auth.INDEX_URL: {"auth": auth_, "email": email}}}, f) json.dump({"auths": {auth.INDEX_URL: {"auth": auth_, "email": email}}}, f)
cfg = auth.load_config(cfg_path) cfg = auth.load_config(cfg_path)
assert auth.resolve_authconfig(cfg) is not None assert auth.resolve_authconfig(cfg) is not None
@ -355,7 +355,7 @@ class LoadConfigTest(unittest.TestCase):
auth_ = base64.b64encode(b"sakuya:izayoi").decode("ascii") auth_ = base64.b64encode(b"sakuya:izayoi").decode("ascii")
config = {registry: {"auth": f"{auth_}", "email": "sakuya@scarlet.net"}} config = {registry: {"auth": f"{auth_}", "email": "sakuya@scarlet.net"}}
with open(dockercfg_path, "w") as f: with open(dockercfg_path, "wt", encoding="utf-8") as f:
json.dump(config, f) json.dump(config, f)
cfg = auth.load_config(dockercfg_path).auths cfg = auth.load_config(dockercfg_path).auths
@ -376,7 +376,7 @@ class LoadConfigTest(unittest.TestCase):
auth_ = base64.b64encode(b"sakuya:izayoi").decode("ascii") auth_ = base64.b64encode(b"sakuya:izayoi").decode("ascii")
config = {registry: {"auth": f"{auth_}", "email": "sakuya@scarlet.net"}} config = {registry: {"auth": f"{auth_}", "email": "sakuya@scarlet.net"}}
with open(dockercfg_path, "w") as f: with open(dockercfg_path, "wt", encoding="utf-8") as f:
json.dump(config, f) json.dump(config, f)
with mock.patch.dict(os.environ, {"DOCKER_CONFIG": folder}): with mock.patch.dict(os.environ, {"DOCKER_CONFIG": folder}):
@ -400,7 +400,7 @@ class LoadConfigTest(unittest.TestCase):
"auths": {registry: {"auth": f"{auth_}", "email": "sakuya@scarlet.net"}} "auths": {registry: {"auth": f"{auth_}", "email": "sakuya@scarlet.net"}}
} }
with open(dockercfg_path, "w") as f: with open(dockercfg_path, "wt", encoding="utf-8") as f:
json.dump(config, f) json.dump(config, f)
with mock.patch.dict(os.environ, {"DOCKER_CONFIG": folder}): with mock.patch.dict(os.environ, {"DOCKER_CONFIG": folder}):
@ -423,7 +423,7 @@ class LoadConfigTest(unittest.TestCase):
"auths": {registry: {"auth": f"{auth_}", "email": "sakuya@scarlet.net"}} "auths": {registry: {"auth": f"{auth_}", "email": "sakuya@scarlet.net"}}
} }
with open(dockercfg_path, "w") as f: with open(dockercfg_path, "wt", encoding="utf-8") as f:
json.dump(config, f) json.dump(config, f)
with mock.patch.dict(os.environ, {"DOCKER_CONFIG": folder}): with mock.patch.dict(os.environ, {"DOCKER_CONFIG": folder}):
@ -440,7 +440,7 @@ class LoadConfigTest(unittest.TestCase):
self.addCleanup(shutil.rmtree, folder) self.addCleanup(shutil.rmtree, folder)
dockercfg_path = os.path.join(folder, "config.json") dockercfg_path = os.path.join(folder, "config.json")
config = {"detachKeys": "ctrl-q, ctrl-u, ctrl-i"} config = {"detachKeys": "ctrl-q, ctrl-u, ctrl-i"}
with open(dockercfg_path, "w") as f: with open(dockercfg_path, "wt", encoding="utf-8") as f:
json.dump(config, f) json.dump(config, f)
cfg = auth.load_config(dockercfg_path) cfg = auth.load_config(dockercfg_path)
@ -451,7 +451,7 @@ class LoadConfigTest(unittest.TestCase):
self.addCleanup(shutil.rmtree, folder) self.addCleanup(shutil.rmtree, folder)
dockercfg_path = os.path.join(folder, "config.json") dockercfg_path = os.path.join(folder, "config.json")
config = {"auths": {"scarlet.net": {"sakuya": "izayoi"}}} config = {"auths": {"scarlet.net": {"sakuya": "izayoi"}}}
with open(dockercfg_path, "w") as f: with open(dockercfg_path, "wt", encoding="utf-8") as f:
json.dump(config, f) json.dump(config, f)
cfg = auth.load_config(dockercfg_path) cfg = auth.load_config(dockercfg_path)
@ -465,7 +465,7 @@ class LoadConfigTest(unittest.TestCase):
dockercfg_path = os.path.join(folder, "config.json") dockercfg_path = os.path.join(folder, "config.json")
auth_entry = encode_auth({"username": "sakuya"}).decode("ascii") auth_entry = encode_auth({"username": "sakuya"}).decode("ascii")
config = {"auths": {registry: {"auth": auth_entry, "identitytoken": token}}} config = {"auths": {registry: {"auth": auth_entry, "identitytoken": token}}}
with open(dockercfg_path, "w") as f: with open(dockercfg_path, "wt", encoding="utf-8") as f:
json.dump(config, f) json.dump(config, f)
cfg = auth.load_config(dockercfg_path) cfg = auth.load_config(dockercfg_path)
@ -803,7 +803,7 @@ class CredstoreTest(unittest.TestCase):
class InMemoryStore(Store): class InMemoryStore(Store):
def __init__(self, *args, **kwargs): def __init__(self, *args, **kwargs): # pylint: disable=super-init-not-called
self.__store = {} self.__store = {}
def get(self, server): def get(self, server):

View File

@ -123,8 +123,8 @@ class APIErrorTest(unittest.TestCase):
except requests.exceptions.HTTPError as e: except requests.exceptions.HTTPError as e:
try: try:
create_api_error_from_http_exception(e) create_api_error_from_http_exception(e)
except APIError as e: except APIError as e2:
err = e err = e2
assert err.is_server_error() is True assert err.is_server_error() is True

View File

@ -37,7 +37,7 @@ def make_tree(dirs, files):
os.makedirs(os.path.join(base, path)) os.makedirs(os.path.join(base, path))
for path in files: for path in files:
with open(os.path.join(base, path), "w") as f: with open(os.path.join(base, path), "wt", encoding="utf-8") as f:
f.write("content") f.write("content")
return base return base
@ -440,7 +440,7 @@ class TarTest(unittest.TestCase):
base = tempfile.mkdtemp() base = tempfile.mkdtemp()
full_path = os.path.join(base, "foo") full_path = os.path.join(base, "foo")
self.addCleanup(shutil.rmtree, base) self.addCleanup(shutil.rmtree, base)
with open(full_path, "w") as f: with open(full_path, "wt", encoding="utf-8") as f:
f.write("content") f.write("content")
os.chmod(full_path, 0o222) os.chmod(full_path, 0o222)
with pytest.raises(IOError) as ei: with pytest.raises(IOError) as ei:
@ -452,7 +452,7 @@ class TarTest(unittest.TestCase):
def test_tar_with_file_symlinks(self): def test_tar_with_file_symlinks(self):
base = tempfile.mkdtemp() base = tempfile.mkdtemp()
self.addCleanup(shutil.rmtree, base) self.addCleanup(shutil.rmtree, base)
with open(os.path.join(base, "foo"), "w") as f: with open(os.path.join(base, "foo"), "wt", encoding="utf-8") as f:
f.write("content") f.write("content")
os.makedirs(os.path.join(base, "bar")) os.makedirs(os.path.join(base, "bar"))
os.symlink("../foo", os.path.join(base, "bar/foo")) os.symlink("../foo", os.path.join(base, "bar/foo"))
@ -500,7 +500,7 @@ class TarTest(unittest.TestCase):
base = tempfile.mkdtemp() base = tempfile.mkdtemp()
filename = os.path.join(base, "th.txt") filename = os.path.join(base, "th.txt")
self.addCleanup(shutil.rmtree, base) self.addCleanup(shutil.rmtree, base)
with open(filename, "w") as f: with open(filename, "wt", encoding="utf-8") as f:
f.write("Invisible Full Moon") f.write("Invisible Full Moon")
os.utime(filename, (12345, -3600.0)) os.utime(filename, (12345, -3600.0))
with tar(base) as archive: with tar(base) as archive:

View File

@ -96,7 +96,7 @@ class LoadConfigTest(unittest.TestCase):
"HttpHeaders": {"Name": "Spike", "Surname": "Spiegel"}, "HttpHeaders": {"Name": "Spike", "Surname": "Spiegel"},
} }
with open(dockercfg_path, "w") as f: with open(dockercfg_path, "wt", encoding="utf-8") as f:
json.dump(config_data, f) json.dump(config_data, f)
cfg = config.load_general_config(dockercfg_path) cfg = config.load_general_config(dockercfg_path)
@ -108,7 +108,7 @@ class LoadConfigTest(unittest.TestCase):
self.addCleanup(shutil.rmtree, folder) self.addCleanup(shutil.rmtree, folder)
dockercfg_path = os.path.join(folder, "config.json") dockercfg_path = os.path.join(folder, "config.json")
config_data = {"detachKeys": "ctrl-q, ctrl-u, ctrl-i"} config_data = {"detachKeys": "ctrl-q, ctrl-u, ctrl-i"}
with open(dockercfg_path, "w") as f: with open(dockercfg_path, "wt", encoding="utf-8") as f:
json.dump(config_data, f) json.dump(config_data, f)
cfg = config.load_general_config(dockercfg_path) cfg = config.load_general_config(dockercfg_path)
@ -119,7 +119,7 @@ class LoadConfigTest(unittest.TestCase):
self.addCleanup(shutil.rmtree, folder) self.addCleanup(shutil.rmtree, folder)
dockercfg_path = os.path.join(folder, "config.json") dockercfg_path = os.path.join(folder, "config.json")
config_data = {"detachKeys": "ctrl-q, ctrl-u, ctrl-i"} config_data = {"detachKeys": "ctrl-q, ctrl-u, ctrl-i"}
with open(dockercfg_path, "w") as f: with open(dockercfg_path, "wt", encoding="utf-8") as f:
json.dump(config_data, f) json.dump(config_data, f)
with mock.patch.dict(os.environ, {"DOCKER_CONFIG": folder}): with mock.patch.dict(os.environ, {"DOCKER_CONFIG": folder}):

View File

@ -15,7 +15,7 @@ from ansible_collections.community.docker.plugins.modules.docker_container_copy_
@pytest.mark.parametrize( @pytest.mark.parametrize(
"input, expected", "value, expected",
[ [
("0777", 0o777), ("0777", 0o777),
("777", 0o777), ("777", 0o777),
@ -32,13 +32,13 @@ from ansible_collections.community.docker.plugins.modules.docker_container_copy_
("-1", -1), ("-1", -1),
], ],
) )
def test_parse_string(input, expected): def test_parse_string(value, expected):
assert parse_modern(input) == expected assert parse_modern(value) == expected
assert parse_octal_string_only(input) == expected assert parse_octal_string_only(value) == expected
@pytest.mark.parametrize( @pytest.mark.parametrize(
"input", "value",
[ [
0o777, 0o777,
0o755, 0o755,
@ -47,14 +47,14 @@ def test_parse_string(input, expected):
123456789012345678901234567890123456789012345678901234567890, 123456789012345678901234567890123456789012345678901234567890,
], ],
) )
def test_parse_int(input): def test_parse_int(value):
assert parse_modern(input) == input assert parse_modern(value) == value
with pytest.raises(TypeError, match=f"^must be an octal string, got {input}L?$"): with pytest.raises(TypeError, match=f"^must be an octal string, got {value}L?$"):
parse_octal_string_only(input) parse_octal_string_only(value)
@pytest.mark.parametrize( @pytest.mark.parametrize(
"input", "value",
[ [
1.0, 1.0,
755.5, 755.5,
@ -62,23 +62,23 @@ def test_parse_int(input):
{}, {},
], ],
) )
def test_parse_bad_type(input): def test_parse_bad_type(value):
with pytest.raises(TypeError, match="^must be an octal string or an integer, got "): with pytest.raises(TypeError, match="^must be an octal string or an integer, got "):
parse_modern(input) parse_modern(value)
with pytest.raises(TypeError, match="^must be an octal string, got "): with pytest.raises(TypeError, match="^must be an octal string, got "):
parse_octal_string_only(input) parse_octal_string_only(value)
@pytest.mark.parametrize( @pytest.mark.parametrize(
"input", "value",
[ [
"foo", "foo",
"8", "8",
"9", "9",
], ],
) )
def test_parse_bad_value(input): def test_parse_bad_value(value):
with pytest.raises(ValueError): with pytest.raises(ValueError):
parse_modern(input) parse_modern(value)
with pytest.raises(ValueError): with pytest.raises(ValueError):
parse_octal_string_only(input) parse_octal_string_only(value)

View File

@ -14,7 +14,7 @@ from ansible_collections.community.docker.plugins.modules.docker_image_build imp
@pytest.mark.parametrize( @pytest.mark.parametrize(
"input, expected", "value, expected",
[ [
("", ""), ("", ""),
(" ", '" "'), (" ", '" "'),
@ -23,5 +23,5 @@ from ansible_collections.community.docker.plugins.modules.docker_image_build imp
('\rhello, "hi" !\n', '"\rhello, ""hi"" !\n"'), ('\rhello, "hi" !\n', '"\rhello, ""hi"" !\n"'),
], ],
) )
def test__quote_csv(input, expected): def test__quote_csv(value, expected):
assert _quote_csv(input) == expected assert _quote_csv(value) == expected