Python code modernization, 8/n (#1179)

* Use to_text instead of to_native.

* Remove no longer needed pylint ignores.

* Remove another pylint ignore.

* Remove no longer needed ignore.

* Address redefined-outer-name.

* Address consider-using-with.
This commit is contained in:
Felix Fontein 2025-10-25 02:36:04 +02:00 committed by GitHub
parent 6ad4bfcd40
commit be000755fc
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
30 changed files with 156 additions and 150 deletions

View File

@ -379,18 +379,12 @@ disable=raw-checker-failed,
wrong-import-order, wrong-import-order,
wrong-import-position, wrong-import-position,
# To clean up: # To clean up:
arguments-differ,
consider-using-with,
fixme, fixme,
import-error, # TODO figure out why pylint cannot find the module import-error, # TODO figure out why pylint cannot find the module
no-member,
no-name-in-module, # TODO figure out why pylint cannot find the module no-name-in-module, # TODO figure out why pylint cannot find the module
not-an-iterable, # TODO: needs better typing info
protected-access, protected-access,
redefined-outer-name, # needed for test fixtures
subprocess-popen-preexec-fn, subprocess-popen-preexec-fn,
unexpected-keyword-arg, unexpected-keyword-arg,
unsupported-assignment-operation, # TODO: needs better typing info
unused-argument, unused-argument,
# Cannot remove yet due to inadequacy of rules # Cannot remove yet due to inadequacy of rules
inconsistent-return-statements, # doesn't notice that fail_json() does not return inconsistent-return-statements, # doesn't notice that fail_json() does not return

View File

@ -29,6 +29,7 @@ class ActionModule(ActionBase):
result = super().run(tmp, task_vars) result = super().run(tmp, task_vars)
del tmp # tmp no longer has any effect del tmp # tmp no longer has any effect
# pylint: disable-next=no-member
max_file_size_for_diff: int = C.MAX_FILE_SIZE_FOR_DIFF # type: ignore max_file_size_for_diff: int = C.MAX_FILE_SIZE_FOR_DIFF # type: ignore
self._task.args["_max_file_size_for_diff"] = max_file_size_for_diff self._task.args["_max_file_size_for_diff"] = max_file_size_for_diff

View File

@ -123,7 +123,7 @@ from shlex import quote
from ansible.errors import AnsibleConnectionFailure, AnsibleError, AnsibleFileNotFound from ansible.errors import AnsibleConnectionFailure, AnsibleError, AnsibleFileNotFound
from ansible.module_utils.common.process import get_bin_path from ansible.module_utils.common.process import get_bin_path
from ansible.module_utils.common.text.converters import to_bytes, to_native, to_text from ansible.module_utils.common.text.converters import to_bytes, to_text
from ansible.plugins.connection import BUFSIZE, ConnectionBase from ansible.plugins.connection import BUFSIZE, ConnectionBase
from ansible.utils.display import Display from ansible.utils.display import Display
@ -188,7 +188,7 @@ class Connection(ConnectionBase):
) as p: ) as p:
cmd_output, err = p.communicate() cmd_output, err = p.communicate()
return old_docker_cmd, to_native(cmd_output), err, p.returncode return old_docker_cmd, to_text(cmd_output), err, p.returncode
def _new_docker_version(self) -> tuple[list[str], str, bytes, int]: def _new_docker_version(self) -> tuple[list[str], str, bytes, int]:
# no result yet, must be newer Docker version # no result yet, must be newer Docker version
@ -201,7 +201,7 @@ class Connection(ConnectionBase):
new_docker_cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE new_docker_cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE
) as p: ) as p:
cmd_output, err = p.communicate() cmd_output, err = p.communicate()
return new_docker_cmd, to_native(cmd_output), err, p.returncode return new_docker_cmd, to_text(cmd_output), err, p.returncode
def _get_docker_version(self) -> str: def _get_docker_version(self) -> str:
cmd, cmd_output, err, returncode = self._old_docker_version() cmd, cmd_output, err, returncode = self._old_docker_version()
@ -213,7 +213,7 @@ class Connection(ConnectionBase):
cmd, cmd_output, err, returncode = self._new_docker_version() cmd, cmd_output, err, returncode = self._new_docker_version()
if returncode: if returncode:
raise AnsibleError( raise AnsibleError(
f"Docker version check ({to_native(cmd)}) failed: {to_native(err)}" f"Docker version check ({to_text(cmd)}) failed: {to_text(err)}"
) )
return self._sanitize_version(to_text(cmd_output, errors="surrogate_or_strict")) return self._sanitize_version(to_text(cmd_output, errors="surrogate_or_strict"))
@ -427,7 +427,7 @@ class Connection(ConnectionBase):
stdout, stderr = p.communicate() stdout, stderr = p.communicate()
raise AnsibleError( raise AnsibleError(
"timeout waiting for privilege escalation password prompt:\n" "timeout waiting for privilege escalation password prompt:\n"
+ to_native(become_output) + to_text(become_output)
) )
chunks = b"" chunks = b""
@ -445,7 +445,7 @@ class Connection(ConnectionBase):
stdout, stderr = p.communicate() stdout, stderr = p.communicate()
raise AnsibleError( raise AnsibleError(
"privilege output closed while waiting for password prompt:\n" "privilege output closed while waiting for password prompt:\n"
+ to_native(become_output) + to_text(become_output)
) )
become_output += chunks become_output += chunks
finally: finally:
@ -503,7 +503,7 @@ class Connection(ConnectionBase):
out_path = self._prefix_login_path(out_path) out_path = self._prefix_login_path(out_path)
if not os.path.exists(to_bytes(in_path, errors="surrogate_or_strict")): if not os.path.exists(to_bytes(in_path, errors="surrogate_or_strict")):
raise AnsibleFileNotFound( raise AnsibleFileNotFound(
f"file or module does not exist: {to_native(in_path)}" f"file or module does not exist: {to_text(in_path)}"
) )
out_path = quote(out_path) out_path = quote(out_path)
@ -525,6 +525,7 @@ class Connection(ConnectionBase):
) )
args = [to_bytes(i, errors="surrogate_or_strict") for i in args] args = [to_bytes(i, errors="surrogate_or_strict") for i in args]
try: try:
# pylint: disable-next=consider-using-with
p = subprocess.Popen( p = subprocess.Popen(
args, stdin=in_file, stdout=subprocess.PIPE, stderr=subprocess.PIPE args, stdin=in_file, stdout=subprocess.PIPE, stderr=subprocess.PIPE
) )
@ -536,7 +537,7 @@ class Connection(ConnectionBase):
if p.returncode != 0: if p.returncode != 0:
raise AnsibleError( raise AnsibleError(
f"failed to transfer file {to_native(in_path)} to {to_native(out_path)}:\n{to_native(stdout)}\n{to_native(stderr)}" f"failed to transfer file {to_text(in_path)} to {to_text(out_path)}:\n{to_text(stdout)}\n{to_text(stderr)}"
) )
def fetch_file(self, in_path: str, out_path: str) -> None: def fetch_file(self, in_path: str, out_path: str) -> None:
@ -587,6 +588,7 @@ class Connection(ConnectionBase):
to_bytes(actual_out_path, errors="surrogate_or_strict"), "wb" to_bytes(actual_out_path, errors="surrogate_or_strict"), "wb"
) as out_file: ) as out_file:
try: try:
# pylint: disable-next=consider-using-with
pp = subprocess.Popen( pp = subprocess.Popen(
args, args,
stdin=subprocess.PIPE, stdin=subprocess.PIPE,

View File

@ -110,7 +110,7 @@ import os.path
import typing as t import typing as t
from ansible.errors import AnsibleConnectionFailure, AnsibleFileNotFound from ansible.errors import AnsibleConnectionFailure, AnsibleFileNotFound
from ansible.module_utils.common.text.converters import to_bytes, to_native, to_text from ansible.module_utils.common.text.converters import to_bytes, to_text
from ansible.plugins.connection import ConnectionBase from ansible.plugins.connection import ConnectionBase
from ansible.utils.display import Display from ansible.utils.display import Display
@ -335,13 +335,13 @@ class Connection(ConnectionBase):
stdout, stderr = exec_socket_handler.consume() stdout, stderr = exec_socket_handler.consume()
raise AnsibleConnectionFailure( raise AnsibleConnectionFailure(
"timeout waiting for privilege escalation password prompt:\n" "timeout waiting for privilege escalation password prompt:\n"
+ to_native(become_output[0]) + to_text(become_output[0])
) )
if exec_socket_handler.is_eof(): if exec_socket_handler.is_eof():
raise AnsibleConnectionFailure( raise AnsibleConnectionFailure(
"privilege output closed while waiting for password prompt:\n" "privilege output closed while waiting for password prompt:\n"
+ to_native(become_output[0]) + to_text(become_output[0])
) )
if not self.become.check_success(become_output[0]): if not self.become.check_success(become_output[0]):
@ -437,9 +437,9 @@ class Connection(ConnectionBase):
not_found_can_be_resource=True, not_found_can_be_resource=True,
) )
except DockerFileNotFound as exc: except DockerFileNotFound as exc:
raise AnsibleFileNotFound(to_native(exc)) from exc raise AnsibleFileNotFound(to_text(exc)) from exc
except DockerFileCopyError as exc: except DockerFileCopyError as exc:
raise AnsibleConnectionFailure(to_native(exc)) from exc raise AnsibleConnectionFailure(to_text(exc)) from exc
def fetch_file(self, in_path: str, out_path: str) -> None: def fetch_file(self, in_path: str, out_path: str) -> None:
"""Fetch a file from container to local.""" """Fetch a file from container to local."""
@ -468,9 +468,9 @@ class Connection(ConnectionBase):
not_found_can_be_resource=True, not_found_can_be_resource=True,
) )
except DockerFileNotFound as exc: except DockerFileNotFound as exc:
raise AnsibleFileNotFound(to_native(exc)) from exc raise AnsibleFileNotFound(to_text(exc)) from exc
except DockerFileCopyError as exc: except DockerFileCopyError as exc:
raise AnsibleConnectionFailure(to_native(exc)) from exc raise AnsibleConnectionFailure(to_text(exc)) from exc
def close(self) -> None: def close(self) -> None:
"""Terminate the connection. Nothing to do for Docker""" """Terminate the connection. Nothing to do for Docker"""

View File

@ -50,7 +50,7 @@ import typing as t
import ansible.constants as C import ansible.constants as C
from ansible.errors import AnsibleError from ansible.errors import AnsibleError
from ansible.module_utils.common.text.converters import to_bytes, to_native, to_text from ansible.module_utils.common.text.converters import to_bytes, to_text
from ansible.plugins.connection import ConnectionBase from ansible.plugins.connection import ConnectionBase
from ansible.utils.display import Display from ansible.utils.display import Display
from ansible.utils.path import unfrackpath from ansible.utils.path import unfrackpath
@ -92,6 +92,7 @@ class Connection(ConnectionBase):
display.debug("in nsenter.exec_command()") display.debug("in nsenter.exec_command()")
# pylint: disable-next=no-member
def_executable: str | None = C.DEFAULT_EXECUTABLE # type: ignore[attr-defined] def_executable: str | None = C.DEFAULT_EXECUTABLE # type: ignore[attr-defined]
executable = def_executable.split()[0] if def_executable else None executable = def_executable.split()[0] if def_executable else None
@ -178,7 +179,7 @@ class Connection(ConnectionBase):
stdout, stderr = p.communicate() stdout, stderr = p.communicate()
raise AnsibleError( raise AnsibleError(
"timeout waiting for privilege escalation password prompt:\n" "timeout waiting for privilege escalation password prompt:\n"
+ to_native(become_output) + to_text(become_output)
) )
chunks = b"" chunks = b""
@ -196,7 +197,7 @@ class Connection(ConnectionBase):
stdout, stderr = p.communicate() stdout, stderr = p.communicate()
raise AnsibleError( raise AnsibleError(
"privilege output closed while waiting for password prompt:\n" "privilege output closed while waiting for password prompt:\n"
+ to_native(become_output) + to_text(become_output)
) )
become_output += chunks become_output += chunks
finally: finally:
@ -279,7 +280,7 @@ class Connection(ConnectionBase):
out_file.write(out) out_file.write(out)
except IOError as e: except IOError as e:
raise AnsibleError( raise AnsibleError(
f"failed to transfer file to {to_native(out_path)}: {e}" f"failed to transfer file to {to_text(out_path)}: {e}"
) from e ) from e
def close(self) -> None: def close(self) -> None:

View File

@ -105,7 +105,7 @@ import typing as t
from ansible.errors import AnsibleError from ansible.errors import AnsibleError
from ansible.module_utils.common.process import get_bin_path from ansible.module_utils.common.process import get_bin_path
from ansible.module_utils.common.text.converters import to_native, to_text from ansible.module_utils.common.text.converters import to_text
from ansible.plugins.inventory import BaseInventoryPlugin, Cacheable, Constructable from ansible.plugins.inventory import BaseInventoryPlugin, Cacheable, Constructable
from ansible.utils.display import Display from ansible.utils.display import Display
from ansible_collections.community.library_inventory_filtering_v1.plugins.plugin_utils.inventory_filter import ( from ansible_collections.community.library_inventory_filtering_v1.plugins.plugin_utils.inventory_filter import (
@ -142,7 +142,7 @@ class InventoryModule(BaseInventoryPlugin, Constructable, Cacheable):
try: try:
self.docker_machine_path = get_bin_path("docker-machine") self.docker_machine_path = get_bin_path("docker-machine")
except ValueError as e: except ValueError as e:
raise AnsibleError(to_native(e)) from e raise AnsibleError(to_text(e)) from e
command = [self.docker_machine_path] command = [self.docker_machine_path]
command.extend(args) command.extend(args)

View File

@ -652,6 +652,8 @@ class APIClient(_Session):
def get_adapter(self, url: str) -> BaseAdapter: def get_adapter(self, url: str) -> BaseAdapter:
try: try:
# pylint finds our Session stub instead of requests.Session:
# pylint: disable-next=no-member
return super().get_adapter(url) return super().get_adapter(url)
except _InvalidSchema as e: except _InvalidSchema as e:
if self._custom_adapter: if self._custom_adapter:

View File

@ -13,7 +13,7 @@ from __future__ import annotations
import typing as t import typing as t
from ansible.module_utils.common.text.converters import to_native from ansible.module_utils.common.text.converters import to_text
from ._import_helper import HTTPError as _HTTPError from ._import_helper import HTTPError as _HTTPError
@ -39,7 +39,7 @@ def create_api_error_from_http_exception(e: _HTTPError) -> t.NoReturn:
try: try:
explanation = response.json()["message"] explanation = response.json()["message"]
except ValueError: except ValueError:
explanation = to_native((response.content or "").strip()) explanation = to_text((response.content or "").strip())
cls = APIError cls = APIError
if response.status_code == 404: if response.status_code == 404:
if explanation and ( if explanation and (

View File

@ -16,6 +16,8 @@ from .._import_helper import HTTPAdapter as _HTTPAdapter
class BaseHTTPAdapter(_HTTPAdapter): class BaseHTTPAdapter(_HTTPAdapter):
def close(self) -> None: def close(self) -> None:
# pylint finds our HTTPAdapter stub instead of requests.adapters.HTTPAdapter:
# pylint: disable-next=no-member
super().close() super().close()
if hasattr(self, "pools"): if hasattr(self, "pools"):
self.pools.clear() self.pools.clear()

View File

@ -58,7 +58,11 @@ class SSLHTTPAdapter(BaseHTTPAdapter):
We already take care of a normal poolmanager via init_poolmanager We already take care of a normal poolmanager via init_poolmanager
But we still need to take care of when there is a proxy poolmanager But we still need to take care of when there is a proxy poolmanager
Note that this method is no longer called for newer requests versions.
""" """
# pylint finds our HTTPAdapter stub instead of requests.adapters.HTTPAdapter:
# pylint: disable-next=no-member
conn = super().get_connection(*args, **kwargs) conn = super().get_connection(*args, **kwargs)
if ( if (
self.assert_hostname is not None self.assert_hostname is not None

View File

@ -97,8 +97,10 @@ def create_archive(
) -> t.IO[bytes]: ) -> t.IO[bytes]:
extra_files = extra_files or [] extra_files = extra_files or []
if not fileobj: if not fileobj:
# pylint: disable-next=consider-using-with
fileobj = tempfile.NamedTemporaryFile() fileobj = tempfile.NamedTemporaryFile()
t = tarfile.open(mode="w:gz" if gzip else "w", fileobj=fileobj)
with tarfile.open(mode="w:gz" if gzip else "w", fileobj=fileobj) as tarf:
if files is None: if files is None:
files = build_file_list(root) files = build_file_list(root)
extra_names = set(e[0] for e in extra_files) extra_names = set(e[0] for e in extra_files)
@ -108,7 +110,7 @@ def create_archive(
continue continue
full_path = os.path.join(root, path) full_path = os.path.join(root, path)
i = t.gettarinfo(full_path, arcname=path) i = tarf.gettarinfo(full_path, arcname=path)
if i is None: if i is None:
# This happens when we encounter a socket file. We can safely # This happens when we encounter a socket file. We can safely
# ignore it and proceed. # ignore it and proceed.
@ -126,20 +128,19 @@ def create_archive(
if i.isfile(): if i.isfile():
try: try:
with open(full_path, "rb") as f: with open(full_path, "rb") as f:
t.addfile(i, f) tarf.addfile(i, f)
except IOError as exc: except IOError as exc:
raise IOError(f"Can not read file in context: {full_path}") from exc raise IOError(f"Can not read file in context: {full_path}") from exc
else: else:
# Directories, FIFOs, symlinks... do not need to be read. # Directories, FIFOs, symlinks... do not need to be read.
t.addfile(i, None) tarf.addfile(i, None)
for name, contents in extra_files: for name, contents in extra_files:
info = tarfile.TarInfo(name) info = tarfile.TarInfo(name)
contents_encoded = contents.encode("utf-8") contents_encoded = contents.encode("utf-8")
info.size = len(contents_encoded) info.size = len(contents_encoded)
t.addfile(info, io.BytesIO(contents_encoded)) tarf.addfile(info, io.BytesIO(contents_encoded))
t.close()
fileobj.seek(0) fileobj.seek(0)
return fileobj return fileobj
@ -147,7 +148,7 @@ def create_archive(
def mkbuildcontext(dockerfile: io.BytesIO | t.IO[bytes]) -> t.IO[bytes]: def mkbuildcontext(dockerfile: io.BytesIO | t.IO[bytes]) -> t.IO[bytes]:
f = tempfile.NamedTemporaryFile() # pylint: disable=consider-using-with f = tempfile.NamedTemporaryFile() # pylint: disable=consider-using-with
try: try:
with tarfile.open(mode="w", fileobj=f) as t: with tarfile.open(mode="w", fileobj=f) as tarf:
if isinstance(dockerfile, io.StringIO): # type: ignore if isinstance(dockerfile, io.StringIO): # type: ignore
raise TypeError("Please use io.BytesIO to create in-memory Dockerfiles") raise TypeError("Please use io.BytesIO to create in-memory Dockerfiles")
if isinstance(dockerfile, io.BytesIO): if isinstance(dockerfile, io.BytesIO):
@ -155,8 +156,8 @@ def mkbuildcontext(dockerfile: io.BytesIO | t.IO[bytes]) -> t.IO[bytes]:
dfinfo.size = len(dockerfile.getvalue()) dfinfo.size = len(dockerfile.getvalue())
dockerfile.seek(0) dockerfile.seek(0)
else: else:
dfinfo = t.gettarinfo(fileobj=dockerfile, arcname="Dockerfile") dfinfo = tarf.gettarinfo(fileobj=dockerfile, arcname="Dockerfile")
t.addfile(dfinfo, dockerfile) tarf.addfile(dfinfo, dockerfile)
f.seek(0) f.seek(0)
except Exception: # noqa: E722 except Exception: # noqa: E722
f.close() f.close()

View File

@ -14,7 +14,7 @@ import typing as t
from ansible.module_utils.basic import AnsibleModule, env_fallback from ansible.module_utils.basic import AnsibleModule, env_fallback
from ansible.module_utils.common.process import get_bin_path from ansible.module_utils.common.process import get_bin_path
from ansible.module_utils.common.text.converters import to_native from ansible.module_utils.common.text.converters import to_text
from ansible_collections.community.docker.plugins.module_utils._api.auth import ( from ansible_collections.community.docker.plugins.module_utils._api.auth import (
resolve_repository_name, resolve_repository_name,
@ -132,9 +132,7 @@ class AnsibleDockerClientBase:
self.fail( self.fail(
"Cannot determine Docker Daemon information. Are you maybe using podman instead of docker?" "Cannot determine Docker Daemon information. Are you maybe using podman instead of docker?"
) )
self.docker_api_version_str = to_native( self.docker_api_version_str = to_text(self._version["Server"]["ApiVersion"])
self._version["Server"]["ApiVersion"]
)
self.docker_api_version = LooseVersion(self.docker_api_version_str) self.docker_api_version = LooseVersion(self.docker_api_version_str)
min_docker_api_version = min_docker_api_version or "1.25" min_docker_api_version = min_docker_api_version or "1.25"
if self.docker_api_version < LooseVersion(min_docker_api_version): if self.docker_api_version < LooseVersion(min_docker_api_version):
@ -191,12 +189,12 @@ class AnsibleDockerClientBase:
*args, check_rc=check_rc, data=data, cwd=cwd, environ_update=environ_update *args, check_rc=check_rc, data=data, cwd=cwd, environ_update=environ_update
) )
if warn_on_stderr and stderr: if warn_on_stderr and stderr:
self.warn(to_native(stderr)) self.warn(to_text(stderr))
try: try:
data = json.loads(stdout) data = json.loads(stdout)
except Exception as exc: # pylint: disable=broad-exception-caught except Exception as exc: # pylint: disable=broad-exception-caught
self.fail( self.fail(
f"Error while parsing JSON output of {self._compose_cmd_str(args)}: {exc}\nJSON output: {to_native(stdout)}" f"Error while parsing JSON output of {self._compose_cmd_str(args)}: {exc}\nJSON output: {to_text(stdout)}"
) )
return rc, data, stderr return rc, data, stderr
@ -213,7 +211,7 @@ class AnsibleDockerClientBase:
*args, check_rc=check_rc, data=data, cwd=cwd, environ_update=environ_update *args, check_rc=check_rc, data=data, cwd=cwd, environ_update=environ_update
) )
if warn_on_stderr and stderr: if warn_on_stderr and stderr:
self.warn(to_native(stderr)) self.warn(to_text(stderr))
result = [] result = []
try: try:
for line in stdout.splitlines(): for line in stdout.splitlines():
@ -222,7 +220,7 @@ class AnsibleDockerClientBase:
result.append(json.loads(line)) result.append(json.loads(line))
except Exception as exc: # pylint: disable=broad-exception-caught except Exception as exc: # pylint: disable=broad-exception-caught
self.fail( self.fail(
f"Error while parsing JSON output of {self._compose_cmd_str(args)}: {exc}\nJSON output: {to_native(stdout)}" f"Error while parsing JSON output of {self._compose_cmd_str(args)}: {exc}\nJSON output: {to_text(stdout)}"
) )
return rc, result, stderr return rc, result, stderr
@ -338,7 +336,7 @@ class AnsibleDockerClientBase:
self.log(f"Image {name}:{tag} not found.") self.log(f"Image {name}:{tag} not found.")
return None return None
if rc != 0: if rc != 0:
self.fail(f"Error inspecting image {name}:{tag} - {to_native(stderr)}") self.fail(f"Error inspecting image {name}:{tag} - {to_text(stderr)}")
return image[0] return image[0]
self.log(f"Image {name}:{tag} not found.") self.log(f"Image {name}:{tag} not found.")
@ -367,11 +365,11 @@ class AnsibleDockerClientBase:
rc, image, stderr = self.call_cli_json("image", "inspect", image_id) rc, image, stderr = self.call_cli_json("image", "inspect", image_id)
if not image: if not image:
if not accept_missing_image: if not accept_missing_image:
self.fail(f"Error inspecting image ID {image_id} - {to_native(stderr)}") self.fail(f"Error inspecting image ID {image_id} - {to_text(stderr)}")
self.log(f"Image {image_id} not found.") self.log(f"Image {image_id} not found.")
return None return None
if rc != 0: if rc != 0:
self.fail(f"Error inspecting image ID {image_id} - {to_native(stderr)}") self.fail(f"Error inspecting image ID {image_id} - {to_text(stderr)}")
return image[0] return image[0]

View File

@ -19,7 +19,7 @@ from collections import namedtuple
from shlex import quote from shlex import quote
from ansible.module_utils.basic import missing_required_lib from ansible.module_utils.basic import missing_required_lib
from ansible.module_utils.common.text.converters import to_native from ansible.module_utils.common.text.converters import to_text
from ansible_collections.community.docker.plugins.module_utils._logfmt import ( from ansible_collections.community.docker.plugins.module_utils._logfmt import (
InvalidLogFmt as _InvalidLogFmt, InvalidLogFmt as _InvalidLogFmt,
@ -418,7 +418,7 @@ def parse_json_events(
ResourceType.UNKNOWN, ResourceType.UNKNOWN,
None, None,
"Warning", "Warning",
to_native(line[len(b"Warning: ") :]), to_text(line[len(b"Warning: ") :]),
) )
events.append(event) events.append(event)
continue continue
@ -557,7 +557,7 @@ def parse_events(
if stderr_lines and stderr_lines[-1] == b"": if stderr_lines and stderr_lines[-1] == b"":
del stderr_lines[-1] del stderr_lines[-1]
for index, line in enumerate(stderr_lines): for index, line in enumerate(stderr_lines):
line = to_native(line.strip()) line = to_text(line.strip())
if not line: if not line:
continue continue
warn_missing_dry_run_prefix = False warn_missing_dry_run_prefix = False
@ -731,8 +731,8 @@ def update_failed(
result["failed"] = True result["failed"] = True
result["msg"] = "\n".join(errors) result["msg"] = "\n".join(errors)
result["cmd"] = " ".join(quote(arg) for arg in [cli] + args) result["cmd"] = " ".join(quote(arg) for arg in [cli] + args)
result["stdout"] = to_native(stdout) result["stdout"] = to_text(stdout)
result["stderr"] = to_native(stderr) result["stderr"] = to_text(stderr)
result["rc"] = rc result["rc"] = rc
return True return True
@ -978,8 +978,8 @@ class BaseComposeManager(DockerBaseClass):
ignore_build_events=ignore_build_events, ignore_build_events=ignore_build_events,
) )
result["actions"] = result.get("actions", []) + extract_actions(events) result["actions"] = result.get("actions", []) + extract_actions(events)
result["stdout"] = combine_text_output(result.get("stdout"), to_native(stdout)) result["stdout"] = combine_text_output(result.get("stdout"), to_text(stdout))
result["stderr"] = combine_text_output(result.get("stderr"), to_native(stderr)) result["stderr"] = combine_text_output(result.get("stderr"), to_text(stderr))
def update_failed( def update_failed(
self, self,

View File

@ -18,7 +18,7 @@ import stat
import tarfile import tarfile
import typing as t import typing as t
from ansible.module_utils.common.text.converters import to_bytes, to_native, to_text from ansible.module_utils.common.text.converters import to_bytes, to_text
from ansible_collections.community.docker.plugins.module_utils._api.errors import ( from ansible_collections.community.docker.plugins.module_utils._api.errors import (
APIError, APIError,
@ -223,7 +223,7 @@ def put_file(
) -> None: ) -> None:
"""Transfer a file from local to Docker container.""" """Transfer a file from local to Docker container."""
if not os.path.exists(to_bytes(in_path, errors="surrogate_or_strict")): if not os.path.exists(to_bytes(in_path, errors="surrogate_or_strict")):
raise DockerFileNotFound(f"file or module does not exist: {to_native(in_path)}") raise DockerFileNotFound(f"file or module does not exist: {to_text(in_path)}")
b_in_path = to_bytes(in_path, errors="surrogate_or_strict") b_in_path = to_bytes(in_path, errors="surrogate_or_strict")

View File

@ -790,17 +790,17 @@ def _preprocess_mounts(
) -> dict[str, t.Any]: ) -> dict[str, t.Any]:
last: dict[str, str] = {} last: dict[str, str] = {}
def check_collision(t: str, name: str) -> None: def check_collision(target: str, name: str) -> None:
if t in last: if target in last:
if name == last[t]: if name == last[target]:
module.fail_json( module.fail_json(
msg=f'The mount point "{t}" appears twice in the {name} option' msg=f'The mount point "{target}" appears twice in the {name} option'
) )
else: else:
module.fail_json( module.fail_json(
msg=f'The mount point "{t}" appears both in the {name} and {last[t]} option' msg=f'The mount point "{target}" appears both in the {name} and {last[target]} option'
) )
last[t] = name last[target] = name
if "mounts" in values: if "mounts" in values:
mounts = [] mounts = []

View File

@ -10,7 +10,7 @@ from __future__ import annotations
import base64 import base64
import random import random
from ansible.module_utils.common.text.converters import to_bytes, to_native, to_text from ansible.module_utils.common.text.converters import to_bytes, to_text
def generate_insecure_key() -> bytes: def generate_insecure_key() -> bytes:
@ -31,7 +31,7 @@ def scramble(value: str, key: bytes) -> str:
b_value = to_bytes(value) b_value = to_bytes(value)
k = key[0] k = key[0]
b_value = bytes([k ^ b for b in b_value]) b_value = bytes([k ^ b for b in b_value])
return f"=S={to_native(base64.b64encode(b_value))}" return f"=S={to_text(base64.b64encode(b_value))}"
def unscramble(value: str, key: bytes) -> str: def unscramble(value: str, key: bytes) -> str:

View File

@ -171,7 +171,7 @@ import stat
import traceback import traceback
import typing as t import typing as t
from ansible.module_utils.common.text.converters import to_bytes, to_native, to_text from ansible.module_utils.common.text.converters import to_bytes, to_text
from ansible.module_utils.common.validation import check_type_int from ansible.module_utils.common.validation import check_type_int
from ansible_collections.community.docker.plugins.module_utils._api.errors import ( from ansible_collections.community.docker.plugins.module_utils._api.errors import (
@ -1041,7 +1041,7 @@ def copy_content_into_container(
def parse_modern(mode: str | int) -> int: def parse_modern(mode: str | int) -> int:
if isinstance(mode, str): if isinstance(mode, str):
return int(to_native(mode), 8) return int(to_text(mode), 8)
if isinstance(mode, int): if isinstance(mode, int):
return mode return mode
raise TypeError(f"must be an octal string or an integer, got {mode!r}") raise TypeError(f"must be an octal string or an integer, got {mode!r}")
@ -1049,7 +1049,7 @@ def parse_modern(mode: str | int) -> int:
def parse_octal_string_only(mode: str) -> int: def parse_octal_string_only(mode: str) -> int:
if isinstance(mode, str): if isinstance(mode, str):
return int(to_native(mode), 8) return int(to_text(mode), 8)
raise TypeError(f"must be an octal string, got {mode!r}") raise TypeError(f"must be an octal string, got {mode!r}")
@ -1185,7 +1185,7 @@ def main() -> None:
except DockerUnexpectedError as exc: except DockerUnexpectedError as exc:
client.fail(f"Unexpected error: {exc}", exception=traceback.format_exc()) client.fail(f"Unexpected error: {exc}", exception=traceback.format_exc())
except DockerFileCopyError as exc: except DockerFileCopyError as exc:
client.fail(to_native(exc)) client.fail(to_text(exc))
except OSError as exc: except OSError as exc:
client.fail(f"Unexpected error: {exc}", exception=traceback.format_exc()) client.fail(f"Unexpected error: {exc}", exception=traceback.format_exc())

View File

@ -369,7 +369,7 @@ import os
import traceback import traceback
import typing as t import typing as t
from ansible.module_utils.common.text.converters import to_native from ansible.module_utils.common.text.converters import to_text
from ansible.module_utils.common.text.formatters import human_to_bytes from ansible.module_utils.common.text.formatters import human_to_bytes
from ansible_collections.community.docker.plugins.module_utils._api.auth import ( from ansible_collections.community.docker.plugins.module_utils._api.auth import (
@ -899,7 +899,7 @@ class ImageManager(DockerBaseClass):
buildargs = {} buildargs = {}
if self.buildargs: if self.buildargs:
for key, value in self.buildargs.items(): for key, value in self.buildargs.items():
buildargs[key] = to_native(value) buildargs[key] = to_text(value)
container_limits = self.container_limits or {} container_limits = self.container_limits or {}
for key in container_limits.keys(): for key in container_limits.keys():

View File

@ -284,7 +284,7 @@ import os
import traceback import traceback
import typing as t import typing as t
from ansible.module_utils.common.text.converters import to_native from ansible.module_utils.common.text.converters import to_text
from ansible.module_utils.common.text.formatters import human_to_bytes from ansible.module_utils.common.text.formatters import human_to_bytes
from ansible_collections.community.docker.plugins.module_utils._api.utils.utils import ( from ansible_collections.community.docker.plugins.module_utils._api.utils.utils import (
@ -545,12 +545,12 @@ class ImageBuilder(DockerBaseClass):
if rc != 0: if rc != 0:
self.fail( self.fail(
f"Building {self.name}:{self.tag} failed", f"Building {self.name}:{self.tag} failed",
stdout=to_native(stdout), stdout=to_text(stdout),
stderr=to_native(stderr), stderr=to_text(stderr),
command=args, command=args,
) )
results["stdout"] = to_native(stdout) results["stdout"] = to_text(stdout)
results["stderr"] = to_native(stderr) results["stderr"] = to_text(stderr)
results["image"] = self.client.find_image(self.name, self.tag) or {} results["image"] = self.client.find_image(self.name, self.tag) or {}
results["command"] = args results["command"] = args

View File

@ -286,7 +286,7 @@ import time
import traceback import traceback
import typing as t import typing as t
from ansible.module_utils.common.text.converters import to_native from ansible.module_utils.common.text.converters import to_text
from ansible_collections.community.docker.plugins.module_utils._api.errors import ( from ansible_collections.community.docker.plugins.module_utils._api.errors import (
DockerException, DockerException,
@ -412,7 +412,7 @@ class DockerNetworkManager:
for ipam_config in self.parameters.ipam_config: for ipam_config in self.parameters.ipam_config:
validate_cidr(ipam_config["subnet"]) validate_cidr(ipam_config["subnet"])
except ValueError as e: except ValueError as e:
self.client.fail(to_native(e)) self.client.fail(to_text(e))
if self.parameters.driver_options: if self.parameters.driver_options:
self.parameters.driver_options = clean_dict_booleans_for_docker_api( self.parameters.driver_options = clean_dict_booleans_for_docker_api(

View File

@ -143,7 +143,7 @@ except ImportError:
# missing Docker SDK for Python handled in ansible.module_utils.docker.common # missing Docker SDK for Python handled in ansible.module_utils.docker.common
pass pass
from ansible.module_utils.common.text.converters import to_native from ansible.module_utils.common.text.converters import to_text
from ansible_collections.community.docker.plugins.module_utils._common import ( from ansible_collections.community.docker.plugins.module_utils._common import (
RequestException, RequestException,
@ -237,11 +237,12 @@ class SwarmNodeManager(DockerBaseClass):
node_spec["Labels"] = self.parameters.labels node_spec["Labels"] = self.parameters.labels
changed = True changed = True
elif self.parameters.labels_state == "merge": elif self.parameters.labels_state == "merge":
node_spec["Labels"] = dict(node_info["Spec"]["Labels"] or {}) labels: dict[str, str] = dict(node_info["Spec"]["Labels"] or {})
node_spec["Labels"] = labels
if self.parameters.labels is not None: if self.parameters.labels is not None:
for key, value in self.parameters.labels.items(): for key, value in self.parameters.labels.items():
if node_spec["Labels"].get(key) != value: if labels.get(key) != value:
node_spec["Labels"][key] = value labels[key] = value
changed = True changed = True
if self.parameters.labels_to_remove is not None: if self.parameters.labels_to_remove is not None:
@ -253,7 +254,7 @@ class SwarmNodeManager(DockerBaseClass):
changed = True changed = True
else: else:
self.client.module.warn( self.client.module.warn(
f"Label '{to_native(key)}' listed both in 'labels' and 'labels_to_remove'. " f"Label '{to_text(key)}' listed both in 'labels' and 'labels_to_remove'. "
"Keeping the assigned label value." "Keeping the assigned label value."
) )
else: else:

View File

@ -131,7 +131,7 @@ actions:
import traceback import traceback
import typing as t import typing as t
from ansible.module_utils.common.text.converters import to_native from ansible.module_utils.common.text.converters import to_text
from ansible_collections.community.docker.plugins.module_utils._api import auth from ansible_collections.community.docker.plugins.module_utils._api import auth
from ansible_collections.community.docker.plugins.module_utils._api.errors import ( from ansible_collections.community.docker.plugins.module_utils._api.errors import (
@ -215,7 +215,7 @@ class DockerPluginManager:
except NotFound: except NotFound:
return None return None
except APIError as e: except APIError as e:
self.client.fail(to_native(e)) self.client.fail(to_text(e))
def has_different_config(self) -> DifferenceTracker: def has_different_config(self) -> DifferenceTracker:
""" """
@ -293,7 +293,7 @@ class DockerPluginManager:
"/plugins/{0}/set", self.preferred_name, data=data "/plugins/{0}/set", self.preferred_name, data=data
) )
except APIError as e: except APIError as e:
self.client.fail(to_native(e)) self.client.fail(to_text(e))
self.actions.append(f"Installed plugin {self.preferred_name}") self.actions.append(f"Installed plugin {self.preferred_name}")
self.changed = True self.changed = True
@ -307,7 +307,7 @@ class DockerPluginManager:
"/plugins/{0}", self.preferred_name, params={"force": force} "/plugins/{0}", self.preferred_name, params={"force": force}
) )
except APIError as e: except APIError as e:
self.client.fail(to_native(e)) self.client.fail(to_text(e))
self.actions.append(f"Removed plugin {self.preferred_name}") self.actions.append(f"Removed plugin {self.preferred_name}")
self.changed = True self.changed = True
@ -323,7 +323,7 @@ class DockerPluginManager:
"/plugins/{0}/set", self.preferred_name, data=data "/plugins/{0}/set", self.preferred_name, data=data
) )
except APIError as e: except APIError as e:
self.client.fail(to_native(e)) self.client.fail(to_text(e))
self.actions.append(f"Updated plugin {self.preferred_name} settings") self.actions.append(f"Updated plugin {self.preferred_name} settings")
self.changed = True self.changed = True
else: else:
@ -361,7 +361,7 @@ class DockerPluginManager:
params={"timeout": timeout}, params={"timeout": timeout},
) )
except APIError as e: except APIError as e:
self.client.fail(to_native(e)) self.client.fail(to_text(e))
self.actions.append(f"Enabled plugin {self.preferred_name}") self.actions.append(f"Enabled plugin {self.preferred_name}")
self.changed = True self.changed = True
else: else:
@ -374,7 +374,7 @@ class DockerPluginManager:
params={"timeout": timeout}, params={"timeout": timeout},
) )
except APIError as e: except APIError as e:
self.client.fail(to_native(e)) self.client.fail(to_text(e))
self.actions.append(f"Enabled plugin {self.preferred_name}") self.actions.append(f"Enabled plugin {self.preferred_name}")
self.changed = True self.changed = True
@ -387,7 +387,7 @@ class DockerPluginManager:
"/plugins/{0}/disable", self.preferred_name "/plugins/{0}/disable", self.preferred_name
) )
except APIError as e: except APIError as e:
self.client.fail(to_native(e)) self.client.fail(to_text(e))
self.actions.append(f"Disable plugin {self.preferred_name}") self.actions.append(f"Disable plugin {self.preferred_name}")
self.changed = True self.changed = True
else: else:

View File

@ -161,7 +161,7 @@ import traceback
import typing as t import typing as t
from time import sleep from time import sleep
from ansible.module_utils.common.text.converters import to_native from ansible.module_utils.common.text.converters import to_text
from ansible_collections.community.docker.plugins.module_utils._common_cli import ( from ansible_collections.community.docker.plugins.module_utils._common_cli import (
AnsibleModuleDockerClient, AnsibleModuleDockerClient,
@ -190,9 +190,9 @@ def docker_stack_services(
dummy_rc, out, err = client.call_cli( dummy_rc, out, err = client.call_cli(
"stack", "services", stack_name, "--format", "{{.Name}}" "stack", "services", stack_name, "--format", "{{.Name}}"
) )
if to_native(err) == f"Nothing found in stack: {stack_name}\n": if to_text(err) == f"Nothing found in stack: {stack_name}\n":
return [] return []
return to_native(out).strip().split("\n") return to_text(out).strip().split("\n")
def docker_service_inspect( def docker_service_inspect(
@ -221,7 +221,7 @@ def docker_stack_deploy(
command += ["--compose-file", compose_file] command += ["--compose-file", compose_file]
command += [stack_name] command += [stack_name]
rc, out, err = client.call_cli(*command) rc, out, err = client.call_cli(*command)
return rc, to_native(out), to_native(err) return rc, to_text(out), to_text(err)
def docker_stack_inspect( def docker_stack_inspect(
@ -244,11 +244,11 @@ def docker_stack_rm(
command += ["--detach=false"] command += ["--detach=false"]
rc, out, err = client.call_cli(*command) rc, out, err = client.call_cli(*command)
while to_native(err) != f"Nothing found in stack: {stack_name}\n" and retries > 0: while to_text(err) != f"Nothing found in stack: {stack_name}\n" and retries > 0:
sleep(interval) sleep(interval)
retries = retries - 1 retries = retries - 1
rc, out, err = client.call_cli(*command) rc, out, err = client.call_cli(*command)
return rc, to_native(out), to_native(err) return rc, to_text(out), to_text(err)
def main() -> None: def main() -> None:

View File

@ -77,7 +77,7 @@ EXAMPLES = r"""
import json import json
import traceback import traceback
from ansible.module_utils.common.text.converters import to_native from ansible.module_utils.common.text.converters import to_text
from ansible_collections.community.docker.plugins.module_utils._common_cli import ( from ansible_collections.community.docker.plugins.module_utils._common_cli import (
AnsibleModuleDockerClient, AnsibleModuleDockerClient,
@ -99,7 +99,7 @@ def main() -> None:
changed=False, changed=False,
rc=rc, rc=rc,
stdout="\n".join([json.dumps(entry) for entry in ret]), stdout="\n".join([json.dumps(entry) for entry in ret]),
stderr=to_native(stderr).strip(), stderr=to_text(stderr).strip(),
results=ret, results=ret,
) )
except DockerException as e: except DockerException as e:

View File

@ -85,7 +85,7 @@ EXAMPLES = r"""
import json import json
import traceback import traceback
from ansible.module_utils.common.text.converters import to_native from ansible.module_utils.common.text.converters import to_text
from ansible_collections.community.docker.plugins.module_utils._common_cli import ( from ansible_collections.community.docker.plugins.module_utils._common_cli import (
AnsibleModuleDockerClient, AnsibleModuleDockerClient,
@ -108,7 +108,7 @@ def main() -> None:
changed=False, changed=False,
rc=rc, rc=rc,
stdout="\n".join([json.dumps(entry) for entry in ret]), stdout="\n".join([json.dumps(entry) for entry in ret]),
stderr=to_native(stderr).strip(), stderr=to_text(stderr).strip(),
results=ret, results=ret,
) )
except DockerException as e: except DockerException as e:

View File

@ -120,7 +120,7 @@ volume:
import traceback import traceback
import typing as t import typing as t
from ansible.module_utils.common.text.converters import to_native from ansible.module_utils.common.text.converters import to_text
from ansible_collections.community.docker.plugins.module_utils._api.errors import ( from ansible_collections.community.docker.plugins.module_utils._api.errors import (
APIError, APIError,
@ -185,7 +185,7 @@ class DockerVolumeManager:
try: try:
volumes = self.client.get_json("/volumes") volumes = self.client.get_json("/volumes")
except APIError as e: except APIError as e:
self.client.fail(to_native(e)) self.client.fail(to_text(e))
if volumes["Volumes"] is None: if volumes["Volumes"] is None:
return None return None
@ -259,7 +259,7 @@ class DockerVolumeManager:
"/volumes/{0}", resp["Name"] "/volumes/{0}", resp["Name"]
) )
except APIError as e: except APIError as e:
self.client.fail(to_native(e)) self.client.fail(to_text(e))
self.actions.append( self.actions.append(
f"Created volume {self.parameters.volume_name} with driver {self.parameters.driver}" f"Created volume {self.parameters.volume_name} with driver {self.parameters.driver}"
@ -272,7 +272,7 @@ class DockerVolumeManager:
try: try:
self.client.delete_call("/volumes/{0}", self.parameters.volume_name) self.client.delete_call("/volumes/{0}", self.parameters.volume_name)
except APIError as e: except APIError as e:
self.client.fail(to_native(e)) self.client.fail(to_text(e))
self.actions.append(f"Removed volume {self.parameters.volume_name}") self.actions.append(f"Removed volume {self.parameters.volume_name}")
self.results["changed"] = True self.results["changed"] = True

View File

@ -24,14 +24,14 @@ if t.TYPE_CHECKING:
from collections.abc import Callable from collections.abc import Callable
@pytest.fixture(scope="module") @pytest.fixture(scope="module", name="templar")
def templar() -> Templar: def templar_fixture() -> Templar:
dataloader = create_autospec(DataLoader, instance=True) dataloader = create_autospec(DataLoader, instance=True)
return Templar(loader=dataloader) return Templar(loader=dataloader)
@pytest.fixture(scope="module") @pytest.fixture(scope="module", name="inventory")
def inventory(templar: Templar) -> InventoryModule: def inventory_fixture(templar: Templar) -> InventoryModule:
r = InventoryModule() r = InventoryModule()
r.inventory = InventoryData() r.inventory = InventoryData()
r.templar = templar r.templar = templar

View File

@ -52,7 +52,7 @@ if t.TYPE_CHECKING:
DEFAULT_TIMEOUT_SECONDS = constants.DEFAULT_TIMEOUT_SECONDS DEFAULT_TIMEOUT_SECONDS = constants.DEFAULT_TIMEOUT_SECONDS
def response( def create_response(
status_code: int = 200, status_code: int = 200,
content: bytes | dict[str, t.Any] | list[dict[str, t.Any]] = b"", content: bytes | dict[str, t.Any] | list[dict[str, t.Any]] = b"",
headers: dict[str, str] | None = None, headers: dict[str, str] | None = None,
@ -95,7 +95,7 @@ def fake_resp(
if not key: if not key:
raise NotImplementedError(f"{method} {url}") raise NotImplementedError(f"{method} {url}")
status_code, content = fake_api.fake_responses[key]() status_code, content = fake_api.fake_responses[key]()
return response(status_code=status_code, content=content) return create_response(status_code=status_code, content=content)
fake_request = mock.Mock(side_effect=fake_resp) fake_request = mock.Mock(side_effect=fake_resp)
@ -328,26 +328,26 @@ class DockerApiTest(BaseAPIClientTest):
# pass `decode=False` to the helper # pass `decode=False` to the helper
raw_resp._fp.seek(0) raw_resp._fp.seek(0)
resp = response(status_code=status_code, content=content, raw=raw_resp) resp = create_response(status_code=status_code, content=content, raw=raw_resp)
result = next(self.client._stream_helper(resp)) result = next(self.client._stream_helper(resp))
assert result == content_str assert result == content_str
# pass `decode=True` to the helper # pass `decode=True` to the helper
raw_resp._fp.seek(0) raw_resp._fp.seek(0)
resp = response(status_code=status_code, content=content, raw=raw_resp) resp = create_response(status_code=status_code, content=content, raw=raw_resp)
result = next(self.client._stream_helper(resp, decode=True)) result = next(self.client._stream_helper(resp, decode=True))
assert result == content assert result == content
# non-chunked response, pass `decode=False` to the helper # non-chunked response, pass `decode=False` to the helper
setattr(raw_resp._fp, "chunked", False) setattr(raw_resp._fp, "chunked", False)
raw_resp._fp.seek(0) raw_resp._fp.seek(0)
resp = response(status_code=status_code, content=content, raw=raw_resp) resp = create_response(status_code=status_code, content=content, raw=raw_resp)
result = next(self.client._stream_helper(resp)) result = next(self.client._stream_helper(resp))
assert result == content_str.decode("utf-8") # type: ignore assert result == content_str.decode("utf-8") # type: ignore
# non-chunked response, pass `decode=True` to the helper # non-chunked response, pass `decode=True` to the helper
raw_resp._fp.seek(0) raw_resp._fp.seek(0)
resp = response(status_code=status_code, content=content, raw=raw_resp) resp = create_response(status_code=status_code, content=content, raw=raw_resp)
result = next(self.client._stream_helper(resp, decode=True)) result = next(self.client._stream_helper(resp, decode=True))
assert result == content assert result == content

View File

@ -22,8 +22,8 @@ from ..test_support.docker_image_archive_stubbing import (
) )
@pytest.fixture @pytest.fixture(name="tar_file_name")
def tar_file_name(tmpdir: t.Any) -> str: def tar_file_name_fixture(tmpdir: t.Any) -> str:
""" """
Return the name of a non-existing tar file in an existing temporary directory. Return the name of a non-existing tar file in an existing temporary directory.
""" """

View File

@ -37,8 +37,8 @@ def capture_logging(messages: list[str]) -> Callable[[str], None]:
return capture return capture
@pytest.fixture @pytest.fixture(name="tar_file_name")
def tar_file_name(tmpdir: t.Any) -> str: def tar_file_name_fixture(tmpdir: t.Any) -> str:
""" """
Return the name of a non-existing tar file in an existing temporary directory. Return the name of a non-existing tar file in an existing temporary directory.
""" """