mirror of
https://github.com/ansible-collections/community.docker.git
synced 2025-12-15 11:32:05 +00:00
Python code modernization, 8/n (#1179)
* Use to_text instead of to_native. * Remove no longer needed pylint ignores. * Remove another pylint ignore. * Remove no longer needed ignore. * Address redefined-outer-name. * Address consider-using-with.
This commit is contained in:
parent
6ad4bfcd40
commit
be000755fc
@ -379,18 +379,12 @@ disable=raw-checker-failed,
|
||||
wrong-import-order,
|
||||
wrong-import-position,
|
||||
# To clean up:
|
||||
arguments-differ,
|
||||
consider-using-with,
|
||||
fixme,
|
||||
import-error, # TODO figure out why pylint cannot find the module
|
||||
no-member,
|
||||
no-name-in-module, # TODO figure out why pylint cannot find the module
|
||||
not-an-iterable, # TODO: needs better typing info
|
||||
protected-access,
|
||||
redefined-outer-name, # needed for test fixtures
|
||||
subprocess-popen-preexec-fn,
|
||||
unexpected-keyword-arg,
|
||||
unsupported-assignment-operation, # TODO: needs better typing info
|
||||
unused-argument,
|
||||
# Cannot remove yet due to inadequacy of rules
|
||||
inconsistent-return-statements, # doesn't notice that fail_json() does not return
|
||||
|
||||
@ -29,6 +29,7 @@ class ActionModule(ActionBase):
|
||||
result = super().run(tmp, task_vars)
|
||||
del tmp # tmp no longer has any effect
|
||||
|
||||
# pylint: disable-next=no-member
|
||||
max_file_size_for_diff: int = C.MAX_FILE_SIZE_FOR_DIFF # type: ignore
|
||||
self._task.args["_max_file_size_for_diff"] = max_file_size_for_diff
|
||||
|
||||
|
||||
@ -123,7 +123,7 @@ from shlex import quote
|
||||
|
||||
from ansible.errors import AnsibleConnectionFailure, AnsibleError, AnsibleFileNotFound
|
||||
from ansible.module_utils.common.process import get_bin_path
|
||||
from ansible.module_utils.common.text.converters import to_bytes, to_native, to_text
|
||||
from ansible.module_utils.common.text.converters import to_bytes, to_text
|
||||
from ansible.plugins.connection import BUFSIZE, ConnectionBase
|
||||
from ansible.utils.display import Display
|
||||
|
||||
@ -188,7 +188,7 @@ class Connection(ConnectionBase):
|
||||
) as p:
|
||||
cmd_output, err = p.communicate()
|
||||
|
||||
return old_docker_cmd, to_native(cmd_output), err, p.returncode
|
||||
return old_docker_cmd, to_text(cmd_output), err, p.returncode
|
||||
|
||||
def _new_docker_version(self) -> tuple[list[str], str, bytes, int]:
|
||||
# no result yet, must be newer Docker version
|
||||
@ -201,7 +201,7 @@ class Connection(ConnectionBase):
|
||||
new_docker_cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE
|
||||
) as p:
|
||||
cmd_output, err = p.communicate()
|
||||
return new_docker_cmd, to_native(cmd_output), err, p.returncode
|
||||
return new_docker_cmd, to_text(cmd_output), err, p.returncode
|
||||
|
||||
def _get_docker_version(self) -> str:
|
||||
cmd, cmd_output, err, returncode = self._old_docker_version()
|
||||
@ -213,7 +213,7 @@ class Connection(ConnectionBase):
|
||||
cmd, cmd_output, err, returncode = self._new_docker_version()
|
||||
if returncode:
|
||||
raise AnsibleError(
|
||||
f"Docker version check ({to_native(cmd)}) failed: {to_native(err)}"
|
||||
f"Docker version check ({to_text(cmd)}) failed: {to_text(err)}"
|
||||
)
|
||||
|
||||
return self._sanitize_version(to_text(cmd_output, errors="surrogate_or_strict"))
|
||||
@ -427,7 +427,7 @@ class Connection(ConnectionBase):
|
||||
stdout, stderr = p.communicate()
|
||||
raise AnsibleError(
|
||||
"timeout waiting for privilege escalation password prompt:\n"
|
||||
+ to_native(become_output)
|
||||
+ to_text(become_output)
|
||||
)
|
||||
|
||||
chunks = b""
|
||||
@ -445,7 +445,7 @@ class Connection(ConnectionBase):
|
||||
stdout, stderr = p.communicate()
|
||||
raise AnsibleError(
|
||||
"privilege output closed while waiting for password prompt:\n"
|
||||
+ to_native(become_output)
|
||||
+ to_text(become_output)
|
||||
)
|
||||
become_output += chunks
|
||||
finally:
|
||||
@ -503,7 +503,7 @@ class Connection(ConnectionBase):
|
||||
out_path = self._prefix_login_path(out_path)
|
||||
if not os.path.exists(to_bytes(in_path, errors="surrogate_or_strict")):
|
||||
raise AnsibleFileNotFound(
|
||||
f"file or module does not exist: {to_native(in_path)}"
|
||||
f"file or module does not exist: {to_text(in_path)}"
|
||||
)
|
||||
|
||||
out_path = quote(out_path)
|
||||
@ -525,6 +525,7 @@ class Connection(ConnectionBase):
|
||||
)
|
||||
args = [to_bytes(i, errors="surrogate_or_strict") for i in args]
|
||||
try:
|
||||
# pylint: disable-next=consider-using-with
|
||||
p = subprocess.Popen(
|
||||
args, stdin=in_file, stdout=subprocess.PIPE, stderr=subprocess.PIPE
|
||||
)
|
||||
@ -536,7 +537,7 @@ class Connection(ConnectionBase):
|
||||
|
||||
if p.returncode != 0:
|
||||
raise AnsibleError(
|
||||
f"failed to transfer file {to_native(in_path)} to {to_native(out_path)}:\n{to_native(stdout)}\n{to_native(stderr)}"
|
||||
f"failed to transfer file {to_text(in_path)} to {to_text(out_path)}:\n{to_text(stdout)}\n{to_text(stderr)}"
|
||||
)
|
||||
|
||||
def fetch_file(self, in_path: str, out_path: str) -> None:
|
||||
@ -587,6 +588,7 @@ class Connection(ConnectionBase):
|
||||
to_bytes(actual_out_path, errors="surrogate_or_strict"), "wb"
|
||||
) as out_file:
|
||||
try:
|
||||
# pylint: disable-next=consider-using-with
|
||||
pp = subprocess.Popen(
|
||||
args,
|
||||
stdin=subprocess.PIPE,
|
||||
|
||||
@ -110,7 +110,7 @@ import os.path
|
||||
import typing as t
|
||||
|
||||
from ansible.errors import AnsibleConnectionFailure, AnsibleFileNotFound
|
||||
from ansible.module_utils.common.text.converters import to_bytes, to_native, to_text
|
||||
from ansible.module_utils.common.text.converters import to_bytes, to_text
|
||||
from ansible.plugins.connection import ConnectionBase
|
||||
from ansible.utils.display import Display
|
||||
|
||||
@ -335,13 +335,13 @@ class Connection(ConnectionBase):
|
||||
stdout, stderr = exec_socket_handler.consume()
|
||||
raise AnsibleConnectionFailure(
|
||||
"timeout waiting for privilege escalation password prompt:\n"
|
||||
+ to_native(become_output[0])
|
||||
+ to_text(become_output[0])
|
||||
)
|
||||
|
||||
if exec_socket_handler.is_eof():
|
||||
raise AnsibleConnectionFailure(
|
||||
"privilege output closed while waiting for password prompt:\n"
|
||||
+ to_native(become_output[0])
|
||||
+ to_text(become_output[0])
|
||||
)
|
||||
|
||||
if not self.become.check_success(become_output[0]):
|
||||
@ -437,9 +437,9 @@ class Connection(ConnectionBase):
|
||||
not_found_can_be_resource=True,
|
||||
)
|
||||
except DockerFileNotFound as exc:
|
||||
raise AnsibleFileNotFound(to_native(exc)) from exc
|
||||
raise AnsibleFileNotFound(to_text(exc)) from exc
|
||||
except DockerFileCopyError as exc:
|
||||
raise AnsibleConnectionFailure(to_native(exc)) from exc
|
||||
raise AnsibleConnectionFailure(to_text(exc)) from exc
|
||||
|
||||
def fetch_file(self, in_path: str, out_path: str) -> None:
|
||||
"""Fetch a file from container to local."""
|
||||
@ -468,9 +468,9 @@ class Connection(ConnectionBase):
|
||||
not_found_can_be_resource=True,
|
||||
)
|
||||
except DockerFileNotFound as exc:
|
||||
raise AnsibleFileNotFound(to_native(exc)) from exc
|
||||
raise AnsibleFileNotFound(to_text(exc)) from exc
|
||||
except DockerFileCopyError as exc:
|
||||
raise AnsibleConnectionFailure(to_native(exc)) from exc
|
||||
raise AnsibleConnectionFailure(to_text(exc)) from exc
|
||||
|
||||
def close(self) -> None:
|
||||
"""Terminate the connection. Nothing to do for Docker"""
|
||||
|
||||
@ -50,7 +50,7 @@ import typing as t
|
||||
|
||||
import ansible.constants as C
|
||||
from ansible.errors import AnsibleError
|
||||
from ansible.module_utils.common.text.converters import to_bytes, to_native, to_text
|
||||
from ansible.module_utils.common.text.converters import to_bytes, to_text
|
||||
from ansible.plugins.connection import ConnectionBase
|
||||
from ansible.utils.display import Display
|
||||
from ansible.utils.path import unfrackpath
|
||||
@ -92,6 +92,7 @@ class Connection(ConnectionBase):
|
||||
|
||||
display.debug("in nsenter.exec_command()")
|
||||
|
||||
# pylint: disable-next=no-member
|
||||
def_executable: str | None = C.DEFAULT_EXECUTABLE # type: ignore[attr-defined]
|
||||
executable = def_executable.split()[0] if def_executable else None
|
||||
|
||||
@ -178,7 +179,7 @@ class Connection(ConnectionBase):
|
||||
stdout, stderr = p.communicate()
|
||||
raise AnsibleError(
|
||||
"timeout waiting for privilege escalation password prompt:\n"
|
||||
+ to_native(become_output)
|
||||
+ to_text(become_output)
|
||||
)
|
||||
|
||||
chunks = b""
|
||||
@ -196,7 +197,7 @@ class Connection(ConnectionBase):
|
||||
stdout, stderr = p.communicate()
|
||||
raise AnsibleError(
|
||||
"privilege output closed while waiting for password prompt:\n"
|
||||
+ to_native(become_output)
|
||||
+ to_text(become_output)
|
||||
)
|
||||
become_output += chunks
|
||||
finally:
|
||||
@ -279,7 +280,7 @@ class Connection(ConnectionBase):
|
||||
out_file.write(out)
|
||||
except IOError as e:
|
||||
raise AnsibleError(
|
||||
f"failed to transfer file to {to_native(out_path)}: {e}"
|
||||
f"failed to transfer file to {to_text(out_path)}: {e}"
|
||||
) from e
|
||||
|
||||
def close(self) -> None:
|
||||
|
||||
@ -105,7 +105,7 @@ import typing as t
|
||||
|
||||
from ansible.errors import AnsibleError
|
||||
from ansible.module_utils.common.process import get_bin_path
|
||||
from ansible.module_utils.common.text.converters import to_native, to_text
|
||||
from ansible.module_utils.common.text.converters import to_text
|
||||
from ansible.plugins.inventory import BaseInventoryPlugin, Cacheable, Constructable
|
||||
from ansible.utils.display import Display
|
||||
from ansible_collections.community.library_inventory_filtering_v1.plugins.plugin_utils.inventory_filter import (
|
||||
@ -142,7 +142,7 @@ class InventoryModule(BaseInventoryPlugin, Constructable, Cacheable):
|
||||
try:
|
||||
self.docker_machine_path = get_bin_path("docker-machine")
|
||||
except ValueError as e:
|
||||
raise AnsibleError(to_native(e)) from e
|
||||
raise AnsibleError(to_text(e)) from e
|
||||
|
||||
command = [self.docker_machine_path]
|
||||
command.extend(args)
|
||||
|
||||
@ -652,6 +652,8 @@ class APIClient(_Session):
|
||||
|
||||
def get_adapter(self, url: str) -> BaseAdapter:
|
||||
try:
|
||||
# pylint finds our Session stub instead of requests.Session:
|
||||
# pylint: disable-next=no-member
|
||||
return super().get_adapter(url)
|
||||
except _InvalidSchema as e:
|
||||
if self._custom_adapter:
|
||||
|
||||
@ -13,7 +13,7 @@ from __future__ import annotations
|
||||
|
||||
import typing as t
|
||||
|
||||
from ansible.module_utils.common.text.converters import to_native
|
||||
from ansible.module_utils.common.text.converters import to_text
|
||||
|
||||
from ._import_helper import HTTPError as _HTTPError
|
||||
|
||||
@ -39,7 +39,7 @@ def create_api_error_from_http_exception(e: _HTTPError) -> t.NoReturn:
|
||||
try:
|
||||
explanation = response.json()["message"]
|
||||
except ValueError:
|
||||
explanation = to_native((response.content or "").strip())
|
||||
explanation = to_text((response.content or "").strip())
|
||||
cls = APIError
|
||||
if response.status_code == 404:
|
||||
if explanation and (
|
||||
|
||||
@ -16,6 +16,8 @@ from .._import_helper import HTTPAdapter as _HTTPAdapter
|
||||
|
||||
class BaseHTTPAdapter(_HTTPAdapter):
|
||||
def close(self) -> None:
|
||||
# pylint finds our HTTPAdapter stub instead of requests.adapters.HTTPAdapter:
|
||||
# pylint: disable-next=no-member
|
||||
super().close()
|
||||
if hasattr(self, "pools"):
|
||||
self.pools.clear()
|
||||
|
||||
@ -58,7 +58,11 @@ class SSLHTTPAdapter(BaseHTTPAdapter):
|
||||
We already take care of a normal poolmanager via init_poolmanager
|
||||
|
||||
But we still need to take care of when there is a proxy poolmanager
|
||||
|
||||
Note that this method is no longer called for newer requests versions.
|
||||
"""
|
||||
# pylint finds our HTTPAdapter stub instead of requests.adapters.HTTPAdapter:
|
||||
# pylint: disable-next=no-member
|
||||
conn = super().get_connection(*args, **kwargs)
|
||||
if (
|
||||
self.assert_hostname is not None
|
||||
|
||||
@ -97,8 +97,10 @@ def create_archive(
|
||||
) -> t.IO[bytes]:
|
||||
extra_files = extra_files or []
|
||||
if not fileobj:
|
||||
# pylint: disable-next=consider-using-with
|
||||
fileobj = tempfile.NamedTemporaryFile()
|
||||
t = tarfile.open(mode="w:gz" if gzip else "w", fileobj=fileobj)
|
||||
|
||||
with tarfile.open(mode="w:gz" if gzip else "w", fileobj=fileobj) as tarf:
|
||||
if files is None:
|
||||
files = build_file_list(root)
|
||||
extra_names = set(e[0] for e in extra_files)
|
||||
@ -108,7 +110,7 @@ def create_archive(
|
||||
continue
|
||||
full_path = os.path.join(root, path)
|
||||
|
||||
i = t.gettarinfo(full_path, arcname=path)
|
||||
i = tarf.gettarinfo(full_path, arcname=path)
|
||||
if i is None:
|
||||
# This happens when we encounter a socket file. We can safely
|
||||
# ignore it and proceed.
|
||||
@ -126,20 +128,19 @@ def create_archive(
|
||||
if i.isfile():
|
||||
try:
|
||||
with open(full_path, "rb") as f:
|
||||
t.addfile(i, f)
|
||||
tarf.addfile(i, f)
|
||||
except IOError as exc:
|
||||
raise IOError(f"Can not read file in context: {full_path}") from exc
|
||||
else:
|
||||
# Directories, FIFOs, symlinks... do not need to be read.
|
||||
t.addfile(i, None)
|
||||
tarf.addfile(i, None)
|
||||
|
||||
for name, contents in extra_files:
|
||||
info = tarfile.TarInfo(name)
|
||||
contents_encoded = contents.encode("utf-8")
|
||||
info.size = len(contents_encoded)
|
||||
t.addfile(info, io.BytesIO(contents_encoded))
|
||||
tarf.addfile(info, io.BytesIO(contents_encoded))
|
||||
|
||||
t.close()
|
||||
fileobj.seek(0)
|
||||
return fileobj
|
||||
|
||||
@ -147,7 +148,7 @@ def create_archive(
|
||||
def mkbuildcontext(dockerfile: io.BytesIO | t.IO[bytes]) -> t.IO[bytes]:
|
||||
f = tempfile.NamedTemporaryFile() # pylint: disable=consider-using-with
|
||||
try:
|
||||
with tarfile.open(mode="w", fileobj=f) as t:
|
||||
with tarfile.open(mode="w", fileobj=f) as tarf:
|
||||
if isinstance(dockerfile, io.StringIO): # type: ignore
|
||||
raise TypeError("Please use io.BytesIO to create in-memory Dockerfiles")
|
||||
if isinstance(dockerfile, io.BytesIO):
|
||||
@ -155,8 +156,8 @@ def mkbuildcontext(dockerfile: io.BytesIO | t.IO[bytes]) -> t.IO[bytes]:
|
||||
dfinfo.size = len(dockerfile.getvalue())
|
||||
dockerfile.seek(0)
|
||||
else:
|
||||
dfinfo = t.gettarinfo(fileobj=dockerfile, arcname="Dockerfile")
|
||||
t.addfile(dfinfo, dockerfile)
|
||||
dfinfo = tarf.gettarinfo(fileobj=dockerfile, arcname="Dockerfile")
|
||||
tarf.addfile(dfinfo, dockerfile)
|
||||
f.seek(0)
|
||||
except Exception: # noqa: E722
|
||||
f.close()
|
||||
|
||||
@ -14,7 +14,7 @@ import typing as t
|
||||
|
||||
from ansible.module_utils.basic import AnsibleModule, env_fallback
|
||||
from ansible.module_utils.common.process import get_bin_path
|
||||
from ansible.module_utils.common.text.converters import to_native
|
||||
from ansible.module_utils.common.text.converters import to_text
|
||||
|
||||
from ansible_collections.community.docker.plugins.module_utils._api.auth import (
|
||||
resolve_repository_name,
|
||||
@ -132,9 +132,7 @@ class AnsibleDockerClientBase:
|
||||
self.fail(
|
||||
"Cannot determine Docker Daemon information. Are you maybe using podman instead of docker?"
|
||||
)
|
||||
self.docker_api_version_str = to_native(
|
||||
self._version["Server"]["ApiVersion"]
|
||||
)
|
||||
self.docker_api_version_str = to_text(self._version["Server"]["ApiVersion"])
|
||||
self.docker_api_version = LooseVersion(self.docker_api_version_str)
|
||||
min_docker_api_version = min_docker_api_version or "1.25"
|
||||
if self.docker_api_version < LooseVersion(min_docker_api_version):
|
||||
@ -191,12 +189,12 @@ class AnsibleDockerClientBase:
|
||||
*args, check_rc=check_rc, data=data, cwd=cwd, environ_update=environ_update
|
||||
)
|
||||
if warn_on_stderr and stderr:
|
||||
self.warn(to_native(stderr))
|
||||
self.warn(to_text(stderr))
|
||||
try:
|
||||
data = json.loads(stdout)
|
||||
except Exception as exc: # pylint: disable=broad-exception-caught
|
||||
self.fail(
|
||||
f"Error while parsing JSON output of {self._compose_cmd_str(args)}: {exc}\nJSON output: {to_native(stdout)}"
|
||||
f"Error while parsing JSON output of {self._compose_cmd_str(args)}: {exc}\nJSON output: {to_text(stdout)}"
|
||||
)
|
||||
return rc, data, stderr
|
||||
|
||||
@ -213,7 +211,7 @@ class AnsibleDockerClientBase:
|
||||
*args, check_rc=check_rc, data=data, cwd=cwd, environ_update=environ_update
|
||||
)
|
||||
if warn_on_stderr and stderr:
|
||||
self.warn(to_native(stderr))
|
||||
self.warn(to_text(stderr))
|
||||
result = []
|
||||
try:
|
||||
for line in stdout.splitlines():
|
||||
@ -222,7 +220,7 @@ class AnsibleDockerClientBase:
|
||||
result.append(json.loads(line))
|
||||
except Exception as exc: # pylint: disable=broad-exception-caught
|
||||
self.fail(
|
||||
f"Error while parsing JSON output of {self._compose_cmd_str(args)}: {exc}\nJSON output: {to_native(stdout)}"
|
||||
f"Error while parsing JSON output of {self._compose_cmd_str(args)}: {exc}\nJSON output: {to_text(stdout)}"
|
||||
)
|
||||
return rc, result, stderr
|
||||
|
||||
@ -338,7 +336,7 @@ class AnsibleDockerClientBase:
|
||||
self.log(f"Image {name}:{tag} not found.")
|
||||
return None
|
||||
if rc != 0:
|
||||
self.fail(f"Error inspecting image {name}:{tag} - {to_native(stderr)}")
|
||||
self.fail(f"Error inspecting image {name}:{tag} - {to_text(stderr)}")
|
||||
return image[0]
|
||||
|
||||
self.log(f"Image {name}:{tag} not found.")
|
||||
@ -367,11 +365,11 @@ class AnsibleDockerClientBase:
|
||||
rc, image, stderr = self.call_cli_json("image", "inspect", image_id)
|
||||
if not image:
|
||||
if not accept_missing_image:
|
||||
self.fail(f"Error inspecting image ID {image_id} - {to_native(stderr)}")
|
||||
self.fail(f"Error inspecting image ID {image_id} - {to_text(stderr)}")
|
||||
self.log(f"Image {image_id} not found.")
|
||||
return None
|
||||
if rc != 0:
|
||||
self.fail(f"Error inspecting image ID {image_id} - {to_native(stderr)}")
|
||||
self.fail(f"Error inspecting image ID {image_id} - {to_text(stderr)}")
|
||||
return image[0]
|
||||
|
||||
|
||||
|
||||
@ -19,7 +19,7 @@ from collections import namedtuple
|
||||
from shlex import quote
|
||||
|
||||
from ansible.module_utils.basic import missing_required_lib
|
||||
from ansible.module_utils.common.text.converters import to_native
|
||||
from ansible.module_utils.common.text.converters import to_text
|
||||
|
||||
from ansible_collections.community.docker.plugins.module_utils._logfmt import (
|
||||
InvalidLogFmt as _InvalidLogFmt,
|
||||
@ -418,7 +418,7 @@ def parse_json_events(
|
||||
ResourceType.UNKNOWN,
|
||||
None,
|
||||
"Warning",
|
||||
to_native(line[len(b"Warning: ") :]),
|
||||
to_text(line[len(b"Warning: ") :]),
|
||||
)
|
||||
events.append(event)
|
||||
continue
|
||||
@ -557,7 +557,7 @@ def parse_events(
|
||||
if stderr_lines and stderr_lines[-1] == b"":
|
||||
del stderr_lines[-1]
|
||||
for index, line in enumerate(stderr_lines):
|
||||
line = to_native(line.strip())
|
||||
line = to_text(line.strip())
|
||||
if not line:
|
||||
continue
|
||||
warn_missing_dry_run_prefix = False
|
||||
@ -731,8 +731,8 @@ def update_failed(
|
||||
result["failed"] = True
|
||||
result["msg"] = "\n".join(errors)
|
||||
result["cmd"] = " ".join(quote(arg) for arg in [cli] + args)
|
||||
result["stdout"] = to_native(stdout)
|
||||
result["stderr"] = to_native(stderr)
|
||||
result["stdout"] = to_text(stdout)
|
||||
result["stderr"] = to_text(stderr)
|
||||
result["rc"] = rc
|
||||
return True
|
||||
|
||||
@ -978,8 +978,8 @@ class BaseComposeManager(DockerBaseClass):
|
||||
ignore_build_events=ignore_build_events,
|
||||
)
|
||||
result["actions"] = result.get("actions", []) + extract_actions(events)
|
||||
result["stdout"] = combine_text_output(result.get("stdout"), to_native(stdout))
|
||||
result["stderr"] = combine_text_output(result.get("stderr"), to_native(stderr))
|
||||
result["stdout"] = combine_text_output(result.get("stdout"), to_text(stdout))
|
||||
result["stderr"] = combine_text_output(result.get("stderr"), to_text(stderr))
|
||||
|
||||
def update_failed(
|
||||
self,
|
||||
|
||||
@ -18,7 +18,7 @@ import stat
|
||||
import tarfile
|
||||
import typing as t
|
||||
|
||||
from ansible.module_utils.common.text.converters import to_bytes, to_native, to_text
|
||||
from ansible.module_utils.common.text.converters import to_bytes, to_text
|
||||
|
||||
from ansible_collections.community.docker.plugins.module_utils._api.errors import (
|
||||
APIError,
|
||||
@ -223,7 +223,7 @@ def put_file(
|
||||
) -> None:
|
||||
"""Transfer a file from local to Docker container."""
|
||||
if not os.path.exists(to_bytes(in_path, errors="surrogate_or_strict")):
|
||||
raise DockerFileNotFound(f"file or module does not exist: {to_native(in_path)}")
|
||||
raise DockerFileNotFound(f"file or module does not exist: {to_text(in_path)}")
|
||||
|
||||
b_in_path = to_bytes(in_path, errors="surrogate_or_strict")
|
||||
|
||||
|
||||
@ -790,17 +790,17 @@ def _preprocess_mounts(
|
||||
) -> dict[str, t.Any]:
|
||||
last: dict[str, str] = {}
|
||||
|
||||
def check_collision(t: str, name: str) -> None:
|
||||
if t in last:
|
||||
if name == last[t]:
|
||||
def check_collision(target: str, name: str) -> None:
|
||||
if target in last:
|
||||
if name == last[target]:
|
||||
module.fail_json(
|
||||
msg=f'The mount point "{t}" appears twice in the {name} option'
|
||||
msg=f'The mount point "{target}" appears twice in the {name} option'
|
||||
)
|
||||
else:
|
||||
module.fail_json(
|
||||
msg=f'The mount point "{t}" appears both in the {name} and {last[t]} option'
|
||||
msg=f'The mount point "{target}" appears both in the {name} and {last[target]} option'
|
||||
)
|
||||
last[t] = name
|
||||
last[target] = name
|
||||
|
||||
if "mounts" in values:
|
||||
mounts = []
|
||||
|
||||
@ -10,7 +10,7 @@ from __future__ import annotations
|
||||
import base64
|
||||
import random
|
||||
|
||||
from ansible.module_utils.common.text.converters import to_bytes, to_native, to_text
|
||||
from ansible.module_utils.common.text.converters import to_bytes, to_text
|
||||
|
||||
|
||||
def generate_insecure_key() -> bytes:
|
||||
@ -31,7 +31,7 @@ def scramble(value: str, key: bytes) -> str:
|
||||
b_value = to_bytes(value)
|
||||
k = key[0]
|
||||
b_value = bytes([k ^ b for b in b_value])
|
||||
return f"=S={to_native(base64.b64encode(b_value))}"
|
||||
return f"=S={to_text(base64.b64encode(b_value))}"
|
||||
|
||||
|
||||
def unscramble(value: str, key: bytes) -> str:
|
||||
|
||||
@ -171,7 +171,7 @@ import stat
|
||||
import traceback
|
||||
import typing as t
|
||||
|
||||
from ansible.module_utils.common.text.converters import to_bytes, to_native, to_text
|
||||
from ansible.module_utils.common.text.converters import to_bytes, to_text
|
||||
from ansible.module_utils.common.validation import check_type_int
|
||||
|
||||
from ansible_collections.community.docker.plugins.module_utils._api.errors import (
|
||||
@ -1041,7 +1041,7 @@ def copy_content_into_container(
|
||||
|
||||
def parse_modern(mode: str | int) -> int:
|
||||
if isinstance(mode, str):
|
||||
return int(to_native(mode), 8)
|
||||
return int(to_text(mode), 8)
|
||||
if isinstance(mode, int):
|
||||
return mode
|
||||
raise TypeError(f"must be an octal string or an integer, got {mode!r}")
|
||||
@ -1049,7 +1049,7 @@ def parse_modern(mode: str | int) -> int:
|
||||
|
||||
def parse_octal_string_only(mode: str) -> int:
|
||||
if isinstance(mode, str):
|
||||
return int(to_native(mode), 8)
|
||||
return int(to_text(mode), 8)
|
||||
raise TypeError(f"must be an octal string, got {mode!r}")
|
||||
|
||||
|
||||
@ -1185,7 +1185,7 @@ def main() -> None:
|
||||
except DockerUnexpectedError as exc:
|
||||
client.fail(f"Unexpected error: {exc}", exception=traceback.format_exc())
|
||||
except DockerFileCopyError as exc:
|
||||
client.fail(to_native(exc))
|
||||
client.fail(to_text(exc))
|
||||
except OSError as exc:
|
||||
client.fail(f"Unexpected error: {exc}", exception=traceback.format_exc())
|
||||
|
||||
|
||||
@ -369,7 +369,7 @@ import os
|
||||
import traceback
|
||||
import typing as t
|
||||
|
||||
from ansible.module_utils.common.text.converters import to_native
|
||||
from ansible.module_utils.common.text.converters import to_text
|
||||
from ansible.module_utils.common.text.formatters import human_to_bytes
|
||||
|
||||
from ansible_collections.community.docker.plugins.module_utils._api.auth import (
|
||||
@ -899,7 +899,7 @@ class ImageManager(DockerBaseClass):
|
||||
buildargs = {}
|
||||
if self.buildargs:
|
||||
for key, value in self.buildargs.items():
|
||||
buildargs[key] = to_native(value)
|
||||
buildargs[key] = to_text(value)
|
||||
|
||||
container_limits = self.container_limits or {}
|
||||
for key in container_limits.keys():
|
||||
|
||||
@ -284,7 +284,7 @@ import os
|
||||
import traceback
|
||||
import typing as t
|
||||
|
||||
from ansible.module_utils.common.text.converters import to_native
|
||||
from ansible.module_utils.common.text.converters import to_text
|
||||
from ansible.module_utils.common.text.formatters import human_to_bytes
|
||||
|
||||
from ansible_collections.community.docker.plugins.module_utils._api.utils.utils import (
|
||||
@ -545,12 +545,12 @@ class ImageBuilder(DockerBaseClass):
|
||||
if rc != 0:
|
||||
self.fail(
|
||||
f"Building {self.name}:{self.tag} failed",
|
||||
stdout=to_native(stdout),
|
||||
stderr=to_native(stderr),
|
||||
stdout=to_text(stdout),
|
||||
stderr=to_text(stderr),
|
||||
command=args,
|
||||
)
|
||||
results["stdout"] = to_native(stdout)
|
||||
results["stderr"] = to_native(stderr)
|
||||
results["stdout"] = to_text(stdout)
|
||||
results["stderr"] = to_text(stderr)
|
||||
results["image"] = self.client.find_image(self.name, self.tag) or {}
|
||||
results["command"] = args
|
||||
|
||||
|
||||
@ -286,7 +286,7 @@ import time
|
||||
import traceback
|
||||
import typing as t
|
||||
|
||||
from ansible.module_utils.common.text.converters import to_native
|
||||
from ansible.module_utils.common.text.converters import to_text
|
||||
|
||||
from ansible_collections.community.docker.plugins.module_utils._api.errors import (
|
||||
DockerException,
|
||||
@ -412,7 +412,7 @@ class DockerNetworkManager:
|
||||
for ipam_config in self.parameters.ipam_config:
|
||||
validate_cidr(ipam_config["subnet"])
|
||||
except ValueError as e:
|
||||
self.client.fail(to_native(e))
|
||||
self.client.fail(to_text(e))
|
||||
|
||||
if self.parameters.driver_options:
|
||||
self.parameters.driver_options = clean_dict_booleans_for_docker_api(
|
||||
|
||||
@ -143,7 +143,7 @@ except ImportError:
|
||||
# missing Docker SDK for Python handled in ansible.module_utils.docker.common
|
||||
pass
|
||||
|
||||
from ansible.module_utils.common.text.converters import to_native
|
||||
from ansible.module_utils.common.text.converters import to_text
|
||||
|
||||
from ansible_collections.community.docker.plugins.module_utils._common import (
|
||||
RequestException,
|
||||
@ -237,11 +237,12 @@ class SwarmNodeManager(DockerBaseClass):
|
||||
node_spec["Labels"] = self.parameters.labels
|
||||
changed = True
|
||||
elif self.parameters.labels_state == "merge":
|
||||
node_spec["Labels"] = dict(node_info["Spec"]["Labels"] or {})
|
||||
labels: dict[str, str] = dict(node_info["Spec"]["Labels"] or {})
|
||||
node_spec["Labels"] = labels
|
||||
if self.parameters.labels is not None:
|
||||
for key, value in self.parameters.labels.items():
|
||||
if node_spec["Labels"].get(key) != value:
|
||||
node_spec["Labels"][key] = value
|
||||
if labels.get(key) != value:
|
||||
labels[key] = value
|
||||
changed = True
|
||||
|
||||
if self.parameters.labels_to_remove is not None:
|
||||
@ -253,7 +254,7 @@ class SwarmNodeManager(DockerBaseClass):
|
||||
changed = True
|
||||
else:
|
||||
self.client.module.warn(
|
||||
f"Label '{to_native(key)}' listed both in 'labels' and 'labels_to_remove'. "
|
||||
f"Label '{to_text(key)}' listed both in 'labels' and 'labels_to_remove'. "
|
||||
"Keeping the assigned label value."
|
||||
)
|
||||
else:
|
||||
|
||||
@ -131,7 +131,7 @@ actions:
|
||||
import traceback
|
||||
import typing as t
|
||||
|
||||
from ansible.module_utils.common.text.converters import to_native
|
||||
from ansible.module_utils.common.text.converters import to_text
|
||||
|
||||
from ansible_collections.community.docker.plugins.module_utils._api import auth
|
||||
from ansible_collections.community.docker.plugins.module_utils._api.errors import (
|
||||
@ -215,7 +215,7 @@ class DockerPluginManager:
|
||||
except NotFound:
|
||||
return None
|
||||
except APIError as e:
|
||||
self.client.fail(to_native(e))
|
||||
self.client.fail(to_text(e))
|
||||
|
||||
def has_different_config(self) -> DifferenceTracker:
|
||||
"""
|
||||
@ -293,7 +293,7 @@ class DockerPluginManager:
|
||||
"/plugins/{0}/set", self.preferred_name, data=data
|
||||
)
|
||||
except APIError as e:
|
||||
self.client.fail(to_native(e))
|
||||
self.client.fail(to_text(e))
|
||||
|
||||
self.actions.append(f"Installed plugin {self.preferred_name}")
|
||||
self.changed = True
|
||||
@ -307,7 +307,7 @@ class DockerPluginManager:
|
||||
"/plugins/{0}", self.preferred_name, params={"force": force}
|
||||
)
|
||||
except APIError as e:
|
||||
self.client.fail(to_native(e))
|
||||
self.client.fail(to_text(e))
|
||||
|
||||
self.actions.append(f"Removed plugin {self.preferred_name}")
|
||||
self.changed = True
|
||||
@ -323,7 +323,7 @@ class DockerPluginManager:
|
||||
"/plugins/{0}/set", self.preferred_name, data=data
|
||||
)
|
||||
except APIError as e:
|
||||
self.client.fail(to_native(e))
|
||||
self.client.fail(to_text(e))
|
||||
self.actions.append(f"Updated plugin {self.preferred_name} settings")
|
||||
self.changed = True
|
||||
else:
|
||||
@ -361,7 +361,7 @@ class DockerPluginManager:
|
||||
params={"timeout": timeout},
|
||||
)
|
||||
except APIError as e:
|
||||
self.client.fail(to_native(e))
|
||||
self.client.fail(to_text(e))
|
||||
self.actions.append(f"Enabled plugin {self.preferred_name}")
|
||||
self.changed = True
|
||||
else:
|
||||
@ -374,7 +374,7 @@ class DockerPluginManager:
|
||||
params={"timeout": timeout},
|
||||
)
|
||||
except APIError as e:
|
||||
self.client.fail(to_native(e))
|
||||
self.client.fail(to_text(e))
|
||||
self.actions.append(f"Enabled plugin {self.preferred_name}")
|
||||
self.changed = True
|
||||
|
||||
@ -387,7 +387,7 @@ class DockerPluginManager:
|
||||
"/plugins/{0}/disable", self.preferred_name
|
||||
)
|
||||
except APIError as e:
|
||||
self.client.fail(to_native(e))
|
||||
self.client.fail(to_text(e))
|
||||
self.actions.append(f"Disable plugin {self.preferred_name}")
|
||||
self.changed = True
|
||||
else:
|
||||
|
||||
@ -161,7 +161,7 @@ import traceback
|
||||
import typing as t
|
||||
from time import sleep
|
||||
|
||||
from ansible.module_utils.common.text.converters import to_native
|
||||
from ansible.module_utils.common.text.converters import to_text
|
||||
|
||||
from ansible_collections.community.docker.plugins.module_utils._common_cli import (
|
||||
AnsibleModuleDockerClient,
|
||||
@ -190,9 +190,9 @@ def docker_stack_services(
|
||||
dummy_rc, out, err = client.call_cli(
|
||||
"stack", "services", stack_name, "--format", "{{.Name}}"
|
||||
)
|
||||
if to_native(err) == f"Nothing found in stack: {stack_name}\n":
|
||||
if to_text(err) == f"Nothing found in stack: {stack_name}\n":
|
||||
return []
|
||||
return to_native(out).strip().split("\n")
|
||||
return to_text(out).strip().split("\n")
|
||||
|
||||
|
||||
def docker_service_inspect(
|
||||
@ -221,7 +221,7 @@ def docker_stack_deploy(
|
||||
command += ["--compose-file", compose_file]
|
||||
command += [stack_name]
|
||||
rc, out, err = client.call_cli(*command)
|
||||
return rc, to_native(out), to_native(err)
|
||||
return rc, to_text(out), to_text(err)
|
||||
|
||||
|
||||
def docker_stack_inspect(
|
||||
@ -244,11 +244,11 @@ def docker_stack_rm(
|
||||
command += ["--detach=false"]
|
||||
rc, out, err = client.call_cli(*command)
|
||||
|
||||
while to_native(err) != f"Nothing found in stack: {stack_name}\n" and retries > 0:
|
||||
while to_text(err) != f"Nothing found in stack: {stack_name}\n" and retries > 0:
|
||||
sleep(interval)
|
||||
retries = retries - 1
|
||||
rc, out, err = client.call_cli(*command)
|
||||
return rc, to_native(out), to_native(err)
|
||||
return rc, to_text(out), to_text(err)
|
||||
|
||||
|
||||
def main() -> None:
|
||||
|
||||
@ -77,7 +77,7 @@ EXAMPLES = r"""
|
||||
import json
|
||||
import traceback
|
||||
|
||||
from ansible.module_utils.common.text.converters import to_native
|
||||
from ansible.module_utils.common.text.converters import to_text
|
||||
|
||||
from ansible_collections.community.docker.plugins.module_utils._common_cli import (
|
||||
AnsibleModuleDockerClient,
|
||||
@ -99,7 +99,7 @@ def main() -> None:
|
||||
changed=False,
|
||||
rc=rc,
|
||||
stdout="\n".join([json.dumps(entry) for entry in ret]),
|
||||
stderr=to_native(stderr).strip(),
|
||||
stderr=to_text(stderr).strip(),
|
||||
results=ret,
|
||||
)
|
||||
except DockerException as e:
|
||||
|
||||
@ -85,7 +85,7 @@ EXAMPLES = r"""
|
||||
import json
|
||||
import traceback
|
||||
|
||||
from ansible.module_utils.common.text.converters import to_native
|
||||
from ansible.module_utils.common.text.converters import to_text
|
||||
|
||||
from ansible_collections.community.docker.plugins.module_utils._common_cli import (
|
||||
AnsibleModuleDockerClient,
|
||||
@ -108,7 +108,7 @@ def main() -> None:
|
||||
changed=False,
|
||||
rc=rc,
|
||||
stdout="\n".join([json.dumps(entry) for entry in ret]),
|
||||
stderr=to_native(stderr).strip(),
|
||||
stderr=to_text(stderr).strip(),
|
||||
results=ret,
|
||||
)
|
||||
except DockerException as e:
|
||||
|
||||
@ -120,7 +120,7 @@ volume:
|
||||
import traceback
|
||||
import typing as t
|
||||
|
||||
from ansible.module_utils.common.text.converters import to_native
|
||||
from ansible.module_utils.common.text.converters import to_text
|
||||
|
||||
from ansible_collections.community.docker.plugins.module_utils._api.errors import (
|
||||
APIError,
|
||||
@ -185,7 +185,7 @@ class DockerVolumeManager:
|
||||
try:
|
||||
volumes = self.client.get_json("/volumes")
|
||||
except APIError as e:
|
||||
self.client.fail(to_native(e))
|
||||
self.client.fail(to_text(e))
|
||||
|
||||
if volumes["Volumes"] is None:
|
||||
return None
|
||||
@ -259,7 +259,7 @@ class DockerVolumeManager:
|
||||
"/volumes/{0}", resp["Name"]
|
||||
)
|
||||
except APIError as e:
|
||||
self.client.fail(to_native(e))
|
||||
self.client.fail(to_text(e))
|
||||
|
||||
self.actions.append(
|
||||
f"Created volume {self.parameters.volume_name} with driver {self.parameters.driver}"
|
||||
@ -272,7 +272,7 @@ class DockerVolumeManager:
|
||||
try:
|
||||
self.client.delete_call("/volumes/{0}", self.parameters.volume_name)
|
||||
except APIError as e:
|
||||
self.client.fail(to_native(e))
|
||||
self.client.fail(to_text(e))
|
||||
|
||||
self.actions.append(f"Removed volume {self.parameters.volume_name}")
|
||||
self.results["changed"] = True
|
||||
|
||||
@ -24,14 +24,14 @@ if t.TYPE_CHECKING:
|
||||
from collections.abc import Callable
|
||||
|
||||
|
||||
@pytest.fixture(scope="module")
|
||||
def templar() -> Templar:
|
||||
@pytest.fixture(scope="module", name="templar")
|
||||
def templar_fixture() -> Templar:
|
||||
dataloader = create_autospec(DataLoader, instance=True)
|
||||
return Templar(loader=dataloader)
|
||||
|
||||
|
||||
@pytest.fixture(scope="module")
|
||||
def inventory(templar: Templar) -> InventoryModule:
|
||||
@pytest.fixture(scope="module", name="inventory")
|
||||
def inventory_fixture(templar: Templar) -> InventoryModule:
|
||||
r = InventoryModule()
|
||||
r.inventory = InventoryData()
|
||||
r.templar = templar
|
||||
|
||||
@ -52,7 +52,7 @@ if t.TYPE_CHECKING:
|
||||
DEFAULT_TIMEOUT_SECONDS = constants.DEFAULT_TIMEOUT_SECONDS
|
||||
|
||||
|
||||
def response(
|
||||
def create_response(
|
||||
status_code: int = 200,
|
||||
content: bytes | dict[str, t.Any] | list[dict[str, t.Any]] = b"",
|
||||
headers: dict[str, str] | None = None,
|
||||
@ -95,7 +95,7 @@ def fake_resp(
|
||||
if not key:
|
||||
raise NotImplementedError(f"{method} {url}")
|
||||
status_code, content = fake_api.fake_responses[key]()
|
||||
return response(status_code=status_code, content=content)
|
||||
return create_response(status_code=status_code, content=content)
|
||||
|
||||
|
||||
fake_request = mock.Mock(side_effect=fake_resp)
|
||||
@ -328,26 +328,26 @@ class DockerApiTest(BaseAPIClientTest):
|
||||
|
||||
# pass `decode=False` to the helper
|
||||
raw_resp._fp.seek(0)
|
||||
resp = response(status_code=status_code, content=content, raw=raw_resp)
|
||||
resp = create_response(status_code=status_code, content=content, raw=raw_resp)
|
||||
result = next(self.client._stream_helper(resp))
|
||||
assert result == content_str
|
||||
|
||||
# pass `decode=True` to the helper
|
||||
raw_resp._fp.seek(0)
|
||||
resp = response(status_code=status_code, content=content, raw=raw_resp)
|
||||
resp = create_response(status_code=status_code, content=content, raw=raw_resp)
|
||||
result = next(self.client._stream_helper(resp, decode=True))
|
||||
assert result == content
|
||||
|
||||
# non-chunked response, pass `decode=False` to the helper
|
||||
setattr(raw_resp._fp, "chunked", False)
|
||||
raw_resp._fp.seek(0)
|
||||
resp = response(status_code=status_code, content=content, raw=raw_resp)
|
||||
resp = create_response(status_code=status_code, content=content, raw=raw_resp)
|
||||
result = next(self.client._stream_helper(resp))
|
||||
assert result == content_str.decode("utf-8") # type: ignore
|
||||
|
||||
# non-chunked response, pass `decode=True` to the helper
|
||||
raw_resp._fp.seek(0)
|
||||
resp = response(status_code=status_code, content=content, raw=raw_resp)
|
||||
resp = create_response(status_code=status_code, content=content, raw=raw_resp)
|
||||
result = next(self.client._stream_helper(resp, decode=True))
|
||||
assert result == content
|
||||
|
||||
|
||||
@ -22,8 +22,8 @@ from ..test_support.docker_image_archive_stubbing import (
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def tar_file_name(tmpdir: t.Any) -> str:
|
||||
@pytest.fixture(name="tar_file_name")
|
||||
def tar_file_name_fixture(tmpdir: t.Any) -> str:
|
||||
"""
|
||||
Return the name of a non-existing tar file in an existing temporary directory.
|
||||
"""
|
||||
|
||||
@ -37,8 +37,8 @@ def capture_logging(messages: list[str]) -> Callable[[str], None]:
|
||||
return capture
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def tar_file_name(tmpdir: t.Any) -> str:
|
||||
@pytest.fixture(name="tar_file_name")
|
||||
def tar_file_name_fixture(tmpdir: t.Any) -> str:
|
||||
"""
|
||||
Return the name of a non-existing tar file in an existing temporary directory.
|
||||
"""
|
||||
|
||||
Loading…
Reference in New Issue
Block a user