Cleanup with ruff check (#1182)

* Implement improvements suggested by ruff check.

* Add ruff check to CI.
This commit is contained in:
Felix Fontein 2025-10-28 06:58:15 +01:00 committed by GitHub
parent 3bade286f8
commit dbc7b0ec18
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
40 changed files with 247 additions and 232 deletions

View File

@ -19,6 +19,8 @@ stable_branches = [ "stable-*" ]
run_isort = true run_isort = true
isort_config = ".isort.cfg" isort_config = ".isort.cfg"
run_black = true run_black = true
run_ruff_check = true
ruff_check_config = "ruff.toml"
run_flake8 = true run_flake8 = true
flake8_config = ".flake8" flake8_config = ".flake8"
run_pylint = true run_pylint = true

View File

@ -698,9 +698,7 @@ class APIClient(_Session):
if auth.INDEX_URL not in auth_data and auth.INDEX_NAME in auth_data: if auth.INDEX_URL not in auth_data and auth.INDEX_NAME in auth_data:
auth_data[auth.INDEX_URL] = auth_data.get(auth.INDEX_NAME, {}) auth_data[auth.INDEX_URL] = auth_data.get(auth.INDEX_NAME, {})
log.debug( log.debug("Sending auth config (%s)", ", ".join(repr(k) for k in auth_data))
"Sending auth config (%s)", ", ".join(repr(k) for k in auth_data.keys())
)
if auth_data: if auth_data:
headers["X-Registry-Config"] = auth.encode_header(auth_data) headers["X-Registry-Config"] = auth.encode_header(auth_data)

View File

@ -292,7 +292,7 @@ class AuthConfig(dict):
log.debug("No entry found") log.debug("No entry found")
return None return None
except StoreError as e: except StoreError as e:
raise errors.DockerException(f"Credentials store error: {e}") raise errors.DockerException(f"Credentials store error: {e}") from e
def _get_store_instance(self, name: str) -> Store: def _get_store_instance(self, name: str) -> Store:
if name not in self._stores: if name not in self._stores:
@ -310,7 +310,7 @@ class AuthConfig(dict):
if self.creds_store: if self.creds_store:
# Retrieve all credentials from the default store # Retrieve all credentials from the default store
store = self._get_store_instance(self.creds_store) store = self._get_store_instance(self.creds_store)
for k in store.list().keys(): for k in store.list():
auth_data[k] = self._resolve_authconfig_credstore(k, self.creds_store) auth_data[k] = self._resolve_authconfig_credstore(k, self.creds_store)
auth_data[convert_to_hostname(k)] = auth_data[k] auth_data[convert_to_hostname(k)] = auth_data[k]

View File

@ -102,8 +102,7 @@ def get_tls_dir(name: str | None = None, endpoint: str = "") -> str:
def get_context_host(path: str | None = None, tls: bool = False) -> str: def get_context_host(path: str | None = None, tls: bool = False) -> str:
host = parse_host(path, IS_WINDOWS_PLATFORM, tls) host = parse_host(path, IS_WINDOWS_PLATFORM, tls)
if host == DEFAULT_UNIX_SOCKET: if host == DEFAULT_UNIX_SOCKET and host.startswith("http+"):
# remove http+ from default docker socket url # remove http+ from default docker socket url
if host.startswith("http+"):
host = host[5:] host = host[5:]
return host return host

View File

@ -90,13 +90,13 @@ class Store:
env=env, env=env,
) )
except subprocess.CalledProcessError as e: except subprocess.CalledProcessError as e:
raise errors.process_store_error(e, self.program) raise errors.process_store_error(e, self.program) from e
except OSError as e: except OSError as e:
if e.errno == errno.ENOENT: if e.errno == errno.ENOENT:
raise errors.StoreError( raise errors.StoreError(
f"{self.program} not installed or not available in PATH" f"{self.program} not installed or not available in PATH"
) ) from e
raise errors.StoreError( raise errors.StoreError(
f'Unexpected OS error "{e.strerror}", errno={e.errno}' f'Unexpected OS error "{e.strerror}", errno={e.errno}'
) ) from e
return output return output

View File

@ -98,7 +98,7 @@ def create_archive(
extra_files = extra_files or [] extra_files = extra_files or []
if not fileobj: if not fileobj:
# pylint: disable-next=consider-using-with # pylint: disable-next=consider-using-with
fileobj = tempfile.NamedTemporaryFile() fileobj = tempfile.NamedTemporaryFile() # noqa: SIM115
with tarfile.open(mode="w:gz" if gzip else "w", fileobj=fileobj) as tarf: with tarfile.open(mode="w:gz" if gzip else "w", fileobj=fileobj) as tarf:
if files is None: if files is None:
@ -146,7 +146,8 @@ def create_archive(
def mkbuildcontext(dockerfile: io.BytesIO | t.IO[bytes]) -> t.IO[bytes]: def mkbuildcontext(dockerfile: io.BytesIO | t.IO[bytes]) -> t.IO[bytes]:
f = tempfile.NamedTemporaryFile() # pylint: disable=consider-using-with # pylint: disable-next=consider-using-with
f = tempfile.NamedTemporaryFile() # noqa: SIM115
try: try:
with tarfile.open(mode="w", fileobj=f) as tarf: with tarfile.open(mode="w", fileobj=f) as tarf:
if isinstance(dockerfile, io.StringIO): # type: ignore if isinstance(dockerfile, io.StringIO): # type: ignore
@ -195,8 +196,11 @@ class PatternMatcher:
for pattern in self.patterns: for pattern in self.patterns:
negative = pattern.exclusion negative = pattern.exclusion
match = pattern.match(filepath) match = pattern.match(filepath)
if not match and parent_path != "": if (
if len(pattern.dirs) <= len(parent_path_dirs): not match
and parent_path != ""
and len(pattern.dirs) <= len(parent_path_dirs)
):
match = pattern.match( match = pattern.match(
os.path.sep.join(parent_path_dirs[: len(pattern.dirs)]) os.path.sep.join(parent_path_dirs[: len(pattern.dirs)])
) )

View File

@ -22,7 +22,7 @@ from ..transport.npipesocket import NpipeSocket
if t.TYPE_CHECKING: if t.TYPE_CHECKING:
from collections.abc import Iterable, Sequence from collections.abc import Sequence
from ..._socket_helper import SocketLike from ..._socket_helper import SocketLike
@ -59,8 +59,8 @@ def read(socket: SocketLike, n: int = 4096) -> bytes | None:
try: try:
if hasattr(socket, "recv"): if hasattr(socket, "recv"):
return socket.recv(n) return socket.recv(n)
if isinstance(socket, getattr(pysocket, "SocketIO")): if isinstance(socket, pysocket.SocketIO): # type: ignore
return socket.read(n) return socket.read(n) # type: ignore[unreachable]
return os.read(socket.fileno(), n) return os.read(socket.fileno(), n)
except EnvironmentError as e: except EnvironmentError as e:
if e.errno not in recoverable_errors: if e.errno not in recoverable_errors:

View File

@ -36,7 +36,6 @@ from ..tls import TLSConfig
if t.TYPE_CHECKING: if t.TYPE_CHECKING:
import ssl
from collections.abc import Mapping, Sequence from collections.abc import Mapping, Sequence
@ -298,7 +297,7 @@ def parse_host(addr: str | None, is_win32: bool = False, tls: bool = False) -> s
if proto == "unix" and parsed_url.hostname is not None: if proto == "unix" and parsed_url.hostname is not None:
# For legacy reasons, we consider unix://path # For legacy reasons, we consider unix://path
# to be valid and equivalent to unix:///path # to be valid and equivalent to unix:///path
path = "/".join((parsed_url.hostname, path)) path = f"{parsed_url.hostname}/{path}"
netloc = parsed_url.netloc netloc = parsed_url.netloc
if proto in ("tcp", "ssh"): if proto in ("tcp", "ssh"):
@ -429,8 +428,7 @@ def parse_bytes(s: int | float | str) -> int | float:
if len(s) == 0: if len(s) == 0:
return 0 return 0
if s[-2:-1].isalpha() and s[-1].isalpha(): if s[-2:-1].isalpha() and s[-1].isalpha() and (s[-1] == "b" or s[-1] == "B"):
if s[-1] == "b" or s[-1] == "B":
s = s[:-1] s = s[:-1]
units = BYTE_UNITS units = BYTE_UNITS
suffix = s[-1].lower() suffix = s[-1].lower()

View File

@ -718,8 +718,7 @@ class AnsibleDockerClient(AnsibleDockerClientBase):
) -> None: ) -> None:
self.option_minimal_versions: dict[str, dict[str, t.Any]] = {} self.option_minimal_versions: dict[str, dict[str, t.Any]] = {}
for option in self.module.argument_spec: for option in self.module.argument_spec:
if ignore_params is not None: if ignore_params is not None and option in ignore_params:
if option in ignore_params:
continue continue
self.option_minimal_versions[option] = {} self.option_minimal_versions[option] = {}
self.option_minimal_versions.update(option_minimal_versions) self.option_minimal_versions.update(option_minimal_versions)

View File

@ -654,8 +654,7 @@ class AnsibleDockerClient(AnsibleDockerClientBase):
) -> None: ) -> None:
self.option_minimal_versions: dict[str, dict[str, t.Any]] = {} self.option_minimal_versions: dict[str, dict[str, t.Any]] = {}
for option in self.module.argument_spec: for option in self.module.argument_spec:
if ignore_params is not None: if ignore_params is not None and option in ignore_params:
if option in ignore_params:
continue continue
self.option_minimal_versions[option] = {} self.option_minimal_versions[option] = {}
self.option_minimal_versions.update(option_minimal_versions) self.option_minimal_versions.update(option_minimal_versions)

View File

@ -690,9 +690,7 @@ def emit_warnings(
def is_failed(events: Sequence[Event], rc: int) -> bool: def is_failed(events: Sequence[Event], rc: int) -> bool:
if rc: return bool(rc)
return True
return False
def update_failed( def update_failed(

View File

@ -479,8 +479,7 @@ def fetch_file(
reader = tar.extractfile(member) reader = tar.extractfile(member)
if reader: if reader:
with reader as in_f: with reader as in_f, open(b_out_path, "wb") as out_f:
with open(b_out_path, "wb") as out_f:
shutil.copyfileobj(in_f, out_f) shutil.copyfileobj(in_f, out_f)
return in_path return in_path

View File

@ -890,9 +890,10 @@ def _preprocess_mounts(
check_collision(container, "volumes") check_collision(container, "volumes")
new_vols.append(f"{host}:{container}:{mode}") new_vols.append(f"{host}:{container}:{mode}")
continue continue
if len(parts) == 2: if (
if not _is_volume_permissions(parts[1]) and re.match( len(parts) == 2
r"[.~]", parts[0] and not _is_volume_permissions(parts[1])
and re.match(r"[.~]", parts[0])
): ):
host = os.path.abspath(os.path.expanduser(parts[0])) host = os.path.abspath(os.path.expanduser(parts[0]))
check_collision(parts[1], "volumes") check_collision(parts[1], "volumes")

View File

@ -219,12 +219,11 @@ class DockerAPIEngineDriver(EngineDriver[AnsibleDockerClient]):
return False return False
def is_container_running(self, container: dict[str, t.Any]) -> bool: def is_container_running(self, container: dict[str, t.Any]) -> bool:
if container.get("State"): return bool(
if container["State"].get("Running") and not container["State"].get( container.get("State")
"Ghost", False and container["State"].get("Running")
): and not container["State"].get("Ghost", False)
return True )
return False
def is_container_paused(self, container: dict[str, t.Any]) -> bool: def is_container_paused(self, container: dict[str, t.Any]) -> bool:
if container.get("State"): if container.get("State"):
@ -1706,8 +1705,7 @@ def _get_expected_values_mounts(
parts = vol.split(":") parts = vol.split(":")
if len(parts) == 3: if len(parts) == 3:
continue continue
if len(parts) == 2: if len(parts) == 2 and not _is_volume_permissions(parts[1]):
if not _is_volume_permissions(parts[1]):
continue continue
expected_vols[vol] = {} expected_vols[vol] = {}
if expected_vols: if expected_vols:
@ -1805,8 +1803,7 @@ def _set_values_mounts(
parts = volume.split(":") parts = volume.split(":")
if len(parts) == 3: if len(parts) == 3:
continue continue
if len(parts) == 2: if len(parts) == 2 and not _is_volume_permissions(parts[1]):
if not _is_volume_permissions(parts[1]):
continue continue
volumes[volume] = {} volumes[volume] = {}
data["Volumes"] = volumes data["Volumes"] = volumes

View File

@ -217,10 +217,12 @@ class ContainerManager(DockerBaseClass, t.Generic[Client]):
"The wildcard can only be used with comparison modes 'strict' and 'ignore'!" "The wildcard can only be used with comparison modes 'strict' and 'ignore'!"
) )
for option in self.all_options.values(): for option in self.all_options.values():
if option.name == "networks":
# `networks` is special: only update if # `networks` is special: only update if
# some value is actually specified # some value is actually specified
if self.module.params["networks"] is None: if (
option.name == "networks"
and self.module.params["networks"] is None
):
continue continue
option.comparison = value option.comparison = value
# Now process all other comparisons. # Now process all other comparisons.
@ -679,9 +681,13 @@ class ContainerManager(DockerBaseClass, t.Generic[Client]):
def _image_is_different( def _image_is_different(
self, image: dict[str, t.Any] | None, container: Container self, image: dict[str, t.Any] | None, container: Container
) -> bool: ) -> bool:
if image and image.get("Id"): if (
if container and container.image: image
if image.get("Id") != container.image: and image.get("Id")
and container
and container.image
and image.get("Id") != container.image
):
self.diff_tracker.add( self.diff_tracker.add(
"image", parameter=image.get("Id"), active=container.image "image", parameter=image.get("Id"), active=container.image
) )
@ -927,8 +933,7 @@ class ContainerManager(DockerBaseClass, t.Generic[Client]):
"ipv6_address" "ipv6_address"
] != network_info_ipam.get("IPv6Address"): ] != network_info_ipam.get("IPv6Address"):
diff = True diff = True
if network.get("aliases"): if network.get("aliases") and not compare_generic(
if not compare_generic(
network["aliases"], network["aliases"],
network_info.get("Aliases"), network_info.get("Aliases"),
"allow_more_present", "allow_more_present",

View File

@ -73,7 +73,7 @@ class DockerSocketHandlerBase:
def __exit__( def __exit__(
self, self,
type_: t.Type[BaseException] | None, type_: type[BaseException] | None,
value: BaseException | None, value: BaseException | None,
tb: TracebackType | None, tb: TracebackType | None,
) -> None: ) -> None:
@ -199,8 +199,7 @@ class DockerSocketHandlerBase:
if event & selectors.EVENT_WRITE != 0: if event & selectors.EVENT_WRITE != 0:
self._write() self._write()
result = len(events) result = len(events)
if self._paramiko_read_workaround and len(self._write_buffer) > 0: if self._paramiko_read_workaround and len(self._write_buffer) > 0 and self._sock.send_ready(): # type: ignore
if self._sock.send_ready(): # type: ignore
self._write() self._write()
result += 1 result += 1
return result > 0 return result > 0

View File

@ -64,8 +64,8 @@ def shutdown_writing(
# probably: "TypeError: shutdown() takes 1 positional argument but 2 were given" # probably: "TypeError: shutdown() takes 1 positional argument but 2 were given"
log(f"Shutting down for writing not possible; trying shutdown instead: {e}") log(f"Shutting down for writing not possible; trying shutdown instead: {e}")
sock.shutdown() # type: ignore sock.shutdown() # type: ignore
elif isinstance(sock, getattr(pysocket, "SocketIO")): elif isinstance(sock, pysocket.SocketIO): # type: ignore
sock._sock.shutdown(pysocket.SHUT_WR) sock._sock.shutdown(pysocket.SHUT_WR) # type: ignore[unreachable]
else: else:
log("No idea how to signal end of writing") log("No idea how to signal end of writing")

View File

@ -115,9 +115,7 @@ class AnsibleDockerSwarmClient(AnsibleDockerClient):
:return: True if node is Swarm Worker, False otherwise :return: True if node is Swarm Worker, False otherwise
""" """
if self.check_if_swarm_node() and not self.check_if_swarm_manager(): return bool(self.check_if_swarm_node() and not self.check_if_swarm_manager())
return True
return False
def check_if_swarm_node_is_down( def check_if_swarm_node_is_down(
self, node_id: str | None = None, repeat_check: int = 1 self, node_id: str | None = None, repeat_check: int = 1
@ -181,8 +179,7 @@ class AnsibleDockerSwarmClient(AnsibleDockerClient):
self.fail( self.fail(
"Cannot inspect node: To inspect node execute module on Swarm Manager" "Cannot inspect node: To inspect node execute module on Swarm Manager"
) )
if exc.status_code == 404: if exc.status_code == 404 and skip_missing:
if skip_missing:
return None return None
self.fail(f"Error while reading from Swarm manager: {exc}") self.fail(f"Error while reading from Swarm manager: {exc}")
except Exception as exc: # pylint: disable=broad-exception-caught except Exception as exc: # pylint: disable=broad-exception-caught
@ -191,8 +188,7 @@ class AnsibleDockerSwarmClient(AnsibleDockerClient):
json_str = json.dumps(node_info, ensure_ascii=False) json_str = json.dumps(node_info, ensure_ascii=False)
node_info = json.loads(json_str) node_info = json.loads(json_str)
if "ManagerStatus" in node_info: if "ManagerStatus" in node_info and node_info["ManagerStatus"].get("Leader"):
if node_info["ManagerStatus"].get("Leader"):
# This is workaround of bug in Docker when in some cases the Leader IP is 0.0.0.0 # This is workaround of bug in Docker when in some cases the Leader IP is 0.0.0.0
# Check moby/moby#35437 for details # Check moby/moby#35437 for details
count_colons = node_info["ManagerStatus"]["Addr"].count(":") count_colons = node_info["ManagerStatus"]["Addr"].count(":")

View File

@ -27,7 +27,7 @@ if t.TYPE_CHECKING:
from ._common_api import AnsibleDockerClientBase as CAPIADCB from ._common_api import AnsibleDockerClientBase as CAPIADCB
from ._common_cli import AnsibleDockerClientBase as CCLIADCB from ._common_cli import AnsibleDockerClientBase as CCLIADCB
Client = t.Union[CADCB, CAPIADCB, CCLIADCB] Client = t.Union[CADCB, CAPIADCB, CCLIADCB] # noqa: UP007
DEFAULT_DOCKER_HOST = "unix:///var/run/docker.sock" DEFAULT_DOCKER_HOST = "unix:///var/run/docker.sock"
@ -94,9 +94,7 @@ BYTE_SUFFIXES = ["B", "KB", "MB", "GB", "TB", "PB"]
def is_image_name_id(name: str) -> bool: def is_image_name_id(name: str) -> bool:
"""Check whether the given image name is in fact an image ID (hash).""" """Check whether the given image name is in fact an image ID (hash)."""
if re.match("^sha256:[0-9a-fA-F]{64}$", name): return bool(re.match("^sha256:[0-9a-fA-F]{64}$", name))
return True
return False
def is_valid_tag(tag: str, allow_empty: bool = False) -> bool: def is_valid_tag(tag: str, allow_empty: bool = False) -> bool:

View File

@ -585,10 +585,10 @@ class ServicesManager(BaseComposeManager):
return args return args
def _are_containers_stopped(self) -> bool: def _are_containers_stopped(self) -> bool:
for container in self.list_containers_raw(): return all(
if container["State"] not in ("created", "exited", "stopped", "killed"): container["State"] in ("created", "exited", "stopped", "killed")
return False for container in self.list_containers_raw()
return True )
def cmd_stop(self) -> dict[str, t.Any]: def cmd_stop(self) -> dict[str, t.Any]:
# Since 'docker compose stop' **always** claims it is stopping containers, even if they are already # Since 'docker compose stop' **always** claims it is stopping containers, even if they are already

View File

@ -287,6 +287,8 @@ def are_fileobjs_equal_read_first(
def is_container_file_not_regular_file(container_stat: dict[str, t.Any]) -> bool: def is_container_file_not_regular_file(container_stat: dict[str, t.Any]) -> bool:
return any(
container_stat["mode"] & 1 << bit != 0
for bit in ( for bit in (
# https://pkg.go.dev/io/fs#FileMode # https://pkg.go.dev/io/fs#FileMode
32 - 1, # ModeDir 32 - 1, # ModeDir
@ -297,10 +299,8 @@ def is_container_file_not_regular_file(container_stat: dict[str, t.Any]) -> bool
32 - 8, # ModeSocket 32 - 8, # ModeSocket
32 - 11, # ModeCharDevice 32 - 11, # ModeCharDevice
32 - 13, # ModeIrregular 32 - 13, # ModeIrregular
): )
if container_stat["mode"] & (1 << bit) != 0: )
return True
return False
def get_container_file_mode(container_stat: dict[str, t.Any]) -> int: def get_container_file_mode(container_stat: dict[str, t.Any]) -> int:
@ -420,7 +420,7 @@ def retrieve_diff(
def is_binary(content: bytes) -> bool: def is_binary(content: bytes) -> bool:
if b"\x00" in content: if b"\x00" in content: # noqa: SIM103
return True return True
# TODO: better detection # TODO: better detection
# (ansible-core also just checks for 0x00, and even just sticks to the first 8k, so this is not too bad...) # (ansible-core also just checks for 0x00, and even just sticks to the first 8k, so this is not too bad...)
@ -695,8 +695,7 @@ def is_file_idempotent(
mf = tar.extractfile(member) mf = tar.extractfile(member)
if mf is None: if mf is None:
raise AssertionError("Member should be present for regular file") raise AssertionError("Member should be present for regular file")
with mf as tar_f: with mf as tar_f, open(managed_path, "rb") as local_f:
with open(managed_path, "rb") as local_f:
is_equal = are_fileobjs_equal_with_diff_of_first( is_equal = are_fileobjs_equal_with_diff_of_first(
tar_f, local_f, member.size, diff, max_file_size_for_diff, in_path tar_f, local_f, member.size, diff, max_file_size_for_diff, in_path
) )

View File

@ -902,7 +902,7 @@ class ImageManager(DockerBaseClass):
buildargs[key] = to_text(value) buildargs[key] = to_text(value)
container_limits = self.container_limits or {} container_limits = self.container_limits or {}
for key in container_limits.keys(): for key in container_limits:
if key not in CONTAINER_LIMITS_KEYS: if key not in CONTAINER_LIMITS_KEYS:
raise DockerException(f"Invalid container_limits key {key}") raise DockerException(f"Invalid container_limits key {key}")
@ -1207,9 +1207,9 @@ def main() -> None:
if not is_valid_tag(client.module.params["tag"], allow_empty=True): if not is_valid_tag(client.module.params["tag"], allow_empty=True):
client.fail(f'"{client.module.params["tag"]}" is not a valid docker tag!') client.fail(f'"{client.module.params["tag"]}" is not a valid docker tag!')
if client.module.params["source"] == "build": if client.module.params["source"] == "build" and (
if not client.module.params["build"] or not client.module.params["build"].get( not client.module.params["build"]
"path" or not client.module.params["build"].get("path")
): ):
client.fail( client.fail(
'If "source" is set to "build", the "build.path" option must be specified.' 'If "source" is set to "build", the "build.path" option must be specified.'

View File

@ -368,13 +368,17 @@ class ImageBuilder(DockerBaseClass):
if self.secrets: if self.secrets:
for secret in self.secrets: for secret in self.secrets:
if secret["type"] in ("env", "value"): if secret["type"] in ("env", "value") and LooseVersion(
if LooseVersion(buildx_version) < LooseVersion("0.6.0"): buildx_version
) < LooseVersion("0.6.0"):
self.fail( self.fail(
f"The Docker buildx plugin has version {buildx_version}, but 0.6.0 is needed for secrets of type=env and type=value" f"The Docker buildx plugin has version {buildx_version}, but 0.6.0 is needed for secrets of type=env and type=value"
) )
if self.outputs and len(self.outputs) > 1: if (
if LooseVersion(buildx_version) < LooseVersion("0.13.0"): self.outputs
and len(self.outputs) > 1
and LooseVersion(buildx_version) < LooseVersion("0.13.0")
):
self.fail( self.fail(
f"The Docker buildx plugin has version {buildx_version}, but 0.13.0 is needed to specify more than one output" f"The Docker buildx plugin has version {buildx_version}, but 0.13.0 is needed to specify more than one output"
) )
@ -530,8 +534,7 @@ class ImageBuilder(DockerBaseClass):
"image": image or {}, "image": image or {},
} }
if image: if image and self.rebuild == "never":
if self.rebuild == "never":
return results return results
results["changed"] = True results["changed"] = True

View File

@ -478,17 +478,15 @@ class DockerNetworkManager:
) )
else: else:
for key, value in self.parameters.driver_options.items(): for key, value in self.parameters.driver_options.items():
if not (key in net["Options"]) or value != net["Options"][key]: if key not in net["Options"] or value != net["Options"][key]:
differences.add( differences.add(
f"driver_options.{key}", f"driver_options.{key}",
parameter=value, parameter=value,
active=net["Options"].get(key), active=net["Options"].get(key),
) )
if self.parameters.ipam_driver: if self.parameters.ipam_driver and (
if ( not net.get("IPAM") or net["IPAM"]["Driver"] != self.parameters.ipam_driver
not net.get("IPAM")
or net["IPAM"]["Driver"] != self.parameters.ipam_driver
): ):
differences.add( differences.add(
"ipam_driver", "ipam_driver",
@ -597,7 +595,7 @@ class DockerNetworkManager:
) )
else: else:
for key, value in self.parameters.labels.items(): for key, value in self.parameters.labels.items():
if not (key in net["Labels"]) or value != net["Labels"][key]: if key not in net["Labels"] or value != net["Labels"][key]:
differences.add( differences.add(
f"labels.{key}", f"labels.{key}",
parameter=value, parameter=value,

View File

@ -216,14 +216,14 @@ class SwarmNodeManager(DockerBaseClass):
if self.parameters.role is None: if self.parameters.role is None:
node_spec["Role"] = node_info["Spec"]["Role"] node_spec["Role"] = node_info["Spec"]["Role"]
else: else:
if not node_info["Spec"]["Role"] == self.parameters.role: if node_info["Spec"]["Role"] != self.parameters.role:
node_spec["Role"] = self.parameters.role node_spec["Role"] = self.parameters.role
changed = True changed = True
if self.parameters.availability is None: if self.parameters.availability is None:
node_spec["Availability"] = node_info["Spec"]["Availability"] node_spec["Availability"] = node_info["Spec"]["Availability"]
else: else:
if not node_info["Spec"]["Availability"] == self.parameters.availability: if node_info["Spec"]["Availability"] != self.parameters.availability:
node_info["Spec"]["Availability"] = self.parameters.availability node_info["Spec"]["Availability"] = self.parameters.availability
changed = True changed = True

View File

@ -1,5 +1,4 @@
#!/usr/bin/python #!/usr/bin/python
# coding: utf-8
# #
# Copyright (c) 2021 Red Hat | Ansible Sakar Mehra<@sakarmehra100@gmail.com | @sakar97> # Copyright (c) 2021 Red Hat | Ansible Sakar Mehra<@sakarmehra100@gmail.com | @sakar97>
# Copyright (c) 2019, Vladimir Porshkevich (@porshkevich) <neosonic@mail.ru> # Copyright (c) 2019, Vladimir Porshkevich (@porshkevich) <neosonic@mail.ru>
@ -281,7 +280,7 @@ class DockerPluginManager:
stream=True, stream=True,
) )
self.client._raise_for_status(response) self.client._raise_for_status(response)
for data in self.client._stream_helper(response, decode=True): for dummy in self.client._stream_helper(response, decode=True):
pass pass
# Inspect and configure plugin # Inspect and configure plugin
self.existing_plugin = self.client.get_json( self.existing_plugin = self.client.get_json(

View File

@ -322,7 +322,7 @@ def main() -> None:
before_after_differences = json_diff( before_after_differences = json_diff(
before_stack_services, after_stack_services before_stack_services, after_stack_services
) )
for k in before_after_differences.keys(): for k in before_after_differences:
if isinstance(before_after_differences[k], dict): if isinstance(before_after_differences[k], dict):
before_after_differences[k].pop("UpdatedAt", None) before_after_differences[k].pop("UpdatedAt", None)
before_after_differences[k].pop("Version", None) before_after_differences[k].pop("Version", None)

View File

@ -554,8 +554,7 @@ class SwarmManager(DockerBaseClass):
except APIError as exc: except APIError as exc:
self.client.fail(f"Can not create a new Swarm Cluster: {exc}") self.client.fail(f"Can not create a new Swarm Cluster: {exc}")
if not self.client.check_if_swarm_manager(): if not self.client.check_if_swarm_manager() and not self.check_mode:
if not self.check_mode:
self.client.fail("Swarm not created or other error!") self.client.fail("Swarm not created or other error!")
self.created = True self.created = True

View File

@ -2380,12 +2380,12 @@ class DockerServiceManager:
ds.container_labels = task_template_data["ContainerSpec"].get("Labels") ds.container_labels = task_template_data["ContainerSpec"].get("Labels")
mode = raw_data["Spec"]["Mode"] mode = raw_data["Spec"]["Mode"]
if "Replicated" in mode.keys(): if "Replicated" in mode:
ds.mode = to_text("replicated", encoding="utf-8") ds.mode = to_text("replicated", encoding="utf-8")
ds.replicas = mode["Replicated"]["Replicas"] ds.replicas = mode["Replicated"]["Replicas"]
elif "Global" in mode.keys(): elif "Global" in mode:
ds.mode = "global" ds.mode = "global"
elif "ReplicatedJob" in mode.keys(): elif "ReplicatedJob" in mode:
ds.mode = to_text("replicated-job", encoding="utf-8") ds.mode = to_text("replicated-job", encoding="utf-8")
ds.replicas = mode["ReplicatedJob"]["TotalCompletions"] ds.replicas = mode["ReplicatedJob"]["TotalCompletions"]
else: else:
@ -2649,10 +2649,9 @@ class DockerServiceManager:
def _detect_publish_mode_usage(client: AnsibleDockerClient) -> bool: def _detect_publish_mode_usage(client: AnsibleDockerClient) -> bool:
for publish_def in client.module.params["publish"] or []: return any(
if publish_def.get("mode"): publish_def.get("mode") for publish_def in client.module.params["publish"] or []
return True )
return False
def _detect_healthcheck_start_period(client: AnsibleDockerClient) -> bool: def _detect_healthcheck_start_period(client: AnsibleDockerClient) -> bool:

View File

@ -1,5 +1,4 @@
#!/usr/bin/python #!/usr/bin/python
# coding: utf-8
# #
# Copyright 2017 Red Hat | Ansible, Alex Grönholm <alex.gronholm@nextday.fi> # Copyright 2017 Red Hat | Ansible, Alex Grönholm <alex.gronholm@nextday.fi>
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) # GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)

View File

@ -1,5 +1,4 @@
#!/usr/bin/python #!/usr/bin/python
# coding: utf-8
# #
# Copyright 2017 Red Hat | Ansible, Alex Grönholm <alex.gronholm@nextday.fi> # Copyright 2017 Red Hat | Ansible, Alex Grönholm <alex.gronholm@nextday.fi>
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) # GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)

31
ruff.toml Normal file
View File

@ -0,0 +1,31 @@
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
# SPDX-License-Identifier: GPL-3.0-or-later
# SPDX-FileCopyrightText: 2025 Felix Fontein <felix@fontein.de>
line-length = 160
[lint]
# https://docs.astral.sh/ruff/rules/
select = ["A", "B", "E", "F", "FA", "FLY", "UP", "SIM"]
ignore = [
# Better keep ignored (for now)
"F811", # Redefinition of unused `xxx` (happens a lot for fixtures in unit tests)
"E402", # Module level import not at top of file
"E741", # Ambiguous variable name
"UP012", # unnecessary-encode-utf8
"UP015", # Unnecessary mode argument
"SIM105", # suppressible-exception
"SIM108", # if-else-block-instead-of-if-exp
# To fix later:
"B905", # zip-without-explicit-strict - needs Python 3.10+
# To fix:
"UP024", # Replace aliased errors with `OSError`
]
# Allow fix for all enabled rules (when `--fix`) is provided.
fixable = ["ALL"]
unfixable = []
# Allow unused variables when underscore-prefixed or starting with dummy
dummy-variable-rgx = "^(_|dummy).*$"

View File

@ -226,7 +226,7 @@ class DockerApiTest(BaseAPIClientTest):
def test_retrieve_server_version(self) -> None: def test_retrieve_server_version(self) -> None:
client = APIClient(version="auto") client = APIClient(version="auto")
assert isinstance(client._version, str) assert isinstance(client._version, str)
assert not (client._version == "auto") assert client._version != "auto"
client.close() client.close()
def test_auto_retrieve_server_version(self) -> None: def test_auto_retrieve_server_version(self) -> None:
@ -323,8 +323,8 @@ class DockerApiTest(BaseAPIClientTest):
# mock a stream interface # mock a stream interface
raw_resp = urllib3.HTTPResponse(body=body) raw_resp = urllib3.HTTPResponse(body=body)
setattr(raw_resp._fp, "chunked", True) raw_resp._fp.chunked = True
setattr(raw_resp._fp, "chunk_left", len(body.getvalue()) - 1) raw_resp._fp.chunk_left = len(body.getvalue()) - 1
# pass `decode=False` to the helper # pass `decode=False` to the helper
raw_resp._fp.seek(0) raw_resp._fp.seek(0)
@ -339,7 +339,7 @@ class DockerApiTest(BaseAPIClientTest):
assert result == content assert result == content
# non-chunked response, pass `decode=False` to the helper # non-chunked response, pass `decode=False` to the helper
setattr(raw_resp._fp, "chunked", False) raw_resp._fp.chunked = False
raw_resp._fp.seek(0) raw_resp._fp.seek(0)
resp = create_response(status_code=status_code, content=content, raw=raw_resp) resp = create_response(status_code=status_code, content=content, raw=raw_resp)
result = next(self.client._stream_helper(resp)) result = next(self.client._stream_helper(resp))
@ -503,7 +503,7 @@ class TCPSocketStreamTest(unittest.TestCase):
cls.thread.join() cls.thread.join()
@classmethod @classmethod
def get_handler_class(cls) -> t.Type[BaseHTTPRequestHandler]: def get_handler_class(cls) -> type[BaseHTTPRequestHandler]:
stdout_data = cls.stdout_data stdout_data = cls.stdout_data
stderr_data = cls.stderr_data stderr_data = cls.stderr_data

View File

@ -581,7 +581,7 @@ fake_responses: dict[str | tuple[str, str], Callable] = {
f"{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/restart": post_fake_restart_container, f"{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/restart": post_fake_restart_container,
f"{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b": delete_fake_remove_container, f"{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b": delete_fake_remove_container,
# TODO: the following is a duplicate of the import endpoint further above! # TODO: the following is a duplicate of the import endpoint further above!
f"{prefix}/{CURRENT_VERSION}/images/create": post_fake_image_create, f"{prefix}/{CURRENT_VERSION}/images/create": post_fake_image_create, # noqa: F601
f"{prefix}/{CURRENT_VERSION}/images/e9aa60c60128": delete_fake_remove_image, f"{prefix}/{CURRENT_VERSION}/images/e9aa60c60128": delete_fake_remove_image,
f"{prefix}/{CURRENT_VERSION}/images/e9aa60c60128/get": get_fake_get_image, f"{prefix}/{CURRENT_VERSION}/images/e9aa60c60128/get": get_fake_get_image,
f"{prefix}/{CURRENT_VERSION}/images/load": post_fake_load_image, f"{prefix}/{CURRENT_VERSION}/images/load": post_fake_load_image,

View File

@ -256,7 +256,7 @@ class ResolveAuthTest(unittest.TestCase):
m.return_value = None m.return_value = None
ac = auth.resolve_authconfig(auth_config, None) ac = auth.resolve_authconfig(auth_config, None)
assert ac is not None assert ac is not None
assert "indexuser" == ac["username"] assert ac["username"] == "indexuser"
class LoadConfigTest(unittest.TestCase): class LoadConfigTest(unittest.TestCase):

View File

@ -421,8 +421,9 @@ class TarTest(unittest.TestCase):
base = make_tree(dirs, files) base = make_tree(dirs, files)
self.addCleanup(shutil.rmtree, base) self.addCleanup(shutil.rmtree, base)
with tar(base, exclude=exclude) as archive: with tar(base, exclude=exclude) as archive, tarfile.open(
with tarfile.open(fileobj=archive) as tar_data: fileobj=archive
) as tar_data:
assert sorted(tar_data.getnames()) == sorted(expected_names) assert sorted(tar_data.getnames()) == sorted(expected_names)
def test_tar_with_empty_directory(self) -> None: def test_tar_with_empty_directory(self) -> None:
@ -430,8 +431,7 @@ class TarTest(unittest.TestCase):
self.addCleanup(shutil.rmtree, base) self.addCleanup(shutil.rmtree, base)
for d in ["foo", "bar"]: for d in ["foo", "bar"]:
os.makedirs(os.path.join(base, d)) os.makedirs(os.path.join(base, d))
with tar(base) as archive: with tar(base) as archive, tarfile.open(fileobj=archive) as tar_data:
with tarfile.open(fileobj=archive) as tar_data:
assert sorted(tar_data.getnames()) == ["bar", "foo"] assert sorted(tar_data.getnames()) == ["bar", "foo"]
@pytest.mark.skipif( @pytest.mark.skipif(
@ -458,8 +458,7 @@ class TarTest(unittest.TestCase):
f.write("content") f.write("content")
os.makedirs(os.path.join(base, "bar")) os.makedirs(os.path.join(base, "bar"))
os.symlink("../foo", os.path.join(base, "bar/foo")) os.symlink("../foo", os.path.join(base, "bar/foo"))
with tar(base) as archive: with tar(base) as archive, tarfile.open(fileobj=archive) as tar_data:
with tarfile.open(fileobj=archive) as tar_data:
assert sorted(tar_data.getnames()) == ["bar", "bar/foo", "foo"] assert sorted(tar_data.getnames()) == ["bar", "bar/foo", "foo"]
@pytest.mark.skipif(IS_WINDOWS_PLATFORM, reason="No symlinks on Windows") @pytest.mark.skipif(IS_WINDOWS_PLATFORM, reason="No symlinks on Windows")
@ -469,8 +468,7 @@ class TarTest(unittest.TestCase):
for d in ["foo", "bar"]: for d in ["foo", "bar"]:
os.makedirs(os.path.join(base, d)) os.makedirs(os.path.join(base, d))
os.symlink("../foo", os.path.join(base, "bar/foo")) os.symlink("../foo", os.path.join(base, "bar/foo"))
with tar(base) as archive: with tar(base) as archive, tarfile.open(fileobj=archive) as tar_data:
with tarfile.open(fileobj=archive) as tar_data:
assert sorted(tar_data.getnames()) == ["bar", "bar/foo", "foo"] assert sorted(tar_data.getnames()) == ["bar", "bar/foo", "foo"]
@pytest.mark.skipif(IS_WINDOWS_PLATFORM, reason="No symlinks on Windows") @pytest.mark.skipif(IS_WINDOWS_PLATFORM, reason="No symlinks on Windows")
@ -481,8 +479,7 @@ class TarTest(unittest.TestCase):
os.makedirs(os.path.join(base, d)) os.makedirs(os.path.join(base, d))
os.symlink("../baz", os.path.join(base, "bar/foo")) os.symlink("../baz", os.path.join(base, "bar/foo"))
with tar(base) as archive: with tar(base) as archive, tarfile.open(fileobj=archive) as tar_data:
with tarfile.open(fileobj=archive) as tar_data:
assert sorted(tar_data.getnames()) == ["bar", "bar/foo", "foo"] assert sorted(tar_data.getnames()) == ["bar", "bar/foo", "foo"]
@pytest.mark.skipif(IS_WINDOWS_PLATFORM, reason="No UNIX sockets on Win32") @pytest.mark.skipif(IS_WINDOWS_PLATFORM, reason="No UNIX sockets on Win32")
@ -494,8 +491,7 @@ class TarTest(unittest.TestCase):
sock = socket.socket(socket.AF_UNIX) sock = socket.socket(socket.AF_UNIX)
self.addCleanup(sock.close) self.addCleanup(sock.close)
sock.bind(os.path.join(base, "test.sock")) sock.bind(os.path.join(base, "test.sock"))
with tar(base) as archive: with tar(base) as archive, tarfile.open(fileobj=archive) as tar_data:
with tarfile.open(fileobj=archive) as tar_data:
assert sorted(tar_data.getnames()) == ["bar", "foo"] assert sorted(tar_data.getnames()) == ["bar", "foo"]
def tar_test_negative_mtime_bug(self) -> None: def tar_test_negative_mtime_bug(self) -> None:
@ -505,8 +501,7 @@ class TarTest(unittest.TestCase):
with open(filename, "wt", encoding="utf-8") as f: with open(filename, "wt", encoding="utf-8") as f:
f.write("Invisible Full Moon") f.write("Invisible Full Moon")
os.utime(filename, (12345, -3600.0)) os.utime(filename, (12345, -3600.0))
with tar(base) as archive: with tar(base) as archive, tarfile.open(fileobj=archive) as tar_data:
with tarfile.open(fileobj=archive) as tar_data:
assert tar_data.getnames() == ["th.txt"] assert tar_data.getnames() == ["th.txt"]
assert tar_data.getmember("th.txt").mtime == -3600 assert tar_data.getmember("th.txt").mtime == -3600

View File

@ -58,7 +58,12 @@ class KwargsFromEnvTest(unittest.TestCase):
self.os_environ = os.environ.copy() self.os_environ = os.environ.copy()
def tearDown(self) -> None: def tearDown(self) -> None:
os.environ = self.os_environ # type: ignore for k, v in self.os_environ.items():
if os.environ.get(k) != v:
os.environ[k] = v
for k in os.environ:
if k not in self.os_environ:
os.environ.pop(k)
def test_kwargs_from_env_empty(self) -> None: def test_kwargs_from_env_empty(self) -> None:
os.environ.update(DOCKER_HOST="", DOCKER_CERT_PATH="") os.environ.update(DOCKER_HOST="", DOCKER_CERT_PATH="")
@ -75,7 +80,7 @@ class KwargsFromEnvTest(unittest.TestCase):
DOCKER_TLS_VERIFY="1", DOCKER_TLS_VERIFY="1",
) )
kwargs = kwargs_from_env(assert_hostname=False) kwargs = kwargs_from_env(assert_hostname=False)
assert "tcp://192.168.59.103:2376" == kwargs["base_url"] assert kwargs["base_url"] == "tcp://192.168.59.103:2376"
assert "ca.pem" in kwargs["tls"].ca_cert assert "ca.pem" in kwargs["tls"].ca_cert
assert "cert.pem" in kwargs["tls"].cert[0] assert "cert.pem" in kwargs["tls"].cert[0]
assert "key.pem" in kwargs["tls"].cert[1] assert "key.pem" in kwargs["tls"].cert[1]
@ -99,7 +104,7 @@ class KwargsFromEnvTest(unittest.TestCase):
DOCKER_TLS_VERIFY="", DOCKER_TLS_VERIFY="",
) )
kwargs = kwargs_from_env(assert_hostname=True) kwargs = kwargs_from_env(assert_hostname=True)
assert "tcp://192.168.59.103:2376" == kwargs["base_url"] assert kwargs["base_url"] == "tcp://192.168.59.103:2376"
assert "ca.pem" in kwargs["tls"].ca_cert assert "ca.pem" in kwargs["tls"].ca_cert
assert "cert.pem" in kwargs["tls"].cert[0] assert "cert.pem" in kwargs["tls"].cert[0]
assert "key.pem" in kwargs["tls"].cert[1] assert "key.pem" in kwargs["tls"].cert[1]
@ -125,7 +130,7 @@ class KwargsFromEnvTest(unittest.TestCase):
) )
os.environ.pop("DOCKER_CERT_PATH", None) os.environ.pop("DOCKER_CERT_PATH", None)
kwargs = kwargs_from_env(assert_hostname=True) kwargs = kwargs_from_env(assert_hostname=True)
assert "tcp://192.168.59.103:2376" == kwargs["base_url"] assert kwargs["base_url"] == "tcp://192.168.59.103:2376"
def test_kwargs_from_env_no_cert_path(self) -> None: def test_kwargs_from_env_no_cert_path(self) -> None:
try: try:
@ -157,7 +162,7 @@ class KwargsFromEnvTest(unittest.TestCase):
"DOCKER_HOST": "http://docker.gensokyo.jp:2581", "DOCKER_HOST": "http://docker.gensokyo.jp:2581",
} }
) )
assert "http://docker.gensokyo.jp:2581" == kwargs["base_url"] assert kwargs["base_url"] == "http://docker.gensokyo.jp:2581"
assert "tls" not in kwargs assert "tls" not in kwargs

View File

@ -23,7 +23,6 @@ from ..test_support.docker_image_archive_stubbing import (
if t.TYPE_CHECKING: if t.TYPE_CHECKING:
from collections.abc import Callable from collections.abc import Callable
from pathlib import Path
def assert_no_logging(msg: str) -> t.NoReturn: def assert_no_logging(msg: str) -> t.NoReturn:

View File

@ -156,7 +156,7 @@ def test_has_list_changed() -> None:
[{"a": 1}, {"a": 2}], [{"a": 1}, {"a": 2}], sort_key="a" [{"a": 1}, {"a": 2}], [{"a": 1}, {"a": 2}], sort_key="a"
) )
with pytest.raises(Exception): with pytest.raises(ValueError):
docker_swarm_service.has_list_changed( docker_swarm_service.has_list_changed(
[{"a": 1}, {"a": 2}], [{"a": 1}, {"a": 2}] [{"a": 1}, {"a": 2}], [{"a": 1}, {"a": 2}]
) )

View File

@ -36,8 +36,7 @@ def write_imitation_archive(
def write_imitation_archive_with_manifest( def write_imitation_archive_with_manifest(
file_name: str, manifest: list[dict[str, t.Any]] file_name: str, manifest: list[dict[str, t.Any]]
) -> None: ) -> None:
with tarfile.open(file_name, "w") as tf: with tarfile.open(file_name, "w") as tf, TemporaryFile() as f:
with TemporaryFile() as f:
f.write(json.dumps(manifest).encode("utf-8")) f.write(json.dumps(manifest).encode("utf-8"))
ti = tarfile.TarInfo("manifest.json") ti = tarfile.TarInfo("manifest.json")
@ -55,8 +54,7 @@ def write_irrelevant_tar(file_name: str) -> None:
:type file_name: str :type file_name: str
""" """
with tarfile.open(file_name, "w") as tf: with tarfile.open(file_name, "w") as tf, TemporaryFile() as f:
with TemporaryFile() as f:
f.write("Hello, world.".encode("utf-8")) f.write("Hello, world.".encode("utf-8"))
ti = tarfile.TarInfo("hi.txt") ti = tarfile.TarInfo("hi.txt")