From dbc7b0ec1872246e94e86838f1bb8eb9c6d4d699 Mon Sep 17 00:00:00 2001 From: Felix Fontein Date: Tue, 28 Oct 2025 06:58:15 +0100 Subject: [PATCH] Cleanup with ruff check (#1182) * Implement improvements suggested by ruff check. * Add ruff check to CI. --- antsibull-nox.toml | 2 + plugins/module_utils/_api/api/client.py | 4 +- plugins/module_utils/_api/auth.py | 4 +- plugins/module_utils/_api/context/config.py | 5 +-- .../module_utils/_api/credentials/store.py | 6 +-- plugins/module_utils/_api/utils/build.py | 18 +++++--- plugins/module_utils/_api/utils/socket.py | 6 +-- plugins/module_utils/_api/utils/utils.py | 8 ++-- plugins/module_utils/_common.py | 5 +-- plugins/module_utils/_common_api.py | 5 +-- plugins/module_utils/_compose_v2.py | 4 +- plugins/module_utils/_copy.py | 5 +-- .../module_utils/_module_container/base.py | 17 +++---- .../_module_container/docker_api.py | 21 ++++----- .../module_utils/_module_container/module.py | 45 ++++++++++--------- plugins/module_utils/_socket_handler.py | 9 ++-- plugins/module_utils/_socket_helper.py | 4 +- plugins/module_utils/_swarm.py | 34 +++++++------- plugins/module_utils/_util.py | 6 +-- plugins/modules/docker_compose_v2.py | 8 ++-- plugins/modules/docker_container_copy_into.py | 39 ++++++++-------- plugins/modules/docker_image.py | 16 +++---- plugins/modules/docker_image_build.py | 29 ++++++------ plugins/modules/docker_network.py | 22 +++++---- plugins/modules/docker_node.py | 4 +- plugins/modules/docker_plugin.py | 3 +- plugins/modules/docker_stack.py | 2 +- plugins/modules/docker_swarm.py | 5 +-- plugins/modules/docker_swarm_service.py | 13 +++--- plugins/modules/docker_volume.py | 1 - plugins/modules/docker_volume_info.py | 1 - ruff.toml | 31 +++++++++++++ .../module_utils/_api/api/test_client.py | 10 ++--- .../plugins/module_utils/_api/fake_api.py | 2 +- .../plugins/module_utils/_api/test_auth.py | 2 +- .../module_utils/_api/utils/test_build.py | 39 +++++++--------- .../module_utils/_api/utils/test_utils.py | 15 ++++--- .../unit/plugins/modules/test_docker_image.py | 1 - .../modules/test_docker_swarm_service.py | 2 +- .../docker_image_archive_stubbing.py | 26 +++++------ 40 files changed, 247 insertions(+), 232 deletions(-) create mode 100644 ruff.toml diff --git a/antsibull-nox.toml b/antsibull-nox.toml index 36fd080e..9fb3f3ba 100644 --- a/antsibull-nox.toml +++ b/antsibull-nox.toml @@ -19,6 +19,8 @@ stable_branches = [ "stable-*" ] run_isort = true isort_config = ".isort.cfg" run_black = true +run_ruff_check = true +ruff_check_config = "ruff.toml" run_flake8 = true flake8_config = ".flake8" run_pylint = true diff --git a/plugins/module_utils/_api/api/client.py b/plugins/module_utils/_api/api/client.py index 2457dfea..66a67ee6 100644 --- a/plugins/module_utils/_api/api/client.py +++ b/plugins/module_utils/_api/api/client.py @@ -698,9 +698,7 @@ class APIClient(_Session): if auth.INDEX_URL not in auth_data and auth.INDEX_NAME in auth_data: auth_data[auth.INDEX_URL] = auth_data.get(auth.INDEX_NAME, {}) - log.debug( - "Sending auth config (%s)", ", ".join(repr(k) for k in auth_data.keys()) - ) + log.debug("Sending auth config (%s)", ", ".join(repr(k) for k in auth_data)) if auth_data: headers["X-Registry-Config"] = auth.encode_header(auth_data) diff --git a/plugins/module_utils/_api/auth.py b/plugins/module_utils/_api/auth.py index 78271ef1..6630d16a 100644 --- a/plugins/module_utils/_api/auth.py +++ b/plugins/module_utils/_api/auth.py @@ -292,7 +292,7 @@ class AuthConfig(dict): log.debug("No entry found") return None except StoreError as e: - raise errors.DockerException(f"Credentials store error: {e}") + raise errors.DockerException(f"Credentials store error: {e}") from e def _get_store_instance(self, name: str) -> Store: if name not in self._stores: @@ -310,7 +310,7 @@ class AuthConfig(dict): if self.creds_store: # Retrieve all credentials from the default store store = self._get_store_instance(self.creds_store) - for k in store.list().keys(): + for k in store.list(): auth_data[k] = self._resolve_authconfig_credstore(k, self.creds_store) auth_data[convert_to_hostname(k)] = auth_data[k] diff --git a/plugins/module_utils/_api/context/config.py b/plugins/module_utils/_api/context/config.py index 04fddc12..205f6168 100644 --- a/plugins/module_utils/_api/context/config.py +++ b/plugins/module_utils/_api/context/config.py @@ -102,8 +102,7 @@ def get_tls_dir(name: str | None = None, endpoint: str = "") -> str: def get_context_host(path: str | None = None, tls: bool = False) -> str: host = parse_host(path, IS_WINDOWS_PLATFORM, tls) - if host == DEFAULT_UNIX_SOCKET: + if host == DEFAULT_UNIX_SOCKET and host.startswith("http+"): # remove http+ from default docker socket url - if host.startswith("http+"): - host = host[5:] + host = host[5:] return host diff --git a/plugins/module_utils/_api/credentials/store.py b/plugins/module_utils/_api/credentials/store.py index 1d560e91..a62f0085 100644 --- a/plugins/module_utils/_api/credentials/store.py +++ b/plugins/module_utils/_api/credentials/store.py @@ -90,13 +90,13 @@ class Store: env=env, ) except subprocess.CalledProcessError as e: - raise errors.process_store_error(e, self.program) + raise errors.process_store_error(e, self.program) from e except OSError as e: if e.errno == errno.ENOENT: raise errors.StoreError( f"{self.program} not installed or not available in PATH" - ) + ) from e raise errors.StoreError( f'Unexpected OS error "{e.strerror}", errno={e.errno}' - ) + ) from e return output diff --git a/plugins/module_utils/_api/utils/build.py b/plugins/module_utils/_api/utils/build.py index b46cc32a..22798803 100644 --- a/plugins/module_utils/_api/utils/build.py +++ b/plugins/module_utils/_api/utils/build.py @@ -98,7 +98,7 @@ def create_archive( extra_files = extra_files or [] if not fileobj: # pylint: disable-next=consider-using-with - fileobj = tempfile.NamedTemporaryFile() + fileobj = tempfile.NamedTemporaryFile() # noqa: SIM115 with tarfile.open(mode="w:gz" if gzip else "w", fileobj=fileobj) as tarf: if files is None: @@ -146,7 +146,8 @@ def create_archive( def mkbuildcontext(dockerfile: io.BytesIO | t.IO[bytes]) -> t.IO[bytes]: - f = tempfile.NamedTemporaryFile() # pylint: disable=consider-using-with + # pylint: disable-next=consider-using-with + f = tempfile.NamedTemporaryFile() # noqa: SIM115 try: with tarfile.open(mode="w", fileobj=f) as tarf: if isinstance(dockerfile, io.StringIO): # type: ignore @@ -195,11 +196,14 @@ class PatternMatcher: for pattern in self.patterns: negative = pattern.exclusion match = pattern.match(filepath) - if not match and parent_path != "": - if len(pattern.dirs) <= len(parent_path_dirs): - match = pattern.match( - os.path.sep.join(parent_path_dirs[: len(pattern.dirs)]) - ) + if ( + not match + and parent_path != "" + and len(pattern.dirs) <= len(parent_path_dirs) + ): + match = pattern.match( + os.path.sep.join(parent_path_dirs[: len(pattern.dirs)]) + ) if match: matched = not negative diff --git a/plugins/module_utils/_api/utils/socket.py b/plugins/module_utils/_api/utils/socket.py index 642a3997..5dbf2595 100644 --- a/plugins/module_utils/_api/utils/socket.py +++ b/plugins/module_utils/_api/utils/socket.py @@ -22,7 +22,7 @@ from ..transport.npipesocket import NpipeSocket if t.TYPE_CHECKING: - from collections.abc import Iterable, Sequence + from collections.abc import Sequence from ..._socket_helper import SocketLike @@ -59,8 +59,8 @@ def read(socket: SocketLike, n: int = 4096) -> bytes | None: try: if hasattr(socket, "recv"): return socket.recv(n) - if isinstance(socket, getattr(pysocket, "SocketIO")): - return socket.read(n) + if isinstance(socket, pysocket.SocketIO): # type: ignore + return socket.read(n) # type: ignore[unreachable] return os.read(socket.fileno(), n) except EnvironmentError as e: if e.errno not in recoverable_errors: diff --git a/plugins/module_utils/_api/utils/utils.py b/plugins/module_utils/_api/utils/utils.py index 0ff758ae..8e4cb47e 100644 --- a/plugins/module_utils/_api/utils/utils.py +++ b/plugins/module_utils/_api/utils/utils.py @@ -36,7 +36,6 @@ from ..tls import TLSConfig if t.TYPE_CHECKING: - import ssl from collections.abc import Mapping, Sequence @@ -298,7 +297,7 @@ def parse_host(addr: str | None, is_win32: bool = False, tls: bool = False) -> s if proto == "unix" and parsed_url.hostname is not None: # For legacy reasons, we consider unix://path # to be valid and equivalent to unix:///path - path = "/".join((parsed_url.hostname, path)) + path = f"{parsed_url.hostname}/{path}" netloc = parsed_url.netloc if proto in ("tcp", "ssh"): @@ -429,9 +428,8 @@ def parse_bytes(s: int | float | str) -> int | float: if len(s) == 0: return 0 - if s[-2:-1].isalpha() and s[-1].isalpha(): - if s[-1] == "b" or s[-1] == "B": - s = s[:-1] + if s[-2:-1].isalpha() and s[-1].isalpha() and (s[-1] == "b" or s[-1] == "B"): + s = s[:-1] units = BYTE_UNITS suffix = s[-1].lower() diff --git a/plugins/module_utils/_common.py b/plugins/module_utils/_common.py index 55a6cd3e..428a2255 100644 --- a/plugins/module_utils/_common.py +++ b/plugins/module_utils/_common.py @@ -718,9 +718,8 @@ class AnsibleDockerClient(AnsibleDockerClientBase): ) -> None: self.option_minimal_versions: dict[str, dict[str, t.Any]] = {} for option in self.module.argument_spec: - if ignore_params is not None: - if option in ignore_params: - continue + if ignore_params is not None and option in ignore_params: + continue self.option_minimal_versions[option] = {} self.option_minimal_versions.update(option_minimal_versions) diff --git a/plugins/module_utils/_common_api.py b/plugins/module_utils/_common_api.py index e2738a38..62a860f8 100644 --- a/plugins/module_utils/_common_api.py +++ b/plugins/module_utils/_common_api.py @@ -654,9 +654,8 @@ class AnsibleDockerClient(AnsibleDockerClientBase): ) -> None: self.option_minimal_versions: dict[str, dict[str, t.Any]] = {} for option in self.module.argument_spec: - if ignore_params is not None: - if option in ignore_params: - continue + if ignore_params is not None and option in ignore_params: + continue self.option_minimal_versions[option] = {} self.option_minimal_versions.update(option_minimal_versions) diff --git a/plugins/module_utils/_compose_v2.py b/plugins/module_utils/_compose_v2.py index d3564629..3953746b 100644 --- a/plugins/module_utils/_compose_v2.py +++ b/plugins/module_utils/_compose_v2.py @@ -690,9 +690,7 @@ def emit_warnings( def is_failed(events: Sequence[Event], rc: int) -> bool: - if rc: - return True - return False + return bool(rc) def update_failed( diff --git a/plugins/module_utils/_copy.py b/plugins/module_utils/_copy.py index 8f520885..54392f3e 100644 --- a/plugins/module_utils/_copy.py +++ b/plugins/module_utils/_copy.py @@ -479,9 +479,8 @@ def fetch_file( reader = tar.extractfile(member) if reader: - with reader as in_f: - with open(b_out_path, "wb") as out_f: - shutil.copyfileobj(in_f, out_f) + with reader as in_f, open(b_out_path, "wb") as out_f: + shutil.copyfileobj(in_f, out_f) return in_path def process_symlink(in_path: str, member: tarfile.TarInfo) -> str: diff --git a/plugins/module_utils/_module_container/base.py b/plugins/module_utils/_module_container/base.py index e527b377..e8dd1f53 100644 --- a/plugins/module_utils/_module_container/base.py +++ b/plugins/module_utils/_module_container/base.py @@ -890,14 +890,15 @@ def _preprocess_mounts( check_collision(container, "volumes") new_vols.append(f"{host}:{container}:{mode}") continue - if len(parts) == 2: - if not _is_volume_permissions(parts[1]) and re.match( - r"[.~]", parts[0] - ): - host = os.path.abspath(os.path.expanduser(parts[0])) - check_collision(parts[1], "volumes") - new_vols.append(f"{host}:{parts[1]}:rw") - continue + if ( + len(parts) == 2 + and not _is_volume_permissions(parts[1]) + and re.match(r"[.~]", parts[0]) + ): + host = os.path.abspath(os.path.expanduser(parts[0])) + check_collision(parts[1], "volumes") + new_vols.append(f"{host}:{parts[1]}:rw") + continue check_collision(parts[min(1, len(parts) - 1)], "volumes") new_vols.append(vol) values["volumes"] = new_vols diff --git a/plugins/module_utils/_module_container/docker_api.py b/plugins/module_utils/_module_container/docker_api.py index 388e7f50..fdb7cc60 100644 --- a/plugins/module_utils/_module_container/docker_api.py +++ b/plugins/module_utils/_module_container/docker_api.py @@ -219,12 +219,11 @@ class DockerAPIEngineDriver(EngineDriver[AnsibleDockerClient]): return False def is_container_running(self, container: dict[str, t.Any]) -> bool: - if container.get("State"): - if container["State"].get("Running") and not container["State"].get( - "Ghost", False - ): - return True - return False + return bool( + container.get("State") + and container["State"].get("Running") + and not container["State"].get("Ghost", False) + ) def is_container_paused(self, container: dict[str, t.Any]) -> bool: if container.get("State"): @@ -1706,9 +1705,8 @@ def _get_expected_values_mounts( parts = vol.split(":") if len(parts) == 3: continue - if len(parts) == 2: - if not _is_volume_permissions(parts[1]): - continue + if len(parts) == 2 and not _is_volume_permissions(parts[1]): + continue expected_vols[vol] = {} if expected_vols: expected_values["volumes"] = expected_vols @@ -1805,9 +1803,8 @@ def _set_values_mounts( parts = volume.split(":") if len(parts) == 3: continue - if len(parts) == 2: - if not _is_volume_permissions(parts[1]): - continue + if len(parts) == 2 and not _is_volume_permissions(parts[1]): + continue volumes[volume] = {} data["Volumes"] = volumes if "volume_binds" in values: diff --git a/plugins/module_utils/_module_container/module.py b/plugins/module_utils/_module_container/module.py index 86748795..75128cfe 100644 --- a/plugins/module_utils/_module_container/module.py +++ b/plugins/module_utils/_module_container/module.py @@ -217,11 +217,13 @@ class ContainerManager(DockerBaseClass, t.Generic[Client]): "The wildcard can only be used with comparison modes 'strict' and 'ignore'!" ) for option in self.all_options.values(): - if option.name == "networks": - # `networks` is special: only update if - # some value is actually specified - if self.module.params["networks"] is None: - continue + # `networks` is special: only update if + # some value is actually specified + if ( + option.name == "networks" + and self.module.params["networks"] is None + ): + continue option.comparison = value # Now process all other comparisons. comp_aliases_used: dict[str, str] = {} @@ -679,13 +681,17 @@ class ContainerManager(DockerBaseClass, t.Generic[Client]): def _image_is_different( self, image: dict[str, t.Any] | None, container: Container ) -> bool: - if image and image.get("Id"): - if container and container.image: - if image.get("Id") != container.image: - self.diff_tracker.add( - "image", parameter=image.get("Id"), active=container.image - ) - return True + if ( + image + and image.get("Id") + and container + and container.image + and image.get("Id") != container.image + ): + self.diff_tracker.add( + "image", parameter=image.get("Id"), active=container.image + ) + return True return False def _compose_create_parameters(self, image: str) -> dict[str, t.Any]: @@ -927,14 +933,13 @@ class ContainerManager(DockerBaseClass, t.Generic[Client]): "ipv6_address" ] != network_info_ipam.get("IPv6Address"): diff = True - if network.get("aliases"): - if not compare_generic( - network["aliases"], - network_info.get("Aliases"), - "allow_more_present", - "set", - ): - diff = True + if network.get("aliases") and not compare_generic( + network["aliases"], + network_info.get("Aliases"), + "allow_more_present", + "set", + ): + diff = True if network.get("links"): expected_links = [] for link, alias in network["links"]: diff --git a/plugins/module_utils/_socket_handler.py b/plugins/module_utils/_socket_handler.py index e0c3c1ef..a8d77dc8 100644 --- a/plugins/module_utils/_socket_handler.py +++ b/plugins/module_utils/_socket_handler.py @@ -73,7 +73,7 @@ class DockerSocketHandlerBase: def __exit__( self, - type_: t.Type[BaseException] | None, + type_: type[BaseException] | None, value: BaseException | None, tb: TracebackType | None, ) -> None: @@ -199,10 +199,9 @@ class DockerSocketHandlerBase: if event & selectors.EVENT_WRITE != 0: self._write() result = len(events) - if self._paramiko_read_workaround and len(self._write_buffer) > 0: - if self._sock.send_ready(): # type: ignore - self._write() - result += 1 + if self._paramiko_read_workaround and len(self._write_buffer) > 0 and self._sock.send_ready(): # type: ignore + self._write() + result += 1 return result > 0 def is_eof(self) -> bool: diff --git a/plugins/module_utils/_socket_helper.py b/plugins/module_utils/_socket_helper.py index 9927c3df..a2146c3f 100644 --- a/plugins/module_utils/_socket_helper.py +++ b/plugins/module_utils/_socket_helper.py @@ -64,8 +64,8 @@ def shutdown_writing( # probably: "TypeError: shutdown() takes 1 positional argument but 2 were given" log(f"Shutting down for writing not possible; trying shutdown instead: {e}") sock.shutdown() # type: ignore - elif isinstance(sock, getattr(pysocket, "SocketIO")): - sock._sock.shutdown(pysocket.SHUT_WR) + elif isinstance(sock, pysocket.SocketIO): # type: ignore + sock._sock.shutdown(pysocket.SHUT_WR) # type: ignore[unreachable] else: log("No idea how to signal end of writing") diff --git a/plugins/module_utils/_swarm.py b/plugins/module_utils/_swarm.py index 53fd2c7c..887f0bf0 100644 --- a/plugins/module_utils/_swarm.py +++ b/plugins/module_utils/_swarm.py @@ -115,9 +115,7 @@ class AnsibleDockerSwarmClient(AnsibleDockerClient): :return: True if node is Swarm Worker, False otherwise """ - if self.check_if_swarm_node() and not self.check_if_swarm_manager(): - return True - return False + return bool(self.check_if_swarm_node() and not self.check_if_swarm_manager()) def check_if_swarm_node_is_down( self, node_id: str | None = None, repeat_check: int = 1 @@ -181,9 +179,8 @@ class AnsibleDockerSwarmClient(AnsibleDockerClient): self.fail( "Cannot inspect node: To inspect node execute module on Swarm Manager" ) - if exc.status_code == 404: - if skip_missing: - return None + if exc.status_code == 404 and skip_missing: + return None self.fail(f"Error while reading from Swarm manager: {exc}") except Exception as exc: # pylint: disable=broad-exception-caught self.fail(f"Error inspecting swarm node: {exc}") @@ -191,19 +188,18 @@ class AnsibleDockerSwarmClient(AnsibleDockerClient): json_str = json.dumps(node_info, ensure_ascii=False) node_info = json.loads(json_str) - if "ManagerStatus" in node_info: - if node_info["ManagerStatus"].get("Leader"): - # This is workaround of bug in Docker when in some cases the Leader IP is 0.0.0.0 - # Check moby/moby#35437 for details - count_colons = node_info["ManagerStatus"]["Addr"].count(":") - if count_colons == 1: - swarm_leader_ip = ( - node_info["ManagerStatus"]["Addr"].split(":", 1)[0] - or node_info["Status"]["Addr"] - ) - else: - swarm_leader_ip = node_info["Status"]["Addr"] - node_info["Status"]["Addr"] = swarm_leader_ip + if "ManagerStatus" in node_info and node_info["ManagerStatus"].get("Leader"): + # This is workaround of bug in Docker when in some cases the Leader IP is 0.0.0.0 + # Check moby/moby#35437 for details + count_colons = node_info["ManagerStatus"]["Addr"].count(":") + if count_colons == 1: + swarm_leader_ip = ( + node_info["ManagerStatus"]["Addr"].split(":", 1)[0] + or node_info["Status"]["Addr"] + ) + else: + swarm_leader_ip = node_info["Status"]["Addr"] + node_info["Status"]["Addr"] = swarm_leader_ip return node_info def get_all_nodes_inspect(self) -> list[dict[str, t.Any]]: diff --git a/plugins/module_utils/_util.py b/plugins/module_utils/_util.py index 62f793c7..2ca60cac 100644 --- a/plugins/module_utils/_util.py +++ b/plugins/module_utils/_util.py @@ -27,7 +27,7 @@ if t.TYPE_CHECKING: from ._common_api import AnsibleDockerClientBase as CAPIADCB from ._common_cli import AnsibleDockerClientBase as CCLIADCB - Client = t.Union[CADCB, CAPIADCB, CCLIADCB] + Client = t.Union[CADCB, CAPIADCB, CCLIADCB] # noqa: UP007 DEFAULT_DOCKER_HOST = "unix:///var/run/docker.sock" @@ -94,9 +94,7 @@ BYTE_SUFFIXES = ["B", "KB", "MB", "GB", "TB", "PB"] def is_image_name_id(name: str) -> bool: """Check whether the given image name is in fact an image ID (hash).""" - if re.match("^sha256:[0-9a-fA-F]{64}$", name): - return True - return False + return bool(re.match("^sha256:[0-9a-fA-F]{64}$", name)) def is_valid_tag(tag: str, allow_empty: bool = False) -> bool: diff --git a/plugins/modules/docker_compose_v2.py b/plugins/modules/docker_compose_v2.py index 54b99163..74cea22b 100644 --- a/plugins/modules/docker_compose_v2.py +++ b/plugins/modules/docker_compose_v2.py @@ -585,10 +585,10 @@ class ServicesManager(BaseComposeManager): return args def _are_containers_stopped(self) -> bool: - for container in self.list_containers_raw(): - if container["State"] not in ("created", "exited", "stopped", "killed"): - return False - return True + return all( + container["State"] in ("created", "exited", "stopped", "killed") + for container in self.list_containers_raw() + ) def cmd_stop(self) -> dict[str, t.Any]: # Since 'docker compose stop' **always** claims it is stopping containers, even if they are already diff --git a/plugins/modules/docker_container_copy_into.py b/plugins/modules/docker_container_copy_into.py index caa0b5c2..8ed97881 100644 --- a/plugins/modules/docker_container_copy_into.py +++ b/plugins/modules/docker_container_copy_into.py @@ -287,20 +287,20 @@ def are_fileobjs_equal_read_first( def is_container_file_not_regular_file(container_stat: dict[str, t.Any]) -> bool: - for bit in ( - # https://pkg.go.dev/io/fs#FileMode - 32 - 1, # ModeDir - 32 - 4, # ModeTemporary - 32 - 5, # ModeSymlink - 32 - 6, # ModeDevice - 32 - 7, # ModeNamedPipe - 32 - 8, # ModeSocket - 32 - 11, # ModeCharDevice - 32 - 13, # ModeIrregular - ): - if container_stat["mode"] & (1 << bit) != 0: - return True - return False + return any( + container_stat["mode"] & 1 << bit != 0 + for bit in ( + # https://pkg.go.dev/io/fs#FileMode + 32 - 1, # ModeDir + 32 - 4, # ModeTemporary + 32 - 5, # ModeSymlink + 32 - 6, # ModeDevice + 32 - 7, # ModeNamedPipe + 32 - 8, # ModeSocket + 32 - 11, # ModeCharDevice + 32 - 13, # ModeIrregular + ) + ) def get_container_file_mode(container_stat: dict[str, t.Any]) -> int: @@ -420,7 +420,7 @@ def retrieve_diff( def is_binary(content: bytes) -> bool: - if b"\x00" in content: + if b"\x00" in content: # noqa: SIM103 return True # TODO: better detection # (ansible-core also just checks for 0x00, and even just sticks to the first 8k, so this is not too bad...) @@ -695,11 +695,10 @@ def is_file_idempotent( mf = tar.extractfile(member) if mf is None: raise AssertionError("Member should be present for regular file") - with mf as tar_f: - with open(managed_path, "rb") as local_f: - is_equal = are_fileobjs_equal_with_diff_of_first( - tar_f, local_f, member.size, diff, max_file_size_for_diff, in_path - ) + with mf as tar_f, open(managed_path, "rb") as local_f: + is_equal = are_fileobjs_equal_with_diff_of_first( + tar_f, local_f, member.size, diff, max_file_size_for_diff, in_path + ) return container_path, mode, is_equal def process_symlink(in_path: str, member: tarfile.TarInfo) -> tuple[str, int, bool]: diff --git a/plugins/modules/docker_image.py b/plugins/modules/docker_image.py index 2fc3f344..06fdae2e 100644 --- a/plugins/modules/docker_image.py +++ b/plugins/modules/docker_image.py @@ -902,7 +902,7 @@ class ImageManager(DockerBaseClass): buildargs[key] = to_text(value) container_limits = self.container_limits or {} - for key in container_limits.keys(): + for key in container_limits: if key not in CONTAINER_LIMITS_KEYS: raise DockerException(f"Invalid container_limits key {key}") @@ -1207,13 +1207,13 @@ def main() -> None: if not is_valid_tag(client.module.params["tag"], allow_empty=True): client.fail(f'"{client.module.params["tag"]}" is not a valid docker tag!') - if client.module.params["source"] == "build": - if not client.module.params["build"] or not client.module.params["build"].get( - "path" - ): - client.fail( - 'If "source" is set to "build", the "build.path" option must be specified.' - ) + if client.module.params["source"] == "build" and ( + not client.module.params["build"] + or not client.module.params["build"].get("path") + ): + client.fail( + 'If "source" is set to "build", the "build.path" option must be specified.' + ) try: results = {"changed": False, "actions": [], "image": {}} diff --git a/plugins/modules/docker_image_build.py b/plugins/modules/docker_image_build.py index b13185c1..3d108fe0 100644 --- a/plugins/modules/docker_image_build.py +++ b/plugins/modules/docker_image_build.py @@ -368,16 +368,20 @@ class ImageBuilder(DockerBaseClass): if self.secrets: for secret in self.secrets: - if secret["type"] in ("env", "value"): - if LooseVersion(buildx_version) < LooseVersion("0.6.0"): - self.fail( - f"The Docker buildx plugin has version {buildx_version}, but 0.6.0 is needed for secrets of type=env and type=value" - ) - if self.outputs and len(self.outputs) > 1: - if LooseVersion(buildx_version) < LooseVersion("0.13.0"): - self.fail( - f"The Docker buildx plugin has version {buildx_version}, but 0.13.0 is needed to specify more than one output" - ) + if secret["type"] in ("env", "value") and LooseVersion( + buildx_version + ) < LooseVersion("0.6.0"): + self.fail( + f"The Docker buildx plugin has version {buildx_version}, but 0.6.0 is needed for secrets of type=env and type=value" + ) + if ( + self.outputs + and len(self.outputs) > 1 + and LooseVersion(buildx_version) < LooseVersion("0.13.0") + ): + self.fail( + f"The Docker buildx plugin has version {buildx_version}, but 0.13.0 is needed to specify more than one output" + ) self.path = parameters["path"] if not os.path.isdir(self.path): @@ -530,9 +534,8 @@ class ImageBuilder(DockerBaseClass): "image": image or {}, } - if image: - if self.rebuild == "never": - return results + if image and self.rebuild == "never": + return results results["changed"] = True if not self.check_mode: diff --git a/plugins/modules/docker_network.py b/plugins/modules/docker_network.py index d73e0ddd..9145fda4 100644 --- a/plugins/modules/docker_network.py +++ b/plugins/modules/docker_network.py @@ -478,23 +478,21 @@ class DockerNetworkManager: ) else: for key, value in self.parameters.driver_options.items(): - if not (key in net["Options"]) or value != net["Options"][key]: + if key not in net["Options"] or value != net["Options"][key]: differences.add( f"driver_options.{key}", parameter=value, active=net["Options"].get(key), ) - if self.parameters.ipam_driver: - if ( - not net.get("IPAM") - or net["IPAM"]["Driver"] != self.parameters.ipam_driver - ): - differences.add( - "ipam_driver", - parameter=self.parameters.ipam_driver, - active=net.get("IPAM"), - ) + if self.parameters.ipam_driver and ( + not net.get("IPAM") or net["IPAM"]["Driver"] != self.parameters.ipam_driver + ): + differences.add( + "ipam_driver", + parameter=self.parameters.ipam_driver, + active=net.get("IPAM"), + ) if self.parameters.ipam_driver_options is not None: ipam_driver_options = net["IPAM"].get("Options") or {} @@ -597,7 +595,7 @@ class DockerNetworkManager: ) else: for key, value in self.parameters.labels.items(): - if not (key in net["Labels"]) or value != net["Labels"][key]: + if key not in net["Labels"] or value != net["Labels"][key]: differences.add( f"labels.{key}", parameter=value, diff --git a/plugins/modules/docker_node.py b/plugins/modules/docker_node.py index a2b25fb3..c0c60243 100644 --- a/plugins/modules/docker_node.py +++ b/plugins/modules/docker_node.py @@ -216,14 +216,14 @@ class SwarmNodeManager(DockerBaseClass): if self.parameters.role is None: node_spec["Role"] = node_info["Spec"]["Role"] else: - if not node_info["Spec"]["Role"] == self.parameters.role: + if node_info["Spec"]["Role"] != self.parameters.role: node_spec["Role"] = self.parameters.role changed = True if self.parameters.availability is None: node_spec["Availability"] = node_info["Spec"]["Availability"] else: - if not node_info["Spec"]["Availability"] == self.parameters.availability: + if node_info["Spec"]["Availability"] != self.parameters.availability: node_info["Spec"]["Availability"] = self.parameters.availability changed = True diff --git a/plugins/modules/docker_plugin.py b/plugins/modules/docker_plugin.py index f69f8bfc..20ee5ff3 100644 --- a/plugins/modules/docker_plugin.py +++ b/plugins/modules/docker_plugin.py @@ -1,5 +1,4 @@ #!/usr/bin/python -# coding: utf-8 # # Copyright (c) 2021 Red Hat | Ansible Sakar Mehra<@sakarmehra100@gmail.com | @sakar97> # Copyright (c) 2019, Vladimir Porshkevich (@porshkevich) @@ -281,7 +280,7 @@ class DockerPluginManager: stream=True, ) self.client._raise_for_status(response) - for data in self.client._stream_helper(response, decode=True): + for dummy in self.client._stream_helper(response, decode=True): pass # Inspect and configure plugin self.existing_plugin = self.client.get_json( diff --git a/plugins/modules/docker_stack.py b/plugins/modules/docker_stack.py index 01fe18b5..3a56ba40 100644 --- a/plugins/modules/docker_stack.py +++ b/plugins/modules/docker_stack.py @@ -322,7 +322,7 @@ def main() -> None: before_after_differences = json_diff( before_stack_services, after_stack_services ) - for k in before_after_differences.keys(): + for k in before_after_differences: if isinstance(before_after_differences[k], dict): before_after_differences[k].pop("UpdatedAt", None) before_after_differences[k].pop("Version", None) diff --git a/plugins/modules/docker_swarm.py b/plugins/modules/docker_swarm.py index 46b58377..1a51a860 100644 --- a/plugins/modules/docker_swarm.py +++ b/plugins/modules/docker_swarm.py @@ -554,9 +554,8 @@ class SwarmManager(DockerBaseClass): except APIError as exc: self.client.fail(f"Can not create a new Swarm Cluster: {exc}") - if not self.client.check_if_swarm_manager(): - if not self.check_mode: - self.client.fail("Swarm not created or other error!") + if not self.client.check_if_swarm_manager() and not self.check_mode: + self.client.fail("Swarm not created or other error!") self.created = True self.inspect_swarm() diff --git a/plugins/modules/docker_swarm_service.py b/plugins/modules/docker_swarm_service.py index 9183349a..ec1d0528 100644 --- a/plugins/modules/docker_swarm_service.py +++ b/plugins/modules/docker_swarm_service.py @@ -2380,12 +2380,12 @@ class DockerServiceManager: ds.container_labels = task_template_data["ContainerSpec"].get("Labels") mode = raw_data["Spec"]["Mode"] - if "Replicated" in mode.keys(): + if "Replicated" in mode: ds.mode = to_text("replicated", encoding="utf-8") ds.replicas = mode["Replicated"]["Replicas"] - elif "Global" in mode.keys(): + elif "Global" in mode: ds.mode = "global" - elif "ReplicatedJob" in mode.keys(): + elif "ReplicatedJob" in mode: ds.mode = to_text("replicated-job", encoding="utf-8") ds.replicas = mode["ReplicatedJob"]["TotalCompletions"] else: @@ -2649,10 +2649,9 @@ class DockerServiceManager: def _detect_publish_mode_usage(client: AnsibleDockerClient) -> bool: - for publish_def in client.module.params["publish"] or []: - if publish_def.get("mode"): - return True - return False + return any( + publish_def.get("mode") for publish_def in client.module.params["publish"] or [] + ) def _detect_healthcheck_start_period(client: AnsibleDockerClient) -> bool: diff --git a/plugins/modules/docker_volume.py b/plugins/modules/docker_volume.py index 41461031..1ac13e2b 100644 --- a/plugins/modules/docker_volume.py +++ b/plugins/modules/docker_volume.py @@ -1,5 +1,4 @@ #!/usr/bin/python -# coding: utf-8 # # Copyright 2017 Red Hat | Ansible, Alex Grönholm # GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) diff --git a/plugins/modules/docker_volume_info.py b/plugins/modules/docker_volume_info.py index 75ee2340..504825f8 100644 --- a/plugins/modules/docker_volume_info.py +++ b/plugins/modules/docker_volume_info.py @@ -1,5 +1,4 @@ #!/usr/bin/python -# coding: utf-8 # # Copyright 2017 Red Hat | Ansible, Alex Grönholm # GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) diff --git a/ruff.toml b/ruff.toml new file mode 100644 index 00000000..dca70302 --- /dev/null +++ b/ruff.toml @@ -0,0 +1,31 @@ +# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) +# SPDX-License-Identifier: GPL-3.0-or-later +# SPDX-FileCopyrightText: 2025 Felix Fontein + +line-length = 160 + +[lint] +# https://docs.astral.sh/ruff/rules/ + +select = ["A", "B", "E", "F", "FA", "FLY", "UP", "SIM"] +ignore = [ + # Better keep ignored (for now) + "F811", # Redefinition of unused `xxx` (happens a lot for fixtures in unit tests) + "E402", # Module level import not at top of file + "E741", # Ambiguous variable name + "UP012", # unnecessary-encode-utf8 + "UP015", # Unnecessary mode argument + "SIM105", # suppressible-exception + "SIM108", # if-else-block-instead-of-if-exp + # To fix later: + "B905", # zip-without-explicit-strict - needs Python 3.10+ + # To fix: + "UP024", # Replace aliased errors with `OSError` +] + +# Allow fix for all enabled rules (when `--fix`) is provided. +fixable = ["ALL"] +unfixable = [] + +# Allow unused variables when underscore-prefixed or starting with dummy +dummy-variable-rgx = "^(_|dummy).*$" diff --git a/tests/unit/plugins/module_utils/_api/api/test_client.py b/tests/unit/plugins/module_utils/_api/api/test_client.py index b0bf937d..5fec0ac5 100644 --- a/tests/unit/plugins/module_utils/_api/api/test_client.py +++ b/tests/unit/plugins/module_utils/_api/api/test_client.py @@ -226,7 +226,7 @@ class DockerApiTest(BaseAPIClientTest): def test_retrieve_server_version(self) -> None: client = APIClient(version="auto") assert isinstance(client._version, str) - assert not (client._version == "auto") + assert client._version != "auto" client.close() def test_auto_retrieve_server_version(self) -> None: @@ -323,8 +323,8 @@ class DockerApiTest(BaseAPIClientTest): # mock a stream interface raw_resp = urllib3.HTTPResponse(body=body) - setattr(raw_resp._fp, "chunked", True) - setattr(raw_resp._fp, "chunk_left", len(body.getvalue()) - 1) + raw_resp._fp.chunked = True + raw_resp._fp.chunk_left = len(body.getvalue()) - 1 # pass `decode=False` to the helper raw_resp._fp.seek(0) @@ -339,7 +339,7 @@ class DockerApiTest(BaseAPIClientTest): assert result == content # non-chunked response, pass `decode=False` to the helper - setattr(raw_resp._fp, "chunked", False) + raw_resp._fp.chunked = False raw_resp._fp.seek(0) resp = create_response(status_code=status_code, content=content, raw=raw_resp) result = next(self.client._stream_helper(resp)) @@ -503,7 +503,7 @@ class TCPSocketStreamTest(unittest.TestCase): cls.thread.join() @classmethod - def get_handler_class(cls) -> t.Type[BaseHTTPRequestHandler]: + def get_handler_class(cls) -> type[BaseHTTPRequestHandler]: stdout_data = cls.stdout_data stderr_data = cls.stderr_data diff --git a/tests/unit/plugins/module_utils/_api/fake_api.py b/tests/unit/plugins/module_utils/_api/fake_api.py index 006f97fc..809d2fcc 100644 --- a/tests/unit/plugins/module_utils/_api/fake_api.py +++ b/tests/unit/plugins/module_utils/_api/fake_api.py @@ -581,7 +581,7 @@ fake_responses: dict[str | tuple[str, str], Callable] = { f"{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/restart": post_fake_restart_container, f"{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b": delete_fake_remove_container, # TODO: the following is a duplicate of the import endpoint further above! - f"{prefix}/{CURRENT_VERSION}/images/create": post_fake_image_create, + f"{prefix}/{CURRENT_VERSION}/images/create": post_fake_image_create, # noqa: F601 f"{prefix}/{CURRENT_VERSION}/images/e9aa60c60128": delete_fake_remove_image, f"{prefix}/{CURRENT_VERSION}/images/e9aa60c60128/get": get_fake_get_image, f"{prefix}/{CURRENT_VERSION}/images/load": post_fake_load_image, diff --git a/tests/unit/plugins/module_utils/_api/test_auth.py b/tests/unit/plugins/module_utils/_api/test_auth.py index 38bba014..3a9a873d 100644 --- a/tests/unit/plugins/module_utils/_api/test_auth.py +++ b/tests/unit/plugins/module_utils/_api/test_auth.py @@ -256,7 +256,7 @@ class ResolveAuthTest(unittest.TestCase): m.return_value = None ac = auth.resolve_authconfig(auth_config, None) assert ac is not None - assert "indexuser" == ac["username"] + assert ac["username"] == "indexuser" class LoadConfigTest(unittest.TestCase): diff --git a/tests/unit/plugins/module_utils/_api/utils/test_build.py b/tests/unit/plugins/module_utils/_api/utils/test_build.py index 0cca04fa..e0710c72 100644 --- a/tests/unit/plugins/module_utils/_api/utils/test_build.py +++ b/tests/unit/plugins/module_utils/_api/utils/test_build.py @@ -421,18 +421,18 @@ class TarTest(unittest.TestCase): base = make_tree(dirs, files) self.addCleanup(shutil.rmtree, base) - with tar(base, exclude=exclude) as archive: - with tarfile.open(fileobj=archive) as tar_data: - assert sorted(tar_data.getnames()) == sorted(expected_names) + with tar(base, exclude=exclude) as archive, tarfile.open( + fileobj=archive + ) as tar_data: + assert sorted(tar_data.getnames()) == sorted(expected_names) def test_tar_with_empty_directory(self) -> None: base = tempfile.mkdtemp() self.addCleanup(shutil.rmtree, base) for d in ["foo", "bar"]: os.makedirs(os.path.join(base, d)) - with tar(base) as archive: - with tarfile.open(fileobj=archive) as tar_data: - assert sorted(tar_data.getnames()) == ["bar", "foo"] + with tar(base) as archive, tarfile.open(fileobj=archive) as tar_data: + assert sorted(tar_data.getnames()) == ["bar", "foo"] @pytest.mark.skipif( IS_WINDOWS_PLATFORM or os.geteuid() == 0, @@ -458,9 +458,8 @@ class TarTest(unittest.TestCase): f.write("content") os.makedirs(os.path.join(base, "bar")) os.symlink("../foo", os.path.join(base, "bar/foo")) - with tar(base) as archive: - with tarfile.open(fileobj=archive) as tar_data: - assert sorted(tar_data.getnames()) == ["bar", "bar/foo", "foo"] + with tar(base) as archive, tarfile.open(fileobj=archive) as tar_data: + assert sorted(tar_data.getnames()) == ["bar", "bar/foo", "foo"] @pytest.mark.skipif(IS_WINDOWS_PLATFORM, reason="No symlinks on Windows") def test_tar_with_directory_symlinks(self) -> None: @@ -469,9 +468,8 @@ class TarTest(unittest.TestCase): for d in ["foo", "bar"]: os.makedirs(os.path.join(base, d)) os.symlink("../foo", os.path.join(base, "bar/foo")) - with tar(base) as archive: - with tarfile.open(fileobj=archive) as tar_data: - assert sorted(tar_data.getnames()) == ["bar", "bar/foo", "foo"] + with tar(base) as archive, tarfile.open(fileobj=archive) as tar_data: + assert sorted(tar_data.getnames()) == ["bar", "bar/foo", "foo"] @pytest.mark.skipif(IS_WINDOWS_PLATFORM, reason="No symlinks on Windows") def test_tar_with_broken_symlinks(self) -> None: @@ -481,9 +479,8 @@ class TarTest(unittest.TestCase): os.makedirs(os.path.join(base, d)) os.symlink("../baz", os.path.join(base, "bar/foo")) - with tar(base) as archive: - with tarfile.open(fileobj=archive) as tar_data: - assert sorted(tar_data.getnames()) == ["bar", "bar/foo", "foo"] + with tar(base) as archive, tarfile.open(fileobj=archive) as tar_data: + assert sorted(tar_data.getnames()) == ["bar", "bar/foo", "foo"] @pytest.mark.skipif(IS_WINDOWS_PLATFORM, reason="No UNIX sockets on Win32") def test_tar_socket_file(self) -> None: @@ -494,9 +491,8 @@ class TarTest(unittest.TestCase): sock = socket.socket(socket.AF_UNIX) self.addCleanup(sock.close) sock.bind(os.path.join(base, "test.sock")) - with tar(base) as archive: - with tarfile.open(fileobj=archive) as tar_data: - assert sorted(tar_data.getnames()) == ["bar", "foo"] + with tar(base) as archive, tarfile.open(fileobj=archive) as tar_data: + assert sorted(tar_data.getnames()) == ["bar", "foo"] def tar_test_negative_mtime_bug(self) -> None: base = tempfile.mkdtemp() @@ -505,10 +501,9 @@ class TarTest(unittest.TestCase): with open(filename, "wt", encoding="utf-8") as f: f.write("Invisible Full Moon") os.utime(filename, (12345, -3600.0)) - with tar(base) as archive: - with tarfile.open(fileobj=archive) as tar_data: - assert tar_data.getnames() == ["th.txt"] - assert tar_data.getmember("th.txt").mtime == -3600 + with tar(base) as archive, tarfile.open(fileobj=archive) as tar_data: + assert tar_data.getnames() == ["th.txt"] + assert tar_data.getmember("th.txt").mtime == -3600 @pytest.mark.skipif(IS_WINDOWS_PLATFORM, reason="No symlinks on Windows") def test_tar_directory_link(self) -> None: diff --git a/tests/unit/plugins/module_utils/_api/utils/test_utils.py b/tests/unit/plugins/module_utils/_api/utils/test_utils.py index 2b412b50..5c91196b 100644 --- a/tests/unit/plugins/module_utils/_api/utils/test_utils.py +++ b/tests/unit/plugins/module_utils/_api/utils/test_utils.py @@ -58,7 +58,12 @@ class KwargsFromEnvTest(unittest.TestCase): self.os_environ = os.environ.copy() def tearDown(self) -> None: - os.environ = self.os_environ # type: ignore + for k, v in self.os_environ.items(): + if os.environ.get(k) != v: + os.environ[k] = v + for k in os.environ: + if k not in self.os_environ: + os.environ.pop(k) def test_kwargs_from_env_empty(self) -> None: os.environ.update(DOCKER_HOST="", DOCKER_CERT_PATH="") @@ -75,7 +80,7 @@ class KwargsFromEnvTest(unittest.TestCase): DOCKER_TLS_VERIFY="1", ) kwargs = kwargs_from_env(assert_hostname=False) - assert "tcp://192.168.59.103:2376" == kwargs["base_url"] + assert kwargs["base_url"] == "tcp://192.168.59.103:2376" assert "ca.pem" in kwargs["tls"].ca_cert assert "cert.pem" in kwargs["tls"].cert[0] assert "key.pem" in kwargs["tls"].cert[1] @@ -99,7 +104,7 @@ class KwargsFromEnvTest(unittest.TestCase): DOCKER_TLS_VERIFY="", ) kwargs = kwargs_from_env(assert_hostname=True) - assert "tcp://192.168.59.103:2376" == kwargs["base_url"] + assert kwargs["base_url"] == "tcp://192.168.59.103:2376" assert "ca.pem" in kwargs["tls"].ca_cert assert "cert.pem" in kwargs["tls"].cert[0] assert "key.pem" in kwargs["tls"].cert[1] @@ -125,7 +130,7 @@ class KwargsFromEnvTest(unittest.TestCase): ) os.environ.pop("DOCKER_CERT_PATH", None) kwargs = kwargs_from_env(assert_hostname=True) - assert "tcp://192.168.59.103:2376" == kwargs["base_url"] + assert kwargs["base_url"] == "tcp://192.168.59.103:2376" def test_kwargs_from_env_no_cert_path(self) -> None: try: @@ -157,7 +162,7 @@ class KwargsFromEnvTest(unittest.TestCase): "DOCKER_HOST": "http://docker.gensokyo.jp:2581", } ) - assert "http://docker.gensokyo.jp:2581" == kwargs["base_url"] + assert kwargs["base_url"] == "http://docker.gensokyo.jp:2581" assert "tls" not in kwargs diff --git a/tests/unit/plugins/modules/test_docker_image.py b/tests/unit/plugins/modules/test_docker_image.py index 050ff7eb..af78da57 100644 --- a/tests/unit/plugins/modules/test_docker_image.py +++ b/tests/unit/plugins/modules/test_docker_image.py @@ -23,7 +23,6 @@ from ..test_support.docker_image_archive_stubbing import ( if t.TYPE_CHECKING: from collections.abc import Callable - from pathlib import Path def assert_no_logging(msg: str) -> t.NoReturn: diff --git a/tests/unit/plugins/modules/test_docker_swarm_service.py b/tests/unit/plugins/modules/test_docker_swarm_service.py index 725976fb..12daa0b7 100644 --- a/tests/unit/plugins/modules/test_docker_swarm_service.py +++ b/tests/unit/plugins/modules/test_docker_swarm_service.py @@ -156,7 +156,7 @@ def test_has_list_changed() -> None: [{"a": 1}, {"a": 2}], [{"a": 1}, {"a": 2}], sort_key="a" ) - with pytest.raises(Exception): + with pytest.raises(ValueError): docker_swarm_service.has_list_changed( [{"a": 1}, {"a": 2}], [{"a": 1}, {"a": 2}] ) diff --git a/tests/unit/plugins/test_support/docker_image_archive_stubbing.py b/tests/unit/plugins/test_support/docker_image_archive_stubbing.py index 38736805..d9369d20 100644 --- a/tests/unit/plugins/test_support/docker_image_archive_stubbing.py +++ b/tests/unit/plugins/test_support/docker_image_archive_stubbing.py @@ -36,15 +36,14 @@ def write_imitation_archive( def write_imitation_archive_with_manifest( file_name: str, manifest: list[dict[str, t.Any]] ) -> None: - with tarfile.open(file_name, "w") as tf: - with TemporaryFile() as f: - f.write(json.dumps(manifest).encode("utf-8")) + with tarfile.open(file_name, "w") as tf, TemporaryFile() as f: + f.write(json.dumps(manifest).encode("utf-8")) - ti = tarfile.TarInfo("manifest.json") - ti.size = f.tell() + ti = tarfile.TarInfo("manifest.json") + ti.size = f.tell() - f.seek(0) - tf.addfile(ti, f) + f.seek(0) + tf.addfile(ti, f) def write_irrelevant_tar(file_name: str) -> None: @@ -55,12 +54,11 @@ def write_irrelevant_tar(file_name: str) -> None: :type file_name: str """ - with tarfile.open(file_name, "w") as tf: - with TemporaryFile() as f: - f.write("Hello, world.".encode("utf-8")) + with tarfile.open(file_name, "w") as tf, TemporaryFile() as f: + f.write("Hello, world.".encode("utf-8")) - ti = tarfile.TarInfo("hi.txt") - ti.size = f.tell() + ti = tarfile.TarInfo("hi.txt") + ti.size = f.tell() - f.seek(0) - tf.addfile(ti, f) + f.seek(0) + tf.addfile(ti, f)