Python code modernization, 3/n (#1157)

* Remove __metaclass__ = type.

for i in $(grep -REl '__metaclass__ = type' plugins/ tests/); do
  sed -e '/^__metaclass__ = type/d' -i $i;
done

* Remove super arguments, and stop inheriting from object.
This commit is contained in:
Felix Fontein 2025-10-10 08:11:58 +02:00 committed by GitHub
parent 741c318b1d
commit e8ec22d3b1
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
99 changed files with 129 additions and 245 deletions

View File

@ -406,7 +406,6 @@ disable=raw-checker-failed,
redefined-outer-name, # needed for test fixtures
simplifiable-if-expression,
subprocess-popen-preexec-fn,
super-with-arguments,
unexpected-keyword-arg,
unnecessary-dunder-call,
unnecessary-pass,
@ -415,7 +414,6 @@ disable=raw-checker-failed,
unused-variable,
use-dict-literal,
use-list-literal,
useless-object-inheritance,
# Cannot remove yet due to inadequacy of rules
inconsistent-return-statements, # doesn't notice that fail_json() does not return

View File

@ -22,7 +22,7 @@ class ActionModule(ActionBase):
self._supports_check_mode = True
self._supports_async = True
result = super(ActionModule, self).run(tmp, task_vars)
result = super().run(tmp, task_vars)
del tmp # tmp no longer has any effect
self._task.args["_max_file_size_for_diff"] = C.MAX_FILE_SIZE_FOR_DIFF

View File

@ -140,7 +140,7 @@ class Connection(ConnectionBase):
has_pipelining = True
def __init__(self, play_context, new_stdin, *args, **kwargs):
super(Connection, self).__init__(play_context, new_stdin, *args, **kwargs)
super().__init__(play_context, new_stdin, *args, **kwargs)
# Note: docker supports running as non-root in some configurations.
# (For instance, setting the UNIX socket file to be readable and
@ -365,7 +365,7 @@ class Connection(ConnectionBase):
def _connect(self, port=None):
"""Connect to the container. Nothing to do"""
super(Connection, self)._connect()
super()._connect()
if not self._connected:
self._set_conn_data()
actual_user = self._get_actual_user()
@ -380,7 +380,7 @@ class Connection(ConnectionBase):
self._set_conn_data()
super(Connection, self).exec_command(cmd, in_data=in_data, sudoable=sudoable)
super().exec_command(cmd, in_data=in_data, sudoable=sudoable)
local_cmd = self._build_exec_cmd([self._play_context.executable, "-c", cmd])
@ -490,7 +490,7 @@ class Connection(ConnectionBase):
def put_file(self, in_path, out_path):
"""Transfer a file from local to docker container"""
self._set_conn_data()
super(Connection, self).put_file(in_path, out_path)
super().put_file(in_path, out_path)
display.vvv(f"PUT {in_path} TO {out_path}", host=self.get_option("remote_addr"))
out_path = self._prefix_login_path(out_path)
@ -535,7 +535,7 @@ class Connection(ConnectionBase):
def fetch_file(self, in_path, out_path):
"""Fetch a file from container to local."""
self._set_conn_data()
super(Connection, self).fetch_file(in_path, out_path)
super().fetch_file(in_path, out_path)
display.vvv(
f"FETCH {in_path} TO {out_path}", host=self.get_option("remote_addr")
)
@ -602,7 +602,7 @@ class Connection(ConnectionBase):
def close(self):
"""Terminate the connection. Nothing to do for Docker"""
super(Connection, self).close()
super().close()
self._connected = False
def reset(self):

View File

@ -180,7 +180,7 @@ class Connection(ConnectionBase):
)
def __init__(self, play_context, new_stdin, *args, **kwargs):
super(Connection, self).__init__(play_context, new_stdin, *args, **kwargs)
super().__init__(play_context, new_stdin, *args, **kwargs)
self.client = None
self.ids = dict()
@ -193,7 +193,7 @@ class Connection(ConnectionBase):
def _connect(self, port=None):
"""Connect to the container. Nothing to do"""
super(Connection, self)._connect()
super()._connect()
if not self._connected:
self.actual_user = self.get_option("remote_user")
display.vvv(
@ -224,7 +224,7 @@ class Connection(ConnectionBase):
def exec_command(self, cmd, in_data=None, sudoable=False):
"""Run a command on the docker host"""
super(Connection, self).exec_command(cmd, in_data=in_data, sudoable=sudoable)
super().exec_command(cmd, in_data=in_data, sudoable=sudoable)
command = [self._play_context.executable, "-c", to_text(cmd)]
@ -376,7 +376,7 @@ class Connection(ConnectionBase):
def put_file(self, in_path, out_path):
"""Transfer a file from local to docker container"""
super(Connection, self).put_file(in_path, out_path)
super().put_file(in_path, out_path)
display.vvv(f"PUT {in_path} TO {out_path}", host=self.get_option("remote_addr"))
out_path = self._prefix_login_path(out_path)
@ -418,7 +418,7 @@ class Connection(ConnectionBase):
def fetch_file(self, in_path, out_path):
"""Fetch a file from container to local."""
super(Connection, self).fetch_file(in_path, out_path)
super().fetch_file(in_path, out_path)
display.vvv(
f"FETCH {in_path} TO {out_path}", host=self.get_option("remote_addr")
)
@ -446,7 +446,7 @@ class Connection(ConnectionBase):
def close(self):
"""Terminate the connection. Nothing to do for Docker"""
super(Connection, self).close()
super().close()
self._connected = False
def reset(self):

View File

@ -64,7 +64,7 @@ class Connection(ConnectionBase):
has_pipelining = False
def __init__(self, *args, **kwargs):
super(Connection, self).__init__(*args, **kwargs)
super().__init__(*args, **kwargs)
self.cwd = None
def _connect(self):
@ -83,7 +83,7 @@ class Connection(ConnectionBase):
return self
def exec_command(self, cmd, in_data=None, sudoable=True):
super(Connection, self).exec_command(cmd, in_data=in_data, sudoable=sudoable)
super().exec_command(cmd, in_data=in_data, sudoable=sudoable)
display.debug("in nsenter.exec_command()")
@ -232,7 +232,7 @@ class Connection(ConnectionBase):
return (p.returncode, stdout, stderr)
def put_file(self, in_path, out_path):
super(Connection, self).put_file(in_path, out_path)
super().put_file(in_path, out_path)
in_path = unfrackpath(in_path, basedir=self.cwd)
out_path = unfrackpath(out_path, basedir=self.cwd)
@ -248,7 +248,7 @@ class Connection(ConnectionBase):
raise AnsibleError(f"failed to transfer file to {out_path}: {e}")
def fetch_file(self, in_path, out_path):
super(Connection, self).fetch_file(in_path, out_path)
super().fetch_file(in_path, out_path)
in_path = unfrackpath(in_path, basedir=self.cwd)
out_path = unfrackpath(out_path, basedir=self.cwd)

View File

@ -8,7 +8,7 @@
from __future__ import annotations
class ModuleDocFragment(object):
class ModuleDocFragment:
# Standard documentation fragment
DOCUMENTATION = r"""

View File

@ -8,7 +8,7 @@
from __future__ import annotations
class ModuleDocFragment(object):
class ModuleDocFragment:
# Docker doc fragment
DOCUMENTATION = r"""

View File

@ -8,7 +8,7 @@
from __future__ import annotations
class ModuleDocFragment(object):
class ModuleDocFragment:
# Docker doc fragment
DOCUMENTATION = r"""

View File

@ -386,7 +386,7 @@ class InventoryModule(BaseInventoryPlugin, Constructable):
def verify_file(self, path):
"""Return the possibly of a file being consumable by this plugin."""
return super(InventoryModule, self).verify_file(path) and path.endswith(
return super().verify_file(path) and path.endswith(
("docker.yaml", "docker.yml")
)
@ -394,7 +394,7 @@ class InventoryModule(BaseInventoryPlugin, Constructable):
return AnsibleDockerClient(self, min_docker_api_version=MIN_DOCKER_API)
def parse(self, inventory, loader, path, cache=True):
super(InventoryModule, self).parse(inventory, loader, path, cache)
super().parse(inventory, loader, path, cache)
self._read_config_data(path)
client = self._create_client()
try:

View File

@ -326,11 +326,11 @@ class InventoryModule(BaseInventoryPlugin, Constructable, Cacheable):
def verify_file(self, path):
"""Return the possibility of a file being consumable by this plugin."""
return super(InventoryModule, self).verify_file(path) and path.endswith(
return super().verify_file(path) and path.endswith(
("docker_machine.yaml", "docker_machine.yml")
)
def parse(self, inventory, loader, path, cache=True):
super(InventoryModule, self).parse(inventory, loader, path, cache)
super().parse(inventory, loader, path, cache)
self._read_config_data(path)
self._populate()

View File

@ -308,7 +308,7 @@ class InventoryModule(BaseInventoryPlugin, Constructable):
def verify_file(self, path):
"""Return the possibly of a file being consumable by this plugin."""
return super(InventoryModule, self).verify_file(path) and path.endswith(
return super().verify_file(path) and path.endswith(
("docker_swarm.yaml", "docker_swarm.yml")
)
@ -318,6 +318,6 @@ class InventoryModule(BaseInventoryPlugin, Constructable):
"The Docker swarm dynamic inventory plugin requires the Docker SDK for Python: "
"https://github.com/docker/docker-py."
)
super(InventoryModule, self).parse(inventory, loader, path, cache)
super().parse(inventory, loader, path, cache)
self._read_config_data(path)
self._populate()

View File

@ -30,10 +30,10 @@ try:
except ImportError:
REQUESTS_IMPORT_ERROR = traceback.format_exc()
class Session(object):
class Session:
__attrs__ = []
class HTTPAdapter(object):
class HTTPAdapter:
__attrs__ = []
class HTTPError(Exception):
@ -57,13 +57,13 @@ except ImportError:
except ImportError:
URLLIB3_IMPORT_ERROR = traceback.format_exc()
class _HTTPConnectionPool(object):
class _HTTPConnectionPool:
pass
class _HTTPConnection(object):
class _HTTPConnection:
pass
class FakeURLLIB3(object):
class FakeURLLIB3:
def __init__(self):
self._collections = self
self.poolmanager = self
@ -75,7 +75,7 @@ except ImportError:
self.match_hostname = object()
self.HTTPConnectionPool = _HTTPConnectionPool
class FakeURLLIB3Connection(object):
class FakeURLLIB3Connection:
def __init__(self):
self.HTTPConnection = _HTTPConnection

View File

@ -115,7 +115,7 @@ class APIClient(_Session, DaemonApiMixin):
use_ssh_client=False,
max_pool_size=DEFAULT_MAX_POOL_SIZE,
):
super(APIClient, self).__init__()
super().__init__()
fail_on_missing_imports()
@ -493,7 +493,7 @@ class APIClient(_Session, DaemonApiMixin):
def get_adapter(self, url):
try:
return super(APIClient, self).get_adapter(url)
return super().get_adapter(url)
except _InvalidSchema as e:
if self._custom_adapter:
return self._custom_adapter

View File

@ -17,7 +17,7 @@ from .. import auth
from ..utils.decorators import minimum_version
class DaemonApiMixin(object):
class DaemonApiMixin:
@minimum_version("1.25")
def df(self):
"""

View File

@ -33,7 +33,7 @@ def create_default_context():
)
class ContextAPI(object):
class ContextAPI:
"""Context API.
Contains methods for context management:
create, list, remove, get, inspect.

View File

@ -28,7 +28,7 @@ from .config import (
IN_MEMORY = "IN MEMORY"
class Context(object):
class Context:
"""A context."""
def __init__(

View File

@ -19,7 +19,7 @@ from . import constants, errors
from .utils import create_environment_dict, find_executable
class Store(object):
class Store:
def __init__(self, program, environment=None):
"""Create a store object that acts as an interface to
perform the basic operations for storing, retrieving

View File

@ -55,12 +55,12 @@ class APIError(_HTTPError, DockerException):
def __init__(self, message, response=None, explanation=None):
# requests 1.2 supports response as a keyword argument, but
# requests 1.1 does not
super(APIError, self).__init__(message)
super().__init__(message)
self.response = response
self.explanation = explanation
def __str__(self):
message = super(APIError, self).__str__()
message = super().__str__()
if self.is_client_error():
message = f"{self.response.status_code} Client Error for {self.response.url}: {self.response.reason}"
@ -152,7 +152,7 @@ class ContainerError(DockerException):
err = f": {stderr}" if stderr is not None else ""
msg = f"Command '{command}' in image '{image}' returned non-zero exit status {exit_status}{err}"
super(ContainerError, self).__init__(msg)
super().__init__(msg)
class StreamParseError(RuntimeError):
@ -162,7 +162,7 @@ class StreamParseError(RuntimeError):
class BuildError(DockerException):
def __init__(self, reason, build_log):
super(BuildError, self).__init__(reason)
super().__init__(reason)
self.msg = reason
self.build_log = build_log

View File

@ -18,7 +18,7 @@ from . import errors
from .transport.ssladapter import SSLHTTPAdapter
class TLSConfig(object):
class TLSConfig:
"""
TLS configuration.

View File

@ -16,7 +16,7 @@ from .._import_helper import HTTPAdapter as _HTTPAdapter
class BaseHTTPAdapter(_HTTPAdapter):
def close(self):
super(BaseHTTPAdapter, self).close()
super().close()
if hasattr(self, "pools"):
self.pools.clear()

View File

@ -22,9 +22,9 @@ from .npipesocket import NpipeSocket
RecentlyUsedContainer = urllib3._collections.RecentlyUsedContainer
class NpipeHTTPConnection(urllib3_connection.HTTPConnection, object):
class NpipeHTTPConnection(urllib3_connection.HTTPConnection):
def __init__(self, npipe_path, timeout=60):
super(NpipeHTTPConnection, self).__init__("localhost", timeout=timeout)
super().__init__("localhost", timeout=timeout)
self.npipe_path = npipe_path
self.timeout = timeout
@ -37,9 +37,7 @@ class NpipeHTTPConnection(urllib3_connection.HTTPConnection, object):
class NpipeHTTPConnectionPool(urllib3.connectionpool.HTTPConnectionPool):
def __init__(self, npipe_path, timeout=60, maxsize=10):
super(NpipeHTTPConnectionPool, self).__init__(
"localhost", timeout=timeout, maxsize=maxsize
)
super().__init__("localhost", timeout=timeout, maxsize=maxsize)
self.npipe_path = npipe_path
self.timeout = timeout
@ -90,7 +88,7 @@ class NpipeHTTPAdapter(BaseHTTPAdapter):
self.pools = RecentlyUsedContainer(
pool_connections, dispose_func=lambda p: p.close()
)
super(NpipeHTTPAdapter, self).__init__()
super().__init__()
def get_connection(self, url, proxies=None):
with self.pools.lock:

View File

@ -45,7 +45,7 @@ def check_closed(f):
return wrapped
class NpipeSocket(object):
class NpipeSocket:
"""Partial implementation of the socket API over windows named pipes.
This implementation is only designed to be used as a client socket,
and server-specific methods (bind, listen, accept...) are not
@ -227,7 +227,7 @@ class NpipeFileIOBase(io.RawIOBase):
self.sock = npipe_socket
def close(self):
super(NpipeFileIOBase, self).close()
super().close()
self.sock = None
def fileno(self):

View File

@ -37,7 +37,7 @@ RecentlyUsedContainer = urllib3._collections.RecentlyUsedContainer
class SSHSocket(socket.socket):
def __init__(self, host):
super(SSHSocket, self).__init__(socket.AF_INET, socket.SOCK_STREAM)
super().__init__(socket.AF_INET, socket.SOCK_STREAM)
self.host = host
self.port = None
self.user = None
@ -117,9 +117,9 @@ class SSHSocket(socket.socket):
self.proc.terminate()
class SSHConnection(urllib3_connection.HTTPConnection, object):
class SSHConnection(urllib3_connection.HTTPConnection):
def __init__(self, ssh_transport=None, timeout=60, host=None):
super(SSHConnection, self).__init__("localhost", timeout=timeout)
super().__init__("localhost", timeout=timeout)
self.ssh_transport = ssh_transport
self.timeout = timeout
self.ssh_host = host
@ -141,9 +141,7 @@ class SSHConnectionPool(urllib3.connectionpool.HTTPConnectionPool):
scheme = "ssh"
def __init__(self, ssh_client=None, timeout=60, maxsize=10, host=None):
super(SSHConnectionPool, self).__init__(
"localhost", timeout=timeout, maxsize=maxsize
)
super().__init__("localhost", timeout=timeout, maxsize=maxsize)
self.ssh_transport = None
self.timeout = timeout
if ssh_client:
@ -207,7 +205,7 @@ class SSHHTTPAdapter(BaseHTTPAdapter):
self.pools = RecentlyUsedContainer(
pool_connections, dispose_func=lambda p: p.close()
)
super(SSHHTTPAdapter, self).__init__()
super().__init__()
def _create_paramiko_client(self, base_url):
logging.getLogger("paramiko").setLevel(logging.WARNING)
@ -272,6 +270,6 @@ class SSHHTTPAdapter(BaseHTTPAdapter):
return pool
def close(self):
super(SSHHTTPAdapter, self).close()
super().close()
if self.ssh_client:
self.ssh_client.close()

View File

@ -35,7 +35,7 @@ class SSLHTTPAdapter(BaseHTTPAdapter):
def __init__(self, ssl_version=None, assert_hostname=None, **kwargs):
self.ssl_version = ssl_version
self.assert_hostname = assert_hostname
super(SSLHTTPAdapter, self).__init__(**kwargs)
super().__init__(**kwargs)
def init_poolmanager(self, connections, maxsize, block=False):
kwargs = {
@ -58,7 +58,7 @@ class SSLHTTPAdapter(BaseHTTPAdapter):
But we still need to take care of when there is a proxy poolmanager
"""
conn = super(SSLHTTPAdapter, self).get_connection(*args, **kwargs)
conn = super().get_connection(*args, **kwargs)
if (
self.assert_hostname is not None
and conn.assert_hostname != self.assert_hostname

View File

@ -21,10 +21,10 @@ from .basehttpadapter import BaseHTTPAdapter
RecentlyUsedContainer = urllib3._collections.RecentlyUsedContainer
class UnixHTTPConnection(urllib3_connection.HTTPConnection, object):
class UnixHTTPConnection(urllib3_connection.HTTPConnection):
def __init__(self, base_url, unix_socket, timeout=60):
super(UnixHTTPConnection, self).__init__("localhost", timeout=timeout)
super().__init__("localhost", timeout=timeout)
self.base_url = base_url
self.unix_socket = unix_socket
self.timeout = timeout
@ -37,7 +37,7 @@ class UnixHTTPConnection(urllib3_connection.HTTPConnection, object):
self.sock = sock
def putheader(self, header, *values):
super(UnixHTTPConnection, self).putheader(header, *values)
super().putheader(header, *values)
if header == "Connection" and "Upgrade" in values:
self.disable_buffering = True
@ -45,14 +45,12 @@ class UnixHTTPConnection(urllib3_connection.HTTPConnection, object):
# FIXME: We may need to disable buffering on Py3,
# but there's no clear way to do it at the moment. See:
# https://github.com/docker/docker-py/issues/1799
return super(UnixHTTPConnection, self).response_class(sock, *args, **kwargs)
return super().response_class(sock, *args, **kwargs)
class UnixHTTPConnectionPool(urllib3.connectionpool.HTTPConnectionPool):
def __init__(self, base_url, socket_path, timeout=60, maxsize=10):
super(UnixHTTPConnectionPool, self).__init__(
"localhost", timeout=timeout, maxsize=maxsize
)
super().__init__("localhost", timeout=timeout, maxsize=maxsize)
self.base_url = base_url
self.socket_path = socket_path
self.timeout = timeout
@ -86,7 +84,7 @@ class UnixHTTPAdapter(BaseHTTPAdapter):
self.pools = RecentlyUsedContainer(
pool_connections, dispose_func=lambda p: p.close()
)
super(UnixHTTPAdapter, self).__init__()
super().__init__()
def get_connection(self, url, proxies=None):
with self.pools.lock:

View File

@ -17,7 +17,7 @@ from .._import_helper import urllib3
from ..errors import DockerException
class CancellableStream(object):
class CancellableStream:
"""
Stream wrapper for real-time events, logs, etc. from the server.

View File

@ -158,7 +158,7 @@ def walk(root, patterns, default=True):
# Heavily based on
# https://github.com/moby/moby/blob/master/pkg/fileutils/fileutils.go
class PatternMatcher(object):
class PatternMatcher:
def __init__(self, patterns):
self.patterns = list(filter(lambda p: p.dirs, [Pattern(p) for p in patterns]))
self.patterns.append(Pattern("!.dockerignore"))
@ -216,7 +216,7 @@ class PatternMatcher(object):
return rec_walk(root)
class Pattern(object):
class Pattern:
def __init__(self, pattern_str):
self.exclusion = False
if pattern_str.startswith("!"):

View File

@ -101,7 +101,7 @@ if not HAS_DOCKER_PY:
# No Docker SDK for Python. Create a place holder client to allow
# instantiation of AnsibleModule and proper error handing
class Client(object): # noqa: F811, pylint: disable=function-redefined
class Client: # noqa: F811, pylint: disable=function-redefined
def __init__(self, **kwargs):
pass
@ -232,7 +232,7 @@ class AnsibleDockerClientBase(Client):
)
try:
super(AnsibleDockerClientBase, self).__init__(**self._connect_params)
super().__init__(**self._connect_params)
self.docker_api_version_str = self.api_version
except APIError as exc:
self.fail(f"Docker API error: {exc}")
@ -628,9 +628,7 @@ class AnsibleDockerClientBase(Client):
),
get_json=True,
)
return super(AnsibleDockerClientBase, self).inspect_distribution(
image, **kwargs
)
return super().inspect_distribution(image, **kwargs)
class AnsibleDockerClient(AnsibleDockerClientBase):
@ -684,7 +682,7 @@ class AnsibleDockerClient(AnsibleDockerClientBase):
self.debug = self.module.params.get("debug")
self.check_mode = self.module.check_mode
super(AnsibleDockerClient, self).__init__(
super().__init__(
min_docker_version=min_docker_version,
min_docker_api_version=min_docker_api_version,
)

View File

@ -119,7 +119,7 @@ class AnsibleDockerClientBase(Client):
)
try:
super(AnsibleDockerClientBase, self).__init__(**self._connect_params)
super().__init__(**self._connect_params)
self.docker_api_version_str = self.api_version
except MissingRequirementException as exc:
self.fail(
@ -592,9 +592,7 @@ class AnsibleDockerClient(AnsibleDockerClientBase):
self.debug = self.module.params.get("debug")
self.check_mode = self.module.check_mode
super(AnsibleDockerClient, self).__init__(
min_docker_api_version=min_docker_api_version
)
super().__init__(min_docker_api_version=min_docker_api_version)
if option_minimal_versions is not None:
self._get_minimal_versions(

View File

@ -61,7 +61,7 @@ class DockerException(Exception):
pass
class AnsibleDockerClientBase(object):
class AnsibleDockerClientBase:
def __init__(
self, common_args, min_docker_api_version=None, needs_api_version=True
):
@ -357,7 +357,7 @@ class AnsibleModuleDockerClient(AnsibleDockerClientBase):
self.diff = self.module._diff
common_args = dict((k, self.module.params[k]) for k in DOCKER_COMMON_ARGS)
super(AnsibleModuleDockerClient, self).__init__(
super().__init__(
common_args,
min_docker_api_version=min_docker_api_version,
needs_api_version=needs_api_version,

View File

@ -132,7 +132,7 @@ DOCKER_PULL_PROGRESS_WORKING = frozenset(
)
class ResourceType(object):
class ResourceType:
UNKNOWN = "unknown"
NETWORK = "network"
IMAGE = "image"
@ -724,7 +724,7 @@ def combine_text_output(*outputs):
class BaseComposeManager(DockerBaseClass):
def __init__(self, client, min_version=MINIMUM_COMPOSE_VERSION):
super(BaseComposeManager, self).__init__()
super().__init__()
self.client = client
self.check_mode = self.client.check_mode
self.cleanup_dirs = set()

View File

@ -12,7 +12,7 @@ import os
import tarfile
class ImageArchiveManifestSummary(object):
class ImageArchiveManifestSummary:
"""
Represents data extracted from a manifest.json found in the tar archive output of the
"docker image save some:tag > some.tar" command.

View File

@ -22,7 +22,7 @@ class InvalidLogFmt(Exception):
pass
class _Mode(object):
class _Mode:
GARBAGE = 0
KEY = 1
EQUAL = 2
@ -72,7 +72,7 @@ def _is_ident(cur):
return cur > " " and cur not in ('"', "=")
class _Parser(object):
class _Parser:
def __init__(self, line):
self.line = line
self.index = 0

View File

@ -61,7 +61,7 @@ def _get_ansible_type(value_type):
return value_type
class Option(object):
class Option:
def __init__(
self,
name,
@ -148,7 +148,7 @@ class Option(object):
)
class OptionGroup(object):
class OptionGroup:
def __init__(
self,
preprocess=None,
@ -205,7 +205,7 @@ class OptionGroup(object):
return self
class Engine(object):
class Engine:
min_api_version = None # string or None
min_api_version_obj = None # LooseVersion object or None
extra_option_minimal_versions = None # dict[str, dict[str, Any]] or None
@ -265,7 +265,7 @@ class Engine(object):
pass
class EngineDriver(object):
class EngineDriver:
name = None # string
@abc.abstractmethod

View File

@ -26,7 +26,7 @@ from ansible_collections.community.docker.plugins.module_utils._util import (
class Container(DockerBaseClass):
def __init__(self, container, engine_driver):
super(Container, self).__init__()
super().__init__()
self.raw = container
self.id = None
self.image = None

View File

@ -128,7 +128,7 @@ def _normalize_arch(arch_str, variant_str):
return arch_str, variant_str
class _Platform(object):
class _Platform:
def __init__(self, os=None, arch=None, variant=None):
self.os = os
self.arch = arch

View File

@ -25,7 +25,7 @@ from ansible_collections.community.docker.plugins.module_utils._socket_helper im
PARAMIKO_POLL_TIMEOUT = 0.01 # 10 milliseconds
class DockerSocketHandlerBase(object):
class DockerSocketHandlerBase:
def __init__(self, sock, selectors, log=None):
make_unblocking(sock)
@ -212,4 +212,4 @@ class DockerSocketHandlerBase(object):
class DockerSocketHandlerModule(DockerSocketHandlerBase):
def __init__(self, sock, module, selectors):
super(DockerSocketHandlerModule, self).__init__(sock, selectors, module.debug)
super().__init__(sock, selectors, module.debug)

View File

@ -29,7 +29,7 @@ from ansible_collections.community.docker.plugins.module_utils._version import (
class AnsibleDockerSwarmClient(AnsibleDockerClient):
def __init__(self, **kwargs):
super(AnsibleDockerSwarmClient, self).__init__(**kwargs)
super().__init__(**kwargs)
def get_swarm_node_id(self):
"""
@ -271,7 +271,7 @@ class AnsibleDockerSwarmClient(AnsibleDockerClient):
def get_unlock_key(self):
if self.docker_py_version < LooseVersion("2.7.0"):
return None
return super(AnsibleDockerSwarmClient, self).get_unlock_key()
return super().get_unlock_key()
def get_service_inspect(self, service_id, skip_missing=False):
"""

View File

@ -121,7 +121,7 @@ def log_debug(msg, pretty_print=False):
log_file.write(f"{msg}\n")
class DockerBaseClass(object):
class DockerBaseClass:
def __init__(self):
self.debug = False
@ -245,7 +245,7 @@ def compare_generic(a, b, method, datatype):
return True
class DifferenceTracker(object):
class DifferenceTracker:
def __init__(self):
self._diff = []

View File

@ -455,7 +455,7 @@ from ansible_collections.community.docker.plugins.module_utils._version import (
class ServicesManager(BaseComposeManager):
def __init__(self, client):
super(ServicesManager, self).__init__(client)
super().__init__(client)
parameters = self.client.module.params
self.state = parameters["state"]

View File

@ -180,7 +180,7 @@ from ansible_collections.community.docker.plugins.module_utils._compose_v2 impor
class ExecManager(BaseComposeManager):
def __init__(self, client):
super(ExecManager, self).__init__(client)
super().__init__(client)
parameters = self.client.module.params
self.service = parameters["service"]

View File

@ -127,7 +127,7 @@ from ansible_collections.community.docker.plugins.module_utils._version import (
class PullManager(BaseComposeManager):
def __init__(self, client):
super(PullManager, self).__init__(client)
super().__init__(client)
parameters = self.client.module.params
self.policy = parameters["policy"]

View File

@ -253,7 +253,7 @@ from ansible_collections.community.docker.plugins.module_utils._compose_v2 impor
class ExecManager(BaseComposeManager):
def __init__(self, client):
super(ExecManager, self).__init__(client)
super().__init__(client)
parameters = self.client.module.params
self.service = parameters["service"]

View File

@ -222,7 +222,7 @@ class ConfigManager(DockerBaseClass):
def __init__(self, client, results):
super(ConfigManager, self).__init__()
super().__init__()
self.client = client
self.results = results

View File

@ -234,7 +234,7 @@ class DockerHostManager(DockerBaseClass):
def __init__(self, client, results):
super(DockerHostManager, self).__init__()
super().__init__()
self.client = client
self.results = results

View File

@ -433,7 +433,7 @@ class ImageManager(DockerBaseClass):
:type results: dict
"""
super(ImageManager, self).__init__()
super().__init__()
self.client = client
self.results = results

View File

@ -327,7 +327,7 @@ def _quote_csv(text):
class ImageBuilder(DockerBaseClass):
def __init__(self, client):
super(ImageBuilder, self).__init__()
super().__init__()
self.client = client
self.check_mode = self.client.check_mode
parameters = self.client.module.params

View File

@ -122,7 +122,7 @@ from ansible_collections.community.docker.plugins.module_utils._util import (
class ImageExportManager(DockerBaseClass):
def __init__(self, client):
super(ImageExportManager, self).__init__()
super().__init__()
self.client = client
parameters = self.client.module.params

View File

@ -158,7 +158,7 @@ class ImageManager(DockerBaseClass):
def __init__(self, client, results):
super(ImageManager, self).__init__()
super().__init__()
self.client = client
self.results = results

View File

@ -96,7 +96,7 @@ from ansible_collections.community.docker.plugins.module_utils._util import (
class ImageManager(DockerBaseClass):
def __init__(self, client, results):
super(ImageManager, self).__init__()
super().__init__()
self.client = client
self.results = results

View File

@ -125,7 +125,7 @@ def image_info(image):
class ImagePuller(DockerBaseClass):
def __init__(self, client):
super(ImagePuller, self).__init__()
super().__init__()
self.client = client
self.check_mode = self.client.check_mode

View File

@ -97,7 +97,7 @@ from ansible_collections.community.docker.plugins.module_utils._util import (
class ImagePusher(DockerBaseClass):
def __init__(self, client):
super(ImagePusher, self).__init__()
super().__init__()
self.client = client
self.check_mode = self.client.check_mode

View File

@ -120,7 +120,7 @@ from ansible_collections.community.docker.plugins.module_utils._util import (
class ImageRemover(DockerBaseClass):
def __init__(self, client):
super(ImageRemover, self).__init__()
super().__init__()
self.client = client
self.check_mode = self.client.check_mode

View File

@ -142,7 +142,7 @@ def image_info(name, tag, image):
class ImageTagger(DockerBaseClass):
def __init__(self, client):
super(ImageTagger, self).__init__()
super().__init__()
self.client = client
parameters = self.client.module.params

View File

@ -145,7 +145,7 @@ from ansible_collections.community.docker.plugins.module_utils._util import (
)
class DockerFileStore(object):
class DockerFileStore:
"""
A custom credential store class that implements only the functionality we need to
update the docker config file when no credential helpers is provided.
@ -238,7 +238,7 @@ class LoginManager(DockerBaseClass):
def __init__(self, client, results):
super(LoginManager, self).__init__()
super().__init__()
self.client = client
self.results = results

View File

@ -303,7 +303,7 @@ from ansible_collections.community.docker.plugins.module_utils._util import (
class TaskParameters(DockerBaseClass):
def __init__(self, client):
super(TaskParameters, self).__init__()
super().__init__()
self.client = client
self.name = None
@ -385,7 +385,7 @@ def dicts_are_essentially_equal(a, b):
return True
class DockerNetworkManager(object):
class DockerNetworkManager:
def __init__(self, client):
self.client = client

View File

@ -157,7 +157,7 @@ from ansible_collections.community.docker.plugins.module_utils._util import (
class TaskParameters(DockerBaseClass):
def __init__(self, client):
super(TaskParameters, self).__init__()
super().__init__()
# Spec
self.name = None
@ -179,7 +179,7 @@ class SwarmNodeManager(DockerBaseClass):
def __init__(self, client, results):
super(SwarmNodeManager, self).__init__()
super().__init__()
self.client = client
self.results = results

View File

@ -149,7 +149,7 @@ from ansible_collections.community.docker.plugins.module_utils._util import (
class TaskParameters(DockerBaseClass):
def __init__(self, client):
super(TaskParameters, self).__init__()
super().__init__()
self.client = client
self.plugin_name = None
self.alias = None
@ -174,7 +174,7 @@ def parse_options(options_list):
return dict(x.split("=", 1) for x in options_list) if options_list else {}
class DockerPluginManager(object):
class DockerPluginManager:
def __init__(self, client):
self.client = client

View File

@ -214,7 +214,7 @@ class SecretManager(DockerBaseClass):
def __init__(self, client, results):
super(SecretManager, self).__init__()
super().__init__()
self.client = client
self.results = results

View File

@ -315,7 +315,7 @@ from ansible_collections.community.docker.plugins.module_utils._util import (
class TaskParameters(DockerBaseClass):
def __init__(self):
super(TaskParameters, self).__init__()
super().__init__()
self.advertise_addr = None
self.listen_addr = None
@ -461,7 +461,7 @@ class SwarmManager(DockerBaseClass):
def __init__(self, client, results):
super(SwarmManager, self).__init__()
super().__init__()
self.client = client
self.results = results

View File

@ -210,7 +210,7 @@ class DockerSwarmManager(DockerBaseClass):
def __init__(self, client, results):
super(DockerSwarmManager, self).__init__()
super().__init__()
self.client = client
self.results = results

View File

@ -1127,7 +1127,7 @@ def have_networks_changed(new_networks, old_networks):
class DockerService(DockerBaseClass):
def __init__(self, docker_api_version, docker_py_version):
super(DockerService, self).__init__()
super().__init__()
self.image = ""
self.command = None
self.args = None
@ -2174,7 +2174,7 @@ class DockerService(DockerBaseClass):
return service
class DockerServiceManager(object):
class DockerServiceManager:
def __init__(self, client):
self.client = client

View File

@ -137,7 +137,7 @@ from ansible_collections.community.docker.plugins.module_utils._util import (
class TaskParameters(DockerBaseClass):
def __init__(self, client):
super(TaskParameters, self).__init__()
super().__init__()
self.client = client
self.volume_name = None
@ -151,7 +151,7 @@ class TaskParameters(DockerBaseClass):
setattr(self, key, value)
class DockerVolumeManager(object):
class DockerVolumeManager:
def __init__(self, client):
self.client = client

View File

@ -7,10 +7,6 @@
from __future__ import annotations
__metaclass__ = type
from ansible.errors import AnsibleConnectionFailure
from ansible.utils.display import Display
from ansible_collections.community.docker.plugins.module_utils._common import (
@ -25,7 +21,7 @@ class AnsibleDockerClient(AnsibleDockerClientBase):
def __init__(self, plugin, min_docker_version=None, min_docker_api_version=None):
self.plugin = plugin
self.display = Display()
super(AnsibleDockerClient, self).__init__(
super().__init__(
min_docker_version=min_docker_version,
min_docker_api_version=min_docker_api_version,
)

View File

@ -21,9 +21,7 @@ class AnsibleDockerClient(AnsibleDockerClientBase):
def __init__(self, plugin, min_docker_api_version=None):
self.plugin = plugin
self.display = Display()
super(AnsibleDockerClient, self).__init__(
min_docker_api_version=min_docker_api_version
)
super().__init__(min_docker_api_version=min_docker_api_version)
def fail(self, msg, **kwargs):
if kwargs:

View File

@ -7,10 +7,6 @@
from __future__ import annotations
__metaclass__ = type
import selectors
from ansible_collections.community.docker.plugins.module_utils._socket_handler import (
@ -20,6 +16,6 @@ from ansible_collections.community.docker.plugins.module_utils._socket_handler i
class DockerSocketHandler(DockerSocketHandlerBase):
def __init__(self, display, sock, log=None, container=None):
super(DockerSocketHandler, self).__init__(
super().__init__(
sock, selectors, log=lambda msg: display.vvvv(msg, host=container)
)

View File

@ -3,7 +3,6 @@
# SPDX-License-Identifier: GPL-3.0-or-later
from __future__ import annotations
__metaclass__ = type
def _normalize_ipaddr(ipaddr):
@ -12,7 +11,7 @@ def _normalize_ipaddr(ipaddr):
return ipaddress.ip_address(ipaddr).compressed
class FilterModule(object):
class FilterModule:
""" IP address and network manipulation filters """
def filters(self):

View File

@ -3,7 +3,6 @@
# SPDX-License-Identifier: GPL-3.0-or-later
from __future__ import annotations
__metaclass__ = type
def sanitize_host_info(data):

View File

@ -4,9 +4,6 @@
from __future__ import annotations
__metaclass__ = type
from io import StringIO
from ansible.errors import AnsibleError

View File

@ -4,10 +4,6 @@
from __future__ import annotations
__metaclass__ = type
import pytest
from ansible.inventory.data import InventoryData
from ansible.parsing.dataloader import DataLoader
@ -96,7 +92,7 @@ def create_get_option(options, default=False):
return get_option
class FakeClient(object):
class FakeClient:
def __init__(self, *hosts):
self.get_results = {}
list_reply = []

View File

@ -8,9 +8,6 @@
from __future__ import annotations
__metaclass__ = type
import datetime
import io
import json
@ -483,7 +480,7 @@ class TCPSocketStreamTest(unittest.TestCase):
stdout_data = cls.stdout_data
stderr_data = cls.stderr_data
class Handler(BaseHTTPRequestHandler, object):
class Handler(BaseHTTPRequestHandler):
def do_POST(self):
resp_data = self.get_resp_data()
self.send_response(101)

View File

@ -9,6 +9,4 @@
from __future__ import annotations
__metaclass__ = type
DEFAULT_DOCKER_API_VERSION = "1.45"

View File

@ -8,9 +8,6 @@
from __future__ import annotations
__metaclass__ = type
from ansible_collections.community.docker.plugins.module_utils._api import constants
from ansible_collections.community.docker.tests.unit.plugins.module_utils._api.constants import (
DEFAULT_DOCKER_API_VERSION,

View File

@ -9,8 +9,6 @@
from __future__ import annotations
__metaclass__ = type
OBJ = {
"read": "2015-02-11T19:20:46.667237763+02:00",
"network": {

View File

@ -8,9 +8,6 @@
from __future__ import annotations
__metaclass__ = type
import base64
import json
import os

View File

@ -8,9 +8,6 @@
from __future__ import annotations
__metaclass__ = type
import unittest
import pytest

View File

@ -8,9 +8,6 @@
from __future__ import annotations
__metaclass__ = type
import unittest
import requests

View File

@ -8,9 +8,6 @@
from __future__ import annotations
__metaclass__ = type
import unittest
from ansible_collections.community.docker.plugins.module_utils._api.transport.sshconn import (

View File

@ -8,9 +8,6 @@
from __future__ import annotations
__metaclass__ = type
import unittest
import pytest

View File

@ -8,9 +8,6 @@
from __future__ import annotations
__metaclass__ = type
import os
import os.path
import shutil

View File

@ -8,9 +8,6 @@
from __future__ import annotations
__metaclass__ = type
import json
import os
import shutil

View File

@ -8,9 +8,6 @@
from __future__ import annotations
__metaclass__ = type
import unittest
from ansible_collections.community.docker.plugins.module_utils._api.api.client import (

View File

@ -8,9 +8,6 @@
from __future__ import annotations
__metaclass__ = type
from ansible_collections.community.docker.plugins.module_utils._api.utils.json_stream import (
json_splitter,
json_stream,

View File

@ -8,9 +8,6 @@
from __future__ import annotations
__metaclass__ = type
import unittest
import pytest

View File

@ -8,9 +8,6 @@
from __future__ import annotations
__metaclass__ = type
import unittest
from ansible_collections.community.docker.plugins.module_utils._api.utils.proxy import (

View File

@ -8,9 +8,6 @@
from __future__ import annotations
__metaclass__ = type
import base64
import json
import os

View File

@ -4,9 +4,6 @@
from __future__ import annotations
__metaclass__ = type
from ansible_collections.community.docker.plugins.module_utils._compose_v2 import (
Event,
)

View File

@ -4,9 +4,6 @@
from __future__ import annotations
__metaclass__ = type
import pytest
from ansible_collections.community.docker.plugins.module_utils._compose_v2 import (
Event,

View File

@ -4,9 +4,6 @@
from __future__ import annotations
__metaclass__ = type
import pytest
from ansible_collections.community.docker.plugins.module_utils._copy import (
_stream_generator_to_fileobj,

View File

@ -4,9 +4,6 @@
from __future__ import annotations
__metaclass__ = type
import tarfile
import pytest

View File

@ -4,9 +4,6 @@
from __future__ import annotations
__metaclass__ = type
import pytest
from ansible_collections.community.docker.plugins.module_utils._logfmt import (
InvalidLogFmt,

View File

@ -4,9 +4,6 @@
from __future__ import annotations
__metaclass__ = type
import pytest
from ansible_collections.community.docker.plugins.module_utils._scramble import (
scramble,

View File

@ -4,9 +4,6 @@
from __future__ import annotations
__metaclass__ = type
import pytest
from ansible_collections.community.docker.plugins.module_utils._util import (
compare_dict_allow_more_present,

View File

@ -4,9 +4,6 @@
from __future__ import annotations
__metaclass__ = type
import pytest
from ansible_collections.community.docker.plugins.modules.docker_container_copy_into import (
parse_modern,

View File

@ -4,9 +4,6 @@
from __future__ import annotations
__metaclass__ = type
import pytest
from ansible_collections.community.docker.plugins.module_utils._image_archive import (
api_image_id,

View File

@ -4,9 +4,6 @@
from __future__ import annotations
__metaclass__ = type
import pytest
from ansible_collections.community.docker.plugins.modules.docker_image_build import (
_quote_csv,

View File

@ -6,9 +6,6 @@
from __future__ import annotations
__metaclass__ = type
import pytest
from ansible_collections.community.docker.plugins.modules.docker_network import (
validate_cidr,

View File

@ -4,9 +4,6 @@
from __future__ import annotations
__metaclass__ = type
import pytest

View File

@ -4,9 +4,6 @@
from __future__ import annotations
__metaclass__ = type
import json
import tarfile
from tempfile import TemporaryFile