mirror of
https://github.com/ansible-collections/community.docker.git
synced 2025-12-13 02:22:04 +00:00
Compare commits
15 Commits
b5fdcfce99
...
ca38a543e8
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
ca38a543e8 | ||
|
|
faa7dee456 | ||
|
|
908c23a3c3 | ||
|
|
350f67d971 | ||
|
|
846fc8564b | ||
|
|
d2947476f7 | ||
|
|
5d2b4085ec | ||
|
|
a869184ad4 | ||
|
|
260e9cc254 | ||
|
|
76b5c2c742 | ||
|
|
ddbbbe5b9e | ||
|
|
f9925d770e | ||
|
|
c8ff5847a3 | ||
|
|
48745bf8c1 | ||
|
|
30f3a1321c |
2
.github/workflows/docker-images.yml
vendored
2
.github/workflows/docker-images.yml
vendored
@ -45,7 +45,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Check out repository
|
||||
uses: actions/checkout@v5
|
||||
uses: actions/checkout@v6
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
|
||||
@ -388,6 +388,8 @@ disable=raw-checker-failed,
|
||||
unused-argument,
|
||||
# Cannot remove yet due to inadequacy of rules
|
||||
inconsistent-return-statements, # doesn't notice that fail_json() does not return
|
||||
# Buggy impementation in pylint:
|
||||
relative-beyond-top-level, # TODO
|
||||
|
||||
# Enable the message, report, category or checker with the given id(s). You can
|
||||
# either give multiple identifier separated by comma (,) or put this option
|
||||
|
||||
712
CHANGELOG.md
712
CHANGELOG.md
File diff suppressed because it is too large
Load Diff
@ -4,6 +4,19 @@ Docker Community Collection Release Notes
|
||||
|
||||
.. contents:: Topics
|
||||
|
||||
v5.0.3
|
||||
======
|
||||
|
||||
Release Summary
|
||||
---------------
|
||||
|
||||
Bugfix release.
|
||||
|
||||
Bugfixes
|
||||
--------
|
||||
|
||||
- docker_container - when the same port is mapped more than once for the same protocol without specifying an interface, a bug caused an invalid value to be passed for the interface (https://github.com/ansible-collections/community.docker/issues/1213, https://github.com/ansible-collections/community.docker/pull/1214).
|
||||
|
||||
v5.0.2
|
||||
======
|
||||
|
||||
|
||||
@ -99,6 +99,7 @@ If you use the Ansible package and do not update collections independently, use
|
||||
- community.docker.docker_volume_info: retrieve information on Docker volumes
|
||||
* Docker Compose:
|
||||
- community.docker.docker_compose_v2: manage Docker Compose files (Docker compose CLI plugin)
|
||||
- community.docker.docker_compose_v2_build: build images for a Docker compose project
|
||||
- community.docker.docker_compose_v2_exec: run command in a container of a Compose service
|
||||
- community.docker.docker_compose_v2_pull: pull a Docker compose project
|
||||
- community.docker.docker_compose_v2_run: run command in a new container of a Compose service
|
||||
|
||||
@ -2318,3 +2318,15 @@ releases:
|
||||
- 1201-docker_network.yml
|
||||
- 5.0.2.yml
|
||||
release_date: '2025-11-16'
|
||||
5.0.3:
|
||||
changes:
|
||||
bugfixes:
|
||||
- docker_container - when the same port is mapped more than once for the same
|
||||
protocol without specifying an interface, a bug caused an invalid value
|
||||
to be passed for the interface (https://github.com/ansible-collections/community.docker/issues/1213,
|
||||
https://github.com/ansible-collections/community.docker/pull/1214).
|
||||
release_summary: Bugfix release.
|
||||
fragments:
|
||||
- 1214-docker_container-ports.yml
|
||||
- 5.0.3.yml
|
||||
release_date: '2025-11-29'
|
||||
|
||||
@ -263,6 +263,9 @@ There are several modules for working with Docker Compose projects:
|
||||
community.docker.docker_compose_v2
|
||||
The :ansplugin:`community.docker.docker_compose_v2 module <community.docker.docker_compose_v2#module>` allows you to use your existing Docker Compose files to orchestrate containers on a single Docker daemon or on Swarm.
|
||||
|
||||
community.docker.docker_compose_v2_build
|
||||
The :ansplugin:`community.docker.docker_compose_v2_pull module <community.docker.docker_compose_v2_pull#module>` allows you to build images for Docker compose projects.
|
||||
|
||||
community.docker.docker_compose_v2_exec
|
||||
The :ansplugin:`community.docker.docker_compose_v2_exec module <community.docker.docker_compose_v2_exec#module>` allows you to run a command in a container of Docker Compose projects.
|
||||
|
||||
|
||||
@ -7,6 +7,7 @@ requires_ansible: '>=2.17.0'
|
||||
action_groups:
|
||||
docker:
|
||||
- docker_compose_v2
|
||||
- docker_compose_v2_build
|
||||
- docker_compose_v2_exec
|
||||
- docker_compose_v2_pull
|
||||
- docker_compose_v2_run
|
||||
|
||||
@ -43,10 +43,8 @@ docker_version: str | None # pylint: disable=invalid-name
|
||||
|
||||
try:
|
||||
from docker import __version__ as docker_version
|
||||
from docker import auth
|
||||
from docker.errors import APIError, NotFound, TLSParameterError
|
||||
from docker.errors import APIError, TLSParameterError
|
||||
from docker.tls import TLSConfig
|
||||
from requests.exceptions import SSLError
|
||||
|
||||
if LooseVersion(docker_version) >= LooseVersion("3.0.0"):
|
||||
HAS_DOCKER_PY_3 = True # pylint: disable=invalid-name
|
||||
@ -391,242 +389,6 @@ class AnsibleDockerClientBase(Client):
|
||||
)
|
||||
self.fail(f"SSL Exception: {error}")
|
||||
|
||||
def get_container_by_id(self, container_id: str) -> dict[str, t.Any] | None:
|
||||
try:
|
||||
self.log(f"Inspecting container Id {container_id}")
|
||||
result = self.inspect_container(container=container_id)
|
||||
self.log("Completed container inspection")
|
||||
return result
|
||||
except NotFound:
|
||||
return None
|
||||
except Exception as exc: # pylint: disable=broad-exception-caught
|
||||
self.fail(f"Error inspecting container: {exc}")
|
||||
|
||||
def get_container(self, name: str | None) -> dict[str, t.Any] | None:
|
||||
"""
|
||||
Lookup a container and return the inspection results.
|
||||
"""
|
||||
if name is None:
|
||||
return None
|
||||
|
||||
search_name = name
|
||||
if not name.startswith("/"):
|
||||
search_name = "/" + name
|
||||
|
||||
result = None
|
||||
try:
|
||||
for container in self.containers(all=True):
|
||||
self.log(f"testing container: {container['Names']}")
|
||||
if (
|
||||
isinstance(container["Names"], list)
|
||||
and search_name in container["Names"]
|
||||
):
|
||||
result = container
|
||||
break
|
||||
if container["Id"].startswith(name):
|
||||
result = container
|
||||
break
|
||||
if container["Id"] == name:
|
||||
result = container
|
||||
break
|
||||
except SSLError as exc:
|
||||
self._handle_ssl_error(exc)
|
||||
except Exception as exc: # pylint: disable=broad-exception-caught
|
||||
self.fail(f"Error retrieving container list: {exc}")
|
||||
|
||||
if result is None:
|
||||
return None
|
||||
|
||||
return self.get_container_by_id(result["Id"])
|
||||
|
||||
def get_network(
|
||||
self, name: str | None = None, network_id: str | None = None
|
||||
) -> dict[str, t.Any] | None:
|
||||
"""
|
||||
Lookup a network and return the inspection results.
|
||||
"""
|
||||
if name is None and network_id is None:
|
||||
return None
|
||||
|
||||
result = None
|
||||
|
||||
if network_id is None:
|
||||
try:
|
||||
for network in self.networks():
|
||||
self.log(f"testing network: {network['Name']}")
|
||||
if name == network["Name"]:
|
||||
result = network
|
||||
break
|
||||
if network["Id"].startswith(name):
|
||||
result = network
|
||||
break
|
||||
except SSLError as exc:
|
||||
self._handle_ssl_error(exc)
|
||||
except Exception as exc: # pylint: disable=broad-exception-caught
|
||||
self.fail(f"Error retrieving network list: {exc}")
|
||||
|
||||
if result is not None:
|
||||
network_id = result["Id"]
|
||||
|
||||
if network_id is not None:
|
||||
try:
|
||||
self.log(f"Inspecting network Id {network_id}")
|
||||
result = self.inspect_network(network_id)
|
||||
self.log("Completed network inspection")
|
||||
except NotFound:
|
||||
return None
|
||||
except Exception as exc: # pylint: disable=broad-exception-caught
|
||||
self.fail(f"Error inspecting network: {exc}")
|
||||
|
||||
return result
|
||||
|
||||
def find_image(self, name: str, tag: str) -> dict[str, t.Any] | None:
|
||||
"""
|
||||
Lookup an image (by name and tag) and return the inspection results.
|
||||
"""
|
||||
if not name:
|
||||
return None
|
||||
|
||||
self.log(f"Find image {name}:{tag}")
|
||||
images = self._image_lookup(name, tag)
|
||||
if not images:
|
||||
# In API <= 1.20 seeing 'docker.io/<name>' as the name of images pulled from docker hub
|
||||
registry, repo_name = auth.resolve_repository_name(name)
|
||||
if registry == "docker.io":
|
||||
# If docker.io is explicitly there in name, the image
|
||||
# is not found in some cases (#41509)
|
||||
self.log(f"Check for docker.io image: {repo_name}")
|
||||
images = self._image_lookup(repo_name, tag)
|
||||
if not images and repo_name.startswith("library/"):
|
||||
# Sometimes library/xxx images are not found
|
||||
lookup = repo_name[len("library/") :]
|
||||
self.log(f"Check for docker.io image: {lookup}")
|
||||
images = self._image_lookup(lookup, tag)
|
||||
if not images:
|
||||
# Last case for some Docker versions: if docker.io was not there,
|
||||
# it can be that the image was not found either
|
||||
# (https://github.com/ansible/ansible/pull/15586)
|
||||
lookup = f"{registry}/{repo_name}"
|
||||
self.log(f"Check for docker.io image: {lookup}")
|
||||
images = self._image_lookup(lookup, tag)
|
||||
if not images and "/" not in repo_name:
|
||||
# This seems to be happening with podman-docker
|
||||
# (https://github.com/ansible-collections/community.docker/issues/291)
|
||||
lookup = f"{registry}/library/{repo_name}"
|
||||
self.log(f"Check for docker.io image: {lookup}")
|
||||
images = self._image_lookup(lookup, tag)
|
||||
|
||||
if len(images) > 1:
|
||||
self.fail(f"Daemon returned more than one result for {name}:{tag}")
|
||||
|
||||
if len(images) == 1:
|
||||
try:
|
||||
inspection = self.inspect_image(images[0]["Id"])
|
||||
except NotFound:
|
||||
self.log(f"Image {name}:{tag} not found.")
|
||||
return None
|
||||
except Exception as exc: # pylint: disable=broad-exception-caught
|
||||
self.fail(f"Error inspecting image {name}:{tag} - {exc}")
|
||||
return inspection
|
||||
|
||||
self.log(f"Image {name}:{tag} not found.")
|
||||
return None
|
||||
|
||||
def find_image_by_id(
|
||||
self, image_id: str, accept_missing_image: bool = False
|
||||
) -> dict[str, t.Any] | None:
|
||||
"""
|
||||
Lookup an image (by ID) and return the inspection results.
|
||||
"""
|
||||
if not image_id:
|
||||
return None
|
||||
|
||||
self.log(f"Find image {image_id} (by ID)")
|
||||
try:
|
||||
inspection = self.inspect_image(image_id)
|
||||
except NotFound as exc:
|
||||
if not accept_missing_image:
|
||||
self.fail(f"Error inspecting image ID {image_id} - {exc}")
|
||||
self.log(f"Image {image_id} not found.")
|
||||
return None
|
||||
except Exception as exc: # pylint: disable=broad-exception-caught
|
||||
self.fail(f"Error inspecting image ID {image_id} - {exc}")
|
||||
return inspection
|
||||
|
||||
def _image_lookup(self, name: str, tag: str) -> list[dict[str, t.Any]]:
|
||||
"""
|
||||
Including a tag in the name parameter sent to the Docker SDK for Python images method
|
||||
does not work consistently. Instead, get the result set for name and manually check
|
||||
if the tag exists.
|
||||
"""
|
||||
try:
|
||||
response = self.images(name=name)
|
||||
except Exception as exc: # pylint: disable=broad-exception-caught
|
||||
self.fail(f"Error searching for image {name} - {exc}")
|
||||
images = response
|
||||
if tag:
|
||||
lookup = f"{name}:{tag}"
|
||||
lookup_digest = f"{name}@{tag}"
|
||||
images = []
|
||||
for image in response:
|
||||
tags = image.get("RepoTags")
|
||||
digests = image.get("RepoDigests")
|
||||
if (tags and lookup in tags) or (digests and lookup_digest in digests):
|
||||
images = [image]
|
||||
break
|
||||
return images
|
||||
|
||||
def pull_image(
|
||||
self, name: str, tag: str = "latest", image_platform: str | None = None
|
||||
) -> tuple[dict[str, t.Any] | None, bool]:
|
||||
"""
|
||||
Pull an image
|
||||
"""
|
||||
kwargs = {
|
||||
"tag": tag,
|
||||
"stream": True,
|
||||
"decode": True,
|
||||
}
|
||||
if image_platform is not None:
|
||||
kwargs["platform"] = image_platform
|
||||
self.log(f"Pulling image {name}:{tag}")
|
||||
old_tag = self.find_image(name, tag)
|
||||
try:
|
||||
for line in self.pull(name, **kwargs):
|
||||
self.log(line, pretty_print=True)
|
||||
if line.get("error"):
|
||||
if line.get("errorDetail"):
|
||||
error_detail = line.get("errorDetail")
|
||||
self.fail(
|
||||
f"Error pulling {name} - code: {error_detail.get('code')} message: {error_detail.get('message')}"
|
||||
)
|
||||
else:
|
||||
self.fail(f"Error pulling {name} - {line.get('error')}")
|
||||
except Exception as exc: # pylint: disable=broad-exception-caught
|
||||
self.fail(f"Error pulling image {name}:{tag} - {exc}")
|
||||
|
||||
new_tag = self.find_image(name, tag)
|
||||
|
||||
return new_tag, old_tag == new_tag
|
||||
|
||||
def inspect_distribution(self, image: str, **kwargs: t.Any) -> dict[str, t.Any]:
|
||||
"""
|
||||
Get image digest by directly calling the Docker API when running Docker SDK < 4.0.0
|
||||
since prior versions did not support accessing private repositories.
|
||||
"""
|
||||
if self.docker_py_version < LooseVersion("4.0.0"):
|
||||
registry = auth.resolve_repository_name(image)[0]
|
||||
header = auth.get_config_header(self, registry)
|
||||
if header:
|
||||
return self._result(
|
||||
self._get(
|
||||
self._url("/distribution/{0}/json", image),
|
||||
headers={"X-Registry-Auth": header},
|
||||
),
|
||||
json=True,
|
||||
)
|
||||
return super().inspect_distribution(image, **kwargs)
|
||||
|
||||
|
||||
class AnsibleDockerClient(AnsibleDockerClientBase):
|
||||
def __init__(
|
||||
|
||||
@ -29,6 +29,7 @@ from ansible_collections.community.docker.plugins.module_utils._common_api impor
|
||||
RequestException,
|
||||
)
|
||||
from ansible_collections.community.docker.plugins.module_utils._module_container.base import (
|
||||
_DEFAULT_IP_REPLACEMENT_STRING,
|
||||
OPTION_AUTO_REMOVE,
|
||||
OPTION_BLKIO_WEIGHT,
|
||||
OPTION_CAP_DROP,
|
||||
@ -127,11 +128,6 @@ if t.TYPE_CHECKING:
|
||||
Sentry = object
|
||||
|
||||
|
||||
_DEFAULT_IP_REPLACEMENT_STRING = (
|
||||
"[[DEFAULT_IP:iewahhaeB4Sae6Aen8IeShairoh4zeph7xaekoh8Geingunaesaeweiy3ooleiwi]]"
|
||||
)
|
||||
|
||||
|
||||
_SENTRY: Sentry = object()
|
||||
|
||||
|
||||
@ -2093,16 +2089,26 @@ def _preprocess_value_ports(
|
||||
if "published_ports" not in values:
|
||||
return values
|
||||
found = False
|
||||
for port_spec in values["published_ports"].values():
|
||||
if port_spec[0] == _DEFAULT_IP_REPLACEMENT_STRING:
|
||||
found = True
|
||||
break
|
||||
for port_specs in values["published_ports"].values():
|
||||
if not isinstance(port_specs, list):
|
||||
port_specs = [port_specs]
|
||||
for port_spec in port_specs:
|
||||
if port_spec[0] == _DEFAULT_IP_REPLACEMENT_STRING:
|
||||
found = True
|
||||
break
|
||||
if not found:
|
||||
return values
|
||||
default_ip = _get_default_host_ip(module, client)
|
||||
for port, port_spec in values["published_ports"].items():
|
||||
if port_spec[0] == _DEFAULT_IP_REPLACEMENT_STRING:
|
||||
values["published_ports"][port] = tuple([default_ip] + list(port_spec[1:]))
|
||||
for port, port_specs in values["published_ports"].items():
|
||||
if isinstance(port_specs, list):
|
||||
for index, port_spec in enumerate(port_specs):
|
||||
if port_spec[0] == _DEFAULT_IP_REPLACEMENT_STRING:
|
||||
port_specs[index] = tuple([default_ip] + list(port_spec[1:]))
|
||||
else:
|
||||
if port_specs[0] == _DEFAULT_IP_REPLACEMENT_STRING:
|
||||
values["published_ports"][port] = tuple(
|
||||
[default_ip] + list(port_specs[1:])
|
||||
)
|
||||
return values
|
||||
|
||||
|
||||
|
||||
@ -173,7 +173,10 @@ author:
|
||||
- Felix Fontein (@felixfontein)
|
||||
|
||||
seealso:
|
||||
- module: community.docker.docker_compose_v2_build
|
||||
- module: community.docker.docker_compose_v2_exec
|
||||
- module: community.docker.docker_compose_v2_pull
|
||||
- module: community.docker.docker_compose_v2_run
|
||||
"""
|
||||
|
||||
EXAMPLES = r"""
|
||||
|
||||
190
plugins/modules/docker_compose_v2_build.py
Normal file
190
plugins/modules/docker_compose_v2_build.py
Normal file
@ -0,0 +1,190 @@
|
||||
#!/usr/bin/python
|
||||
#
|
||||
# Copyright (c) 2023, Felix Fontein <felix@fontein.de>
|
||||
# Copyright (c) 2025, Maciej Bogusz (@mjbogusz)
|
||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
__metaclass__ = type
|
||||
|
||||
|
||||
DOCUMENTATION = r"""
|
||||
module: docker_compose_v2_build
|
||||
|
||||
short_description: Build a Docker compose project
|
||||
|
||||
version_added: 4.7.0
|
||||
|
||||
description:
|
||||
- Uses Docker Compose to build images for a project.
|
||||
extends_documentation_fragment:
|
||||
- community.docker.compose_v2
|
||||
- community.docker.compose_v2.minimum_version
|
||||
- community.docker.docker.cli_documentation
|
||||
- community.docker.attributes
|
||||
- community.docker.attributes.actiongroup_docker
|
||||
|
||||
attributes:
|
||||
check_mode:
|
||||
support: full
|
||||
diff_mode:
|
||||
support: none
|
||||
idempotent:
|
||||
support: full
|
||||
|
||||
options:
|
||||
no_cache:
|
||||
description:
|
||||
- If set to V(true), will not use cache when building the images.
|
||||
type: bool
|
||||
default: false
|
||||
pull:
|
||||
description:
|
||||
- If set to V(true), will attempt to pull newer version of the image.
|
||||
type: bool
|
||||
default: false
|
||||
with_dependencies:
|
||||
description:
|
||||
- If set to V(true), also build services that are declared as dependencies.
|
||||
- This only makes sense if O(services) is used.
|
||||
type: bool
|
||||
default: false
|
||||
memory_limit:
|
||||
description:
|
||||
- Memory limit for the build container, in bytes. Not supported by BuildKit.
|
||||
type: int
|
||||
services:
|
||||
description:
|
||||
- Specifies a subset of services to be targeted.
|
||||
type: list
|
||||
elements: str
|
||||
|
||||
author:
|
||||
- Maciej Bogusz (@mjbogusz)
|
||||
|
||||
seealso:
|
||||
- module: community.docker.docker_compose_v2
|
||||
"""
|
||||
|
||||
EXAMPLES = r"""
|
||||
---
|
||||
- name: Build images for flask project
|
||||
community.docker.docker_compose_v2_build:
|
||||
project_src: /path/to/flask
|
||||
"""
|
||||
|
||||
RETURN = r"""
|
||||
actions:
|
||||
description:
|
||||
- A list of actions that have been applied.
|
||||
returned: success
|
||||
type: list
|
||||
elements: dict
|
||||
contains:
|
||||
what:
|
||||
description:
|
||||
- What kind of resource was changed.
|
||||
type: str
|
||||
sample: container
|
||||
choices:
|
||||
- image
|
||||
- unknown
|
||||
id:
|
||||
description:
|
||||
- The ID of the resource that was changed.
|
||||
type: str
|
||||
sample: container
|
||||
status:
|
||||
description:
|
||||
- The status change that happened.
|
||||
type: str
|
||||
sample: Building
|
||||
choices:
|
||||
- Building
|
||||
"""
|
||||
|
||||
import traceback
|
||||
|
||||
from ansible.module_utils.common.text.converters import to_native
|
||||
|
||||
from ansible_collections.community.docker.plugins.module_utils.common_cli import (
|
||||
AnsibleModuleDockerClient,
|
||||
DockerException,
|
||||
)
|
||||
|
||||
from ansible_collections.community.docker.plugins.module_utils.compose_v2 import (
|
||||
BaseComposeManager,
|
||||
common_compose_argspec_ex,
|
||||
)
|
||||
|
||||
|
||||
class BuildManager(BaseComposeManager):
|
||||
def __init__(self, client):
|
||||
super(BuildManager, self).__init__(client)
|
||||
parameters = self.client.module.params
|
||||
|
||||
self.no_cache = parameters['no_cache']
|
||||
self.pull = parameters['pull']
|
||||
self.with_dependencies = parameters['with_dependencies']
|
||||
self.memory_limit = parameters['memory_limit']
|
||||
self.services = parameters['services'] or []
|
||||
|
||||
def get_build_cmd(self, dry_run):
|
||||
args = self.get_base_args() + ['build']
|
||||
if self.no_cache:
|
||||
args.append('--no-cache')
|
||||
if self.pull:
|
||||
args.append('--pull')
|
||||
if self.with_dependencies:
|
||||
args.append('--with-dependencies')
|
||||
if self.memory_limit:
|
||||
args.extend(['--memory', str(self.memory_limit)])
|
||||
if dry_run:
|
||||
args.append('--dry-run')
|
||||
args.append('--')
|
||||
for service in self.services:
|
||||
args.append(service)
|
||||
return args
|
||||
|
||||
def run(self):
|
||||
result = dict()
|
||||
args = self.get_build_cmd(self.check_mode)
|
||||
rc, stdout, stderr = self.client.call_cli(*args, cwd=self.project_src)
|
||||
events = self.parse_events(stderr, dry_run=self.check_mode, nonzero_rc=rc != 0)
|
||||
self.emit_warnings(events)
|
||||
self.update_result(result, events, stdout, stderr, ignore_build_events=False)
|
||||
self.update_failed(result, events, args, stdout, stderr, rc)
|
||||
self.cleanup_result(result)
|
||||
return result
|
||||
|
||||
|
||||
def main():
|
||||
argument_spec = dict(
|
||||
no_cache=dict(type='bool', default=False),
|
||||
pull=dict(type='bool', default=False),
|
||||
with_dependencies=dict(type='bool', default=False),
|
||||
memory_limit=dict(type='int'),
|
||||
services=dict(type='list', elements='str'),
|
||||
)
|
||||
argspec_ex = common_compose_argspec_ex()
|
||||
argument_spec.update(argspec_ex.pop('argspec'))
|
||||
|
||||
client = AnsibleModuleDockerClient(
|
||||
argument_spec=argument_spec,
|
||||
supports_check_mode=True,
|
||||
needs_api_version=False,
|
||||
**argspec_ex
|
||||
)
|
||||
|
||||
try:
|
||||
manager = BuildManager(client)
|
||||
result = manager.run()
|
||||
manager.cleanup()
|
||||
client.module.exit_json(**result)
|
||||
except DockerException as e:
|
||||
client.fail('An unexpected docker error occurred: {0}'.format(to_native(e)), exception=traceback.format_exc())
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
@ -0,0 +1,6 @@
|
||||
# Copyright (c) Ansible Project
|
||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
azp/4
|
||||
destructive
|
||||
@ -0,0 +1,10 @@
|
||||
---
|
||||
# Copyright (c) Ansible Project
|
||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
dependencies:
|
||||
- setup_docker_cli_compose
|
||||
# The Python dependencies are needed for the other modules
|
||||
- setup_docker_python_deps
|
||||
- setup_remote_tmp_dir
|
||||
@ -0,0 +1,59 @@
|
||||
---
|
||||
# Copyright (c) Ansible Project
|
||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
####################################################################
|
||||
# WARNING: These are designed specifically for Ansible tests #
|
||||
# and should not be used as examples of how to write Ansible roles #
|
||||
####################################################################
|
||||
|
||||
# Create random name prefix (for services, ...)
|
||||
- name: Create random container name prefix
|
||||
set_fact:
|
||||
name_prefix: "{{ 'ansible-docker-test-%0x' % ((2**32) | random) }}"
|
||||
cnames: []
|
||||
inames: []
|
||||
dnetworks: []
|
||||
|
||||
- debug:
|
||||
msg: "Using name prefix {{ name_prefix }}"
|
||||
|
||||
- name: Show images
|
||||
command: docker images --all --digests
|
||||
|
||||
# Run the tests
|
||||
- block:
|
||||
- name: Show docker compose --help output
|
||||
command: docker compose --help
|
||||
|
||||
- include_tasks: run-test.yml
|
||||
with_fileglob:
|
||||
- "tests/*.yml"
|
||||
loop_control:
|
||||
loop_var: test_name
|
||||
|
||||
always:
|
||||
- name: "Make sure all containers are removed"
|
||||
docker_container:
|
||||
name: "{{ item }}"
|
||||
state: absent
|
||||
force_kill: true
|
||||
with_items: "{{ cnames }}"
|
||||
diff: false
|
||||
|
||||
- name: "Make sure all images are removed"
|
||||
docker_image_remove:
|
||||
name: "{{ item }}"
|
||||
with_items: "{{ inames }}"
|
||||
diff: false
|
||||
|
||||
- name: "Make sure all networks are removed"
|
||||
docker_network:
|
||||
name: "{{ item }}"
|
||||
state: absent
|
||||
force: true
|
||||
with_items: "{{ dnetworks }}"
|
||||
diff: false
|
||||
|
||||
when: docker_has_compose and docker_compose_version is version('2.18.0', '>=')
|
||||
@ -0,0 +1,7 @@
|
||||
---
|
||||
# Copyright (c) Ansible Project
|
||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
- name: "Loading tasks from {{ test_name }}"
|
||||
include_tasks: "{{ test_name }}"
|
||||
@ -0,0 +1,159 @@
|
||||
---
|
||||
# Copyright (c) Ansible Project
|
||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
- vars:
|
||||
bname: "{{ name_prefix }}-build"
|
||||
cname: "{{ name_prefix }}-cont"
|
||||
non_existing_image: does-not-exist:latest
|
||||
dockerfile_path: test-dockerfile
|
||||
base_image: "{{ docker_test_image_hello_world }}"
|
||||
image_name: "{{ name_prefix }}-image"
|
||||
project_src: "{{ remote_tmp_dir }}/{{ bname }}"
|
||||
test_service_non_existing: |
|
||||
services:
|
||||
{{ cname }}:
|
||||
image: {{ non_existing_image }}
|
||||
build:
|
||||
dockerfile: Dockerfile-does-not-exist
|
||||
test_service_simple: |
|
||||
services:
|
||||
{{ cname }}:
|
||||
image: {{ image_name }}
|
||||
build:
|
||||
dockerfile: {{ dockerfile_path }}
|
||||
command: 10m
|
||||
stop_grace_period: 1s
|
||||
test_service_simple_dockerfile: |
|
||||
FROM {{ base_image }}
|
||||
RUN [ "/hello" ]
|
||||
|
||||
block:
|
||||
- name: Registering container name
|
||||
set_fact:
|
||||
cnames: "{{ cnames + [bname ~ '-' ~ cname ~ '-1'] }}"
|
||||
inames: "{{ inames + [ base_image, image_name ] }}"
|
||||
dnetworks: "{{ dnetworks + [bname ~ '_default'] }}"
|
||||
|
||||
- name: Create project directory
|
||||
file:
|
||||
path: '{{ project_src }}'
|
||||
state: directory
|
||||
|
||||
- name: Make sure images are not around
|
||||
docker_image_remove:
|
||||
name: '{{ item }}'
|
||||
loop:
|
||||
- '{{ non_existing_image }}'
|
||||
- '{{ image_name }}'
|
||||
|
||||
- name: Prune docker build cache
|
||||
docker_prune:
|
||||
builder_cache: true
|
||||
builder_cache_all: true
|
||||
|
||||
####################################################################
|
||||
## Image with missing dockerfile ###################################
|
||||
####################################################################
|
||||
|
||||
- name: Template project file with non-existing image
|
||||
copy:
|
||||
dest: '{{ project_src }}/docker-compose.yml'
|
||||
content: '{{ test_service_non_existing }}'
|
||||
|
||||
- name: Build (check)
|
||||
docker_compose_v2_build:
|
||||
project_src: '{{ project_src }}'
|
||||
check_mode: true
|
||||
register: build_1_check
|
||||
ignore_errors: true
|
||||
|
||||
- name: Build
|
||||
docker_compose_v2_build:
|
||||
project_src: '{{ project_src }}'
|
||||
register: build_1
|
||||
ignore_errors: true
|
||||
|
||||
- assert:
|
||||
that:
|
||||
- build_1_check is failed
|
||||
- >-
|
||||
build_1_check.msg | trim == "General error: failed to solve: failed to read dockerfile: open Dockerfile-does-not-exist: no such file or directory"
|
||||
- build_1_check.warnings | default([]) | select('regex', 'Cannot parse event from ') | length == 0
|
||||
- build_1 is failed
|
||||
- >-
|
||||
build_1.msg | trim == "General error: failed to solve: failed to read dockerfile: open Dockerfile-does-not-exist: no such file or directory"
|
||||
- build_1.warnings | default([]) | select('regex', 'Cannot parse event from ') | length == 0
|
||||
|
||||
####################################################################
|
||||
## Regular image ###################################################
|
||||
####################################################################
|
||||
|
||||
- name: Template project file with simple dockerfile
|
||||
copy:
|
||||
dest: '{{ project_src }}/docker-compose.yml'
|
||||
content: '{{ test_service_simple }}'
|
||||
|
||||
- name: Template dockerfile
|
||||
copy:
|
||||
dest: '{{ project_src }}/{{ dockerfile_path }}'
|
||||
content: '{{ test_service_simple_dockerfile }}'
|
||||
|
||||
- docker_image_info:
|
||||
name: "{{ image_name }}"
|
||||
register: pre_image
|
||||
|
||||
- name: Build (check)
|
||||
docker_compose_v2_build:
|
||||
project_src: '{{ project_src }}'
|
||||
check_mode: true
|
||||
register: build_1_check
|
||||
|
||||
- docker_image_info:
|
||||
name: "{{ image_name }}"
|
||||
register: build_1_check_image
|
||||
|
||||
- name: Build
|
||||
docker_compose_v2_build:
|
||||
project_src: '{{ project_src }}'
|
||||
register: build_1
|
||||
|
||||
- docker_image_info:
|
||||
name: "{{ image_name }}"
|
||||
register: build_1_image
|
||||
|
||||
- name: Build (idempotent, check)
|
||||
docker_compose_v2_build:
|
||||
project_src: '{{ project_src }}'
|
||||
check_mode: true
|
||||
register: build_2_check
|
||||
|
||||
- docker_image_info:
|
||||
name: "{{ image_name }}"
|
||||
register: build_2_check_image
|
||||
|
||||
- name: Build (idempotent)
|
||||
docker_compose_v2_build:
|
||||
project_src: '{{ project_src }}'
|
||||
register: build_2
|
||||
|
||||
- docker_image_info:
|
||||
name: "{{ image_name }}"
|
||||
register: build_2_image
|
||||
|
||||
- assert:
|
||||
that:
|
||||
- build_1_check is changed
|
||||
- (build_1_check.actions | selectattr('status', 'eq', 'Building') | first) is truthy
|
||||
- build_1_check.warnings | default([]) | select('regex', 'Cannot parse event from ') | length == 0
|
||||
- build_1 is changed
|
||||
- (build_1.actions | selectattr('status', 'eq', 'Building') | first) is truthy
|
||||
- build_1.warnings | default([]) | select('regex', 'Cannot parse event from ') | length == 0
|
||||
|
||||
- build_2_check is not changed
|
||||
- build_2_check.actions | selectattr('status', 'eq', 'Building') | length == 0
|
||||
- build_2_check.warnings | default([]) | select('regex', 'Cannot parse event from ') | length == 0
|
||||
- build_2 is not changed
|
||||
- build_2.actions | selectattr('status', 'eq', 'Building') | length == 0
|
||||
- build_2.warnings | default([]) | select('regex', 'Cannot parse event from ') | length == 0
|
||||
@ -277,6 +277,58 @@
|
||||
- published_ports_2 is not changed
|
||||
- published_ports_3 is changed
|
||||
|
||||
####################################################################
|
||||
## published_ports: duplicate ports ################################
|
||||
####################################################################
|
||||
|
||||
- name: published_ports -- duplicate ports
|
||||
community.docker.docker_container:
|
||||
image: "{{ docker_test_image_alpine }}"
|
||||
command: '/bin/sh -c "sleep 10m"'
|
||||
name: "{{ cname }}"
|
||||
state: started
|
||||
published_ports:
|
||||
- 8000:80
|
||||
- 10000:80
|
||||
register: published_ports_1
|
||||
|
||||
- name: published_ports -- duplicate ports (idempotency)
|
||||
community.docker.docker_container:
|
||||
image: "{{ docker_test_image_alpine }}"
|
||||
command: '/bin/sh -c "sleep 10m"'
|
||||
name: "{{ cname }}"
|
||||
state: started
|
||||
published_ports:
|
||||
- 8000:80
|
||||
- 10000:80
|
||||
force_kill: true
|
||||
register: published_ports_2
|
||||
|
||||
- name: published_ports -- duplicate ports (idempotency w/ protocol)
|
||||
community.docker.docker_container:
|
||||
image: "{{ docker_test_image_alpine }}"
|
||||
command: '/bin/sh -c "sleep 10m"'
|
||||
name: "{{ cname }}"
|
||||
state: started
|
||||
published_ports:
|
||||
- 8000:80/tcp
|
||||
- 10000:80/tcp
|
||||
force_kill: true
|
||||
register: published_ports_3
|
||||
|
||||
- name: cleanup
|
||||
community.docker.docker_container:
|
||||
name: "{{ cname }}"
|
||||
state: absent
|
||||
force_kill: true
|
||||
diff: false
|
||||
|
||||
- ansible.builtin.assert:
|
||||
that:
|
||||
- published_ports_1 is changed
|
||||
- published_ports_2 is not changed
|
||||
- published_ports_3 is not changed
|
||||
|
||||
####################################################################
|
||||
## published_ports: IPv6 addresses #################################
|
||||
####################################################################
|
||||
|
||||
Loading…
Reference in New Issue
Block a user