Delete CI. This branch is now End of Life.

This commit is contained in:
Felix Fontein 2025-10-25 13:33:49 +02:00
parent a2470bc232
commit d49971d51b
28 changed files with 0 additions and 1842 deletions

View File

@ -1,9 +0,0 @@
<!--
Copyright (c) Ansible Project
GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
SPDX-License-Identifier: GPL-3.0-or-later
-->
## Azure Pipelines Configuration
Please see the [Documentation](https://github.com/ansible/community/wiki/Testing:-Azure-Pipelines) for more information.

View File

@ -1,292 +0,0 @@
---
# Copyright (c) Ansible Project
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
# SPDX-License-Identifier: GPL-3.0-or-later
trigger:
batch: true
branches:
include:
- main
- stable-*
pr:
autoCancel: true
branches:
include:
- main
- stable-*
schedules:
- cron: 0 9 * * *
displayName: Nightly
always: true
branches:
include:
- main
- cron: 0 12 * * 0
displayName: Weekly (old stable branches)
always: true
branches:
include:
- stable-3
variables:
- name: checkoutPath
value: ansible_collections/community/docker
- name: coverageBranches
value: main
- name: entryPoint
value: tests/utils/shippable/shippable.sh
- name: fetchDepth
value: 0
resources:
containers:
- container: default
image: quay.io/ansible/azure-pipelines-test-container:7.0.0
pool: Standard
stages:
### Sanity & units
- stage: Ansible_2_19
displayName: Sanity & Units 2.19
dependsOn: []
jobs:
- template: templates/matrix.yml
parameters:
targets:
- name: Sanity
test: '2.19/sanity/1'
- name: Units
test: '2.19/units/1'
- stage: Ansible_2_18
displayName: Sanity & Units 2.18
dependsOn: []
jobs:
- template: templates/matrix.yml
parameters:
targets:
- name: Sanity
test: '2.18/sanity/1'
- name: Units
test: '2.18/units/1'
- stage: Ansible_2_17
displayName: Sanity & Units 2.17
dependsOn: []
jobs:
- template: templates/matrix.yml
parameters:
targets:
- name: Sanity
test: '2.17/sanity/1'
- name: Units
test: '2.17/units/1'
- stage: Ansible_2_16
displayName: Sanity & Units 2.16
dependsOn: []
jobs:
- template: templates/matrix.yml
parameters:
targets:
- name: Sanity
test: '2.16/sanity/1'
- name: Units
test: '2.16/units/1'
### Docker
- stage: Docker_2_19
displayName: Docker 2.19
dependsOn: []
jobs:
- template: templates/matrix.yml
parameters:
testFormat: 2.19/linux/{0}
targets:
- name: Fedora 41
test: fedora41
- name: Ubuntu 22.04
test: ubuntu2204
- name: Ubuntu 24.04
test: ubuntu2404
- name: Alpine 3.21
test: alpine321
groups:
- 4
- 5
# - 6 -- some images no longer work with docker-compose v1
- stage: Docker_2_18
displayName: Docker 2.18
dependsOn: []
jobs:
- template: templates/matrix.yml
parameters:
testFormat: 2.18/linux/{0}
targets:
- name: Fedora 40
test: fedora40
- name: Ubuntu 22.04
test: ubuntu2204
- name: Alpine 3.20
test: alpine320
groups:
- 4
- 5
# - 6 -- some images no longer work with docker-compose v1
- stage: Docker_2_17
displayName: Docker 2.17
dependsOn: []
jobs:
- template: templates/matrix.yml
parameters:
testFormat: 2.17/linux/{0}
targets:
- name: Fedora 39
test: fedora39
- name: Ubuntu 20.04
test: ubuntu2004
- name: Alpine 3.19
test: alpine319
groups:
- 4
- 5
- 6
- stage: Docker_2_16
displayName: Docker 2.16
dependsOn: []
jobs:
- template: templates/matrix.yml
parameters:
testFormat: 2.16/linux/{0}
targets:
- name: Fedora 38
test: fedora38
- name: CentOS 7
test: centos7
- name: openSUSE 15
test: opensuse15
- name: Alpine 3
test: alpine3
groups:
- 4
- 5
- 6
### Community Docker
- stage: Docker_community_2_19
displayName: Docker (community images) 2.19
dependsOn: []
jobs:
- template: templates/matrix.yml
parameters:
testFormat: 2.19/linux-community/{0}
targets:
- name: Debian 11 Bullseye
test: debian-bullseye/3.9
- name: Debian 12 Bookworm
test: debian-bookworm/3.11
- name: Debian 13 Trixie
test: debian-13-trixie/3.13
- name: ArchLinux
test: archlinux/3.13
groups:
- 4
- 5
- 6
### Remote
- stage: Remote_2_19
displayName: Remote 2.19
dependsOn: []
jobs:
- template: templates/matrix.yml
parameters:
testFormat: 2.19/{0}
targets:
- name: RHEL 9.5 with Docker SDK, urllib3, requests from sources
test: rhel/9.5-dev-latest
groups:
- 1
- 2
- 3
- 4
- 5
# - 6 -- Docker 26 no longer works with docker-compose v1
- stage: Remote_2_18
displayName: Remote 2.18
dependsOn: []
jobs:
- template: templates/matrix.yml
parameters:
testFormat: 2.18/{0}
targets:
- name: RHEL 9.4
test: rhel/9.4
groups:
- 1
- 2
- 3
- 4
- 5
# - 6 -- Docker 26 no longer works with docker-compose v1
- stage: Remote_2_17
displayName: Remote 2.17
dependsOn: []
jobs:
- template: templates/matrix.yml
parameters:
testFormat: 2.17/{0}
targets:
- name: RHEL 9.3
test: rhel/9.3
groups:
- 1
- 2
- 3
- 4
- 5
# - 6 -- Docker 26 no longer works with docker-compose v1
- stage: Remote_2_16
displayName: Remote 2.16
dependsOn: []
jobs:
- template: templates/matrix.yml
parameters:
testFormat: 2.16/{0}
targets:
- name: RHEL 9.2
test: rhel/9.2
# Currently always hangs in group 2
# - name: RHEL 8.8
# test: rhel/8.8
- name: RHEL 7.9
test: rhel/7.9
groups:
- 1
- 2
- 3
- 4
- 5
## Finally
- stage: Summary
condition: succeededOrFailed()
dependsOn:
- Ansible_2_19
- Ansible_2_18
- Ansible_2_17
- Ansible_2_16
- Remote_2_19
- Remote_2_18
- Remote_2_17
- Remote_2_16
- Docker_2_19
- Docker_2_18
- Docker_2_17
- Docker_2_16
- Docker_community_2_19
jobs:
- template: templates/coverage.yml

View File

@ -1,28 +0,0 @@
#!/usr/bin/env bash
# Aggregate code coverage results for later processing.
# Copyright (c) Ansible Project
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
# SPDX-License-Identifier: GPL-3.0-or-later
set -o pipefail -eu
agent_temp_directory="$1"
PATH="${PWD}/bin:${PATH}"
mkdir "${agent_temp_directory}/coverage/"
if [[ "$(ansible --version)" =~ \ 2\.9\. ]]; then
exit
fi
options=(--venv --venv-system-site-packages --color -v)
ansible-test coverage combine --group-by command --export "${agent_temp_directory}/coverage/" "${options[@]}"
if ansible-test coverage analyze targets generate --help >/dev/null 2>&1; then
# Only analyze coverage if the installed version of ansible-test supports it.
# Doing so allows this script to work unmodified for multiple Ansible versions.
ansible-test coverage analyze targets generate "${agent_temp_directory}/coverage/coverage-analyze-targets.json" "${options[@]}"
fi

View File

@ -1,64 +0,0 @@
#!/usr/bin/env python
# Copyright (c) Ansible Project
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
# SPDX-License-Identifier: GPL-3.0-or-later
"""
Combine coverage data from multiple jobs, keeping the data only from the most recent attempt from each job.
Coverage artifacts must be named using the format: "Coverage $(System.JobAttempt) {StableUniqueNameForEachJob}"
The recommended coverage artifact name format is: Coverage $(System.JobAttempt) $(System.StageDisplayName) $(System.JobDisplayName)
Keep in mind that Azure Pipelines does not enforce unique job display names (only names).
It is up to pipeline authors to avoid name collisions when deviating from the recommended format.
"""
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
import re
import shutil
import sys
def main():
"""Main program entry point."""
source_directory = sys.argv[1]
if '/ansible_collections/' in os.getcwd():
output_path = "tests/output"
else:
output_path = "test/results"
destination_directory = os.path.join(output_path, 'coverage')
if not os.path.exists(destination_directory):
os.makedirs(destination_directory)
jobs = {}
count = 0
for name in os.listdir(source_directory):
match = re.search('^Coverage (?P<attempt>[0-9]+) (?P<label>.+)$', name)
label = match.group('label')
attempt = int(match.group('attempt'))
jobs[label] = max(attempt, jobs.get(label, 0))
for label, attempt in jobs.items():
name = 'Coverage {attempt} {label}'.format(label=label, attempt=attempt)
source = os.path.join(source_directory, name)
source_files = os.listdir(source)
for source_file in source_files:
source_path = os.path.join(source, source_file)
destination_path = os.path.join(destination_directory, source_file + '.' + label)
print('"%s" -> "%s"' % (source_path, destination_path))
shutil.copyfile(source_path, destination_path)
count += 1
print('Coverage file count: %d' % count)
print('##vso[task.setVariable variable=coverageFileCount]%d' % count)
print('##vso[task.setVariable variable=outputPath]%s' % output_path)
if __name__ == '__main__':
main()

View File

@ -1,28 +0,0 @@
#!/usr/bin/env bash
# Check the test results and set variables for use in later steps.
# Copyright (c) Ansible Project
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
# SPDX-License-Identifier: GPL-3.0-or-later
set -o pipefail -eu
if [[ "$PWD" =~ /ansible_collections/ ]]; then
output_path="tests/output"
else
output_path="test/results"
fi
echo "##vso[task.setVariable variable=outputPath]${output_path}"
if compgen -G "${output_path}"'/junit/*.xml' > /dev/null; then
echo "##vso[task.setVariable variable=haveTestResults]true"
fi
if compgen -G "${output_path}"'/bot/ansible-test-*' > /dev/null; then
echo "##vso[task.setVariable variable=haveBotResults]true"
fi
if compgen -G "${output_path}"'/coverage/*' > /dev/null; then
echo "##vso[task.setVariable variable=haveCoverageData]true"
fi

View File

@ -1,105 +0,0 @@
#!/usr/bin/env python
# Copyright (c) Ansible Project
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
# SPDX-License-Identifier: GPL-3.0-or-later
"""
Upload code coverage reports to codecov.io.
Multiple coverage files from multiple languages are accepted and aggregated after upload.
Python coverage, as well as PowerShell and Python stubs can all be uploaded.
"""
import argparse
import dataclasses
import pathlib
import shutil
import subprocess
import tempfile
import typing as t
import urllib.request
@dataclasses.dataclass(frozen=True)
class CoverageFile:
name: str
path: pathlib.Path
flags: t.List[str]
@dataclasses.dataclass(frozen=True)
class Args:
dry_run: bool
path: pathlib.Path
def parse_args() -> Args:
parser = argparse.ArgumentParser()
parser.add_argument('-n', '--dry-run', action='store_true')
parser.add_argument('path', type=pathlib.Path)
args = parser.parse_args()
# Store arguments in a typed dataclass
fields = dataclasses.fields(Args)
kwargs = {field.name: getattr(args, field.name) for field in fields}
return Args(**kwargs)
def process_files(directory: pathlib.Path) -> t.Tuple[CoverageFile, ...]:
processed = []
for file in directory.joinpath('reports').glob('coverage*.xml'):
name = file.stem.replace('coverage=', '')
# Get flags from name
flags = name.replace('-powershell', '').split('=') # Drop '-powershell' suffix
flags = [flag if not flag.startswith('stub') else flag.split('-')[0] for flag in flags] # Remove "-01" from stub files
processed.append(CoverageFile(name, file, flags))
return tuple(processed)
def upload_files(codecov_bin: pathlib.Path, files: t.Tuple[CoverageFile, ...], dry_run: bool = False) -> None:
for file in files:
cmd = [
str(codecov_bin),
'--name', file.name,
'--file', str(file.path),
]
for flag in file.flags:
cmd.extend(['--flags', flag])
if dry_run:
print(f'DRY-RUN: Would run command: {cmd}')
continue
subprocess.run(cmd, check=True)
def download_file(url: str, dest: pathlib.Path, flags: int, dry_run: bool = False) -> None:
if dry_run:
print(f'DRY-RUN: Would download {url} to {dest} and set mode to {flags:o}')
return
with urllib.request.urlopen(url) as resp:
with dest.open('w+b') as f:
# Read data in chunks rather than all at once
shutil.copyfileobj(resp, f, 64 * 1024)
dest.chmod(flags)
def main():
args = parse_args()
url = 'https://ansible-ci-files.s3.amazonaws.com/codecov/linux/codecov'
with tempfile.TemporaryDirectory(prefix='codecov-') as tmpdir:
codecov_bin = pathlib.Path(tmpdir) / 'codecov'
download_file(url, codecov_bin, 0o755, args.dry_run)
files = process_files(args.path)
upload_files(codecov_bin, files, args.dry_run)
if __name__ == '__main__':
main()

View File

@ -1,23 +0,0 @@
#!/usr/bin/env bash
# Generate code coverage reports for uploading to Azure Pipelines and codecov.io.
# Copyright (c) Ansible Project
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
# SPDX-License-Identifier: GPL-3.0-or-later
set -o pipefail -eu
PATH="${PWD}/bin:${PATH}"
if [[ "$(ansible --version)" =~ \ 2\.9\. ]]; then
exit
fi
if ! ansible-test --help >/dev/null 2>&1; then
# Install the devel version of ansible-test for generating code coverage reports.
# This is only used by Ansible Collections, which are typically tested against multiple Ansible versions (in separate jobs).
# Since a version of ansible-test is required that can work the output from multiple older releases, the devel version is used.
pip install https://github.com/ansible/ansible/archive/devel.tar.gz --disable-pip-version-check
fi
ansible-test coverage xml --group-by command --stub --venv --venv-system-site-packages --color -v

View File

@ -1,38 +0,0 @@
#!/usr/bin/env bash
# Configure the test environment and run the tests.
# Copyright (c) Ansible Project
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
# SPDX-License-Identifier: GPL-3.0-or-later
set -o pipefail -eu
entry_point="$1"
test="$2"
read -r -a coverage_branches <<< "$3" # space separated list of branches to run code coverage on for scheduled builds
export COMMIT_MESSAGE
export COMPLETE
export COVERAGE
export IS_PULL_REQUEST
if [ "${SYSTEM_PULLREQUEST_TARGETBRANCH:-}" ]; then
IS_PULL_REQUEST=true
COMMIT_MESSAGE=$(git log --format=%B -n 1 HEAD^2)
else
IS_PULL_REQUEST=
COMMIT_MESSAGE=$(git log --format=%B -n 1 HEAD)
fi
COMPLETE=
COVERAGE=
if [ "${BUILD_REASON}" = "Schedule" ]; then
COMPLETE=yes
if printf '%s\n' "${coverage_branches[@]}" | grep -q "^${BUILD_SOURCEBRANCHNAME}$"; then
COVERAGE=yes
fi
fi
"${entry_point}" "${test}" 2>&1 | "$(dirname "$0")/time-command.py"

View File

@ -1,29 +0,0 @@
#!/usr/bin/env python
# Copyright (c) Ansible Project
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
# SPDX-License-Identifier: GPL-3.0-or-later
"""Prepends a relative timestamp to each input line from stdin and writes it to stdout."""
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import sys
import time
def main():
"""Main program entry point."""
start = time.time()
sys.stdin.reconfigure(errors='surrogateescape')
sys.stdout.reconfigure(errors='surrogateescape')
for line in sys.stdin:
seconds = time.time() - start
sys.stdout.write('%02d:%02d %s' % (seconds // 60, seconds % 60, line))
sys.stdout.flush()
if __name__ == '__main__':
main()

View File

@ -1,34 +0,0 @@
---
# Copyright (c) Ansible Project
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
# SPDX-License-Identifier: GPL-3.0-or-later
# This template adds a job for processing code coverage data.
# It will upload results to Azure Pipelines and codecov.io.
# Use it from a job stage that completes after all other jobs have completed.
# This can be done by placing it in a separate summary stage that runs after the test stage(s) have completed.
jobs:
- job: Coverage
displayName: Code Coverage
container: default
workspace:
clean: all
steps:
- checkout: self
fetchDepth: $(fetchDepth)
path: $(checkoutPath)
- task: DownloadPipelineArtifact@2
displayName: Download Coverage Data
inputs:
path: coverage/
patterns: "Coverage */*=coverage.combined"
- bash: .azure-pipelines/scripts/combine-coverage.py coverage/
displayName: Combine Coverage Data
- bash: .azure-pipelines/scripts/report-coverage.sh
displayName: Generate Coverage Report
condition: gt(variables.coverageFileCount, 0)
- bash: .azure-pipelines/scripts/publish-codecov.py "$(outputPath)"
displayName: Publish to codecov.io
condition: gt(variables.coverageFileCount, 0)
continueOnError: true

View File

@ -1,59 +0,0 @@
# Copyright (c) Ansible Project
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
# SPDX-License-Identifier: GPL-3.0-or-later
# This template uses the provided targets and optional groups to generate a matrix which is then passed to the test template.
# If this matrix template does not provide the required functionality, consider using the test template directly instead.
parameters:
# A required list of dictionaries, one per test target.
# Each item in the list must contain a "test" or "name" key.
# Both may be provided. If one is omitted, the other will be used.
- name: targets
type: object
# An optional list of values which will be used to multiply the targets list into a matrix.
# Values can be strings or numbers.
- name: groups
type: object
default: []
# An optional format string used to generate the job name.
# - {0} is the name of an item in the targets list.
- name: nameFormat
type: string
default: "{0}"
# An optional format string used to generate the test name.
# - {0} is the name of an item in the targets list.
- name: testFormat
type: string
default: "{0}"
# An optional format string used to add the group to the job name.
# {0} is the formatted name of an item in the targets list.
# {{1}} is the group -- be sure to include the double "{{" and "}}".
- name: nameGroupFormat
type: string
default: "{0} - {{1}}"
# An optional format string used to add the group to the test name.
# {0} is the formatted test of an item in the targets list.
# {{1}} is the group -- be sure to include the double "{{" and "}}".
- name: testGroupFormat
type: string
default: "{0}/{{1}}"
jobs:
- template: test.yml
parameters:
jobs:
- ${{ if eq(length(parameters.groups), 0) }}:
- ${{ each target in parameters.targets }}:
- name: ${{ format(parameters.nameFormat, coalesce(target.name, target.test)) }}
test: ${{ format(parameters.testFormat, coalesce(target.test, target.name)) }}
- ${{ if not(eq(length(parameters.groups), 0)) }}:
- ${{ each group in parameters.groups }}:
- ${{ each target in parameters.targets }}:
- name: ${{ format(format(parameters.nameGroupFormat, parameters.nameFormat), coalesce(target.name, target.test), group) }}
test: ${{ format(format(parameters.testGroupFormat, parameters.testFormat), coalesce(target.test, target.name), group) }}

View File

@ -1,49 +0,0 @@
# Copyright (c) Ansible Project
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
# SPDX-License-Identifier: GPL-3.0-or-later
# This template uses the provided list of jobs to create test one or more test jobs.
# It can be used directly if needed, or through the matrix template.
parameters:
# A required list of dictionaries, one per test job.
# Each item in the list must contain a "job" and "name" key.
- name: jobs
type: object
jobs:
- ${{ each job in parameters.jobs }}:
- job: test_${{ replace(replace(replace(job.test, '/', '_'), '.', '_'), '-', '_') }}
displayName: ${{ job.name }}
container: default
workspace:
clean: all
steps:
- checkout: self
fetchDepth: $(fetchDepth)
path: $(checkoutPath)
- bash: .azure-pipelines/scripts/run-tests.sh "$(entryPoint)" "${{ job.test }}" "$(coverageBranches)"
displayName: Run Tests
- bash: .azure-pipelines/scripts/process-results.sh
condition: succeededOrFailed()
displayName: Process Results
- bash: .azure-pipelines/scripts/aggregate-coverage.sh "$(Agent.TempDirectory)"
condition: eq(variables.haveCoverageData, 'true')
displayName: Aggregate Coverage Data
- task: PublishTestResults@2
condition: eq(variables.haveTestResults, 'true')
inputs:
testResultsFiles: "$(outputPath)/junit/*.xml"
displayName: Publish Test Results
- task: PublishPipelineArtifact@1
condition: eq(variables.haveBotResults, 'true')
displayName: Publish Bot Results
inputs:
targetPath: "$(outputPath)/bot/"
artifactName: "Bot $(System.JobAttempt) $(System.StageDisplayName) $(System.JobDisplayName)"
- task: PublishPipelineArtifact@1
condition: eq(variables.haveCoverageData, 'true')
displayName: Publish Coverage Data
inputs:
targetPath: "$(Agent.TempDirectory)/coverage/"
artifactName: "Coverage $(System.JobAttempt) $(System.StageDisplayName) $(System.JobDisplayName)"

View File

@ -1,245 +0,0 @@
---
# Copyright (c) Ansible Project
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
# SPDX-License-Identifier: GPL-3.0-or-later
# For the comprehensive list of the inputs supported by the ansible-community/ansible-test-gh-action GitHub Action, see
# https://github.com/marketplace/actions/ansible-test
name: EOL CI
on:
# Run EOL CI against all pushes (direct commits, also merged PRs), Pull Requests
push:
branches:
- main
- stable-*
pull_request:
# Run EOL CI once per day (at 09:00 UTC)
schedule:
- cron: '0 9 * * *'
concurrency:
# Make sure there is at most one active run per PR, but do not cancel any non-PR runs
group: ${{ github.workflow }}-${{ (github.head_ref && github.event.number) || github.run_id }}
cancel-in-progress: true
jobs:
sanity:
name: EOL Sanity (Ⓐ${{ matrix.ansible }})
strategy:
matrix:
ansible:
- '2.11'
- '2.12'
- '2.13'
- '2.14'
- '2.15'
runs-on: ubuntu-latest
steps:
- name: Perform sanity testing
uses: felixfontein/ansible-test-gh-action@main
with:
ansible-core-github-repository-slug: ${{ contains(fromJson('["2.10", "2.11"]'), matrix.ansible) && 'felixfontein/ansible' || 'ansible/ansible' }}
ansible-core-version: stable-${{ matrix.ansible }}
codecov-token: ${{ secrets.CODECOV_TOKEN }}
coverage: ${{ github.event_name == 'schedule' && 'always' || 'never' }}
pull-request-change-detection: 'true'
testing-type: sanity
pre-test-cmd: >-
git clone --depth=1 --single-branch --branch stable-1 https://github.com/ansible-collections/community.library_inventory_filtering.git ../../community/library_inventory_filtering_v1
units:
runs-on: ubuntu-latest
name: EOL Units (Ⓐ${{ matrix.ansible }})
strategy:
# As soon as the first unit test fails, cancel the others to free up the CI queue
fail-fast: true
matrix:
ansible:
- '2.11'
- '2.12'
- '2.13'
- '2.14'
- '2.15'
steps:
- name: Perform unit testing against Ansible version ${{ matrix.ansible }}
uses: felixfontein/ansible-test-gh-action@main
with:
ansible-core-github-repository-slug: ${{ contains(fromJson('["2.10", "2.11"]'), matrix.ansible) && 'felixfontein/ansible' || 'ansible/ansible' }}
ansible-core-version: stable-${{ matrix.ansible }}
codecov-token: ${{ secrets.CODECOV_TOKEN }}
coverage: ${{ github.event_name == 'schedule' && 'always' || 'never' }}
pull-request-change-detection: 'true'
testing-type: units
pre-test-cmd: >-
git clone --depth=1 --single-branch --branch stable-1 https://github.com/ansible-collections/community.library_inventory_filtering.git ../../community/library_inventory_filtering_v1
;
git clone --depth=1 --single-branch --branch main https://github.com/ansible-collections/community.internal_test_tools.git ../../community/internal_test_tools
integration:
runs-on: ubuntu-latest
name: EOL I (Ⓐ${{ matrix.ansible }}+${{ matrix.docker }}+py${{ matrix.python }}:${{ matrix.target }})
strategy:
fail-fast: false
matrix:
ansible:
- ''
docker:
- ''
python:
- ''
target:
- ''
extra-constraints:
# Specifying this other than '' likely destroys change detection, but at least it will make
# CI pass when necessary...
- ''
exclude:
- ansible: ''
include:
# 2.11
- ansible: '2.11'
docker: alpine3
python: ''
target: azp/4/
extra-constraints: urllib3 < 2.0.0
- ansible: '2.11'
docker: alpine3
python: ''
target: azp/5/
- ansible: '2.11'
docker: alpine3
python: ''
target: azp/6/
# 2.12
- ansible: '2.12'
docker: fedora33
python: ''
target: azp/4/
- ansible: '2.12'
docker: fedora33
python: ''
target: azp/5/
- ansible: '2.12'
docker: fedora33
python: ''
target: azp/6/
- ansible: '2.12'
docker: fedora34
python: ''
target: azp/4/
- ansible: '2.12'
docker: fedora34
python: ''
target: azp/5/
- ansible: '2.12'
docker: fedora34
python: ''
target: azp/6/
- ansible: '2.12'
docker: ubuntu1804
python: ''
target: azp/4/
- ansible: '2.12'
docker: ubuntu1804
python: ''
target: azp/5/
- ansible: '2.12'
docker: ubuntu1804
python: ''
target: azp/6/
# 2.13
- ansible: '2.13'
docker: fedora35
python: ''
target: azp/4/
- ansible: '2.13'
docker: fedora35
python: ''
target: azp/5/
- ansible: '2.13'
docker: fedora35
python: ''
target: azp/6/
- ansible: '2.13'
docker: opensuse15py2
python: ''
target: azp/4/
- ansible: '2.13'
docker: opensuse15py2
python: ''
target: azp/5/
- ansible: '2.13'
docker: opensuse15py2
python: ''
target: azp/6/
- ansible: '2.13'
docker: alpine3
python: ''
target: azp/4/
- ansible: '2.13'
docker: alpine3
python: ''
target: azp/5/
- ansible: '2.13'
docker: alpine3
python: ''
target: azp/6/
# 2.14
- ansible: '2.14'
docker: alpine3
python: ''
target: azp/4/
- ansible: '2.14'
docker: alpine3
python: ''
target: azp/5/
- ansible: '2.14'
docker: alpine3
python: ''
target: azp/6/
# 2.15
- ansible: '2.15'
docker: fedora37
python: ''
target: azp/4/
- ansible: '2.15'
docker: fedora37
python: ''
target: azp/5/
- ansible: '2.15'
docker: fedora37
python: ''
target: azp/6/
steps:
- name: Perform integration testing against Ansible version ${{ matrix.ansible }} under Python ${{ matrix.python }}
uses: felixfontein/ansible-test-gh-action@main
with:
ansible-core-github-repository-slug: ${{ contains(fromJson('["2.10", "2.11"]'), matrix.ansible) && 'felixfontein/ansible' || 'ansible/ansible' }}
ansible-core-version: stable-${{ matrix.ansible }}
codecov-token: ${{ secrets.CODECOV_TOKEN }}
coverage: ${{ github.event_name == 'schedule' && 'always' || 'never' }}
docker-image: ${{ matrix.docker }}
integration-continue-on-error: 'false'
integration-diff: 'false'
integration-retry-on-error: 'true'
# TODO: remove "--branch stable-2" from community.crypto install once we're only using ansible-core 2.17 or newer!
pre-test-cmd: >-
mkdir -p ../../ansible
;
git clone --depth=1 --single-branch https://github.com/ansible-collections/ansible.posix.git ../../ansible/posix
;
git clone --depth=1 --single-branch --branch stable-2 https://github.com/ansible-collections/community.crypto.git ../../community/crypto
;
git clone --depth=1 --single-branch --branch stable-9 https://github.com/ansible-collections/community.general.git ../../community/general
;
git clone --depth=1 --single-branch --branch stable-1 https://github.com/ansible-collections/community.library_inventory_filtering.git ../../community/library_inventory_filtering_v1
${{ matrix.extra-constraints && format('; echo ''{0}'' >> tests/utils/constraints.txt', matrix.extra-constraints) || '' }}
;
cat tests/utils/constraints.txt
pull-request-change-detection: 'true'
target: ${{ matrix.target }}
target-python-version: ${{ matrix.python }}
testing-type: integration

View File

@ -1,90 +0,0 @@
---
# Copyright (c) Ansible Project
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
# SPDX-License-Identifier: GPL-3.0-or-later
name: Helper Docker images for testing
on:
# Run CI against all pushes (direct commits, also merged PRs), Pull Requests
push:
branches:
- main
paths:
- .github/workflows/docker-images.yml
- tests/images/**
pull_request:
branches:
- main
paths:
- .github/workflows/docker-images.yml
- tests/images/**
# Run CI once per day (at 03:00 UTC)
schedule:
- cron: '0 3 * * *'
env:
CONTAINER_REGISTRY: ghcr.io/ansible-collections
jobs:
build:
name: Build image ${{ matrix.name }}:${{ matrix.tag }}
runs-on: ubuntu-24.04
strategy:
fail-fast: false
matrix:
include:
- name: simple-1
tag: tag
tag-as-latest: true
- name: simple-2
tag: tag
tag-as-latest: true
- name: healthcheck
tag: check
tag-as-latest: true
steps:
- name: Check out repository
uses: actions/checkout@v4
with:
persist-credentials: false
- name: Install dependencies
run: |
sudo apt-get install podman buildah
- name: Set up Go 1.22
uses: actions/setup-go@v6
with:
go-version: '1.22'
cache: false # true (default) results in warnings since we don't use Go modules
- name: Build ${{ matrix.name }} image
run: |
./build.sh "${CONTAINER_REGISTRY}/${{ matrix.name }}:${{ matrix.tag }}"
working-directory: tests/images/${{ matrix.name }}
- name: Tag image as latest
if: matrix.tag-as-latest && matrix.tag != 'latest'
run: |
podman tag "${CONTAINER_REGISTRY}/${{ matrix.name }}:${{ matrix.tag }}" "${CONTAINER_REGISTRY}/${{ matrix.name }}:latest"
- name: Publish container image ${{ env.CONTAINER_REGISTRY }}/${{ matrix.name }}:${{ matrix.tag }}
if: github.event_name != 'pull_request'
uses: redhat-actions/push-to-registry@v2
with:
registry: ${{ env.CONTAINER_REGISTRY }}
image: ${{ matrix.name }}
tags: ${{ matrix.tag }}
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Publish container image ${{ env.CONTAINER_REGISTRY }}/${{ matrix.name }}:latest
if: github.event_name != 'pull_request' && matrix.tag-as-latest && matrix.tag != 'latest'
uses: redhat-actions/push-to-registry@v2
with:
registry: ${{ env.CONTAINER_REGISTRY }}
image: ${{ matrix.name }}
tags: latest
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}

View File

@ -1,96 +0,0 @@
---
# Copyright (c) Ansible Project
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
# SPDX-License-Identifier: GPL-3.0-or-later
name: Collection Docs
concurrency:
group: docs-pr-${{ github.head_ref }}
cancel-in-progress: true
on:
pull_request_target:
types: [opened, synchronize, reopened, closed]
env:
GHP_BASE_URL: https://${{ github.repository_owner }}.github.io/${{ github.event.repository.name }}
jobs:
build-docs:
permissions:
contents: read
name: Build Ansible Docs
uses: ansible-community/github-docs-build/.github/workflows/_shared-docs-build-pr.yml@main
with:
collection-name: community.docker
init-lenient: false
init-fail-on-error: true
squash-hierarchy: true
init-project: Community.Docker Collection
init-copyright: Community.Docker Contributors
init-title: Community.Docker Collection Documentation
init-html-short-title: Community.Docker Collection Docs
init-extra-html-theme-options: |
documentation_home_url=https://${{ github.repository_owner }}.github.io/${{ github.event.repository.name }}/branch/main/
render-file-line: '> * `$<status>` [$<path_tail>](https://${{ github.repository_owner }}.github.io/${{ github.event.repository.name }}/pr/${{ github.event.number }}/$<path_tail>)'
extra-collections: community.library_inventory_filtering_v1
publish-docs-gh-pages:
# for now we won't run this on forks
if: github.repository == 'ansible-collections/community.docker'
permissions:
contents: write
pages: write
id-token: write
needs: [build-docs]
name: Publish Ansible Docs
uses: ansible-community/github-docs-build/.github/workflows/_shared-docs-build-publish-gh-pages.yml@main
with:
artifact-name: ${{ needs.build-docs.outputs.artifact-name }}
action: ${{ (github.event.action == 'closed' || needs.build-docs.outputs.changed != 'true') && 'teardown' || 'publish' }}
publish-gh-pages-branch: true
secrets:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
comment:
permissions:
pull-requests: write
runs-on: ubuntu-latest
needs: [build-docs, publish-docs-gh-pages]
name: PR comments
steps:
- name: PR comment
uses: ansible-community/github-docs-build/actions/ansible-docs-build-comment@main
with:
body-includes: '## Docs Build'
reactions: heart
action: ${{ needs.build-docs.outputs.changed != 'true' && 'remove' || '' }}
on-closed-body: |
## Docs Build 📝
This PR is closed and any previously published docsite has been unpublished.
on-merged-body: |
## Docs Build 📝
Thank you for contribution!✨
This PR has been merged and the docs are now incorporated into `main`:
${{ env.GHP_BASE_URL }}/branch/main
body: |
## Docs Build 📝
Thank you for contribution!✨
The docs for **this PR** have been published here:
${{ env.GHP_BASE_URL }}/pr/${{ github.event.number }}
You can compare to the docs for the `main` branch here:
${{ env.GHP_BASE_URL }}/branch/main
The docsite for **this PR** is also available for download as an artifact from this run:
${{ needs.build-docs.outputs.artifact-url }}
File changes:
${{ needs.build-docs.outputs.diff-files-rendered }}
${{ needs.build-docs.outputs.diff-rendered }}

View File

@ -1,56 +0,0 @@
---
# Copyright (c) Ansible Project
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
# SPDX-License-Identifier: GPL-3.0-or-later
name: Collection Docs
concurrency:
group: docs-push-${{ github.sha }}
cancel-in-progress: true
on:
push:
branches:
- main
- stable-*
tags:
- '*'
# Run CI once per day (at 09:00 UTC)
schedule:
- cron: '0 9 * * *'
# Allow manual trigger (for newer antsibull-docs, sphinx-ansible-theme, ... versions)
workflow_dispatch:
jobs:
build-docs:
permissions:
contents: read
name: Build Ansible Docs
uses: ansible-community/github-docs-build/.github/workflows/_shared-docs-build-push.yml@main
with:
collection-name: community.docker
init-lenient: false
init-fail-on-error: true
squash-hierarchy: true
init-project: Community.Docker Collection
init-copyright: Community.Docker Contributors
init-title: Community.Docker Collection Documentation
init-html-short-title: Community.Docker Collection Docs
init-extra-html-theme-options: |
documentation_home_url=https://${{ github.repository_owner }}.github.io/${{ github.event.repository.name }}/branch/main/
extra-collections: community.library_inventory_filtering_v1
publish-docs-gh-pages:
# for now we won't run this on forks
if: github.repository == 'ansible-collections/community.docker'
permissions:
contents: write
pages: write
id-token: write
needs: [build-docs]
name: Publish Ansible Docs
uses: ansible-community/github-docs-build/.github/workflows/_shared-docs-build-publish-gh-pages.yml@main
with:
artifact-name: ${{ needs.build-docs.outputs.artifact-name }}
publish-gh-pages-branch: true
secrets:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}

View File

@ -1,176 +0,0 @@
---
# Copyright (c) Ansible Project
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
# SPDX-License-Identifier: GPL-3.0-or-later
name: execution environment
on:
# Run CI against all pushes (direct commits, also merged PRs), Pull Requests
push:
branches:
- main
- stable-*
pull_request:
# Run CI once per day (at 04:30 UTC)
# This ensures that even if there haven't been commits that we are still testing against latest version of ansible-builder
schedule:
- cron: '30 4 * * *'
env:
NAMESPACE: community
COLLECTION_NAME: docker
jobs:
build:
name: Build and test EE (${{ matrix.name }})
strategy:
fail-fast: false
matrix:
name:
- ''
ansible_core:
- ''
ansible_runner:
- ''
base_image:
- ''
pre_base:
- ''
extra_vars:
- ''
other_deps:
- ''
exclude:
- ansible_core: ''
include:
- name: ansible-core 2.19 @ RHEL UBI 9
ansible_core: https://github.com/ansible/ansible/archive/stable-2.19.tar.gz
ansible_runner: ansible-runner
other_deps: |2
python_interpreter:
package_system: python3.11 python3.11-pip python3.11-wheel python3.11-cryptography
python_path: "/usr/bin/python3.11"
base_image: docker.io/redhat/ubi9:latest
pre_base: '"#"'
- name: ansible-core 2.15 @ Rocky Linux 9
ansible_core: https://github.com/ansible/ansible/archive/stable-2.15.tar.gz
ansible_runner: ansible-runner
base_image: quay.io/rockylinux/rockylinux:9
pre_base: '"#"'
- name: ansible-core 2.14 @ CentOS Stream 9
ansible_core: https://github.com/ansible/ansible/archive/stable-2.14.tar.gz
ansible_runner: ansible-runner
base_image: quay.io/centos/centos:stream9
pre_base: '"#"'
- name: ansible-core 2.13 @ RHEL UBI 8
ansible_core: https://github.com/ansible/ansible/archive/stable-2.13.tar.gz
ansible_runner: ansible-runner
other_deps: |2
python_interpreter:
package_system: python39 python39-pip python39-wheel python39-cryptography
base_image: docker.io/redhat/ubi8:latest
pre_base: '"#"'
runs-on: ubuntu-latest
steps:
- name: Check out code
uses: actions/checkout@v4
with:
path: ansible_collections/${{ env.NAMESPACE }}/${{ env.COLLECTION_NAME }}
persist-credentials: false
- name: Set up Python
uses: actions/setup-python@v5
with:
python-version: '3.11'
- name: Install ansible-builder and ansible-navigator
run: pip install ansible-builder ansible-navigator
- name: Verify requirements
run: ansible-builder introspect --sanitize .
- name: Make sure galaxy.yml has version entry
run: >-
python -c
'import yaml ;
f = open("galaxy.yml", "rb") ;
data = yaml.safe_load(f) ;
f.close() ;
data["version"] = data.get("version") or "0.0.1" ;
f = open("galaxy.yml", "wb") ;
f.write(yaml.dump(data).encode("utf-8")) ;
f.close() ;
'
working-directory: ansible_collections/${{ env.NAMESPACE }}/${{ env.COLLECTION_NAME }}
- name: Build collection
run: |
ansible-galaxy collection build --output-path ../../../
working-directory: ansible_collections/${{ env.NAMESPACE }}/${{ env.COLLECTION_NAME }}
- name: Create files for building execution environment
run: |
COLLECTION_FILENAME="$(ls "${NAMESPACE}-${COLLECTION_NAME}"-*.tar.gz)"
# EE config
cat > execution-environment.yml <<EOF
---
version: 3
dependencies:
ansible_core:
package_pip: ${{ matrix.ansible_core }}
ansible_runner:
package_pip: ${{ matrix.ansible_runner }}
galaxy: requirements.yml
${{ matrix.other_deps }}
images:
base_image:
name: ${{ matrix.base_image }}
additional_build_files:
- src: ${COLLECTION_FILENAME}
dest: src
additional_build_steps:
prepend_base:
- ${{ matrix.pre_base }}
EOF
echo "::group::execution-environment.yml"
cat execution-environment.yml
echo "::endgroup::"
# Requirements
cat > requirements.yml <<EOF
---
collections:
- name: src/${COLLECTION_FILENAME}
type: file
EOF
echo "::group::requirements.yml"
cat requirements.yml
echo "::endgroup::"
- name: Build image based on ${{ matrix.base_image }}
run: |
ansible-builder build --verbosity 3 --tag test-ee:latest --container-runtime docker
- name: Show images
run: docker image ls
- name: Make /var/run/docker.sock accessible by everyone
run: sudo chmod a+rw /var/run/docker.sock
- name: Run basic tests
run: >
ansible-navigator run
--mode stdout
--container-engine docker
--container-options=-v --container-options=/var/run/docker.sock:/var/run/docker.sock
--pull-policy never
--set-environment-variable ANSIBLE_PRIVATE_ROLE_VARS=true
--execution-environment-image test-ee:latest
-v
all.yml
${{ matrix.extra_vars }}
working-directory: ansible_collections/${{ env.NAMESPACE }}/${{ env.COLLECTION_NAME }}/tests/ee

View File

@ -1,28 +0,0 @@
---
# Copyright (c) Ansible Project
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
# SPDX-License-Identifier: GPL-3.0-or-later
name: nox
'on':
push:
branches:
- main
- stable-*
pull_request:
# Run CI once per day (at 04:30 UTC)
schedule:
- cron: '30 4 * * *'
workflow_dispatch:
jobs:
nox:
runs-on: ubuntu-latest
name: "Run extra sanity tests"
steps:
- name: Check out collection
uses: actions/checkout@v4
with:
persist-credentials: false
- name: Run nox
uses: ansible-community/antsibull-nox@main

View File

@ -1 +0,0 @@
remote.sh

View File

@ -1 +0,0 @@
remote.sh

View File

@ -1,31 +0,0 @@
#!/usr/bin/env bash
# Copyright (c) Ansible Project
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
# SPDX-License-Identifier: GPL-3.0-or-later
set -o pipefail -eux
declare -a args
IFS='/:' read -ra args <<< "$1"
image="${args[1]}"
python="${args[2]}"
if [ "${#args[@]}" -gt 3 ]; then
target="azp/${args[3]}/"
else
target="azp/"
fi
if [[ "${python}" =~ -pypi-latest$ ]]; then
python="${python/-pypi-latest}"
echo 'force_docker_sdk_for_python_pypi: true' >> tests/integration/integration_config.yml
fi
if [[ "${python}" =~ -dev-latest$ ]]; then
python="${python/-dev-latest}"
echo 'force_docker_sdk_for_python_dev: true' >> tests/integration/integration_config.yml
fi
# shellcheck disable=SC2086
ansible-test integration --color -v --retry-on-error "${target}" ${COVERAGE:+"$COVERAGE"} ${CHANGED:+"$CHANGED"} ${UNSTABLE:+"$UNSTABLE"} \
--docker "quay.io/ansible-community/test-image:${image}" --python "${python}"

View File

@ -1,30 +0,0 @@
#!/usr/bin/env bash
# Copyright (c) Ansible Project
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
# SPDX-License-Identifier: GPL-3.0-or-later
set -o pipefail -eux
declare -a args
IFS='/:' read -ra args <<< "$1"
image="${args[1]}"
if [ "${#args[@]}" -gt 2 ]; then
target="azp/${args[2]}/"
else
target="azp/"
fi
if [[ "${image}" =~ -pypi-latest$ ]]; then
image="${image/-pypi-latest}"
echo 'force_docker_sdk_for_python_pypi: true' >> tests/integration/integration_config.yml
fi
if [[ "${image}" =~ -dev-latest$ ]]; then
image="${image/-dev-latest}"
echo 'force_docker_sdk_for_python_dev: true' >> tests/integration/integration_config.yml
fi
# shellcheck disable=SC2086
ansible-test integration --color -v --retry-on-error "${target}" ${COVERAGE:+"$COVERAGE"} ${CHANGED:+"$CHANGED"} ${UNSTABLE:+"$UNSTABLE"} \
--docker "${image}"

View File

@ -1,48 +0,0 @@
#!/usr/bin/env bash
# Copyright (c) Ansible Project
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
# SPDX-License-Identifier: GPL-3.0-or-later
set -o pipefail -eux
declare -a args
IFS='/:' read -ra args <<< "$1"
platform="${args[0]}"
version="${args[1]}"
pyver=default
# check for explicit python version like 8.3@3.8
declare -a splitversion
IFS='@' read -ra splitversion <<< "$version"
if [ "${#splitversion[@]}" -gt 1 ]; then
version="${splitversion[0]}"
pyver="${splitversion[1]}"
fi
if [ "${#args[@]}" -gt 2 ]; then
target="azp/${args[2]}/"
else
target="azp/"
fi
if [[ "${version}" =~ -pypi-latest$ ]]; then
version="${version/-pypi-latest}"
echo 'force_docker_sdk_for_python_pypi: true' >> tests/integration/integration_config.yml
fi
if [[ "${version}" =~ -dev-latest$ ]]; then
version="${version/-dev-latest}"
echo 'force_docker_sdk_for_python_dev: true' >> tests/integration/integration_config.yml
fi
stage="${S:-prod}"
provider="${P:-default}"
if [ "${platform}" == "rhel" ] && [[ "${version}" =~ ^8\. ]]; then
echo "pynacl >= 1.4.0, < 1.5.0; python_version == '3.6'" >> tests/utils/constraints.txt
fi
# shellcheck disable=SC2086
ansible-test integration --color -v --retry-on-error "${target}" ${COVERAGE:+"$COVERAGE"} ${CHANGED:+"$CHANGED"} ${UNSTABLE:+"$UNSTABLE"} \
--python "${pyver}" --remote "${platform}/${version}" --remote-terminate always --remote-stage "${stage}" --remote-provider "${provider}"

View File

@ -1 +0,0 @@
remote.sh

View File

@ -1,17 +0,0 @@
#!/usr/bin/env bash
# Copyright (c) Ansible Project
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
# SPDX-License-Identifier: GPL-3.0-or-later
set -o pipefail -eux
if [ "${BASE_BRANCH:-}" ]; then
base_branch="origin/${BASE_BRANCH}"
else
base_branch=""
fi
# shellcheck disable=SC2086
ansible-test sanity --color -v --junit ${COVERAGE:+"$COVERAGE"} ${CHANGED:+"$CHANGED"} \
--docker --base-branch "${base_branch}" \
--allow-disabled

View File

@ -1,234 +0,0 @@
#!/usr/bin/env bash
# Copyright (c) Ansible Project
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
# SPDX-License-Identifier: GPL-3.0-or-later
set -o pipefail -eux
declare -a args
IFS='/:' read -ra args <<< "$1"
ansible_version="${args[0]}"
script="${args[1]}"
after_script="${args[2]}"
function join {
local IFS="$1";
shift;
echo "$*";
}
# Ensure we can write other collections to this dir
sudo chown -R "$(whoami)" "${PWD}/../../../"
test="$(join / "${args[@]:1}")"
docker images ansible/ansible
docker images quay.io/ansible/*
docker ps
for container in $(docker ps --format '{{.Image}} {{.ID}}' | grep -v -e '^drydock/' -e '^quay.io/ansible/azure-pipelines-test-container:' | sed 's/^.* //'); do
docker rm -f "${container}" || true # ignore errors
done
docker ps
if [ -d /home/shippable/cache/ ]; then
ls -la /home/shippable/cache/
fi
command -v python
python -V
function retry
{
# shellcheck disable=SC2034
for repetition in 1 2 3; do
set +e
"$@"
result=$?
set -e
if [ ${result} == 0 ]; then
return ${result}
fi
echo "@* -> ${result}"
done
echo "Command '@*' failed 3 times!"
exit 255
}
command -v pip
pip --version
pip list --disable-pip-version-check
if [ "${ansible_version}" == "devel" ]; then
retry pip install https://github.com/ansible/ansible/archive/devel.tar.gz --disable-pip-version-check
else
retry pip install "https://github.com/ansible/ansible/archive/stable-${ansible_version}.tar.gz" --disable-pip-version-check
fi
export ANSIBLE_COLLECTIONS_PATHS="${PWD}/../../../"
# START: HACK
COMMUNITY_CRYPTO_BRANCH=main
if [ "${ansible_version}" == "2.16" ] || [ "${ansible_version}" == "2.15" ]; then
COMMUNITY_CRYPTO_BRANCH=stable-2
fi
if [ "${script}" == "linux" ] && [ "$after_script" == "ubuntu2004" ]; then
COMMUNITY_CRYPTO_BRANCH=stable-2
fi
retry git clone --depth=1 --single-branch --branch stable-1 https://github.com/ansible-collections/community.library_inventory_filtering.git "${ANSIBLE_COLLECTIONS_PATHS}/ansible_collections/community/library_inventory_filtering_v1"
# NOTE: we're installing with git to work around Galaxy being a huge PITA (https://github.com/ansible/galaxy/issues/2429)
# retry ansible-galaxy -vvv collection install community.library_inventory_filtering_v1
if [ "${test}" == "units/1" ]; then
# Nothing further should be added to this list.
# This is to prevent modules or plugins in this collection having a runtime dependency on other collections.
retry git clone --depth=1 --single-branch https://github.com/ansible-collections/community.internal_test_tools.git "${ANSIBLE_COLLECTIONS_PATHS}/ansible_collections/community/internal_test_tools"
# NOTE: we're installing with git to work around Galaxy being a huge PITA (https://github.com/ansible/galaxy/issues/2429)
# retry ansible-galaxy -vvv collection install community.internal_test_tools
fi
if [ "${script}" != "sanity/1" ] && [ "${script}" != "units/1" ]; then
# To prevent Python dependencies on other collections only install other collections for integration tests
retry git clone --depth=1 --single-branch https://github.com/ansible-collections/ansible.posix.git "${ANSIBLE_COLLECTIONS_PATHS}/ansible_collections/ansible/posix"
retry git clone --depth=1 --single-branch --branch "${COMMUNITY_CRYPTO_BRANCH}" https://github.com/ansible-collections/community.crypto.git "${ANSIBLE_COLLECTIONS_PATHS}/ansible_collections/community/crypto"
retry git clone --depth=1 --single-branch --branch stable-11 https://github.com/ansible-collections/community.general.git "${ANSIBLE_COLLECTIONS_PATHS}/ansible_collections/community/general"
# NOTE: we're installing with git to work around Galaxy being a huge PITA (https://github.com/ansible/galaxy/issues/2429)
# retry ansible-galaxy -vvv collection install ansible.posix
# retry ansible-galaxy -vvv collection install community.crypto
# retry ansible-galaxy -vvv collection install community.general
fi
# END: HACK
export PYTHONIOENCODING='utf-8'
if [ "${JOB_TRIGGERED_BY_NAME:-}" == "nightly-trigger" ]; then
COVERAGE=yes
COMPLETE=yes
fi
if [ -n "${COVERAGE:-}" ]; then
# on-demand coverage reporting triggered by setting the COVERAGE environment variable to a non-empty value
export COVERAGE="--coverage"
elif [[ "${COMMIT_MESSAGE}" =~ ci_coverage ]]; then
# on-demand coverage reporting triggered by having 'ci_coverage' in the latest commit message
export COVERAGE="--coverage"
else
# on-demand coverage reporting disabled (default behavior, always-on coverage reporting remains enabled)
export COVERAGE="--coverage-check"
fi
if [ -n "${COMPLETE:-}" ]; then
# disable change detection triggered by setting the COMPLETE environment variable to a non-empty value
export CHANGED=""
elif [[ "${COMMIT_MESSAGE}" =~ ci_complete ]]; then
# disable change detection triggered by having 'ci_complete' in the latest commit message
export CHANGED=""
else
# enable change detection (default behavior)
export CHANGED="--changed"
fi
if [ "${IS_PULL_REQUEST:-}" == "true" ]; then
# run unstable tests which are targeted by focused changes on PRs
export UNSTABLE="--allow-unstable-changed"
else
# do not run unstable tests outside PRs
export UNSTABLE=""
fi
# remove empty core/extras module directories from PRs created prior to the repo-merge
find plugins -type d -empty -print -delete
function cleanup
{
# for complete on-demand coverage generate a report for all files with no coverage on the "sanity/5" job so we only have one copy
if [ "${COVERAGE}" == "--coverage" ] && [ "${CHANGED}" == "" ] && [ "${test}" == "sanity/5" ]; then
stub="--stub"
# trigger coverage reporting for stubs even if no other coverage data exists
mkdir -p tests/output/coverage/
else
stub=""
fi
if [ -d tests/output/coverage/ ]; then
if find tests/output/coverage/ -mindepth 1 -name '.*' -prune -o -print -quit | grep -q .; then
process_coverage='yes' # process existing coverage files
elif [ "${stub}" ]; then
process_coverage='yes' # process coverage when stubs are enabled
else
process_coverage=''
fi
if [ "${process_coverage}" ]; then
# use python 3.7 for coverage to avoid running out of memory during coverage xml processing
# only use it for coverage to avoid the additional overhead of setting up a virtual environment for a potential no-op job
virtualenv --python /usr/bin/python3.7 ~/ansible-venv
set +ux
. ~/ansible-venv/bin/activate
set -ux
# shellcheck disable=SC2086
ansible-test coverage xml --color -v --requirements --group-by command --group-by version ${stub:+"$stub"}
cp -a tests/output/reports/coverage=*.xml "$SHIPPABLE_RESULT_DIR/codecoverage/"
if [ "${ansible_version}" != "2.9" ]; then
# analyze and capture code coverage aggregated by integration test target
ansible-test coverage analyze targets generate -v "$SHIPPABLE_RESULT_DIR/testresults/coverage-analyze-targets.json"
fi
# upload coverage report to codecov.io only when using complete on-demand coverage
if [ "${COVERAGE}" == "--coverage" ] && [ "${CHANGED}" == "" ]; then
for file in tests/output/reports/coverage=*.xml; do
flags="${file##*/coverage=}"
flags="${flags%-powershell.xml}"
flags="${flags%.xml}"
# remove numbered component from stub files when converting to tags
flags="${flags//stub-[0-9]*/stub}"
flags="${flags//=/,}"
flags="${flags//[^a-zA-Z0-9_,]/_}"
bash <(curl -s https://ansible-ci-files.s3.us-east-1.amazonaws.com/codecov/codecov.sh) \
-f "${file}" \
-F "${flags}" \
-n "${test}" \
-t 8450ed26-4e94-4d07-8831-d2023d6d20a3 \
-X coveragepy \
-X gcov \
-X fix \
-X search \
-X xcode \
|| echo "Failed to upload code coverage report to codecov.io: ${file}"
done
fi
fi
fi
if [ -d tests/output/junit/ ]; then
cp -aT tests/output/junit/ "$SHIPPABLE_RESULT_DIR/testresults/"
fi
if [ -d tests/output/data/ ]; then
cp -a tests/output/data/ "$SHIPPABLE_RESULT_DIR/testresults/"
fi
if [ -d tests/output/bot/ ]; then
cp -aT tests/output/bot/ "$SHIPPABLE_RESULT_DIR/testresults/"
fi
}
if [ "${SHIPPABLE_BUILD_ID:-}" ]; then trap cleanup EXIT; fi
if [[ "${COVERAGE:-}" == "--coverage" ]]; then
timeout=60
else
timeout=50
fi
ansible-test env --dump --show --timeout "${timeout}" --color -v
if [ "${SHIPPABLE_BUILD_ID:-}" ]; then "tests/utils/shippable/check_matrix.py"; fi
"tests/utils/shippable/${script}.sh" "${test}"

View File

@ -1 +0,0 @@
remote.sh

View File

@ -1,29 +0,0 @@
#!/usr/bin/env bash
# Copyright (c) Ansible Project
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
# SPDX-License-Identifier: GPL-3.0-or-later
set -o pipefail -eux
declare -a args
IFS='/:' read -ra args <<< "$1"
group="${args[1]}"
if [[ "${COVERAGE:-}" == "--coverage" ]]; then
timeout=90
else
timeout=30
fi
group1=()
case "${group}" in
1) options=("${group1[@]:+${group1[@]}}") ;;
esac
ansible-test env --timeout "${timeout}" --color -v
# shellcheck disable=SC2086
ansible-test units --color -v --docker default ${COVERAGE:+"$COVERAGE"} ${CHANGED:+"$CHANGED"} \
"${options[@]:+${options[@]}}" \