diff options
-rw-r--r-- | .azure-pipelines/azure-pipelines.yml | 250 | ||||
-rwxr-xr-x | .azure-pipelines/scripts/aggregate-coverage.sh | 15 | ||||
-rwxr-xr-x | .azure-pipelines/scripts/combine-coverage.py | 60 | ||||
-rwxr-xr-x | .azure-pipelines/scripts/process-results.sh | 24 | ||||
-rwxr-xr-x | .azure-pipelines/scripts/publish-codecov.sh | 27 | ||||
-rwxr-xr-x | .azure-pipelines/scripts/report-coverage.sh | 8 | ||||
-rwxr-xr-x | .azure-pipelines/scripts/run-tests.sh | 34 | ||||
-rwxr-xr-x | .azure-pipelines/scripts/time-command.py | 25 | ||||
-rw-r--r-- | .azure-pipelines/templates/coverage.yml | 39 | ||||
-rw-r--r-- | .azure-pipelines/templates/matrix.yml | 55 | ||||
-rw-r--r-- | .azure-pipelines/templates/test.yml | 45 | ||||
-rwxr-xr-x | test/utils/shippable/shippable.sh | 6 |
12 files changed, 585 insertions, 3 deletions
diff --git a/.azure-pipelines/azure-pipelines.yml b/.azure-pipelines/azure-pipelines.yml new file mode 100644 index 0000000000..c5653440e8 --- /dev/null +++ b/.azure-pipelines/azure-pipelines.yml @@ -0,0 +1,250 @@ +trigger: + batch: true + branches: + include: + - devel + - stable-* + +pr: + autoCancel: true + branches: + include: + - devel + - stable-* + +schedules: + - cron: 0 0 * * * + displayName: Nightly + always: true + branches: + include: + - devel + - stable-* + +variables: + - name: checkoutPath + value: ansible + - name: coverageBranches + value: devel + - name: pipelinesCoverage + value: coverage + - name: entryPoint + value: test/utils/shippable/shippable.sh + - name: fetchDepth + value: 100 + +resources: + containers: + - container: default + image: quay.io/ansible/azure-pipelines-test-container:1.6.0 + +pool: Standard + +stages: + - stage: Sanity + dependsOn: [] + jobs: + - template: templates/matrix.yml + parameters: + nameFormat: Test {0} + testFormat: sanity/{0} + targets: + - test: 1 + - test: 2 + - test: 3 + - test: 4 + - test: 5 + - stage: Units + dependsOn: [] + jobs: + - template: templates/matrix.yml + parameters: + nameFormat: Python {0} + testFormat: units/{0} + targets: + - test: 2.6 + - test: 2.7 + - test: 3.5 + - test: 3.6 + - test: 3.7 + - test: 3.8 + - test: 3.9 + - stage: Windows + dependsOn: [] + jobs: + - template: templates/matrix.yml + parameters: + nameFormat: Server {0} + testFormat: windows/{0}/1 + targets: + - test: 2012 + - test: 2012-R2 + - test: 2016 + - test: 2019 + - stage: Remote + dependsOn: [] + jobs: + - template: templates/matrix.yml + parameters: + targets: + - name: macOS 10.15 + test: macos/10.15 + - name: RHEL 7.9 + test: rhel/7.9 + - name: RHEL 8.3 + test: rhel/8.3 + - name: FreeBSD 11.1 + test: freebsd/11.1 + - name: FreeBSD 12.2 + test: freebsd/12.2 + groups: + - 1 + - 2 + - 3 + - 4 + - 5 + - stage: Docker + dependsOn: [] + jobs: + - template: templates/matrix.yml + parameters: + testFormat: linux/{0} + targets: + - name: Alpine 3 + test: alpine3 + - name: CentOS 6 + test: centos6 + - name: CentOS 7 + test: centos7 + - name: CentOS 8 + test: centos8 + - name: Fedora 31 + test: fedora31 + - name: Fedora 32 + test: fedora32 + - name: openSUSE 15 py2 + test: opensuse15py2 + - name: openSUSE 15 py3 + test: opensuse15 + - name: Ubuntu 16.04 + test: ubuntu1604 + - name: Ubuntu 18.04 + test: ubuntu1804 + groups: + - 1 + - 2 + - 3 + - 4 + - 5 + - stage: Galaxy + dependsOn: [] + jobs: + - template: templates/matrix.yml + parameters: + nameFormat: Python {0} + testFormat: galaxy/{0}/1 + targets: + - test: 2.7 + - test: 3.6 + - stage: Generic + dependsOn: [] + jobs: + - template: templates/matrix.yml + parameters: + nameFormat: Python {0} + testFormat: generic/{0}/1 + targets: + - test: 2.7 + - test: 3.6 + - stage: Incidental_Remote + displayName: Incidental Remote + dependsOn: [] + jobs: + - template: templates/matrix.yml + parameters: + testFormat: i/{0} + targets: + - name: OS X 10.11 + test: osx/10.11 + - name: RHEL 7.9 + test: rhel/7.9 + - name: RHEL 8.3 + test: rhel/8.3 + - name: FreeBSD 11.1 + test: freebsd/11.1 + - name: FreeBSD 12.2 + test: freebsd/12.2 + - stage: Incidental_Docker + displayName: Incidental Docker + dependsOn: [] + jobs: + - template: templates/matrix.yml + parameters: + testFormat: i/linux/{0} + targets: + - name: CentOS 6 + test: centos6 + - name: CentOS 7 + test: centos7 + - name: CentOS 8 + test: centos8 + - name: Fedora 31 + test: fedora31 + - name: Fedora 32 + test: fedora32 + - name: openSUSE 15 py2 + test: opensuse15py2 + - name: openSUSE 15 py3 + test: opensuse15 + - name: Ubuntu 16.04 + test: ubuntu1604 + - name: Ubuntu 18.04 + test: ubuntu1804 + - stage: Incidental_Windows + displayName: Incidental Windows + dependsOn: [] + jobs: + - template: templates/matrix.yml + parameters: + nameFormat: Server {0} + testFormat: i/windows/{0} + targets: + - test: 2012 + - test: 2012-R2 + - test: 2016 + - test: 2019 + - stage: Incidental + dependsOn: [] + jobs: + - template: templates/matrix.yml + parameters: + testFormat: i/{0}/1 + targets: + - name: IOS Python + test: ios/csr1000v/ + - name: VyOS Python 2.7 + test: vyos/1.1.8/2.7 + - name: VyOS Python 3.6 + test: vyos/1.1.8/3.6 + - name: AWS Python 2.7 + test: aws/2.7 + - name: AWS Python 3.6 + test: aws/3.6 + - name: Cloud Python + test: cloud/ + - stage: Summary + condition: succeededOrFailed() + dependsOn: + - Sanity + - Units + - Windows + - Remote + - Docker + - Galaxy + - Generic + - Incidental_Remote + - Incidental_Docker + - Incidental_Windows + - Incidental + jobs: + - template: templates/coverage.yml diff --git a/.azure-pipelines/scripts/aggregate-coverage.sh b/.azure-pipelines/scripts/aggregate-coverage.sh new file mode 100755 index 0000000000..2200502f56 --- /dev/null +++ b/.azure-pipelines/scripts/aggregate-coverage.sh @@ -0,0 +1,15 @@ +#!/usr/bin/env bash +# Aggregate code coverage results for later processing. + +set -o pipefail -eu + +agent_temp_directory="$1" + +PATH="${PWD}/bin:${PATH}" + +mkdir "${agent_temp_directory}/coverage/" + +options=(--venv --venv-system-site-packages --color -v) + +ansible-test coverage combine --export "${agent_temp_directory}/coverage/" "${options[@]}" +ansible-test coverage analyze targets generate "${agent_temp_directory}/coverage/coverage-analyze-targets.json" "${options[@]}" diff --git a/.azure-pipelines/scripts/combine-coverage.py b/.azure-pipelines/scripts/combine-coverage.py new file mode 100755 index 0000000000..506ade6460 --- /dev/null +++ b/.azure-pipelines/scripts/combine-coverage.py @@ -0,0 +1,60 @@ +#!/usr/bin/env python +""" +Combine coverage data from multiple jobs, keeping the data only from the most recent attempt from each job. +Coverage artifacts must be named using the format: "Coverage $(System.JobAttempt) {StableUniqueNameForEachJob}" +The recommended coverage artifact name format is: Coverage $(System.JobAttempt) $(System.StageDisplayName) $(System.JobDisplayName) +Keep in mind that Azure Pipelines does not enforce unique job display names (only names). +It is up to pipeline authors to avoid name collisions when deviating from the recommended format. +""" + +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +import os +import re +import shutil +import sys + + +def main(): + """Main program entry point.""" + source_directory = sys.argv[1] + + if '/ansible_collections/' in os.getcwd(): + output_path = "tests/output" + else: + output_path = "test/results" + + destination_directory = os.path.join(output_path, 'coverage') + + if not os.path.exists(destination_directory): + os.makedirs(destination_directory) + + jobs = {} + count = 0 + + for name in os.listdir(source_directory): + match = re.search('^Coverage (?P<attempt>[0-9]+) (?P<label>.+)$', name) + label = match.group('label') + attempt = int(match.group('attempt')) + jobs[label] = max(attempt, jobs.get(label, 0)) + + for label, attempt in jobs.items(): + name = 'Coverage {attempt} {label}'.format(label=label, attempt=attempt) + source = os.path.join(source_directory, name) + source_files = os.listdir(source) + + for source_file in source_files: + source_path = os.path.join(source, source_file) + destination_path = os.path.join(destination_directory, source_file + '.' + label) + print('"%s" -> "%s"' % (source_path, destination_path)) + shutil.copyfile(source_path, destination_path) + count += 1 + + print('Coverage file count: %d' % count) + print('##vso[task.setVariable variable=coverageFileCount]%d' % count) + print('##vso[task.setVariable variable=outputPath]%s' % output_path) + + +if __name__ == '__main__': + main() diff --git a/.azure-pipelines/scripts/process-results.sh b/.azure-pipelines/scripts/process-results.sh new file mode 100755 index 0000000000..f3f1d1bae8 --- /dev/null +++ b/.azure-pipelines/scripts/process-results.sh @@ -0,0 +1,24 @@ +#!/usr/bin/env bash +# Check the test results and set variables for use in later steps. + +set -o pipefail -eu + +if [[ "$PWD" =~ /ansible_collections/ ]]; then + output_path="tests/output" +else + output_path="test/results" +fi + +echo "##vso[task.setVariable variable=outputPath]${output_path}" + +if compgen -G "${output_path}"'/junit/*.xml' > /dev/null; then + echo "##vso[task.setVariable variable=haveTestResults]true" +fi + +if compgen -G "${output_path}"'/bot/ansible-test-*' > /dev/null; then + echo "##vso[task.setVariable variable=haveBotResults]true" +fi + +if compgen -G "${output_path}"'/coverage/*' > /dev/null; then + echo "##vso[task.setVariable variable=haveCoverageData]true" +fi diff --git a/.azure-pipelines/scripts/publish-codecov.sh b/.azure-pipelines/scripts/publish-codecov.sh new file mode 100755 index 0000000000..7aeabda0c0 --- /dev/null +++ b/.azure-pipelines/scripts/publish-codecov.sh @@ -0,0 +1,27 @@ +#!/usr/bin/env bash +# Upload code coverage reports to codecov.io. +# Multiple coverage files from multiple languages are accepted and aggregated after upload. +# Python coverage, as well as PowerShell and Python stubs can all be uploaded. + +set -o pipefail -eu + +output_path="$1" + +curl --silent --show-error https://codecov.io/bash > codecov.sh + +for file in "${output_path}"/reports/coverage*.xml; do + name="${file}" + name="${name##*/}" # remove path + name="${name##coverage=}" # remove 'coverage=' prefix if present + name="${name%.xml}" # remove '.xml' suffix + + bash codecov.sh \ + -f "${file}" \ + -n "${name}" \ + -X coveragepy \ + -X gcov \ + -X fix \ + -X search \ + -X xcode \ + || echo "Failed to upload code coverage report to codecov.io: ${file}" +done diff --git a/.azure-pipelines/scripts/report-coverage.sh b/.azure-pipelines/scripts/report-coverage.sh new file mode 100755 index 0000000000..d8a689d803 --- /dev/null +++ b/.azure-pipelines/scripts/report-coverage.sh @@ -0,0 +1,8 @@ +#!/usr/bin/env bash +# Generate code coverage reports for uploading to Azure Pipelines and codecov.io. + +set -o pipefail -eu + +PATH="${PWD}/bin:${PATH}" + +ansible-test coverage xml --stub --venv --venv-system-site-packages --color -v diff --git a/.azure-pipelines/scripts/run-tests.sh b/.azure-pipelines/scripts/run-tests.sh new file mode 100755 index 0000000000..a947fdf013 --- /dev/null +++ b/.azure-pipelines/scripts/run-tests.sh @@ -0,0 +1,34 @@ +#!/usr/bin/env bash +# Configure the test environment and run the tests. + +set -o pipefail -eu + +entry_point="$1" +test="$2" +read -r -a coverage_branches <<< "$3" # space separated list of branches to run code coverage on for scheduled builds + +export COMMIT_MESSAGE +export COMPLETE +export COVERAGE +export IS_PULL_REQUEST + +if [ "${SYSTEM_PULLREQUEST_TARGETBRANCH:-}" ]; then + IS_PULL_REQUEST=true + COMMIT_MESSAGE=$(git log --format=%B -n 1 HEAD^2) +else + IS_PULL_REQUEST= + COMMIT_MESSAGE=$(git log --format=%B -n 1 HEAD) +fi + +COMPLETE= +COVERAGE= + +if [ "${BUILD_REASON}" = "Schedule" ]; then + COMPLETE=yes + + if printf '%s\n' "${coverage_branches[@]}" | grep -q "^${BUILD_SOURCEBRANCHNAME}$"; then + COVERAGE=yes + fi +fi + +"${entry_point}" "${test}" 2>&1 | "$(dirname "$0")/time-command.py" diff --git a/.azure-pipelines/scripts/time-command.py b/.azure-pipelines/scripts/time-command.py new file mode 100755 index 0000000000..5e8eb8d4c8 --- /dev/null +++ b/.azure-pipelines/scripts/time-command.py @@ -0,0 +1,25 @@ +#!/usr/bin/env python +"""Prepends a relative timestamp to each input line from stdin and writes it to stdout.""" + +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +import sys +import time + + +def main(): + """Main program entry point.""" + start = time.time() + + sys.stdin.reconfigure(errors='surrogateescape') + sys.stdout.reconfigure(errors='surrogateescape') + + for line in sys.stdin: + seconds = time.time() - start + sys.stdout.write('%02d:%02d %s' % (seconds // 60, seconds % 60, line)) + sys.stdout.flush() + + +if __name__ == '__main__': + main() diff --git a/.azure-pipelines/templates/coverage.yml b/.azure-pipelines/templates/coverage.yml new file mode 100644 index 0000000000..1864e44410 --- /dev/null +++ b/.azure-pipelines/templates/coverage.yml @@ -0,0 +1,39 @@ +# This template adds a job for processing code coverage data. +# It will upload results to Azure Pipelines and codecov.io. +# Use it from a job stage that completes after all other jobs have completed. +# This can be done by placing it in a separate summary stage that runs after the test stage(s) have completed. + +jobs: + - job: Coverage + displayName: Code Coverage + container: default + workspace: + clean: all + steps: + - checkout: self + fetchDepth: $(fetchDepth) + path: $(checkoutPath) + - task: DownloadPipelineArtifact@2 + displayName: Download Coverage Data + inputs: + path: coverage/ + patterns: "Coverage */*=coverage.combined" + - bash: .azure-pipelines/scripts/combine-coverage.py coverage/ + displayName: Combine Coverage Data + - bash: .azure-pipelines/scripts/report-coverage.sh + displayName: Generate Coverage Report + condition: gt(variables.coverageFileCount, 0) + - task: PublishCodeCoverageResults@1 + inputs: + codeCoverageTool: Cobertura + # Azure Pipelines only accepts a single coverage data file. + # That means only Python or PowerShell coverage can be uploaded, but not both. + # Set the "pipelinesCoverage" variable to determine which type is uploaded. + # Use "coverage" for Python and "coverage-powershell" for PowerShell. + summaryFileLocation: "$(outputPath)/reports/$(pipelinesCoverage).xml" + displayName: Publish to Azure Pipelines + condition: gt(variables.coverageFileCount, 0) + - bash: .azure-pipelines/scripts/publish-codecov.sh "$(outputPath)" + displayName: Publish to codecov.io + condition: gt(variables.coverageFileCount, 0) + continueOnError: true diff --git a/.azure-pipelines/templates/matrix.yml b/.azure-pipelines/templates/matrix.yml new file mode 100644 index 0000000000..4e9555dd3b --- /dev/null +++ b/.azure-pipelines/templates/matrix.yml @@ -0,0 +1,55 @@ +# This template uses the provided targets and optional groups to generate a matrix which is then passed to the test template. +# If this matrix template does not provide the required functionality, consider using the test template directly instead. + +parameters: + # A required list of dictionaries, one per test target. + # Each item in the list must contain a "test" or "name" key. + # Both may be provided. If one is omitted, the other will be used. + - name: targets + type: object + + # An optional list of values which will be used to multiply the targets list into a matrix. + # Values can be strings or numbers. + - name: groups + type: object + default: [] + + # An optional format string used to generate the job name. + # - {0} is the name of an item in the targets list. + - name: nameFormat + type: string + default: "{0}" + + # An optional format string used to generate the test name. + # - {0} is the name of an item in the targets list. + - name: testFormat + type: string + default: "{0}" + + # An optional format string used to add the group to the job name. + # {0} is the formatted name of an item in the targets list. + # {{1}} is the group -- be sure to include the double "{{" and "}}". + - name: nameGroupFormat + type: string + default: "{0} - {{1}}" + + # An optional format string used to add the group to the test name. + # {0} is the formatted test of an item in the targets list. + # {{1}} is the group -- be sure to include the double "{{" and "}}". + - name: testGroupFormat + type: string + default: "{0}/{{1}}" + +jobs: + - template: test.yml + parameters: + jobs: + - ${{ if eq(length(parameters.groups), 0) }}: + - ${{ each target in parameters.targets }}: + - name: ${{ format(parameters.nameFormat, coalesce(target.name, target.test)) }} + test: ${{ format(parameters.testFormat, coalesce(target.test, target.name)) }} + - ${{ if not(eq(length(parameters.groups), 0)) }}: + - ${{ each group in parameters.groups }}: + - ${{ each target in parameters.targets }}: + - name: ${{ format(format(parameters.nameGroupFormat, parameters.nameFormat), coalesce(target.name, target.test), group) }} + test: ${{ format(format(parameters.testGroupFormat, parameters.testFormat), coalesce(target.test, target.name), group) }} diff --git a/.azure-pipelines/templates/test.yml b/.azure-pipelines/templates/test.yml new file mode 100644 index 0000000000..5250ed8023 --- /dev/null +++ b/.azure-pipelines/templates/test.yml @@ -0,0 +1,45 @@ +# This template uses the provided list of jobs to create test one or more test jobs. +# It can be used directly if needed, or through the matrix template. + +parameters: + # A required list of dictionaries, one per test job. + # Each item in the list must contain a "job" and "name" key. + - name: jobs + type: object + +jobs: + - ${{ each job in parameters.jobs }}: + - job: test_${{ replace(replace(replace(job.test, '/', '_'), '.', '_'), '-', '_') }} + displayName: ${{ job.name }} + container: default + workspace: + clean: all + steps: + - checkout: self + fetchDepth: $(fetchDepth) + path: $(checkoutPath) + - bash: .azure-pipelines/scripts/run-tests.sh "$(entryPoint)" "${{ job.test }}" "$(coverageBranches)" + displayName: Run Tests + - bash: .azure-pipelines/scripts/process-results.sh + condition: succeededOrFailed() + displayName: Process Results + - bash: .azure-pipelines/scripts/aggregate-coverage.sh "$(Agent.TempDirectory)" + condition: eq(variables.haveCoverageData, 'true') + displayName: Aggregate Coverage Data + - task: PublishTestResults@2 + condition: eq(variables.haveTestResults, 'true') + inputs: + testResultsFiles: "$(outputPath)/junit/*.xml" + displayName: Publish Test Results + - task: PublishPipelineArtifact@1 + condition: eq(variables.haveBotResults, 'true') + displayName: Publish Bot Results + inputs: + targetPath: "$(outputPath)/bot/" + artifactName: "Bot $(System.JobAttempt) $(System.StageDisplayName) $(System.JobDisplayName)" + - task: PublishPipelineArtifact@1 + condition: eq(variables.haveCoverageData, 'true') + displayName: Publish Coverage Data + inputs: + targetPath: "$(Agent.TempDirectory)/coverage/" + artifactName: "Coverage $(System.JobAttempt) $(System.StageDisplayName) $(System.JobDisplayName)" diff --git a/test/utils/shippable/shippable.sh b/test/utils/shippable/shippable.sh index 55d6828971..10f32f044e 100755 --- a/test/utils/shippable/shippable.sh +++ b/test/utils/shippable/shippable.sh @@ -13,7 +13,7 @@ docker images ansible/ansible docker images quay.io/ansible/* docker ps -for container in $(docker ps --format '{{.Image}} {{.ID}}' | grep -v -e '^drydock/' -e '^quay.io/ansible/shippable-build-container:' | sed 's/^.* //'); do +for container in $(docker ps --format '{{.Image}} {{.ID}}' | grep -v -e '^drydock/' -e '^quay.io/ansible/shippable-build-container:' -e '^quay.io/ansible/azure-pipelines-test-container:' | sed 's/^.* //'); do docker rm -f "${container}" || true # ignore errors done @@ -146,7 +146,7 @@ function cleanup fi } -trap cleanup EXIT +if [ "${SHIPPABLE_BUILD_ID:-}" ]; then trap cleanup EXIT; fi if [[ "${COVERAGE:-}" == "--coverage" ]]; then timeout=60 @@ -156,5 +156,5 @@ fi ansible-test env --dump --show --timeout "${timeout}" --color -v -"test/utils/shippable/check_matrix.py" +if [ "${SHIPPABLE_BUILD_ID:-}" ]; then "test/utils/shippable/check_matrix.py"; fi "test/utils/shippable/${script}.sh" "${test}" |