ci: clean azp scripts (#340)

##### SUMMARY

Clean old scripts and simplify the CI scripts.
This commit is contained in:
Jonas L 2023-09-27 08:59:26 +02:00 committed by GitHub
parent 3dffaafcdc
commit 001c3e2089
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
41 changed files with 185 additions and 490 deletions

View file

@ -23,7 +23,7 @@ variables:
- name: pipelinesCoverage
value: coverage
- name: entryPoint
value: tests/utils/shippable/shippable.sh
value: tests/utils/ci.sh
- name: fetchDepth
value: 0
@ -44,7 +44,7 @@ stages:
parameters:
targets:
- name: Sanity
test: devel/sanity/1
test: devel/sanity
- stage: Sanity_2_16
displayName: Sanity 2.16
@ -54,7 +54,7 @@ stages:
parameters:
targets:
- name: Sanity
test: 2.16/sanity/1
test: 2.16/sanity
- stage: Sanity_2_15
displayName: Sanity 2.15
@ -64,7 +64,7 @@ stages:
parameters:
targets:
- name: Sanity
test: 2.15/sanity/1
test: 2.15/sanity
- stage: Sanity_2_14
displayName: Sanity 2.14
@ -74,7 +74,7 @@ stages:
parameters:
targets:
- name: Sanity
test: 2.14/sanity/1
test: 2.14/sanity
- stage: Sanity_2_13
displayName: Sanity 2.13
@ -84,7 +84,7 @@ stages:
parameters:
targets:
- name: Sanity
test: 2.13/sanity/1
test: 2.13/sanity
### Units
- stage: Units_devel

View file

@ -4,7 +4,7 @@
set -o pipefail -eu
entry_point="$1"
test="$2"
entry_point_args="$2"
read -r -a coverage_branches <<< "$3" # space separated list of branches to run code coverage on for scheduled builds
export COMMIT_MESSAGE
@ -16,19 +16,19 @@ if [ "${SYSTEM_PULLREQUEST_TARGETBRANCH:-}" ]; then
IS_PULL_REQUEST=true
COMMIT_MESSAGE=$(git log --format=%B -n 1 HEAD^2)
else
IS_PULL_REQUEST=
IS_PULL_REQUEST=false
COMMIT_MESSAGE=$(git log --format=%B -n 1 HEAD)
fi
COMPLETE=
COVERAGE=
COMPLETE=false
COVERAGE=false
if [ "${BUILD_REASON}" = "Schedule" ]; then
COMPLETE=yes
COMPLETE=true
if printf '%s\n' "${coverage_branches[@]}" | grep -q "^${BUILD_SOURCEBRANCHNAME}$"; then
COVERAGE=yes
COVERAGE=true
fi
fi
"${entry_point}" "${test}" 2>&1 | "$(dirname "$0")/time-command.py"
"${entry_point}" "${entry_point_args}" 2>&1 | "$(dirname "$0")/time-command.py"

View file

@ -1,2 +1,2 @@
cloud/hcloud
shippable/hcloud/group2
azp/group2

View file

@ -1,2 +1,2 @@
cloud/hcloud
shippable/hcloud/group2
azp/group2

View file

@ -1,2 +1,2 @@
cloud/hcloud
shippable/hcloud/group2
azp/group2

View file

@ -1,2 +1,2 @@
cloud/hcloud
shippable/hcloud/group2
azp/group2

View file

@ -1,2 +1,2 @@
cloud/hcloud
shippable/hcloud/group3
azp/group3

View file

@ -1,2 +1,2 @@
cloud/hcloud
shippable/hcloud/group3
azp/group3

View file

@ -1,2 +1,2 @@
cloud/hcloud
shippable/hcloud/group2
azp/group2

View file

@ -1,2 +1,2 @@
cloud/hcloud
shippable/hcloud/group3
azp/group3

View file

@ -1,2 +1,2 @@
cloud/hcloud
shippable/hcloud/group1
azp/group1

View file

@ -1,2 +1,2 @@
cloud/hcloud
shippable/hcloud/group1
azp/group1

View file

@ -1,2 +1,2 @@
cloud/hcloud
shippable/hcloud/group1
azp/group1

View file

@ -1,2 +1,2 @@
cloud/hcloud
shippable/hcloud/group1
azp/group1

View file

@ -1,2 +1,2 @@
cloud/hcloud
shippable/hcloud/group1
azp/group1

View file

@ -1,2 +1,2 @@
cloud/hcloud
shippable/hcloud/group2
azp/group2

View file

@ -1,2 +1,2 @@
cloud/hcloud
shippable/hcloud/group3
azp/group3

View file

@ -1,3 +1,3 @@
cloud/hcloud
shippable/hcloud/group1
azp/group1
disabled

View file

@ -1,2 +1,2 @@
cloud/hcloud
shippable/hcloud/group1
azp/group1

View file

@ -1,2 +1,2 @@
cloud/hcloud
shippable/hcloud/group2
azp/group2

View file

@ -1,2 +1,2 @@
cloud/hcloud
shippable/hcloud/group3
azp/group3

View file

@ -1,2 +1,2 @@
cloud/hcloud
shippable/hcloud/group3
azp/group3

View file

@ -1,2 +1,2 @@
cloud/hcloud
shippable/hcloud/group1
azp/group1

View file

@ -1,2 +1,2 @@
cloud/hcloud
shippable/hcloud/group1
azp/group1

View file

@ -1,2 +1,2 @@
cloud/hcloud
shippable/hcloud/group3
azp/group3

View file

@ -1,2 +1,2 @@
cloud/hcloud
shippable/hcloud/group2
azp/group2

View file

@ -1,3 +1,3 @@
cloud/hcloud
shippable/hcloud/group2
azp/group2
disabled

View file

@ -1,2 +1,2 @@
cloud/hcloud
shippable/hcloud/group2
azp/group2

View file

@ -1,2 +1,2 @@
cloud/hcloud
shippable/hcloud/group2
azp/group2

View file

@ -1,2 +1,2 @@
cloud/hcloud
shippable/hcloud/group2
azp/group2

View file

@ -1,2 +1,2 @@
cloud/hcloud
shippable/hcloud/group3
azp/group3

View file

@ -1,2 +1,2 @@
cloud/hcloud
shippable/hcloud/group2
azp/group2

View file

@ -1,2 +1,2 @@
cloud/hcloud
shippable/hcloud/group2
azp/group2

141
tests/utils/ci.sh Executable file
View file

@ -0,0 +1,141 @@
#!/usr/bin/env bash
set -o pipefail -eux
error() {
echo >&2 "error: $*"
exit 1
}
retry() {
local exit_code=1
for _ in 1 2 3; do
set +e
"$@"
exit_code=$?
set -e
if [ $exit_code == 0 ]; then
return $exit_code
fi
done
echo "Command '$*' failed 3 times!"
exit $exit_code
}
declare -a entry_point_args
IFS='/:' read -ra entry_point_args <<< "$1"
# Explode entry point args, for example '2.16/integration/3.10/2' or '2.16/sanity'
ansible_version="${entry_point_args[0]}"
test_name="${entry_point_args[1]}"
python_version="${entry_point_args[2]:-}"
test_group="${entry_point_args[3]:-}"
export PYTHONIOENCODING="utf-8"
export PIP_DISABLE_PIP_VERSION_CHECK=true
export PIP_NO_WARN_SCRIPT_LOCATION=false # Negative options are a bit weird: https://pip.pypa.io/en/stable/topics/configuration/#boolean-options
export ANSIBLE_COLLECTIONS_PATHS="$PWD/../.."
command -v python
python -V
command -v pip
pip --version
pip list
if [ "$ansible_version" == "devel" ]; then
pip install "https://github.com/ansible/ansible/archive/devel.tar.gz"
else
pip install "https://github.com/ansible/ansible/archive/stable-$ansible_version.tar.gz"
fi
command -v ansible
ansible --version
# Prepare coverage args
if $COVERAGE; then
coverage_args="--coverage"
elif [[ "$COMMIT_MESSAGE" =~ ci_coverage ]]; then
coverage_args="--coverage"
else
coverage_args="--coverage-check"
fi
# Prepare changed args
if $COMPLETE; then
changed_args=""
elif [[ "$COMMIT_MESSAGE" =~ ci_complete ]]; then
changed_args=""
else
changed_args="--changed"
fi
# Prepare unstable args
if $IS_PULL_REQUEST; then
unstable_args="--allow-unstable-changed"
else
unstable_args=""
fi
# Install dependencies
pip install rstcheck
# Ensure we can write other collections to this dir
sudo chown "$(whoami)" "$ANSIBLE_COLLECTIONS_PATHS"
pip install -r tests/integration/requirements.txt -c tests/constraints.txt
ansible-galaxy -vvv collection install -r tests/requirements.yml
# Dump env and set timeout
timeout=45
if $COVERAGE; then
timeout=60
fi
ansible-test env --color -v --dump --show --timeout "$timeout"
# Run tests
case "$test_name" in
sanity)
# shellcheck disable=SC2086
ansible-test sanity --color -v \
--exclude plugins/module_utils/vendor/ \
--exclude scripts/ \
--exclude tests/utils/ \
--docker default \
--junit \
$coverage_args \
$changed_args \
--allow-disabled
;;
units)
# shellcheck disable=SC2086
ansible-test units --color -v \
--docker default \
--python "$python_version" \
$coverage_args \
$changed_args
;;
integration)
# shellcheck disable=SC2086
ansible-test integration --color -v \
--remote-terminate always \
--remote-stage prod \
--docker default \
--python "$python_version" \
--retry-on-error \
$coverage_args \
$changed_args \
--changed-all-target none \
--changed-all-mode include \
$unstable_args \
"azp/group$test_group/"
;;
*)
error "found invalid test_name: $test_name"
;;
esac

View file

@ -1,133 +0,0 @@
#!/usr/bin/env python3
"""Verify the currently executing Shippable test matrix matches the one defined in the "shippable.yml" file.
"""
import datetime
import json
import os
import re
import sys
import time
from urllib.request import urlopen
try:
from typing import NoReturn
except ImportError:
NoReturn = None
def main(): # type: () -> None
"""Main entry point."""
repo_full_name = os.environ["REPO_FULL_NAME"]
required_repo_full_name = "ansible-collections/hetzner.hcloud"
if repo_full_name != required_repo_full_name:
sys.stderr.write(
f'Skipping matrix check on repo "{repo_full_name}" which is not "{required_repo_full_name}".\n'
)
return
with open("shippable.yml", "rb") as yaml_file:
yaml = yaml_file.read().decode("utf-8").splitlines()
defined_matrix = [
match.group(1)
for match in [re.search(r"^ *- env: T=(.*)$", line) for line in yaml]
if match and match.group(1) != "none"
]
if not defined_matrix:
fail('No matrix entries found in the "shippable.yml" file.', 'Did you modify the "shippable.yml" file?')
run_id = os.environ["SHIPPABLE_BUILD_ID"]
sleep = 1
jobs = []
for attempts_remaining in range(4, -1, -1):
try:
jobs = json.loads(urlopen("https://api.shippable.com/jobs?runIds=%s" % run_id).read())
if not isinstance(jobs, list):
raise Exception("Shippable run %s data is not a list." % run_id)
break
except Exception as ex:
if not attempts_remaining:
fail("Unable to retrieve Shippable run %s matrix." % run_id, str(ex))
sys.stderr.write(f"Unable to retrieve Shippable run {run_id} matrix: {ex}\n")
sys.stderr.write("Trying again in %d seconds...\n" % sleep)
time.sleep(sleep)
sleep *= 2
if len(jobs) != len(defined_matrix):
if len(jobs) == 1:
hint = '\n\nMake sure you do not use the "Rebuild with SSH" option.'
else:
hint = ""
fail(
"Shippable run %s has %d jobs instead of the expected %d jobs." % (run_id, len(jobs), len(defined_matrix)),
"Try re-running the entire matrix.%s" % hint,
)
actual_matrix = {
job.get("jobNumber"): dict(tuple(line.split("=", 1)) for line in job.get("env", [])).get("T", "")
for job in jobs
}
errors = [
(job_number, test, actual_matrix.get(job_number))
for job_number, test in enumerate(defined_matrix, 1)
if actual_matrix.get(job_number) != test
]
if len(errors):
error_summary = "\n".join(
f'Job {job_number} expected "{expected}" but found "{actual}" instead.'
for job_number, expected, actual in errors
)
fail(
"Shippable run %s has a job matrix mismatch." % run_id,
"Try re-running the entire matrix.\n\n%s" % error_summary,
)
def fail(message, output): # type: (str, str) -> NoReturn
# Include a leading newline to improve readability on Shippable "Tests" tab.
# Without this, the first line becomes indented.
output = "\n" + output.strip()
timestamp = datetime.datetime.utcnow().replace(microsecond=0).isoformat()
# hack to avoid requiring junit-xml, which isn't pre-installed on Shippable outside our test containers
xml = f"""
<?xml version="1.0" encoding="utf-8"?>
<testsuites disabled="0" errors="1" failures="0" tests="1" time="0.0">
\t<testsuite disabled="0" errors="1" failures="0" file="None" log="None" name="ansible-test" skipped="0" tests="1" time="0" timestamp="{timestamp}" url="None">
\t\t<testcase classname="timeout" name="timeout">
\t\t\t<error message="{message}" type="error">{output}</error>
\t\t</testcase>
\t</testsuite>
</testsuites>
"""
path = "shippable/testresults/check-matrix.xml"
dir_path = os.path.dirname(path)
if not os.path.exists(dir_path):
os.makedirs(dir_path)
with open(path, "w") as junit_fd:
junit_fd.write(xml.lstrip())
sys.stderr.write(message + "\n")
sys.stderr.write(output + "\n")
sys.exit(1)
if __name__ == "__main__":
main()

View file

@ -1,34 +0,0 @@
#!/usr/bin/env bash
set -o pipefail -eux
declare -a args
IFS='/:' read -ra args <<< "$1"
cloud="${args[0]}"
python="${args[1]}"
group="${args[2]}"
target="shippable/${cloud}/group${group}/"
stage="${S:-prod}"
changed_all_target="shippable/${cloud}/smoketest/"
if ! ansible-test integration "${changed_all_target}" --list-targets > /dev/null 2>&1; then
# no smoketest tests are available for this cloud
changed_all_target="none"
fi
if [ "${group}" == "1" ]; then
# only run smoketest tests for group1
changed_all_mode="include"
else
# smoketest tests already covered by group1
changed_all_mode="exclude"
fi
# shellcheck disable=SC2086
ansible-test integration --color -v --retry-on-error "${target}" ${COVERAGE:+"$COVERAGE"} ${CHANGED:+"$CHANGED"} ${UNSTABLE:+"$UNSTABLE"} \
--remote-terminate always --remote-stage "${stage}" \
--docker --python "${python}" --changed-all-target "${changed_all_target}" --changed-all-mode "${changed_all_mode}"

View file

@ -1,30 +0,0 @@
#!/usr/bin/env bash
set -o pipefail -eux
declare -a args
IFS='/:' read -ra args <<< "$1"
group="${args[1]}"
if [ "${BASE_BRANCH:-}" ]; then
base_branch="origin/${BASE_BRANCH}"
else
base_branch=""
fi
if [ "${group}" == "extra" ]; then
# ansible-galaxy -vvv collection install community.internal_test_tools
git clone --single-branch --depth 1 https://github.com/ansible-collections/community.internal_test_tools.git ../../community/internal_test_tools
../internal_test_tools/tools/run.py --color
exit
fi
# shellcheck disable=SC2086
ansible-test sanity --color -v --junit ${COVERAGE:+"$COVERAGE"} ${CHANGED:+"$CHANGED"} \
--docker --base-branch "${base_branch}" \
--exclude plugins/module_utils/vendor/ \
--exclude scripts/ \
--exclude tests/utils/ \
--allow-disabled

View file

@ -1,212 +0,0 @@
#!/usr/bin/env bash
set -o pipefail -eux
declare -a args
IFS='/:' read -ra args <<< "$1"
ansible_version="${args[0]}"
script="${args[1]}"
function join {
local IFS="$1"
shift
echo "$*"
}
# Ensure we can write other collections to this dir
sudo chown "$(whoami)" "${PWD}/../../"
test="$(join / "${args[@]:1}")"
docker images ansible/ansible
docker images quay.io/ansible/*
docker ps
for container in $(docker ps --format '{{.Image}} {{.ID}}' | grep -v -e '^drydock/' -e '^quay.io/ansible/azure-pipelines-test-container:' | sed 's/^.* //'); do
docker rm -f "${container}" || true # ignore errors
done
docker ps
if [ -d /home/shippable/cache/ ]; then
ls -la /home/shippable/cache/
fi
command -v python
python -V
function retry {
# shellcheck disable=SC2034
for repetition in 1 2 3; do
set +e
"$@"
result=$?
set -e
if [ ${result} == 0 ]; then
return ${result}
fi
echo "@* -> ${result}"
done
echo "Command '@*' failed 3 times!"
exit 1
}
command -v pip
pip --version
pip list --disable-pip-version-check
if [ "${ansible_version}" == "devel" ]; then
retry pip install https://github.com/ansible/ansible/archive/devel.tar.gz --disable-pip-version-check
else
retry pip install "https://github.com/ansible/ansible/archive/stable-${ansible_version}.tar.gz" --disable-pip-version-check
fi
if [ "${SHIPPABLE_BUILD_ID:-}" ]; then
export ANSIBLE_COLLECTIONS_PATHS="${HOME}/.ansible"
SHIPPABLE_RESULT_DIR="$(pwd)/shippable"
TEST_DIR="${ANSIBLE_COLLECTIONS_PATHS}/ansible_collections/hetzner/hcloud"
mkdir -p "${TEST_DIR}"
# shellcheck disable=SC2153
cp -aT "${SHIPPABLE_BUILD_DIR}" "${TEST_DIR}"
cd "${TEST_DIR}"
else
export ANSIBLE_COLLECTIONS_PATHS="${PWD}/../../../"
fi
# STAR: HACK install dependencies
retry pip install -r tests/integration/requirements.txt -c tests/constraints.txt
retry ansible-galaxy -vvv collection install -r tests/requirements.yml
retry pip install rstcheck
retry ansible-galaxy -vvv collection install community.internal_test_tools
# END: HACK
export PYTHONIOENCODING='utf-8'
if [ "${JOB_TRIGGERED_BY_NAME:-}" == "nightly-trigger" ]; then
COVERAGE=yes
COMPLETE=yes
fi
if [ -n "${COVERAGE:-}" ]; then
# on-demand coverage reporting triggered by setting the COVERAGE environment variable to a non-empty value
export COVERAGE="--coverage"
elif [[ "${COMMIT_MESSAGE}" =~ ci_coverage ]]; then
# on-demand coverage reporting triggered by having 'ci_coverage' in the latest commit message
export COVERAGE="--coverage"
else
# on-demand coverage reporting disabled (default behavior, always-on coverage reporting remains enabled)
export COVERAGE="--coverage-check"
fi
if [ -n "${COMPLETE:-}" ]; then
# disable change detection triggered by setting the COMPLETE environment variable to a non-empty value
export CHANGED=""
elif [[ "${COMMIT_MESSAGE}" =~ ci_complete ]]; then
# disable change detection triggered by having 'ci_complete' in the latest commit message
export CHANGED=""
else
# enable change detection (default behavior)
export CHANGED="--changed"
fi
if [ "${IS_PULL_REQUEST:-}" == "true" ]; then
# run unstable tests which are targeted by focused changes on PRs
export UNSTABLE="--allow-unstable-changed"
else
# do not run unstable tests outside PRs
export UNSTABLE=""
fi
# remove empty core/extras module directories from PRs created prior to the repo-merge
find plugins -type d -empty -print -delete
function cleanup {
# for complete on-demand coverage generate a report for all files with no coverage on the "sanity/5" job so we only have one copy
if [ "${COVERAGE}" == "--coverage" ] && [ "${CHANGED}" == "" ] && [ "${test}" == "sanity/5" ]; then
stub="--stub"
# trigger coverage reporting for stubs even if no other coverage data exists
mkdir -p tests/output/coverage/
else
stub=""
fi
if [ -d tests/output/coverage/ ]; then
if find tests/output/coverage/ -mindepth 1 -name '.*' -prune -o -print -quit | grep -q .; then
process_coverage='yes' # process existing coverage files
elif [ "${stub}" ]; then
process_coverage='yes' # process coverage when stubs are enabled
else
process_coverage=''
fi
if [ "${process_coverage}" ]; then
# use python 3.7 for coverage to avoid running out of memory during coverage xml processing
# only use it for coverage to avoid the additional overhead of setting up a virtual environment for a potential no-op job
virtualenv --python /usr/bin/python3.7 ~/ansible-venv
set +ux
# shellcheck disable=SC1090
. ~/ansible-venv/bin/activate
set -ux
# shellcheck disable=SC2086
ansible-test coverage xml --color -v --requirements --group-by command --group-by version ${stub:+"$stub"}
cp -a tests/output/reports/coverage=*.xml "$SHIPPABLE_RESULT_DIR/codecoverage/"
if [ "${ansible_version}" != "2.9" ]; then
# analyze and capture code coverage aggregated by integration test target
ansible-test coverage analyze targets generate -v "$SHIPPABLE_RESULT_DIR/testresults/coverage-analyze-targets.json"
fi
# upload coverage report to codecov.io only when using complete on-demand coverage
if [ "${COVERAGE}" == "--coverage" ] && [ "${CHANGED}" == "" ]; then
for file in tests/output/reports/coverage=*.xml; do
flags="${file##*/coverage=}"
flags="${flags%-powershell.xml}"
flags="${flags%.xml}"
# remove numbered component from stub files when converting to tags
flags="${flags//stub-[0-9]*/stub}"
flags="${flags//=/,}"
flags="${flags//[^a-zA-Z0-9_,]/_}"
bash <(curl -s https://ansible-ci-files.s3.us-east-1.amazonaws.com/codecov/codecov.sh) \
-f "${file}" \
-F "${flags}" \
-n "${test}" \
-t 8a86e979-f37b-4d5d-95a4-960c280d5eaa \
-X coveragepy \
-X gcov \
-X fix \
-X search \
-X xcode ||
echo "Failed to upload code coverage report to codecov.io: ${file}"
done
fi
fi
fi
if [ -d tests/output/junit/ ]; then
cp -aT tests/output/junit/ "$SHIPPABLE_RESULT_DIR/testresults/"
fi
if [ -d tests/output/data/ ]; then
cp -a tests/output/data/ "$SHIPPABLE_RESULT_DIR/testresults/"
fi
if [ -d tests/output/bot/ ]; then
cp -aT tests/output/bot/ "$SHIPPABLE_RESULT_DIR/testresults/"
fi
}
if [ "${SHIPPABLE_BUILD_ID:-}" ]; then trap cleanup EXIT; fi
if [[ "${COVERAGE:-}" == "--coverage" ]]; then
timeout=60
else
timeout=45
fi
ansible-test env --dump --show --timeout "${timeout}" --color -v
if [ "${SHIPPABLE_BUILD_ID:-}" ]; then "tests/utils/shippable/check_matrix.py"; fi
"tests/utils/shippable/${script}.sh" "${test}"

View file

@ -1,15 +0,0 @@
#!/usr/bin/env python3
import sys
import time
start = time.time()
sys.stdin.reconfigure(errors="surrogateescape")
sys.stdout.reconfigure(errors="surrogateescape")
for line in sys.stdin:
seconds = time.time() - start
sys.stdout.write("%02d:%02d %s" % (seconds // 60, seconds % 60, line))
sys.stdout.flush()

View file

@ -1,5 +0,0 @@
#!/usr/bin/env bash
set -o pipefail -eu
"$@" 2>&1 | "$(dirname "$0")/timing.py"

View file

@ -1,17 +0,0 @@
#!/usr/bin/env bash
set -o pipefail -eux
declare -a args
IFS='/:' read -ra args <<< "$1"
python_version="${args[1]}"
ansible-test env --timeout 30 --color -v
# shellcheck disable=SC2086
ansible-test units --color -v \
--docker default \
--python "$python_version" \
${COVERAGE:+"$COVERAGE"} \
${CHANGED:+"$CHANGED"}