2
0
Fork 0
mirror of https://github.com/ansible-collections/hetzner.hcloud synced 2024-12-14 06:22:32 +00:00

Merge pull request from ansible-collections/master

update
This commit is contained in:
John R Barker 2021-03-05 15:59:27 +00:00 committed by GitHub
commit 6e8ca4af5d
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
70 changed files with 882 additions and 293 deletions
.azure-pipelines
plugins
shippable.yml
tests
integration
constraints.txt
targets
hcloud_floating_ip
defaults
tasks
hcloud_floating_ip_info/defaults
hcloud_load_balancer/defaults
hcloud_load_balancer_info/defaults
hcloud_load_balancer_network/defaults
hcloud_load_balancer_service/defaults
hcloud_load_balancer_target/defaults
hcloud_network/defaults
hcloud_rdns/defaults
hcloud_route/defaults
hcloud_server/defaults
hcloud_server_info/defaults
hcloud_server_network/defaults
hcloud_ssh_key
defaults
tasks
hcloud_ssh_key_info/defaults
hcloud_subnetwork/defaults
hcloud_volume/defaults
hcloud_volume_info/defaults
sanity
utils

View file

@ -0,0 +1,3 @@
## Azure Pipelines Configuration
Please see the [Documentation](https://github.com/ansible/community/wiki/Testing:-Azure-Pipelines) for more information.

View file

@ -0,0 +1,123 @@
trigger:
batch: true
branches:
include:
- master
pr:
autoCancel: true
branches:
include:
- master
schedules:
- cron: 0 9 * * *
displayName: Nightly
always: true
branches:
include:
- master
variables:
- name: checkoutPath
value: ansible_collections/hetzner/hcloud
- name: coverageBranches
value: master
- name: pipelinesCoverage
value: coverage
- name: entryPoint
value: tests/utils/shippable/shippable.sh
- name: fetchDepth
value: 0
resources:
containers:
- container: default
image: quay.io/ansible/azure-pipelines-test-container:1.8.0
pool: Standard
stages:
### Sanity
- stage: Ansible_devel
displayName: Sanity devel
dependsOn: []
jobs:
- template: templates/matrix.yml
parameters:
targets:
- name: Sanity
test: 'devel/sanity/1'
- stage: Ansible_2_10
displayName: Sanity & Units 2.10
dependsOn: []
jobs:
- template: templates/matrix.yml
parameters:
targets:
- name: Sanity
test: '2.10/sanity/1'
- stage: Ansible_2_9
displayName: Sanity 2.9
dependsOn: []
jobs:
- template: templates/matrix.yml
parameters:
targets:
- name: Sanity
test: '2.9/sanity/1'
## Integration tests (remote)
- stage: Hetzner_devel
displayName: Hetzner devel
dependsOn: []
jobs:
- template: templates/matrix.yml
parameters:
groups:
- 1
- 2
targets:
- name: hcloud
test: 'devel/hcloud/3.8'
- stage: Hetzner_2_10
displayName: Hetzner 2.10
dependsOn: []
jobs:
- template: templates/matrix.yml
parameters:
groups:
- 1
- 2
- 3
targets:
- name: hcloud
test: '2.10/hcloud/3.8'
- stage: Hetzner_2_9
displayName: Hetzner 2.9
dependsOn: []
jobs:
- template: templates/matrix.yml
parameters:
groups:
- 1
- 2
- 3
targets:
- name: hcloud
test: '2.9/hcloud/3.8'
### Finally
- stage: Summary
condition: succeededOrFailed()
dependsOn:
- Ansible_devel
- Ansible_2_10
- Ansible_2_9
- Hetzner_devel
- Hetzner_2_10
- Hetzner_2_9
jobs:
- template: templates/coverage.yml

View file

@ -0,0 +1,20 @@
#!/usr/bin/env bash
# Aggregate code coverage results for later processing.
set -o pipefail -eu
agent_temp_directory="$1"
PATH="${PWD}/bin:${PATH}"
mkdir "${agent_temp_directory}/coverage/"
options=(--venv --venv-system-site-packages --color -v)
ansible-test coverage combine --export "${agent_temp_directory}/coverage/" "${options[@]}"
if ansible-test coverage analyze targets generate --help >/dev/null 2>&1; then
# Only analyze coverage if the installed version of ansible-test supports it.
# Doing so allows this script to work unmodified for multiple Ansible versions.
ansible-test coverage analyze targets generate "${agent_temp_directory}/coverage/coverage-analyze-targets.json" "${options[@]}"
fi

View file

@ -0,0 +1,60 @@
#!/usr/bin/env python
"""
Combine coverage data from multiple jobs, keeping the data only from the most recent attempt from each job.
Coverage artifacts must be named using the format: "Coverage $(System.JobAttempt) {StableUniqueNameForEachJob}"
The recommended coverage artifact name format is: Coverage $(System.JobAttempt) $(System.StageDisplayName) $(System.JobDisplayName)
Keep in mind that Azure Pipelines does not enforce unique job display names (only names).
It is up to pipeline authors to avoid name collisions when deviating from the recommended format.
"""
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
import re
import shutil
import sys
def main():
"""Main program entry point."""
source_directory = sys.argv[1]
if '/ansible_collections/' in os.getcwd():
output_path = "tests/output"
else:
output_path = "test/results"
destination_directory = os.path.join(output_path, 'coverage')
if not os.path.exists(destination_directory):
os.makedirs(destination_directory)
jobs = {}
count = 0
for name in os.listdir(source_directory):
match = re.search('^Coverage (?P<attempt>[0-9]+) (?P<label>.+)$', name)
label = match.group('label')
attempt = int(match.group('attempt'))
jobs[label] = max(attempt, jobs.get(label, 0))
for label, attempt in jobs.items():
name = 'Coverage {attempt} {label}'.format(label=label, attempt=attempt)
source = os.path.join(source_directory, name)
source_files = os.listdir(source)
for source_file in source_files:
source_path = os.path.join(source, source_file)
destination_path = os.path.join(destination_directory, source_file + '.' + label)
print('"%s" -> "%s"' % (source_path, destination_path))
shutil.copyfile(source_path, destination_path)
count += 1
print('Coverage file count: %d' % count)
print('##vso[task.setVariable variable=coverageFileCount]%d' % count)
print('##vso[task.setVariable variable=outputPath]%s' % output_path)
if __name__ == '__main__':
main()

View file

@ -0,0 +1,24 @@
#!/usr/bin/env bash
# Check the test results and set variables for use in later steps.
set -o pipefail -eu
if [[ "$PWD" =~ /ansible_collections/ ]]; then
output_path="tests/output"
else
output_path="test/results"
fi
echo "##vso[task.setVariable variable=outputPath]${output_path}"
if compgen -G "${output_path}"'/junit/*.xml' > /dev/null; then
echo "##vso[task.setVariable variable=haveTestResults]true"
fi
if compgen -G "${output_path}"'/bot/ansible-test-*' > /dev/null; then
echo "##vso[task.setVariable variable=haveBotResults]true"
fi
if compgen -G "${output_path}"'/coverage/*' > /dev/null; then
echo "##vso[task.setVariable variable=haveCoverageData]true"
fi

View file

@ -0,0 +1,27 @@
#!/usr/bin/env bash
# Upload code coverage reports to codecov.io.
# Multiple coverage files from multiple languages are accepted and aggregated after upload.
# Python coverage, as well as PowerShell and Python stubs can all be uploaded.
set -o pipefail -eu
output_path="$1"
curl --silent --show-error https://codecov.io/bash > codecov.sh
for file in "${output_path}"/reports/coverage*.xml; do
name="${file}"
name="${name##*/}" # remove path
name="${name##coverage=}" # remove 'coverage=' prefix if present
name="${name%.xml}" # remove '.xml' suffix
bash codecov.sh \
-f "${file}" \
-n "${name}" \
-X coveragepy \
-X gcov \
-X fix \
-X search \
-X xcode \
|| echo "Failed to upload code coverage report to codecov.io: ${file}"
done

View file

@ -0,0 +1,15 @@
#!/usr/bin/env bash
# Generate code coverage reports for uploading to Azure Pipelines and codecov.io.
set -o pipefail -eu
PATH="${PWD}/bin:${PATH}"
if ! ansible-test --help >/dev/null 2>&1; then
# Install the devel version of ansible-test for generating code coverage reports.
# This is only used by Ansible Collections, which are typically tested against multiple Ansible versions (in separate jobs).
# Since a version of ansible-test is required that can work the output from multiple older releases, the devel version is used.
pip install https://github.com/ansible/ansible/archive/devel.tar.gz --disable-pip-version-check
fi
ansible-test coverage xml --stub --venv --venv-system-site-packages --color -v

View file

@ -0,0 +1,34 @@
#!/usr/bin/env bash
# Configure the test environment and run the tests.
set -o pipefail -eu
entry_point="$1"
test="$2"
read -r -a coverage_branches <<< "$3" # space separated list of branches to run code coverage on for scheduled builds
export COMMIT_MESSAGE
export COMPLETE
export COVERAGE
export IS_PULL_REQUEST
if [ "${SYSTEM_PULLREQUEST_TARGETBRANCH:-}" ]; then
IS_PULL_REQUEST=true
COMMIT_MESSAGE=$(git log --format=%B -n 1 HEAD^2)
else
IS_PULL_REQUEST=
COMMIT_MESSAGE=$(git log --format=%B -n 1 HEAD)
fi
COMPLETE=
COVERAGE=
if [ "${BUILD_REASON}" = "Schedule" ]; then
COMPLETE=yes
if printf '%s\n' "${coverage_branches[@]}" | grep -q "^${BUILD_SOURCEBRANCHNAME}$"; then
COVERAGE=yes
fi
fi
"${entry_point}" "${test}" 2>&1 | "$(dirname "$0")/time-command.py"

View file

@ -0,0 +1,25 @@
#!/usr/bin/env python
"""Prepends a relative timestamp to each input line from stdin and writes it to stdout."""
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import sys
import time
def main():
"""Main program entry point."""
start = time.time()
sys.stdin.reconfigure(errors='surrogateescape')
sys.stdout.reconfigure(errors='surrogateescape')
for line in sys.stdin:
seconds = time.time() - start
sys.stdout.write('%02d:%02d %s' % (seconds // 60, seconds % 60, line))
sys.stdout.flush()
if __name__ == '__main__':
main()

View file

@ -0,0 +1,39 @@
# This template adds a job for processing code coverage data.
# It will upload results to Azure Pipelines and codecov.io.
# Use it from a job stage that completes after all other jobs have completed.
# This can be done by placing it in a separate summary stage that runs after the test stage(s) have completed.
jobs:
- job: Coverage
displayName: Code Coverage
container: default
workspace:
clean: all
steps:
- checkout: self
fetchDepth: $(fetchDepth)
path: $(checkoutPath)
- task: DownloadPipelineArtifact@2
displayName: Download Coverage Data
inputs:
path: coverage/
patterns: "Coverage */*=coverage.combined"
- bash: .azure-pipelines/scripts/combine-coverage.py coverage/
displayName: Combine Coverage Data
- bash: .azure-pipelines/scripts/report-coverage.sh
displayName: Generate Coverage Report
condition: gt(variables.coverageFileCount, 0)
- task: PublishCodeCoverageResults@1
inputs:
codeCoverageTool: Cobertura
# Azure Pipelines only accepts a single coverage data file.
# That means only Python or PowerShell coverage can be uploaded, but not both.
# Set the "pipelinesCoverage" variable to determine which type is uploaded.
# Use "coverage" for Python and "coverage-powershell" for PowerShell.
summaryFileLocation: "$(outputPath)/reports/$(pipelinesCoverage).xml"
displayName: Publish to Azure Pipelines
condition: gt(variables.coverageFileCount, 0)
- bash: .azure-pipelines/scripts/publish-codecov.sh "$(outputPath)"
displayName: Publish to codecov.io
condition: gt(variables.coverageFileCount, 0)
continueOnError: true

View file

@ -0,0 +1,55 @@
# This template uses the provided targets and optional groups to generate a matrix which is then passed to the test template.
# If this matrix template does not provide the required functionality, consider using the test template directly instead.
parameters:
# A required list of dictionaries, one per test target.
# Each item in the list must contain a "test" or "name" key.
# Both may be provided. If one is omitted, the other will be used.
- name: targets
type: object
# An optional list of values which will be used to multiply the targets list into a matrix.
# Values can be strings or numbers.
- name: groups
type: object
default: []
# An optional format string used to generate the job name.
# - {0} is the name of an item in the targets list.
- name: nameFormat
type: string
default: "{0}"
# An optional format string used to generate the test name.
# - {0} is the name of an item in the targets list.
- name: testFormat
type: string
default: "{0}"
# An optional format string used to add the group to the job name.
# {0} is the formatted name of an item in the targets list.
# {{1}} is the group -- be sure to include the double "{{" and "}}".
- name: nameGroupFormat
type: string
default: "{0} - {{1}}"
# An optional format string used to add the group to the test name.
# {0} is the formatted test of an item in the targets list.
# {{1}} is the group -- be sure to include the double "{{" and "}}".
- name: testGroupFormat
type: string
default: "{0}/{{1}}"
jobs:
- template: test.yml
parameters:
jobs:
- ${{ if eq(length(parameters.groups), 0) }}:
- ${{ each target in parameters.targets }}:
- name: ${{ format(parameters.nameFormat, coalesce(target.name, target.test)) }}
test: ${{ format(parameters.testFormat, coalesce(target.test, target.name)) }}
- ${{ if not(eq(length(parameters.groups), 0)) }}:
- ${{ each group in parameters.groups }}:
- ${{ each target in parameters.targets }}:
- name: ${{ format(format(parameters.nameGroupFormat, parameters.nameFormat), coalesce(target.name, target.test), group) }}
test: ${{ format(format(parameters.testGroupFormat, parameters.testFormat), coalesce(target.test, target.name), group) }}

View file

@ -0,0 +1,45 @@
# This template uses the provided list of jobs to create test one or more test jobs.
# It can be used directly if needed, or through the matrix template.
parameters:
# A required list of dictionaries, one per test job.
# Each item in the list must contain a "job" and "name" key.
- name: jobs
type: object
jobs:
- ${{ each job in parameters.jobs }}:
- job: test_${{ replace(replace(replace(job.test, '/', '_'), '.', '_'), '-', '_') }}
displayName: ${{ job.name }}
container: default
workspace:
clean: all
steps:
- checkout: self
fetchDepth: $(fetchDepth)
path: $(checkoutPath)
- bash: .azure-pipelines/scripts/run-tests.sh "$(entryPoint)" "${{ job.test }}" "$(coverageBranches)"
displayName: Run Tests
- bash: .azure-pipelines/scripts/process-results.sh
condition: succeededOrFailed()
displayName: Process Results
- bash: .azure-pipelines/scripts/aggregate-coverage.sh "$(Agent.TempDirectory)"
condition: eq(variables.haveCoverageData, 'true')
displayName: Aggregate Coverage Data
- task: PublishTestResults@2
condition: eq(variables.haveTestResults, 'true')
inputs:
testResultsFiles: "$(outputPath)/junit/*.xml"
displayName: Publish Test Results
- task: PublishPipelineArtifact@1
condition: eq(variables.haveBotResults, 'true')
displayName: Publish Bot Results
inputs:
targetPath: "$(outputPath)/bot/"
artifactName: "Bot $(System.JobAttempt) $(System.StageDisplayName) $(System.JobDisplayName)"
- task: PublishPipelineArtifact@1
condition: eq(variables.haveCoverageData, 'true')
displayName: Publish Coverage Data
inputs:
targetPath: "$(Agent.TempDirectory)/coverage/"
artifactName: "Coverage $(System.JobAttempt) $(System.StageDisplayName) $(System.JobDisplayName)"

View file

@ -102,8 +102,9 @@ from ansible.release import __version__
try:
from hcloud import hcloud
from hcloud import APIException
HAS_HCLOUD = True
except ImportError:
raise AnsibleError("The Hetzner Cloud dynamic inventory plugin requires hcloud-python.")
HAS_HCLOUD = False
class InventoryModule(BaseInventoryPlugin, Constructable):
@ -243,6 +244,10 @@ class InventoryModule(BaseInventoryPlugin, Constructable):
def parse(self, inventory, loader, path, cache=True):
super(InventoryModule, self).parse(inventory, loader, path, cache)
if not HAS_HCLOUD:
raise AnsibleError("The Hetzner Cloud dynamic inventory plugin requires hcloud-python.")
self._read_config_data(path)
self._configure_hcloud_client()
self._test_hcloud_token()

View file

@ -165,7 +165,7 @@ class AnsibleHcloudCertificate(Hcloud):
self.module.params.get("name")
)
except APIException as e:
except Exception as e:
self.module.fail_json(msg=e.message)
def _create_certificate(self):
@ -182,27 +182,29 @@ class AnsibleHcloudCertificate(Hcloud):
if not self.module.check_mode:
try:
self.client.certificates.create(**params)
except APIException as e:
except Exception as e:
self.module.fail_json(msg=e.message)
self._mark_as_changed()
self._get_certificate()
def _update_certificate(self):
name = self.module.params.get("name")
if name is not None and self.hcloud_certificate.name != name:
self.module.fail_on_missing_params(
required_params=["id"]
)
if not self.module.check_mode:
self.hcloud_certificate.update(name=name)
self._mark_as_changed()
labels = self.module.params.get("labels")
if labels is not None and self.hcloud_certificate.labels != labels:
if not self.module.check_mode:
self.hcloud_certificate.update(labels=labels)
self._mark_as_changed()
try:
name = self.module.params.get("name")
if name is not None and self.hcloud_certificate.name != name:
self.module.fail_on_missing_params(
required_params=["id"]
)
if not self.module.check_mode:
self.hcloud_certificate.update(name=name)
self._mark_as_changed()
labels = self.module.params.get("labels")
if labels is not None and self.hcloud_certificate.labels != labels:
if not self.module.check_mode:
self.hcloud_certificate.update(labels=labels)
self._mark_as_changed()
except Exception as e:
self.module.fail_json(msg=e.message)
self._get_certificate()
def present_certificate(self):
@ -216,7 +218,10 @@ class AnsibleHcloudCertificate(Hcloud):
self._get_certificate()
if self.hcloud_certificate is not None:
if not self.module.check_mode:
self.client.certificates.delete(self.hcloud_certificate)
try:
self.client.certificates.delete(self.hcloud_certificate)
except Exception as e:
self.module.fail_json(msg=e.message)
self._mark_as_changed()
self.hcloud_certificate = None

View file

@ -134,7 +134,7 @@ class AnsibleHcloudCertificateInfo(Hcloud):
else:
self.hcloud_certificate_info = self.client.certificates.get_all()
except APIException as e:
except Exception as e:
self.module.fail_json(msg=e.message)
@staticmethod

View file

@ -123,7 +123,7 @@ class AnsibleHcloudDatacenterInfo(Hcloud):
else:
self.hcloud_datacenter_info = self.client.datacenters.get_all()
except APIException as e:
except Exception as e:
self.module.fail_json(msg=e.message)
@staticmethod

View file

@ -207,40 +207,41 @@ class AnsibleHcloudFloatingIP(Hcloud):
self.hcloud_floating_ip = self.client.floating_ips.get_by_name(
self.module.params.get("name")
)
except APIException as e:
except Exception as e:
self.module.fail_json(msg=e.message)
def _create_floating_ip(self):
self.module.fail_on_missing_params(
required_params=["type"]
)
try:
params = {
"description": self.module.params.get("description"),
"type": self.module.params.get("type"),
"name": self.module.params.get("name"),
}
if self.module.params.get("home_location") is not None:
params["home_location"] = self.client.locations.get_by_name(
self.module.params.get("home_location")
)
elif self.module.params.get("server") is not None:
params["server"] = self.client.servers.get_by_name(
self.module.params.get("server")
)
else:
self.module.fail_json(msg="one of the following is required: home_location, server")
params = {
"description": self.module.params.get("description"),
"type": self.module.params.get("type"),
"name": self.module.params.get("name"),
}
if self.module.params.get("home_location") is not None:
params["home_location"] = self.client.locations.get_by_name(
self.module.params.get("home_location")
)
elif self.module.params.get("server") is not None:
params["server"] = self.client.servers.get_by_name(
self.module.params.get("server")
)
else:
self.module.fail_json(msg="one of the following is required: home_location, server")
if self.module.params.get("labels") is not None:
params["labels"] = self.module.params.get("labels")
if not self.module.check_mode:
resp = self.client.floating_ips.create(**params)
self.hcloud_floating_ip = resp.floating_ip
delete_protection = self.module.params.get("delete_protection")
if delete_protection is not None:
self.hcloud_floating_ip.change_protection(delete=delete_protection).wait_until_finished()
if self.module.params.get("labels") is not None:
params["labels"] = self.module.params.get("labels")
if not self.module.check_mode:
resp = self.client.floating_ips.create(**params)
self.hcloud_floating_ip = resp.floating_ip
delete_protection = self.module.params.get("delete_protection")
if delete_protection is not None:
self.hcloud_floating_ip.change_protection(delete=delete_protection).wait_until_finished()
except Exception as e:
self.module.fail_json(msg=e.message)
self._mark_as_changed()
self._get_floating_ip()
@ -290,7 +291,7 @@ class AnsibleHcloudFloatingIP(Hcloud):
self._mark_as_changed()
self._get_floating_ip()
except APIException as e:
except Exception as e:
self.module.fail_json(msg=e.message)
def present_floating_ip(self):
@ -314,7 +315,7 @@ class AnsibleHcloudFloatingIP(Hcloud):
)
self._mark_as_changed()
self.hcloud_floating_ip = None
except APIException as e:
except Exception as e:
self.module.fail_json(msg=e.message)
@staticmethod

View file

@ -147,7 +147,7 @@ class AnsibleHcloudFloatingIPInfo(Hcloud):
else:
self.hcloud_floating_ip_info = self.client.floating_ips.get_all()
except APIException as e:
except Exception as e:
self.module.fail_json(msg=e.message)
@staticmethod

View file

@ -158,7 +158,7 @@ class AnsibleHcloudImageInfo(Hcloud):
self.hcloud_image_info = self.client.images.get_all(**params)
except APIException as e:
except Exception as e:
self.module.fail_json(msg=e.message)
@staticmethod

View file

@ -187,7 +187,7 @@ class AnsibleHcloudLoadBalancer(Hcloud):
self.hcloud_load_balancer = self.client.load_balancers.get_by_name(
self.module.params.get("name")
)
except APIException as e:
except Exception as e:
self.module.fail_json(msg=e.message)
def _create_load_balancer(self):
@ -195,33 +195,34 @@ class AnsibleHcloudLoadBalancer(Hcloud):
self.module.fail_on_missing_params(
required_params=["name", "load_balancer_type"]
)
try:
params = {
"name": self.module.params.get("name"),
"load_balancer_type": self.client.load_balancer_types.get_by_name(
self.module.params.get("load_balancer_type")
),
"labels": self.module.params.get("labels"),
}
params = {
"name": self.module.params.get("name"),
"load_balancer_type": self.client.load_balancer_types.get_by_name(
self.module.params.get("load_balancer_type")
),
"labels": self.module.params.get("labels"),
}
if self.module.params.get("location") is None and self.module.params.get("network_zone") is None:
self.module.fail_json(msg="one of the following is required: location, network_zone")
elif self.module.params.get("location") is not None and self.module.params.get("network_zone") is None:
params["location"] = self.client.locations.get_by_name(
self.module.params.get("location")
)
elif self.module.params.get("location") is None and self.module.params.get("network_zone") is not None:
params["network_zone"] = self.module.params.get("network_zone")
if self.module.params.get("location") is None and self.module.params.get("network_zone") is None:
self.module.fail_json(msg="one of the following is required: location, network_zone")
elif self.module.params.get("location") is not None and self.module.params.get("network_zone") is None:
params["location"] = self.client.locations.get_by_name(
self.module.params.get("location")
)
elif self.module.params.get("location") is None and self.module.params.get("network_zone") is not None:
params["network_zone"] = self.module.params.get("network_zone")
if not self.module.check_mode:
resp = self.client.load_balancers.create(**params)
resp.action.wait_until_finished(max_retries=1000)
delete_protection = self.module.params.get("delete_protection")
if delete_protection is not None:
self._get_load_balancer()
self.hcloud_load_balancer.change_protection(delete=delete_protection).wait_until_finished()
if not self.module.check_mode:
resp = self.client.load_balancers.create(**params)
resp.action.wait_until_finished(max_retries=1000)
delete_protection = self.module.params.get("delete_protection")
if delete_protection is not None:
self._get_load_balancer()
self.hcloud_load_balancer.change_protection(delete=delete_protection).wait_until_finished()
except Exception as e:
self.module.fail_json(msg=e.message)
self._mark_as_changed()
self._get_load_balancer()
@ -261,7 +262,7 @@ class AnsibleHcloudLoadBalancer(Hcloud):
self._mark_as_changed()
self._get_load_balancer()
except APIException as e:
except Exception as e:
self.module.fail_json(msg=e.message)
def present_load_balancer(self):
@ -279,7 +280,7 @@ class AnsibleHcloudLoadBalancer(Hcloud):
self.client.load_balancers.delete(self.hcloud_load_balancer)
self._mark_as_changed()
self.hcloud_load_balancer = None
except APIException as e:
except Exception as e:
self.module.fail_json(msg=e.message)
@staticmethod

View file

@ -370,7 +370,7 @@ class AnsibleHcloudLoadBalancerInfo(Hcloud):
self.hcloud_load_balancer_info = self.client.load_balancers.get_all(**params)
except APIException as e:
except Exception as e:
self.module.fail_json(msg=e.message)
@staticmethod

View file

@ -125,7 +125,7 @@ class AnsibleHcloudLoadBalancerNetwork(Hcloud):
self.hcloud_network = self.client.networks.get_by_name(self.module.params.get("network"))
self.hcloud_load_balancer = self.client.load_balancers.get_by_name(self.module.params.get("load_balancer"))
self.hcloud_load_balancer_network = None
except APIException as e:
except Exception as e:
self.module.fail_json(msg=e.message)
def _get_load_balancer_network(self):
@ -144,7 +144,7 @@ class AnsibleHcloudLoadBalancerNetwork(Hcloud):
if not self.module.check_mode:
try:
self.hcloud_load_balancer.attach_to_network(**params).wait_until_finished()
except APIException as e:
except Exception as e:
self.module.fail_json(msg=e.message)
self._mark_as_changed()
@ -162,9 +162,13 @@ class AnsibleHcloudLoadBalancerNetwork(Hcloud):
self._get_load_balancer_network()
if self.hcloud_load_balancer_network is not None and self.hcloud_load_balancer is not None:
if not self.module.check_mode:
self.hcloud_load_balancer.detach_from_network(
self.hcloud_load_balancer_network.network).wait_until_finished()
self._mark_as_changed()
try:
self.hcloud_load_balancer.detach_from_network(
self.hcloud_load_balancer_network.network).wait_until_finished()
self._mark_as_changed()
except Exception as e:
self.module.fail_json(msg=e.message)
self.hcloud_load_balancer_network = None
@staticmethod

View file

@ -344,7 +344,7 @@ class AnsibleHcloudLoadBalancerService(Hcloud):
self.module.params.get("load_balancer")
)
self._get_load_balancer_service()
except APIException as e:
except Exception as e:
self.module.fail_json(msg=e.message)
def _create_load_balancer_service(self):
@ -377,7 +377,7 @@ class AnsibleHcloudLoadBalancerService(Hcloud):
try:
self.hcloud_load_balancer.add_service(LoadBalancerService(**params)).wait_until_finished(
max_retries=1000)
except APIException as e:
except Exception as e:
self.module.fail_json(msg=e.message)
self._mark_as_changed()
self._get_load_balancer()
@ -403,11 +403,11 @@ class AnsibleHcloudLoadBalancerService(Hcloud):
hcloud_cert = self.client.certificates.get_by_name(
certificate
)
except APIException:
except Exception:
hcloud_cert = self.client.certificates.get_by_id(
certificate
)
except APIException as e:
except Exception as e:
self.module.fail_json(msg=e.message)
service_http.certificates.append(hcloud_cert)
@ -475,7 +475,7 @@ class AnsibleHcloudLoadBalancerService(Hcloud):
if not self.module.check_mode:
self.hcloud_load_balancer.update_service(LoadBalancerService(**params)).wait_until_finished(
max_retries=1000)
except APIException as e:
except Exception as e:
self.module.fail_json(msg=e.message)
self._get_load_balancer()
@ -499,8 +499,11 @@ class AnsibleHcloudLoadBalancerService(Hcloud):
self._get_load_balancer()
if self.hcloud_load_balancer_service is not None:
if not self.module.check_mode:
self.hcloud_load_balancer.delete_service(self.hcloud_load_balancer_service).wait_until_finished(
max_retries=1000)
try:
self.hcloud_load_balancer.delete_service(self.hcloud_load_balancer_service).wait_until_finished(
max_retries=1000)
except Exception as e:
self.module.fail_json(msg=e.message)
self._mark_as_changed()
self.hcloud_load_balancer_service = None
except APIException as e:

View file

@ -181,7 +181,7 @@ class AnsibleHcloudLoadBalancerTarget(Hcloud):
if self.module.params.get("type") == "server":
self.hcloud_server = self.client.servers.get_by_name(self.module.params.get("server"))
self.hcloud_load_balancer_target = None
except APIException as e:
except Exception as e:
self.module.fail_json(msg=e.message)
def _get_load_balancer_target(self):
@ -226,7 +226,7 @@ class AnsibleHcloudLoadBalancerTarget(Hcloud):
if not self.module.check_mode:
try:
self.hcloud_load_balancer.add_target(**params).wait_until_finished()
except APIException as e:
except Exception as e:
if e.code == "locked" or e.code == "conflict":
self._create_load_balancer_target()
else:
@ -269,7 +269,10 @@ class AnsibleHcloudLoadBalancerTarget(Hcloud):
target = LoadBalancerTarget(type=self.module.params.get("type"),
ip=LoadBalancerTargetIP(ip=self.module.params.get("ip")),
use_private_ip=False)
self.hcloud_load_balancer.remove_target(target).wait_until_finished()
try:
self.hcloud_load_balancer.remove_target(target).wait_until_finished()
except Exception as e:
self.module.fail_json(msg=e.message)
self._mark_as_changed()
self.hcloud_load_balancer_target = None

View file

@ -132,7 +132,7 @@ class AnsibleHcloudLoadBalancerTypeInfo(Hcloud):
else:
self.hcloud_load_balancer_type_info = self.client.load_balancer_types.get_all()
except APIException as e:
except Exception as e:
self.module.fail_json(msg=e.message)
@staticmethod

View file

@ -122,7 +122,7 @@ class AnsibleHcloudLocationInfo(Hcloud):
else:
self.hcloud_location_info = self.client.locations.get_all()
except APIException as e:
except Exception as e:
self.module.fail_json(msg=e.message)
@staticmethod

View file

@ -144,7 +144,7 @@ class AnsibleHcloudNetwork(Hcloud):
self.hcloud_network = self.client.networks.get_by_name(
self.module.params.get("name")
)
except APIException as e:
except Exception as e:
self.module.fail_json(msg=e.message)
def _create_network(self):
@ -157,15 +157,16 @@ class AnsibleHcloudNetwork(Hcloud):
"ip_range": self.module.params.get("ip_range"),
"labels": self.module.params.get("labels"),
}
try:
if not self.module.check_mode:
self.client.networks.create(**params)
if not self.module.check_mode:
self.client.networks.create(**params)
delete_protection = self.module.params.get("delete_protection")
if delete_protection is not None:
self._get_network()
self.hcloud_network.change_protection(delete=delete_protection).wait_until_finished()
delete_protection = self.module.params.get("delete_protection")
if delete_protection is not None:
self._get_network()
self.hcloud_network.change_protection(delete=delete_protection).wait_until_finished()
except Exception as e:
self.module.fail_json(msg=e.message)
self._mark_as_changed()
self._get_network()
@ -188,7 +189,7 @@ class AnsibleHcloudNetwork(Hcloud):
if not self.module.check_mode:
self.hcloud_network.change_protection(delete=delete_protection).wait_until_finished()
self._mark_as_changed()
except APIException as e:
except Exception as e:
self.module.fail_json(msg=e.message)
self._get_network()
@ -206,7 +207,7 @@ class AnsibleHcloudNetwork(Hcloud):
if not self.module.check_mode:
self.client.networks.delete(self.hcloud_network)
self._mark_as_changed()
except APIException as e:
except Exception as e:
self.module.fail_json(msg=e.message)
self.hcloud_network = None

View file

@ -219,12 +219,13 @@ class AnsibleHcloudNetworkInfo(Hcloud):
servers = []
for server in network.servers:
image = None if server.image is None else to_native(server.image.name)
prepared_server = {
"id": to_native(server.id),
"name": to_native(server.name),
"ipv4_address": to_native(server.public_net.ipv4.ip),
"ipv6": to_native(server.public_net.ipv6.ip),
"image": to_native(server.image.name),
"image": image,
"server_type": to_native(server.server_type.name),
"datacenter": to_native(server.datacenter.name),
"location": to_native(server.datacenter.location.name),
@ -263,7 +264,7 @@ class AnsibleHcloudNetworkInfo(Hcloud):
else:
self.hcloud_network_info = self.client.networks.get_all()
except APIException as e:
except Exception as e:
self.module.fail_json(msg=e.message)
@staticmethod

View file

@ -147,7 +147,7 @@ class AnsibleHcloudReverseDNS(Hcloud):
self.hcloud_resource = self.client.floating_ips.get_by_name(
self.module.params.get("floating_ip")
)
except APIException as e:
except Exception as e:
self.module.fail_json(msg=e.message)
def _get_rdns(self):
@ -198,8 +198,10 @@ class AnsibleHcloudReverseDNS(Hcloud):
}
if not self.module.check_mode:
self.hcloud_resource.change_dns_ptr(**params).wait_until_finished()
try:
self.hcloud_resource.change_dns_ptr(**params).wait_until_finished()
except Exception as e:
self.module.fail_json(msg=e.message)
self._mark_as_changed()
self._get_resource()
self._get_rdns()
@ -213,8 +215,10 @@ class AnsibleHcloudReverseDNS(Hcloud):
}
if not self.module.check_mode:
self.hcloud_resource.change_dns_ptr(**params).wait_until_finished()
try:
self.hcloud_resource.change_dns_ptr(**params).wait_until_finished()
except Exception as e:
self.module.fail_json(msg=e.message)
self._mark_as_changed()
self._get_resource()
self._get_rdns()
@ -232,7 +236,10 @@ class AnsibleHcloudReverseDNS(Hcloud):
self._get_rdns()
if self.hcloud_rdns is not None:
if not self.module.check_mode:
self.hcloud_resource.change_dns_ptr(ip=self.hcloud_rdns['ip_address'], dns_ptr=None)
try:
self.hcloud_resource.change_dns_ptr(ip=self.hcloud_rdns['ip_address'], dns_ptr=None)
except Exception as e:
self.module.fail_json(msg=e.message)
self._mark_as_changed()
self.hcloud_rdns = None

View file

@ -120,7 +120,7 @@ class AnsibleHcloudRoute(Hcloud):
try:
self.hcloud_network = self.client.networks.get_by_name(self.module.params.get("network"))
self.hcloud_route = None
except APIException as e:
except Exception as e:
self.module.fail_json(msg=e.message)
def _get_route(self):
@ -139,7 +139,7 @@ class AnsibleHcloudRoute(Hcloud):
if not self.module.check_mode:
try:
self.hcloud_network.add_route(route=route).wait_until_finished()
except APIException as e:
except Exception as e:
self.module.fail_json(msg=e.message)
self._mark_as_changed()
@ -157,7 +157,10 @@ class AnsibleHcloudRoute(Hcloud):
self._get_route()
if self.hcloud_route is not None and self.hcloud_network is not None:
if not self.module.check_mode:
self.hcloud_network.delete_route(self.hcloud_route).wait_until_finished()
try:
self.hcloud_network.delete_route(self.hcloud_route).wait_until_finished()
except Exception as e:
self.module.fail_json(msg=e.message)
self._mark_as_changed()
self.hcloud_route = None

View file

@ -297,7 +297,7 @@ class AnsibleHcloudServer(Hcloud):
self.hcloud_server = self.client.servers.get_by_name(
self.module.params.get("name")
)
except APIException as e:
except Exception as e:
self.module.fail_json(msg=e.message)
def _create_server(self):
@ -345,27 +345,30 @@ class AnsibleHcloudServer(Hcloud):
)
if not self.module.check_mode:
resp = self.client.servers.create(**params)
self.result["root_password"] = resp.root_password
resp.action.wait_until_finished(max_retries=1000)
[action.wait_until_finished() for action in resp.next_actions]
try:
resp = self.client.servers.create(**params)
self.result["root_password"] = resp.root_password
resp.action.wait_until_finished(max_retries=1000)
[action.wait_until_finished() for action in resp.next_actions]
rescue_mode = self.module.params.get("rescue_mode")
if rescue_mode:
self._get_server()
self._set_rescue_mode(rescue_mode)
rescue_mode = self.module.params.get("rescue_mode")
if rescue_mode:
self._get_server()
self._set_rescue_mode(rescue_mode)
backups = self.module.params.get("backups")
if backups:
self._get_server()
self.hcloud_server.enable_backup().wait_until_finished()
backups = self.module.params.get("backups")
if backups:
self._get_server()
self.hcloud_server.enable_backup().wait_until_finished()
delete_protection = self.module.params.get("delete_protection")
rebuild_protection = self.module.params.get("rebuild_protection")
if delete_protection is not None and rebuild_protection is not None:
self._get_server()
self.hcloud_server.change_protection(delete=delete_protection,
rebuild=rebuild_protection).wait_until_finished()
delete_protection = self.module.params.get("delete_protection")
rebuild_protection = self.module.params.get("rebuild_protection")
if delete_protection is not None and rebuild_protection is not None:
self._get_server()
self.hcloud_server.change_protection(delete=delete_protection,
rebuild=rebuild_protection).wait_until_finished()
except Exception as e:
self.module.fail_json(msg=e.message)
self._mark_as_changed()
self._get_server()
@ -434,7 +437,7 @@ class AnsibleHcloudServer(Hcloud):
rebuild=rebuild_protection).wait_until_finished()
self._mark_as_changed()
self._get_server()
except APIException as e:
except Exception as e:
self.module.fail_json(msg=e.message)
def _set_rescue_mode(self, rescue_mode):
@ -456,7 +459,7 @@ class AnsibleHcloudServer(Hcloud):
self.client.servers.power_on(self.hcloud_server).wait_until_finished()
self._mark_as_changed()
self._get_server()
except APIException as e:
except Exception as e:
self.module.fail_json(msg=e.message)
def stop_server(self):
@ -466,7 +469,7 @@ class AnsibleHcloudServer(Hcloud):
self.client.servers.power_off(self.hcloud_server).wait_until_finished()
self._mark_as_changed()
self._get_server()
except APIException as e:
except Exception as e:
self.module.fail_json(msg=e.message)
def rebuild_server(self):
@ -480,7 +483,7 @@ class AnsibleHcloudServer(Hcloud):
self._mark_as_changed()
self._get_server()
except APIException as e:
except Exception as e:
self.module.fail_json(msg=e.message)
def present_server(self):
@ -498,7 +501,7 @@ class AnsibleHcloudServer(Hcloud):
self.client.servers.delete(self.hcloud_server).wait_until_finished()
self._mark_as_changed()
self.hcloud_server = None
except APIException as e:
except Exception as e:
self.module.fail_json(msg=e.message)
@staticmethod

View file

@ -180,7 +180,7 @@ class AnsibleHcloudServerInfo(Hcloud):
else:
self.hcloud_server_info = self.client.servers.get_all()
except APIException as e:
except Exception as e:
self.module.fail_json(msg=e.message)
@staticmethod

View file

@ -145,7 +145,7 @@ class AnsibleHcloudServerNetwork(Hcloud):
self.hcloud_network = self.client.networks.get_by_name(self.module.params.get("network"))
self.hcloud_server = self.client.servers.get_by_name(self.module.params.get("server"))
self.hcloud_server_network = None
except APIException as e:
except Exception as e:
self.module.fail_json(msg=e.message)
def _get_server_network(self):
@ -166,7 +166,7 @@ class AnsibleHcloudServerNetwork(Hcloud):
if not self.module.check_mode:
try:
self.hcloud_server.attach_to_network(**params).wait_until_finished()
except APIException as e:
except Exception as e:
self.module.fail_json(msg=e.message)
self._mark_as_changed()
@ -204,7 +204,10 @@ class AnsibleHcloudServerNetwork(Hcloud):
self._get_server_network()
if self.hcloud_server_network is not None and self.hcloud_server is not None:
if not self.module.check_mode:
self.hcloud_server.detach_from_network(self.hcloud_server_network.network).wait_until_finished()
try:
self.hcloud_server.detach_from_network(self.hcloud_server_network.network).wait_until_finished()
except Exception as e:
self.module.fail_json(msg=e.message)
self._mark_as_changed()
self.hcloud_server_network = None

View file

@ -140,7 +140,7 @@ class AnsibleHcloudServerTypeInfo(Hcloud):
else:
self.hcloud_server_type_info = self.client.server_types.get_all()
except APIException as e:
except Exception as e:
self.module.fail_json(msg=e.message)
@staticmethod

View file

@ -156,7 +156,7 @@ class AnsibleHcloudSSHKey(Hcloud):
self.module.params.get("name")
)
except APIException as e:
except Exception as e:
self.module.fail_json(msg=e.message)
def _create_ssh_key(self):
@ -170,7 +170,10 @@ class AnsibleHcloudSSHKey(Hcloud):
}
if not self.module.check_mode:
self.client.ssh_keys.create(**params)
try:
self.client.ssh_keys.create(**params)
except Exception as e:
self.module.fail_json(msg=e.message)
self._mark_as_changed()
self._get_ssh_key()
@ -203,7 +206,10 @@ class AnsibleHcloudSSHKey(Hcloud):
self._get_ssh_key()
if self.hcloud_ssh_key is not None:
if not self.module.check_mode:
self.client.ssh_keys.delete(self.hcloud_ssh_key)
try:
self.client.ssh_keys.delete(self.hcloud_ssh_key)
except Exception as e:
self.module.fail_json(msg=e.message)
self._mark_as_changed()
self.hcloud_ssh_key = None

View file

@ -129,7 +129,7 @@ class AnsibleHcloudSSHKeyInfo(Hcloud):
else:
self.hcloud_ssh_key_info = self.client.ssh_keys.get_all()
except APIException as e:
except Exception as e:
self.module.fail_json(msg=e.message)
@staticmethod

View file

@ -160,7 +160,7 @@ class AnsibleHcloudSubnetwork(Hcloud):
try:
self.hcloud_network = self.client.networks.get_by_name(self.module.params.get("network"))
self.hcloud_subnetwork = None
except APIException as e:
except Exception as e:
self.module.fail_json(msg=e.message)
def _get_subnetwork(self):
@ -184,7 +184,7 @@ class AnsibleHcloudSubnetwork(Hcloud):
if not self.module.check_mode:
try:
self.hcloud_network.add_subnet(subnet=NetworkSubnet(**params)).wait_until_finished()
except APIException as e:
except Exception as e:
self.module.fail_json(msg=e.message)
self._mark_as_changed()
@ -202,7 +202,10 @@ class AnsibleHcloudSubnetwork(Hcloud):
self._get_subnetwork()
if self.hcloud_subnetwork is not None and self.hcloud_network is not None:
if not self.module.check_mode:
self.hcloud_network.delete_subnet(self.hcloud_subnetwork).wait_until_finished()
try:
self.hcloud_network.delete_subnet(self.hcloud_subnetwork).wait_until_finished()
except Exception as e:
self.module.fail_json(msg=e.message)
self._mark_as_changed()
self.hcloud_subnetwork = None

View file

@ -206,7 +206,7 @@ class AnsibleHcloudVolume(Hcloud):
self.hcloud_volume = self.client.volumes.get_by_name(
self.module.params.get("name")
)
except hcloud.APIException as e:
except Exception as e:
self.module.fail_json(msg=e.message)
def _create_volume(self):
@ -228,14 +228,16 @@ class AnsibleHcloudVolume(Hcloud):
self.module.fail_json(msg="server or location is required")
if not self.module.check_mode:
resp = self.client.volumes.create(**params)
resp.action.wait_until_finished()
[action.wait_until_finished() for action in resp.next_actions]
delete_protection = self.module.params.get("delete_protection")
if delete_protection is not None:
self._get_volume()
self.hcloud_volume.change_protection(delete=delete_protection).wait_until_finished()
try:
resp = self.client.volumes.create(**params)
resp.action.wait_until_finished()
[action.wait_until_finished() for action in resp.next_actions]
delete_protection = self.module.params.get("delete_protection")
if delete_protection is not None:
self._get_volume()
self.hcloud_volume.change_protection(delete=delete_protection).wait_until_finished()
except Exception as e:
self.module.fail_json(msg=e.message)
self._mark_as_changed()
self._get_volume()
@ -277,7 +279,7 @@ class AnsibleHcloudVolume(Hcloud):
self._mark_as_changed()
self._get_volume()
except hcloud.APIException as e:
except Exception as e:
self.module.fail_json(msg=e.message)
def present_volume(self):
@ -295,7 +297,7 @@ class AnsibleHcloudVolume(Hcloud):
self.client.volumes.delete(self.hcloud_volume)
self._mark_as_changed()
self.hcloud_volume = None
except hcloud.APIException as e:
except Exception as e:
self.module.fail_json(msg=e.message)
@staticmethod

View file

@ -147,7 +147,7 @@ class AnsibleHcloudVolumeInfo(Hcloud):
else:
self.hcloud_volume_info = self.client.volumes.get_all()
except APIException as e:
except Exception as e:
self.module.fail_json(msg=e.message)
@staticmethod

View file

@ -1,56 +0,0 @@
language: python
python: 3.7
env:
matrix:
- T=none
matrix:
exclude:
- env: T=none
include:
- env: T=devel/sanity/1
python: 3.7
- env: T=devel/sanity/2
python: 3.7
- env: T=devel/sanity/3
python: 3.7
- env: T=devel/sanity/4
python: 3.7
- env: T=devel/hcloud/3.8/1
- env: T=devel/hcloud/3.8/2
- env: T=2.10/sanity/1
- env: T=2.10/sanity/2
- env: T=2.10/sanity/3
- env: T=2.10/sanity/4
- env: T=2.10/hcloud/3.8/1
- env: T=2.10/hcloud/3.8/2
- env: T=2.10/hcloud/3.8/3
- env: T=2.9/sanity/1
- env: T=2.9/sanity/2
- env: T=2.9/sanity/3
- env: T=2.9/sanity/4
branches:
except:
- "*-patch-*"
- "revert-*-*"
build:
ci:
- tests/utils/shippable/timing.sh tests/utils/shippable/shippable.sh $T
integrations:
notifications:
- integrationName: email
type: email
on_success: never
on_failure: never
on_start: never
on_pull_request: never

View file

@ -0,0 +1 @@
hcloud >= 1.10.0 # minimum version

View file

@ -2,5 +2,5 @@
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
---
hcloud_prefix: "tests"
hcloud_floating_ip_name: "{{hcloud_prefix}}-integration"
hcloud_server_name: "{{hcloud_prefix}}-fip-tests"
hcloud_floating_ip_name: "{{hcloud_prefix}}-i"
hcloud_server_name: "{{hcloud_prefix}}-fip-t"

View file

@ -62,6 +62,33 @@
- result is failed
- 'result.msg == "one of the following is required: id, name"'
- name: test invalid type
hcloud_floating_ip:
name: "{{ hcloud_floating_ip_name }}"
type: ipv5
home_location: "fsn1"
register: result
ignore_errors: yes
- name: verify invalid type
assert:
that:
- result is failed
- 'result.msg == "value of type must be one of: ipv4, ipv6, got: ipv5"'
- name: test invalid location
hcloud_floating_ip:
name: "{{ hcloud_floating_ip_name }}"
type: ipv4
home_location: "abc"
register: result
ignore_errors: yes
- name: verify invalid location
assert:
that:
- result is failed
- result.msg == "invalid input in fields 'server', 'home_location'"
- name: test create Floating IP with check mode
hcloud_floating_ip:
name: "{{ hcloud_floating_ip_name }}"

View file

@ -2,4 +2,4 @@
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
---
hcloud_prefix: "tests"
hcloud_floating_ip_name: "{{hcloud_prefix}}-integration"
hcloud_floating_ip_name: "{{hcloud_prefix}}-i"

View file

@ -2,4 +2,4 @@
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
---
hcloud_prefix: "tests"
hcloud_load_balancer_name: "{{hcloud_prefix}}-integration"
hcloud_load_balancer_name: "{{hcloud_prefix}}-i"

View file

@ -2,5 +2,5 @@
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
---
hcloud_prefix: "tests"
hcloud_load_balancer_name: "{{hcloud_prefix}}-integration"
hcloud_server_name: "{{hcloud_prefix}}-lb-info"
hcloud_load_balancer_name: "{{hcloud_prefix}}-i"
hcloud_server_name: "{{hcloud_prefix}}-lb-i"

View file

@ -2,5 +2,5 @@
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
---
hcloud_prefix: "tests"
hcloud_network_name: "{{hcloud_prefix}}-load_balancer-network"
hcloud_load_balancer_name: "{{hcloud_prefix}}-load_balancer-network"
hcloud_network_name: "{{hcloud_prefix}}-lb-n"
hcloud_load_balancer_name: "{{hcloud_prefix}}-lb-n"

View file

@ -2,4 +2,4 @@
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
---
hcloud_prefix: "tests"
hcloud_load_balancer_name: "{{hcloud_prefix}}-load_balancer-target"
hcloud_load_balancer_name: "{{hcloud_prefix}}-lb-target"

View file

@ -2,6 +2,6 @@
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
---
hcloud_prefix: "tests"
hcloud_server_name: "{{hcloud_prefix}}-lb-target"
hcloud_load_balancer_name: "{{hcloud_prefix}}-load_balancer-target"
hcloud_server_name: "{{hcloud_prefix}}-lb-t"
hcloud_load_balancer_name: "{{hcloud_prefix}}-lb-target"
hcloud_testing_ip: "176.9.59.39"

View file

@ -2,4 +2,4 @@
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
---
hcloud_prefix: "tests"
hcloud_network_name: "{{hcloud_prefix}}-integ"
hcloud_network_name: "{{hcloud_prefix}}-i"

View file

@ -2,5 +2,5 @@
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
---
hcloud_prefix: "tests"
hcloud_server_name: "{{hcloud_prefix}}-rdns"
hcloud_floating_ip_name: "{{hcloud_prefix}}-rdns"
hcloud_server_name: "{{hcloud_prefix}}"
hcloud_floating_ip_name: "{{hcloud_prefix}}"

View file

@ -2,4 +2,4 @@
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
---
hcloud_prefix: "tests"
hcloud_network_name: "{{hcloud_prefix}}-routes"
hcloud_network_name: "{{hcloud_prefix}}-ro"

View file

@ -2,4 +2,4 @@
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
---
hcloud_prefix: "tests"
hcloud_server_name: "{{hcloud_prefix}}-integration"
hcloud_server_name: "{{hcloud_prefix}}-i"

View file

@ -2,4 +2,4 @@
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
---
hcloud_prefix: "tests"
hcloud_server_name: "{{hcloud_prefix}}-integration"
hcloud_server_name: "{{hcloud_prefix}}-ii"

View file

@ -2,5 +2,5 @@
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
---
hcloud_prefix: "tests"
hcloud_network_name: "{{hcloud_prefix}}-server-network"
hcloud_server_name: "{{hcloud_prefix}}-server-network"
hcloud_network_name: "{{hcloud_prefix}}-sn"
hcloud_server_name: "{{hcloud_prefix}}-sn"

View file

@ -2,7 +2,10 @@
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
---
hcloud_prefix: "tests"
hcloud_server_name: "{{hcloud_prefix}}-integration"
hcloud_ssh_key_name: "{{hcloud_prefix}}-integration"
hcloud_server_name: "{{hcloud_prefix}}"
hcloud_ssh_key_name: "{{hcloud_prefix}}"
hcloud_ssh_key_public_key: "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDnaTPfKaX1QKcRLOfr34buVLh5FhJAThI9NYB0xNdXsMd4Y0zLyyCQzHbx4eWCVZxym/s6csWSeLaAhO1GOHeAw3hQFMqf1oTBx6Y8g0pKpeotKPa/PDSUzdZF9Lc+DadtpQd8kFVHAu1Kd3zoEUnk1u6kP7I4qu4Z/6F9qBDF+M3aobiPVxdS7GwaVRW3nZu+FcQDLiBiNOjuRDyjHcDfEUkoh2SOu25RrFtGPzFu5mGmBJwotKpWAocLGfHzyn/fAHxgw3jKZVH/t+XWQFnl82Ie8yE3Z1EZ7oDkNRqFQT9AdXEQOLycTTYTQMJZpgeFTv3sAo6lPRCusiFmmLcf ci@ansible.hetzner.cloud"
hcloud_ssh_key_fingerprint: "56:89:c4:d6:a7:4a:79:82:f4:c2:58:9c:e1:d2:2d:4e"
hcloud_doubled_ssh_key_public_key: "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQC1AiuN3UMQKzOs4tNudmlDSkSebC+savc6CivoHGflUKeli7nKb5pKgGiqH+zeWZc+8+flUa2BxsJWmi7d1nGJ++W4BnzmqW78ApelpJnGtuX8IKNcq/trhVTQyaShPiLluoBs7bXyyZpAKNGkk3jHrgwwYD/QQDN0CJnQUM18fjH5CUes2vmaG/kkhn7ctuVHDOvDcEy8KdBX3fYyrtXw5GgWDC5borG6yT1f3E9AXfRPL9OQjMTeC+G4FHscJAZjNnYav+jLrQLdV1xJ0JgbjRyBgTAfBszx9oKIjzCUPvpj4npju0WFGu10pIh0w7bluMoVn1tS6Y3gxE/Cepwt ci@ansible.hetzner.cloud"
hcloud_doubled_ssh_key_fingerprint: "f9:33:40:ff:77:f3:3e:85:f2:9e:8f:98:71:fd:a0:58"

View file

@ -113,7 +113,7 @@
hcloud_server:
name: "{{ hcloud_server_name }}"
server_type: cx11
image: "ubuntu-18.04"
image: "ubuntu-20.04"
ssh_keys:
- "{{ hcloud_ssh_key_name }}"
state: started
@ -128,11 +128,23 @@
id: "{{ sshKey.hcloud_ssh_key.id }}"
state: absent
register: result
- name: verify absent server
- name: verify absent sshkey
assert:
that:
- result is success
- name: test fail cleanly on double created ssh key
hcloud_ssh_key:
name: "{{ hcloud_ssh_key_name }}othername"
public_key: "{{ hcloud_doubled_ssh_key_public_key }}"
register: result
ignore_errors: yes
- name: verify failed correctly
assert:
that:
- result is failed
- 'result.msg == "SSH key with the same fingerprint already exists"'
- name: cleanup
hcloud_server:
name: "{{ hcloud_server_name }}"

View file

@ -2,4 +2,4 @@
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
---
hcloud_prefix: "tests"
hcloud_ssh_key_name: "{{hcloud_prefix}}-ssh_key_facts"
hcloud_ssh_key_name: "{{hcloud_prefix}}-f"

View file

@ -2,5 +2,5 @@
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
---
hcloud_prefix: "tests"
hcloud_network_name: "{{hcloud_prefix}}-subnet"
hcloud_network_name: "{{hcloud_prefix}}-s"
hetzner_vswitch_id: 15311

View file

@ -2,5 +2,5 @@
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
---
hcloud_prefix: "tests"
hcloud_volume_name: "{{hcloud_prefix}}-integ"
hcloud_server_name: "{{hcloud_prefix}}-volume-server"
hcloud_volume_name: "{{hcloud_prefix}}-i"
hcloud_server_name: "{{hcloud_prefix}}-vs"

View file

@ -2,4 +2,4 @@
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
---
hcloud_prefix: "tests"
hcloud_volume_name: "{{hcloud_prefix}}-facts"
hcloud_volume_name: "{{hcloud_prefix}}-i"

View file

@ -0,0 +1,2 @@
tests/utils/shippable/check_matrix.py replace-urlopen
tests/utils/shippable/timing.py shebang

View file

@ -0,0 +1,2 @@
tests/utils/shippable/check_matrix.py replace-urlopen
tests/utils/shippable/timing.py shebang

View file

@ -0,0 +1,2 @@
tests/utils/shippable/check_matrix.py replace-urlopen
tests/utils/shippable/timing.py shebang

View file

@ -1,4 +0,0 @@
packaging # needed for update-bundled and changelog
sphinx ; python_version >= '3.5' # docs build requires python 3+
sphinx-notfound-page ; python_version >= '3.5' # docs build requires python 3+
straight.plugin ; python_version >= '3.5' # needed for hacking/build-ansible.py which will host changelog generation and requires python 3+

View file

@ -7,6 +7,7 @@ declare -a args
IFS='/:' read -ra args <<< "$1"
ansible_version="${args[0]}"
# shellcheck disable=SC2034
script="${args[1]}"
function join {
@ -21,6 +22,7 @@ python -V
function retry
{
# shellcheck disable=SC2034
for repetition in 1 2 3; do
set +e
"$@"
@ -29,9 +31,9 @@ function retry
if [ ${result} == 0 ]; then
return ${result}
fi
echo "$@ -> ${result}"
echo "@* -> ${result}"
done
echo "Command '$@' failed 3 times!"
echo "Command '@*' failed 3 times!"
exit -1
}
@ -44,6 +46,7 @@ else
retry pip install "https://github.com/ansible/ansible/archive/stable-${ansible_version}.tar.gz" --disable-pip-version-check
fi
export ANSIBLE_COLLECTIONS_PATHS="${HOME}/.ansible"
# shellcheck disable=SC2034
SHIPPABLE_RESULT_DIR="$(pwd)/shippable"
TEST_DIR="${ANSIBLE_COLLECTIONS_PATHS}/ansible_collections/hetzner/hcloud"
rm -rf "${TEST_DIR}"
@ -87,7 +90,7 @@ find plugins -type d -empty -print -delete
ansible-test env --dump --show --timeout "50" --color -v
group="${args[1]}"
echo $test
echo "$test"
if [[ "${test}" =~ hcloud ]]; then
group="${args[3]}"
bash tests/utils/gitlab/integration.sh "shippable/hcloud/group${group}/"

View file

@ -3,13 +3,18 @@
target="$1"
HCLOUD_TOKEN=$(cat hcloud_token.txt)
# shellcheck disable=SC2034,SC2154
changed_all_target="shippable/${cloud}/smoketest/"
# shellcheck disable=SC2046
echo "[default]
hcloud_api_token=${HCLOUD_TOKEN}
" >> $(pwd)/tests/integration/cloud-config-hcloud.ini
# shellcheck disable=SC2086
export SHIPPABLE="true"
# shellcheck disable=SC2155
export SHIPPABLE_BUILD_NUMBER="gl-$(cat prefix.txt)"
# shellcheck disable=SC2155,SC2002
export SHIPPABLE_JOB_NUMBER="$(cat /dev/urandom | tr -dc 'a-zA-Z0-9' | fold -w 2 | head -n 1)"
ansible-test integration --color --local -vv "${target}" ${COVERAGE:+"$COVERAGE"} ${CHANGED:+"$CHANGED"} ${UNSTABLE:+"$UNSTABLE"}

View file

@ -43,5 +43,5 @@ pip install pylint
# shellcheck disable=SC2086
ansible-test sanity --color -v --junit ${COVERAGE:+"$COVERAGE"} ${CHANGED:+"$CHANGED"} \
--base-branch "${base_branch}" \
--exclude shippable.yml --exclude tests/utils/ \
--exclude tests/utils/ \
"${options[@]}" --allow-disabled

View file

@ -14,31 +14,14 @@ else
fi
if [ "${group}" == "extra" ]; then
pip install antsibull-changelog
python ../../community/internal_test_tools/tools/run.py --color
# ansible-galaxy -vvv collection install community.internal_test_tools
git clone --single-branch --depth 1 https://github.com/ansible-collections/community.internal_test_tools.git ../../community/internal_test_tools
../internal_test_tools/tools/run.py --color
exit
fi
case "${group}" in
1) options=(--skip-test pylint --skip-test ansible-doc --skip-test validate-modules) ;;
2) options=( --test ansible-doc --test validate-modules) ;;
3) options=(--test pylint plugins/modules/) ;;
4) options=(--test pylint --exclude plugins/modules/) ;;
esac
# allow collection migration sanity tests for groups 3 and 4 to pass without updating this script during migration
network_path="lib/ansible/modules/network/"
if [ -d "${network_path}" ]; then
if [ "${group}" -eq 3 ]; then
options+=(--exclude "${network_path}")
elif [ "${group}" -eq 4 ]; then
options+=("${network_path}")
fi
fi
# shellcheck disable=SC2086
ansible-test sanity --color -v --junit ${COVERAGE:+"$COVERAGE"} ${CHANGED:+"$CHANGED"} \
--docker --base-branch "${base_branch}" \
--exclude shippable.yml --exclude tests/utils/ \
"${options[@]}" --allow-disabled
--allow-disabled

View file

@ -14,13 +14,16 @@ function join {
echo "$*";
}
# Ensure we can write other collections to this dir
sudo chown "$(whoami)" "${PWD}/../../"
test="$(join / "${args[@]:1}")"
docker images ansible/ansible
docker images quay.io/ansible/*
docker ps
for container in $(docker ps --format '{{.Image}} {{.ID}}' | grep -v '^drydock/' | sed 's/^.* //'); do
for container in $(docker ps --format '{{.Image}} {{.ID}}' | grep -v -e '^drydock/' -e '^quay.io/ansible/azure-pipelines-test-container:' | sed 's/^.* //'); do
docker rm -f "${container}" || true # ignore errors
done
@ -35,6 +38,7 @@ python -V
function retry
{
# shellcheck disable=SC2034
for repetition in 1 2 3; do
set +e
"$@"
@ -43,10 +47,10 @@ function retry
if [ ${result} == 0 ]; then
return ${result}
fi
echo "$@ -> ${result}"
echo "@* -> ${result}"
done
echo "Command '$@' failed 3 times!"
exit -1
echo "Command '@*' failed 3 times!"
exit 1
}
command -v pip
@ -57,12 +61,17 @@ if [ "${ansible_version}" == "devel" ]; then
else
retry pip install "https://github.com/ansible/ansible/archive/stable-${ansible_version}.tar.gz" --disable-pip-version-check
fi
export ANSIBLE_COLLECTIONS_PATHS="${HOME}/.ansible"
SHIPPABLE_RESULT_DIR="$(pwd)/shippable"
TEST_DIR="${ANSIBLE_COLLECTIONS_PATHS}/ansible_collections/hetzner/hcloud"
mkdir -p "${TEST_DIR}"
cp -aT "${SHIPPABLE_BUILD_DIR}" "${TEST_DIR}"
cd "${TEST_DIR}"
if [ "${SHIPPABLE_BUILD_ID:-}" ]; then
export ANSIBLE_COLLECTIONS_PATHS="${HOME}/.ansible"
SHIPPABLE_RESULT_DIR="$(pwd)/shippable"
TEST_DIR="${ANSIBLE_COLLECTIONS_PATHS}/ansible_collections/hetzner/hcloud"
mkdir -p "${TEST_DIR}"
cp -aT "${SHIPPABLE_BUILD_DIR}" "${TEST_DIR}"
cd "${TEST_DIR}"
else
export ANSIBLE_COLLECTIONS_PATHS="${PWD}/../../../"
fi
# STAR: HACK install dependencies
retry ansible-galaxy -vvv collection install community.general
@ -76,9 +85,20 @@ retry ansible-galaxy -vvv collection install community.internal_test_tools
export PYTHONIOENCODING='utf-8'
if [ "${JOB_TRIGGERED_BY_NAME:-}" == "nightly-trigger" ]; then
COVERAGE=yes
COMPLETE=yes
fi
if [ -n "${COVERAGE:-}" ]; then
# on-demand coverage reporting triggered by setting the COVERAGE environment variable to a non-empty value
export COVERAGE="--coverage"
elif [[ "${COMMIT_MESSAGE}" =~ ci_coverage ]]; then
# on-demand coverage reporting triggered by having 'ci_coverage' in the latest commit message
export COVERAGE="--coverage"
else
# on-demand coverage reporting disabled (default behavior, always-on coverage reporting remains enabled)
export COVERAGE="--coverage-check"
fi
if [ -n "${COMPLETE:-}" ]; then
# disable change detection triggered by setting the COMPLETE environment variable to a non-empty value
@ -104,6 +124,68 @@ find plugins -type d -empty -print -delete
function cleanup
{
# for complete on-demand coverage generate a report for all files with no coverage on the "sanity/5" job so we only have one copy
if [ "${COVERAGE}" == "--coverage" ] && [ "${CHANGED}" == "" ] && [ "${test}" == "sanity/5" ]; then
stub="--stub"
# trigger coverage reporting for stubs even if no other coverage data exists
mkdir -p tests/output/coverage/
else
stub=""
fi
if [ -d tests/output/coverage/ ]; then
if find tests/output/coverage/ -mindepth 1 -name '.*' -prune -o -print -quit | grep -q .; then
process_coverage='yes' # process existing coverage files
elif [ "${stub}" ]; then
process_coverage='yes' # process coverage when stubs are enabled
else
process_coverage=''
fi
if [ "${process_coverage}" ]; then
# use python 3.7 for coverage to avoid running out of memory during coverage xml processing
# only use it for coverage to avoid the additional overhead of setting up a virtual environment for a potential no-op job
virtualenv --python /usr/bin/python3.7 ~/ansible-venv
set +ux
. ~/ansible-venv/bin/activate
set -ux
# shellcheck disable=SC2086
ansible-test coverage xml --color -v --requirements --group-by command --group-by version ${stub:+"$stub"}
cp -a tests/output/reports/coverage=*.xml "$SHIPPABLE_RESULT_DIR/codecoverage/"
if [ "${ansible_version}" != "2.9" ]; then
# analyze and capture code coverage aggregated by integration test target
ansible-test coverage analyze targets generate -v "$SHIPPABLE_RESULT_DIR/testresults/coverage-analyze-targets.json"
fi
# upload coverage report to codecov.io only when using complete on-demand coverage
if [ "${COVERAGE}" == "--coverage" ] && [ "${CHANGED}" == "" ]; then
for file in tests/output/reports/coverage=*.xml; do
flags="${file##*/coverage=}"
flags="${flags%-powershell.xml}"
flags="${flags%.xml}"
# remove numbered component from stub files when converting to tags
flags="${flags//stub-[0-9]*/stub}"
flags="${flags//=/,}"
flags="${flags//[^a-zA-Z0-9_,]/_}"
bash <(curl -s https://codecov.io/bash) \
-f "${file}" \
-F "${flags}" \
-n "${test}" \
-t 8a86e979-f37b-4d5d-95a4-960c280d5eaa \
-X coveragepy \
-X gcov \
-X fix \
-X search \
-X xcode \
|| echo "Failed to upload code coverage report to codecov.io: ${file}"
done
fi
fi
fi
if [ -d tests/output/junit/ ]; then
cp -aT tests/output/junit/ "$SHIPPABLE_RESULT_DIR/testresults/"
fi
@ -117,9 +199,15 @@ function cleanup
fi
}
trap cleanup EXIT
if [ "${SHIPPABLE_BUILD_ID:-}" ]; then trap cleanup EXIT; fi
ansible-test env --dump --show --timeout "50" --color -v
if [[ "${COVERAGE:-}" == "--coverage" ]]; then
timeout=60
else
timeout=45
fi
"tests/utils/shippable/check_matrix.py"
ansible-test env --dump --show --timeout "${timeout}" --color -v
if [ "${SHIPPABLE_BUILD_ID:-}" ]; then "tests/utils/shippable/check_matrix.py"; fi
"tests/utils/shippable/${script}.sh" "${test}"