Merge branch 'main' into main

This commit is contained in:
Pavle 2024-10-30 18:53:28 +01:00
commit e04558d143
402 changed files with 4012 additions and 7040 deletions

View file

@ -2,5 +2,7 @@
profile: production
warn_list:
- galaxy[version-incorrect] # until collection gets bumped to 1.x.x
- name[casing] # https://github.com/ansible/ansible-lint/issues/4035#issuecomment-2116272270
skip_list:
- role-name # Allow underscore prefix in role name for internal role
- var-naming[no-role-prefix] # https://github.com/ansible/ansible-lint/pull/3422#issuecomment-1549584988

View file

@ -0,0 +1,106 @@
---
- name: Run local preparation
hosts: localhost
gather_facts: false
vars:
__role_name: "{{ lookup('ansible.builtin.env', 'MOLECULE_PROJECT_DIRECTORY') | basename }}"
__binary_name: "{{ __role_name }}"
__binary_url: "{{ lookup('ansible.builtin.vars', __role_name ~ '_binary_url', default='') }}"
__cache_path: "{{ lookup('ansible.builtin.vars', __role_name ~ '_local_cache_path', default='') }}"
__tls_server_config: "{{ lookup('ansible.builtin.vars', __role_name ~ '_tls_server_config', default={}) }}"
tasks:
- name: "Create local binary directory"
ansible.builtin.file:
path: "{{ __cache_path }}"
state: directory
mode: 0755
when: (__cache_path)
- name: "Fetch binary"
become: false
ansible.builtin.unarchive:
src: "{{ __binary_url }}"
dest: "{{ __cache_path }}"
remote_src: true
list_files: true
extra_opts:
- "--strip-components=1"
creates: "{{ __cache_path }}/{{ __binary_name }}"
check_mode: false
register: __download_binary
when: (__binary_url)
- name: Generate self signed certificates
when: "'cert_file' in __tls_server_config"
block:
- name: Install pyOpenSSL for certificate generation
ansible.builtin.pip:
name: "pyOpenSSL"
- name: Create private key
community.crypto.openssl_privatekey:
path: "/tmp/tls.key"
- name: Create CSR
community.crypto.openssl_csr:
path: "/tmp/tls.csr"
privatekey_path: "/tmp/tls.key"
- name: Create certificate
community.crypto.x509_certificate:
path: "/tmp/tls.cert"
csr_path: "/tmp/tls.csr"
privatekey_path: "/tmp/tls.key"
provider: selfsigned
- name: Filter out incompatible distro/ansible version combos
ansible.builtin.add_host:
name: "{{ item }}"
groups: target_hosts
loop: >-
{{
groups['all']
| map('extract', hostvars)
| rejectattr('exclude_ansible_vers', 'defined')
| map(attribute='inventory_hostname')
| list
| union(
groups['all']
| map('extract', hostvars)
| selectattr('exclude_ansible_vers', 'defined')
| rejectattr('exclude_ansible_vers', 'search', ansible_version.major ~ '.' ~ ansible_version.minor)
| map(attribute='inventory_hostname')
| list
)
}}
when: item not in groups['target_hosts']
changed_when: false
- name: Run target preparation
hosts: target_hosts
any_errors_fatal: true
vars:
__role_name: "{{ lookup('ansible.builtin.env', 'MOLECULE_PROJECT_DIRECTORY') | basename }}"
__tls_server_config: "{{ lookup('ansible.builtin.vars', __role_name ~ '_tls_server_config', default={}) }}"
tasks:
- name: Copy self signed certificates
when: "'cert_file' in __tls_server_config"
block:
- name: "Create cert dir"
ansible.builtin.file:
path: "{{ __tls_server_config.cert_file | dirname }}"
state: directory
owner: root
group: root
mode: u+rwX,g+rwX,o=rX
- name: "Copy cert and key"
ansible.builtin.copy:
src: "{{ item.src }}"
dest: "{{ item.dest }}"
mode: "{{ item.mode | default('0644') }}"
loop:
- src: "/tmp/tls.cert"
dest: "{{ __tls_server_config.cert_file }}"
- src: "/tmp/tls.key"
dest: "{{ __tls_server_config.key_file }}"

View file

@ -17,48 +17,12 @@ platforms:
privileged: true
cgroup_parent: docker.slice
command: /lib/systemd/systemd
- name: centos-7
image: dokken/centos-7
pre_build_image: true
privileged: true
cgroup_parent: docker.slice
command: /usr/lib/systemd/systemd
- name: centos-stream-8
image: dokken/centos-stream-8
pre_build_image: true
privileged: true
cgroup_parent: docker.slice
command: /lib/systemd/systemd
- name: centos-stream-9
image: dokken/centos-stream-9
pre_build_image: true
privileged: true
cgroup_parent: docker.slice
command: /lib/systemd/systemd
- name: debian-10
image: dokken/debian-10
pre_build_image: true
privileged: true
cgroup_parent: docker.slice
command: /lib/systemd/systemd
- name: debian-11
image: dokken/debian-11
pre_build_image: true
privileged: true
cgroup_parent: docker.slice
command: /lib/systemd/systemd
- name: fedora-37
image: dokken/fedora-37
pre_build_image: true
privileged: true
cgroup_parent: docker.slice
command: /lib/systemd/systemd
- name: fedora-38
image: dokken/fedora-38
pre_build_image: true
privileged: true
cgroup_parent: docker.slice
command: /lib/systemd/systemd
- name: ubuntu-20.04
image: dokken/ubuntu-20.04
pre_build_image: true
@ -71,8 +35,31 @@ platforms:
privileged: true
cgroup_parent: docker.slice
command: /lib/systemd/systemd
- name: ubuntu-24.04
image: dokken/ubuntu-24.04
pre_build_image: true
privileged: true
cgroup_parent: docker.slice
command: /lib/systemd/systemd
verifier:
name: testinfra
additional_files_or_dirs:
- "../../../../../.testinfra/testinfra_helpers.py"
env:
PYTHONPATH: "${MOLECULE_PROJECT_DIRECTORY}/../../.testinfra:${PYTHONPATH}"
provisioner:
playbooks:
converge: "${MOLECULE_PROJECT_DIRECTORY}/../../.config/molecule/converge.yml"
inventory:
hosts:
target_hosts:
hosts: {}
host_vars:
almalinux-8:
exclude_ansible_vers:
- "2.17"
ubuntu-24.04:
exclude_ansible_vers:
- "2.9"
- "2.10"
- "2.11"

View file

@ -1,6 +1,33 @@
---
- name: Filter and add hosts to ansible_compatible_hosts
hosts: localhost
gather_facts: false
tasks:
- name: Filter out incompatible distro/ansible version combos
ansible.builtin.add_host:
name: "{{ item }}"
groups: target_hosts
loop: >-
{{
groups['all']
| map('extract', hostvars)
| rejectattr('exclude_ansible_vers', 'defined')
| map(attribute='inventory_hostname')
| list
| union(
groups['all']
| map('extract', hostvars)
| selectattr('exclude_ansible_vers', 'defined')
| rejectattr('exclude_ansible_vers', 'search', ansible_version.major ~ '.' ~ ansible_version.minor)
| map(attribute='inventory_hostname')
| list
)
}}
when: item not in groups['target_hosts']
changed_when: false
- name: Converge
hosts: all
hosts: target_hosts
any_errors_fatal: true
tasks:
- name: "Run role"

View file

@ -3,9 +3,16 @@
# Description: Discover the upstream repo from each role default vars.
result=$(
for defaults_file in roles/*/vars/main.yml ; do
role="$(echo "${defaults_file}" | cut -f2 -d'/')"
yq eval "[{\"repo\": ._${role}_repo, \"role\": \"${role}\", \"type\": ._${role}_repo_type // \"github\"}]" "${defaults_file}"
for role_dir in roles/*/ ; do
role="$(basename "${role_dir}")"
role_repo=$(yq eval "._${role}_repo" "${role_dir}/vars/main.yml" 2>/dev/null)
yq eval "[{
\"repo\": \"${role_repo}\",
\"role\": \"${role}\",
\"type\": (.${role}_binary_url | split(\"/\")[2] | split(\".\")[0] // \"github\")
}]" "${role_dir}/defaults/main.yml" 2>/dev/null
done | yq -o json -I=0
)

View file

@ -90,12 +90,12 @@ esac
echo_green "New ${source_repo} version is: ${version}"
# Download destination repository
if grep "_version: ${version}" "roles/${role}/defaults/main.yml"; then
if grep -Eq "^${role}_version: ${version}" "roles/${role}/defaults/main.yml"; then
echo_green "Newest version is used."
exit 0
fi
sed -i "s/_version:.*$/_version: ${version}/" "roles/${role}/defaults/main.yml"
sed -i -r "s/_version.*[0-9]+\.[0-9]+\.[0-9]+/_version\` | ${version}/" "roles/${role}/README.md"
sed -i "s/^${role}_version:.*$/${role}_version: ${version}/" "roles/${role}/defaults/main.yml"
sed -i -r "s/${role}_version.*[0-9]+\.[0-9]+\.[0-9]+/${role}_version\` | ${version}/" "roles/${role}/README.md"
yq eval -i ".argument_specs.main.options.${role}_version.default = \"${version}\"" "roles/${role}/meta/argument_specs.yml"
update_branch="autoupdate/${role}/${version}"

View file

@ -24,7 +24,7 @@ jobs:
ansible-lint:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v4
- name: Lint collection
uses: ansible/ansible-lint@main
@ -34,16 +34,23 @@ jobs:
container:
image: quay.io/prometheus/golang-builder:base
steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v4
- name: Lint arguments spec
run: ./.github/scripts/lint_arguments_spec.sh
check-unused-variables:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Run action
uses: hoo29/little-timmy@v2-action
discover-ansible-versions:
runs-on: ubuntu-latest
outputs:
versions: ${{ steps.supported-ansible-versions.outputs.result }}
steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v4
- name: Get Ansible versions that the collection supports
id: supported-ansible-versions
@ -53,7 +60,6 @@ jobs:
ansible-test-sanity:
uses: ./.github/workflows/ansible-test-sanity.yml
needs:
- ansible-lint
- discover-ansible-versions
with:
ansible-core-versions: ${{ needs.discover-ansible-versions.outputs.versions }}
@ -63,24 +69,21 @@ jobs:
needs:
- ansible-lint
outputs:
molecule-tests: ${{ steps.set-molecule-tests.outputs.tests }}
integration-tests: ${{ steps.set-integration-tests.outputs.tests }}
ansible-roles: ${{ steps.set-ansible-roles.outputs.roles }}
steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Discover role tests
id: set-molecule-tests
- name: Discover ansible roles
id: set-ansible-roles
env:
LABELS: ${{ toJson(github.event.pull_request.labels.*.name) }}
run: |
roles=$(echo $LABELS | jq -r '.[]' | grep '^roles/' | sed 's|^roles/||')
echo tests="[`for role in $roles; do
for test in $(find tests/integration/targets -maxdepth 1 -mindepth 1 -type d -iname "molecule-${role}-*" -printf "%f\n"); do
echo '{"test":\"'"${test}"'\","name":\"'"${test#*-}\"'"}';
done
done | tr '\n' ',' | sed '$s/,$//'`]" >> $GITHUB_OUTPUT
roles=$(echo $LABELS | jq -c '[.[] | select(startswith("roles/")) | ltrimstr("roles/")]')
echo $roles
echo "roles=$roles" >> $GITHUB_OUTPUT
- name: Discover integration tests
id: set-integration-tests
@ -89,17 +92,20 @@ jobs:
echo '{"test":\"'"${test}"'\","name":\"'"${test}\"'"}';
done | tr '\n' ',' | sed '$s/,$//'`]" >> $GITHUB_OUTPUT
ansible-test-molecule:
uses: ./.github/workflows/ansible-test-integration.yml
molecule:
uses: ./.github/workflows/ansible-test-molecule.yml
needs:
- discover-ansible-tests
- discover-ansible-versions
if: needs.discover-ansible-tests.outputs.molecule-tests != '[]' &&
needs.discover-ansible-tests.outputs.molecule-tests != ''
if: needs.discover-ansible-tests.outputs.ansible-roles != '[]' &&
needs.discover-ansible-tests.outputs.ansible-roles != ''
with:
targets: ${{ needs.discover-ansible-tests.outputs.molecule-tests }}
role: ${{ matrix.role }}
ansible-core-versions: ${{ needs.discover-ansible-versions.outputs.versions }}
coverage: never
strategy:
fail-fast: false
matrix:
role: ${{ fromJson(needs.discover-ansible-tests.outputs.ansible-roles) }}
ansible-test-integration:
uses: ./.github/workflows/ansible-test-integration.yml

View file

@ -24,6 +24,7 @@ jobs:
runs-on: ubuntu-latest
strategy:
fail-fast: false
max-parallel: 10
matrix:
targets: ${{ fromJson(inputs.targets) }}
ansible-core-versions: ${{ fromJson(inputs.ansible-core-versions) }}
@ -41,5 +42,7 @@ jobs:
testing-type: integration
target: ${{ matrix.targets.test }}
coverage: ${{ inputs.coverage }}
ansible-core-github-repository-slug: ${{ contains(fromJson('["stable-2.9", "stable-2.10", "stable-2.11"]'), matrix.ansible-core-versions) &&
'ansible-community/eol-ansible' || 'ansible/ansible' }}
env:
GITHUB_TOKEN: "${{ secrets.GITHUB_TOKEN }}"

View file

@ -0,0 +1,43 @@
---
name: Ansible Molecule
on:
workflow_call:
inputs:
role:
required: true
type: string
ansible-core-versions:
required: false
default: '["stable-2.14"]'
type: string
jobs:
discover-molecule-scenarios:
runs-on: ubuntu-latest
outputs:
molecule-tests: ${{ steps.set-molecule-tests.outputs.tests }}
name: "${{ inputs.role }}-discover-molecule-scenarios"
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 0
- name: "Discover ${{ inputs.role }} molecule scenarios"
id: set-molecule-tests
run: |
role=${{ inputs.role }}
echo tests="[`for test in $(find tests/integration/targets -maxdepth 1 -mindepth 1 -type d -iname "molecule-${role}-*" -printf "%f\n"); do
echo '{"test":\"'"${test}"'\","name":\"'"${test#*-}\"'"}';
done | tr '\n' ',' | sed '$s/,$//'`]" >> $GITHUB_OUTPUT
ansible-test-integration-molecule:
uses: ./.github/workflows/ansible-test-integration.yml
needs:
- discover-molecule-scenarios
if: needs.discover-molecule-scenarios.outputs.molecule-tests != '[]' &&
needs.discover-molecule-scenarios.outputs.molecule-tests != ''
with:
targets: ${{ needs.discover-molecule-scenarios.outputs.molecule-tests }}
ansible-core-versions: ${{ inputs.ansible-core-versions }}
coverage: never

View file

@ -23,3 +23,5 @@ jobs:
with:
ansible-core-version: ${{ matrix.ansible-core-versions }}
testing-type: sanity
ansible-core-github-repository-slug: ${{ contains(fromJson('["stable-2.9", "stable-2.10", "stable-2.11"]'), matrix.ansible-core-versions) &&
'ansible-community/eol-ansible' || 'ansible/ansible' }}

View file

@ -16,7 +16,9 @@ jobs:
- name: "Confirm correct pull request title"
uses: mmubeen/action-pr-title@master # until PR gets merged https://github.com/deepakputhraya/action-pr-title/pull/29
with:
allowed_prefixes: 'feat,feature,fix,major,breaking,minor,enhancement,deprecated,removed,security,bug,bugfix,docs,packaging,test,refactor,refactoring,skip-release,skip_changelog,patch'
allowed_prefixes: "breaking,chore,feat,feature,fix,major,minor,enhancement,\
deprecated,removed,security,bug,bugfix,docs,packaging,\
test,refactor,refactoring,skip-release,skip_changelog,patch"
- name: "Apply label"
if: github.event.pull_request.labels.length == 0
@ -43,7 +45,8 @@ jobs:
"refactoring": "trivial",
"skip-release": "skip_changelog",
"skip_changelog": "skip_changelog",
"patch": "trivial"
"patch": "trivial",
"chore": "trivial"
}
role-label:
@ -51,11 +54,11 @@ jobs:
needs: pr-label
if: github.event.pull_request.labels.length == 0
steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v4
- name: Get changed roles
id: changed-roles
uses: tj-actions/changed-files@v35
uses: tj-actions/changed-files@v45
with:
path: "roles"
diff_relative: "true"
@ -65,12 +68,23 @@ jobs:
sha: ${{ github.event.pull_request.head.sha }}
- name: Add changed roles labels
uses: actions/github-script@v6
uses: actions/github-script@v7
if: |
steps.changed-roles.outputs.all_changed_and_modified_files
with:
script: |
const labels = '${{ steps.changed-roles.outputs.all_changed_and_modified_files }}'.split(' ').map(i => 'roles/' + i);
const changedRoles = '${{ steps.changed-roles.outputs.all_changed_and_modified_files }}'.split(' ');
let labels = changedRoles.map(i => 'roles/' + i);
if (changedRoles.includes('_common')) {
const allLabels = await github.paginate(github.rest.issues.listLabelsForRepo, {
owner: context.repo.owner,
repo: context.repo.repo,
});
const roleLabels = allLabels.map(label => label.name).filter(name => name.startsWith('roles/'));
labels = [...new Set([...labels, ...roleLabels])];
}
github.rest.issues.addLabels({
issue_number: context.issue.number,
owner: context.repo.owner,

View file

@ -1,5 +1,5 @@
---
name: Collection Docs
name: Collection Docs (PR)
concurrency:
group: docs-pr-${{ github.head_ref }}
cancel-in-progress: true
@ -11,10 +11,28 @@ env:
GHP_BASE_URL: https://${{ github.repository_owner }}.github.io/${{ github.event.repository.name }}
jobs:
get-tags:
runs-on: ubuntu-latest
outputs:
latest: ${{ steps.get-latest-tag.outputs.latest_tag }}
steps:
- name: Get the latest tag
id: get-latest-tag
uses: actions/github-script@v7
with:
script: |
const latestTag = await github.rest.repos.listTags({
owner: context.repo.owner,
repo: context.repo.repo,
per_page: 1
});
core.setOutput('latest_tag', latestTag.data[0].name);
build-docs:
name: Build Ansible Docs
needs: get-tags
permissions:
contents: read
name: Build Ansible Docs
uses: ansible-community/github-docs-build/.github/workflows/_shared-docs-build-pr.yml@main
with:
collection-name: prometheus.prometheus
@ -26,7 +44,7 @@ jobs:
init-title: Prometheus.Prometheus Collection Documentation
init-html-short-title: Prometheus.Prometheus Collection Docs
init-extra-html-theme-options: |
documentation_home_url=https://${{ github.repository_owner }}.github.io/${{ github.event.repository.name }}/branch/main/
documentation_home_url=https://${{ github.repository_owner }}.github.io/${{ github.event.repository.name }}/tag/${{ needs.get-tags.outputs.latest }}
render-file-line:
'> * `$<status>`
[$<path_tail>](https://${{ github.repository_owner }}.github.io/${{ github.event.repository.name }}/pr/${{ github.event.number }}/$<path_tail>)'
@ -36,11 +54,14 @@ jobs:
if: github.repository == 'prometheus-community/ansible'
permissions:
contents: write
pages: write
id-token: write
needs: [build-docs]
name: Publish Ansible Docs
uses: ansible-community/github-docs-build/.github/workflows/_shared-docs-build-publish-gh-pages.yml@main
with:
artifact-name: ${{ needs.build-docs.outputs.artifact-name }}
publish-gh-pages-branch: true
action: ${{ (github.event.action == 'closed' || needs.build-docs.outputs.changed != 'true') && 'teardown' || 'publish' }}
secrets:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
@ -56,7 +77,6 @@ jobs:
uses: ansible-community/github-docs-build/actions/ansible-docs-build-comment@main
with:
body-includes: '## Docs Build'
reactions: heart
action: ${{ needs.build-docs.outputs.changed != 'true' && 'remove' || '' }}
on-closed-body: |
## Docs Build 📝

View file

@ -16,10 +16,28 @@ on:
workflow_dispatch:
jobs:
get-tags:
runs-on: ubuntu-latest
outputs:
latest: ${{ steps.get-latest-tag.outputs.latest_tag }}
steps:
- name: Get the latest tag
id: get-latest-tag
uses: actions/github-script@v7
with:
script: |
const latestTag = await github.rest.repos.listTags({
owner: context.repo.owner,
repo: context.repo.repo,
per_page: 1
});
core.setOutput('latest_tag', latestTag.data[0].name);
build-docs:
name: Build Ansible Docs
needs: get-tags
permissions:
contents: read
name: Build Ansible Docs
uses: ansible-community/github-docs-build/.github/workflows/_shared-docs-build-push.yml@main
with:
collection-name: prometheus.prometheus
@ -31,17 +49,20 @@ jobs:
init-title: Prometheus.Prometheus Collection Documentation
init-html-short-title: Prometheus.Prometheus Collection Docs
init-extra-html-theme-options: |
documentation_home_url=https://${{ github.repository_owner }}.github.io/${{ github.event.repository.name }}/branch/main/
documentation_home_url=https://${{ github.repository_owner }}.github.io/${{ github.event.repository.name }}/tag/${{ needs.get-tags.outputs.latest }}
publish-docs-gh-pages:
# for now we won't run this on forks
if: github.repository == 'prometheus-community/ansible'
permissions:
contents: write
pages: write
id-token: write
needs: [build-docs]
name: Publish Ansible Docs
uses: ansible-community/github-docs-build/.github/workflows/_shared-docs-build-publish-gh-pages.yml@main
with:
artifact-name: ${{ needs.build-docs.outputs.artifact-name }}
publish-gh-pages-branch: true
secrets:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}

View file

@ -10,7 +10,7 @@ jobs:
release:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v4
- name: "Install tools"
run: "python -m pip install ansible-base --disable-pip-version-check"

View file

@ -14,12 +14,12 @@ jobs:
permissions:
contents: write
steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v4
with:
fetch-depth: 0
- name: "Install tools"
run: "python -m pip install ansible-base antsibull-changelog --disable-pip-version-check"
run: "python -m pip install --upgrade ansible-base antsibull-changelog --disable-pip-version-check"
- name: "Calculate next version"
id: version
@ -31,7 +31,7 @@ jobs:
run: antsibull-changelog release -v --version "${{ steps.version.outputs.next-version }}"
- name: "Run antsichaut"
uses: ansible-community/antsichaut@main
uses: gardar/antsichaut@fix-missing-changes
with:
GITHUB_TOKEN: "${{ secrets.GITHUB_TOKEN }}"
since_version: "${{ steps.version.outputs.current-version }}"
@ -45,19 +45,19 @@ jobs:
cmd: yq -i '.version = "${{ steps.version.outputs.next-version }}"' 'galaxy.yml'
- name: "Write changelog and version"
uses: stefanzweifel/git-auto-commit-action@v4
uses: stefanzweifel/git-auto-commit-action@v5
with:
branch: ${{ github.event.pull_request.base.ref }}
commit_message: "chore: update version"
push_options: --force
- name: "Checkout updated branch"
uses: actions/checkout@v3
uses: actions/checkout@v4
with:
ref: ${{ github.event.pull_request.base.ref }}
- name: "Publish release"
id: release-publish
uses: release-drafter/release-drafter@v5
uses: release-drafter/release-drafter@v6
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}

View file

@ -16,7 +16,7 @@ jobs:
container:
image: quay.io/prometheus/golang-builder:base
steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v4
- name: Get repos for each role
id: discover
@ -36,5 +36,5 @@ jobs:
matrix:
include: ${{ fromJson(needs.discover-role-repos.outputs.role-repos) }}
steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v4
- run: ./.github/scripts/version_updater.sh ${{ matrix.repo }} ${{ matrix.role }} ${{ matrix.type }}

View file

@ -0,0 +1,70 @@
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
import testinfra
def get_ansible_version():
"""
Get the current Ansible version from localhost using the 'debug' module.
Returns:
str: The current Ansible version (major.minor).
"""
localhost = testinfra.get_host(
"ansible://localhost?ansible_inventory=localhost,"
)
local_ansible_version = localhost.ansible("debug", "var=ansible_version")
ansible_version = '.'.join(
local_ansible_version['ansible_version']['full'].split('.')[:2]
)
return ansible_version
def filter_compatible_hosts(inventory, ansible_version):
"""
Filter hosts based on Ansible version compatibility.
Args:
inventory (str): The inventory file path.
ansible_version (str): The current Ansible version (major.minor).
Returns:
list: A list of compatible hosts that do not have the current Ansible
version in their exclude_ansible_vers hostvars.
"""
ansible_runner = testinfra.utils.ansible_runner.AnsibleRunner(inventory)
all_hosts = ansible_runner.get_hosts('all')
compatible_hosts = []
for host in all_hosts:
# Get host variables
host_vars = ansible_runner.get_variables(host)
# Check if the host should be excluded
if 'exclude_ansible_vers' in host_vars:
excluded_versions = host_vars['exclude_ansible_vers']
if ansible_version in excluded_versions:
continue
compatible_hosts.append(host)
return compatible_hosts
def get_target_hosts():
"""
Get the filtered target hosts based on the current Ansible version.
Returns:
list: A list of hosts that are compatible with
the current Ansible version.
"""
# Get current Ansible version
current_ansible_version = get_ansible_version()
# Get the inventory file from environment
inventory_file = os.environ['MOLECULE_INVENTORY_FILE']
# Filter the hosts based on the exclusion criteria
return filter_compatible_hosts(inventory_file, current_ansible_version)

View file

@ -4,6 +4,105 @@ Prometheus.Prometheus Release Notes
.. contents:: Topics
v0.22.0
=======
Minor Changes
-------------
- feat: Add nvidia_gpu_exporter (https://github.com/prometheus-community/ansible/pull/441)
Bugfixes
--------
- bugfix: update default home directory (https://github.com/prometheus-community/ansible/pull/442)
v0.21.0
=======
Bugfixes
--------
- fix: remove unused variable (https://github.com/prometheus-community/ansible/pull/437)
v0.20.3
=======
v0.20.1
=======
Bugfixes
--------
- fix: alertmanager deprecated PIDFile= path warning (https://github.com/prometheus-community/ansible/pull/155)
v0.20.0
=======
Minor Changes
-------------
- minor(ci): detect unused ansible variables (https://github.com/prometheus-community/ansible/pull/422)
v0.19.0
=======
Minor Changes
-------------
- minor: bump support for ansible from v2.16.* to v2.17.* (https://github.com/prometheus-community/ansible/pull/414)
Bugfixes
--------
- fix: remove EOL platforms for current ones (https://github.com/prometheus-community/ansible/pull/395)
v0.18.1
=======
Bugfixes
--------
- fix: remove EOL platforms for current ones (https://github.com/prometheus-community/ansible/pull/395)
v0.18.0
=======
Minor Changes
-------------
- enhancement: adding env/container labels options (https://github.com/prometheus-community/ansible/pull/407)
Bugfixes
--------
- fix: ansible-community/eol-ansible for 2.9/2.10/2.11 tests (https://github.com/prometheus-community/ansible/pull/416)
v0.17.2
=======
Bugfixes
--------
- fix: ansible-community/eol-ansible for 2.9/2.10/2.11 tests (https://github.com/prometheus-community/ansible/pull/416)
v0.17.1
=======
Bugfixes
--------
- fix(ci): limit number of parallel tests to avoid github api limits (https://github.com/prometheus-community/ansible/pull/397)
v0.17.0
=======
v0.16.4
=======
v0.16.3
=======
v0.16.2
=======

View file

@ -93,4 +93,4 @@ To accomplish this all roles need to support:
- current and at least one previous ansible version
- systemd as the only available process manager
- at least latest debian and CentOS distributions
- at least latest Debian/Ubuntu and EL distributions

View file

@ -13,4 +13,4 @@ plugins:
shell: {}
strategy: {}
vars: {}
version: 0.16.2
version: 0.22.0

View file

@ -1,8 +1,98 @@
---
ancestor:
releases:
0.22.0:
changes:
bugfixes:
- 'bugfix: update default home directory (https://github.com/prometheus-community/ansible/pull/442)'
minor_changes:
- 'feat: Add nvidia_gpu_exporter (https://github.com/prometheus-community/ansible/pull/441)'
release_date: '2024-10-29'
0.21.0:
changes:
bugfixes:
- 'fix: remove unused variable (https://github.com/prometheus-community/ansible/pull/437)'
trivial:
- 'chore(deps): update hoo29/little-timmy action to v2 (https://github.com/prometheus-community/ansible/pull/436)'
release_date: '2024-10-18'
0.20.3:
release_date: '2024-10-21'
0.20.1:
changes:
bugfixes:
- 'fix: alertmanager deprecated PIDFile= path warning (https://github.com/prometheus-community/ansible/pull/155)'
release_date: '2024-10-17'
0.20.0:
changes:
minor_changes:
- 'minor(ci): detect unused ansible variables (https://github.com/prometheus-community/ansible/pull/422)'
trivial:
- 'refactor: consolidate common tasks (https://github.com/prometheus-community/ansible/pull/425)'
release_date: '2024-10-02'
0.19.0:
changes:
bugfixes:
- 'fix: remove EOL platforms for current ones (https://github.com/prometheus-community/ansible/pull/395)'
minor_changes:
- 'minor: bump support for ansible from v2.16.* to v2.17.* (https://github.com/prometheus-community/ansible/pull/414)'
trivial:
- 'patch: New percona/mongodb_exporter upstream release 0.41.1! (https://github.com/prometheus-community/ansible/pull/420)'
- 'patch: New prometheus/pushgateway upstream release 1.10.0! (https://github.com/prometheus-community/ansible/pull/418)'
release_date: '2024-09-30'
0.18.1:
changes:
bugfixes:
- 'fix: remove EOL platforms for current ones (https://github.com/prometheus-community/ansible/pull/395)'
trivial:
- 'patch: New percona/mongodb_exporter upstream release 0.41.1! (https://github.com/prometheus-community/ansible/pull/420)'
- 'patch: New prometheus/pushgateway upstream release 1.10.0! (https://github.com/prometheus-community/ansible/pull/418)'
release_date: '2024-09-20'
0.18.0:
changes:
bugfixes:
- 'fix: ansible-community/eol-ansible for 2.9/2.10/2.11 tests (https://github.com/prometheus-community/ansible/pull/416)'
minor_changes:
- 'enhancement: adding env/container labels options (https://github.com/prometheus-community/ansible/pull/407)'
trivial:
- 'patch: New percona/mongodb_exporter upstream release 0.41.0! (https://github.com/prometheus-community/ansible/pull/417)'
- 'patch: New oliver006/redis_exporter upstream release 1.63.0! (https://github.com/prometheus-community/ansible/pull/415)'
- 'patch: New prometheus/prometheus upstream release 2.54.1! (https://github.com/prometheus-community/ansible/pull/413)'
- 'chore(deps): update tj-actions/changed-files action to v45 (https://github.com/prometheus-community/ansible/pull/412)'
- 'patch: New nginxinc/nginx-prometheus-exporter upstream release 1.3.0! (https://github.com/prometheus-community/ansible/pull/405)'
- 'patch: New ncabatoff/process-exporter upstream release 0.8.3! (https://github.com/prometheus-community/ansible/pull/404)'
- 'patch: New prometheus/node_exporter upstream release 1.8.2! (https://github.com/prometheus-community/ansible/pull/401)'
- 'patch: New prometheus/memcached_exporter upstream release 0.14.4! (https://github.com/prometheus-community/ansible/pull/388)'
release_date: '2024-09-13'
0.17.2:
changes:
bugfixes:
- 'fix: ansible-community/eol-ansible for 2.9/2.10/2.11 tests (https://github.com/prometheus-community/ansible/pull/416)'
trivial:
- 'patch: New oliver006/redis_exporter upstream release 1.63.0! (https://github.com/prometheus-community/ansible/pull/415)'
- 'patch: New prometheus/prometheus upstream release 2.54.1! (https://github.com/prometheus-community/ansible/pull/413)'
- 'chore(deps): update tj-actions/changed-files action to v45 (https://github.com/prometheus-community/ansible/pull/412)'
- 'patch: New nginxinc/nginx-prometheus-exporter upstream release 1.3.0! (https://github.com/prometheus-community/ansible/pull/405)'
- 'patch: New ncabatoff/process-exporter upstream release 0.8.3! (https://github.com/prometheus-community/ansible/pull/404)'
- 'patch: New prometheus/node_exporter upstream release 1.8.2! (https://github.com/prometheus-community/ansible/pull/401)'
- 'patch: New prometheus/memcached_exporter upstream release 0.14.4! (https://github.com/prometheus-community/ansible/pull/388)'
release_date: '2024-09-13'
0.17.1:
changes:
bugfixes:
- 'fix(ci): limit number of parallel tests to avoid github api limits (https://github.com/prometheus-community/ansible/pull/397)'
trivial:
- 'refactor(ci): split up molecule matrices to avoid job limit (https://github.com/prometheus-community/ansible/pull/396)'
release_date: '2024-07-02'
0.17.0:
release_date: '2024-05-29'
0.16.4:
release_date: '2024-04-22'
0.16.3:
changes:
trivial:
- 'patch: New ncabatoff/process-exporter upstream release 0.8.1! (https://github.com/prometheus-community/ansible/pull/336)'
release_date: '2024-04-18'
0.16.2:
changes: {}
release_date: '2024-04-15'
0.16.1:
changes:

View file

@ -31,3 +31,5 @@ sections:
title: Prometheus.Prometheus
trivial_section_name: trivial
use_fqcn: true
changelog_nice_yaml: true
changelog_sort: "version_reversed"

View file

@ -1,7 +1,7 @@
---
namespace: prometheus
name: prometheus
version: 0.16.2
version: 0.22.0
readme: README.md
authors:
- "Ben Kochie (https://github.com/SuperQ)"

View file

@ -1,2 +1,2 @@
---
requires_ansible: ">=2.9.0,<=2.16.99"
requires_ansible: ">=2.9.0,<=2.17.99"

6
renovate.json Normal file
View file

@ -0,0 +1,6 @@
{
"$schema": "https://docs.renovatebot.com/renovate-schema.json",
"extends": [
"config:recommended"
]
}

3
roles/_common/README.md Normal file
View file

@ -0,0 +1,3 @@
---
# Internal use only
This role is for common tasks shared between roles and should not be used directly

View file

@ -0,0 +1,15 @@
---
- name: "Restart {{ _common_service_name }}"
# listen: "restart_service"
become: true
ansible.builtin.service:
daemon_reload: true
name: "{{ _common_service_name }}"
state: restarted
- name: "Reload {{ _common_service_name }}"
# listen: "reload_service"
become: true
ansible.builtin.service:
name: "{{ _common_service_name }}"
state: reloaded

View file

@ -0,0 +1,91 @@
---
argument_specs:
configure:
short_description: "Internal only - common configuration tasks"
description: "Internal only - selinux requirements"
author:
- "Prometheus Community"
options:
_common_service_name:
description:
- "Name of the system service (systemd)"
- "Usually matches the role name"
default: "{{ ansible_parent_role_names | first | regex_replace(ansible_collection_name ~ '.', '') }}"
_common_config_dir:
description: "Path to directory to install configuration."
default: ""
_common_system_user:
description: "System user for running the service."
default: ""
_common_system_group:
description: "User group for the system user."
default: ""
_common_tls_server_config:
description: "Configuration for TLS authentication."
default: ""
_common_http_server_config:
description: "Configuration for HTTP/2 support."
default: ""
_common_common_basic_auth_users:
description: "Dictionary of users and password for basic authentication. Passwords are automatically hashed with bcrypt."
default: ""
install:
short_description: "Internal only - common installation tasks"
description: "Internal only - selinux requirements"
author:
- "Prometheus Community"
options:
_common_binaries:
description: "List of binaries to install"
default: []
type: "list"
elements: "str"
_common_binary_install_dir:
description: "Directory to install binaries"
default: ""
_common_binary_name:
description: "Name of main binary"
default: "{{ __common_binary_basename }}"
_common_binary_unarchive_opts:
description: "Extra options to pass to binary unarchive task"
default: []
type: "list"
elements: "str"
_common_binary_url:
description: "URL of the binaries to install"
default: ""
_common_checksums_url:
description: "URL of the checksums file for the binaries"
default: ""
_common_config_dir:
description: "Path to the configuration dir"
default: ""
_common_local_cache_path:
description: "Local path to stash the archive and its extraction"
default: ""
_common_system_user:
description: "System user for running the service."
default: ""
_common_system_group:
description: "User group for the system user."
default: ""
preflight:
short_description: "Internal only - common preflight tasks"
description: "Internal only - selinux requirements"
author:
- "Prometheus Community"
options:
_common_dependencies:
description: "Package dependencies to install"
default: "{% if (ansible_pkg_mgr == 'apt') %}\
{{ ('python-apt' if ansible_python_version is version('3', '<') else 'python3-apt') }}
{% else %}\
{% endif %}"
selinux:
short_description: "Internal only - common selinux configuration tasks"
description: "Internal only - selinux requirements"
author:
- "Prometheus Community"
options:
_common_selinux_port:
description: "Port to allow in SELinux"

View file

@ -0,0 +1,6 @@
---
galaxy_info:
author: "Prometheus Community"
description: "Internal role for common tasks shared between roles"
license: "Apache"
min_ansible_version: "2.9"

View file

@ -0,0 +1,70 @@
---
- name: "Validate invocation of _common role"
ansible.builtin.assert:
that:
- "ansible_parent_role_names is defined"
- "ansible_parent_role_names | default() | length > 0"
fail_msg: "Error: The '_common' role is a internal role and cannot be invoked directly."
tags:
- always
- name: "Create systemd service unit {{ _common_service_name }}"
ansible.builtin.template:
src: "{{ _common_service_name }}.service.j2"
dest: "/etc/systemd/system/{{ _common_service_name }}.service"
owner: root
group: root
mode: 0644
become: true
notify:
- "{{ ansible_parent_role_names | first }} : Restart {{ _common_service_name }}"
tags:
- "{{ ansible_parent_role_names | first | regex_replace(ansible_collection_name ~ '.', '') }}"
- configure
- "{{ ansible_parent_role_names | first | regex_replace(ansible_collection_name ~ '.', '') }}_configure"
- name: "Create config dir {{ _common_config_dir }}"
ansible.builtin.file:
path: "{{ _common_config_dir }}"
state: directory
owner: "{{ _common_system_user }}"
group: "{{ _common_system_group }}"
mode: u+rwX,g+rwX,o=rX
become: true
notify:
- "{{ ansible_parent_role_names | first }} : Restart {{ _common_service_name }}"
when: (_common_config_dir)
tags:
- "{{ ansible_parent_role_names | first | regex_replace(ansible_collection_name ~ '.', '') }}"
- configure
- "{{ ansible_parent_role_names | first | regex_replace(ansible_collection_name ~ '.', '') }}_configure"
- name: "Install web config for {{ _common_service_name }}"
ansible.builtin.template:
src: "web_config.yml.j2"
dest: "{{ _common_config_dir }}/web_config.yml"
owner: "{{ _common_system_user }}"
group: "{{ _common_system_group }}"
mode: 0644
become: true
notify:
- "{{ ansible_parent_role_names | first }} : Restart {{ _common_service_name }}"
when: "[_common_tls_server_config, _common_http_server_config, _common_basic_auth_users] | map('length') | select('>', 0) | list is any"
tags:
- "{{ ansible_parent_role_names | first | regex_replace(ansible_collection_name ~ '.', '') }}"
- configure
- "{{ ansible_parent_role_names | first | regex_replace(ansible_collection_name ~ '.', '') }}_configure"
#
# - name: "Configure {{ _common_service_name }}"
# ansible.builtin.template:
# # src: "{{ ansible_parent_role_paths | first }}/templates/{{ _common_service_name }}.yml.j2"
# src: "{{ _config_template | default(ansible_parent_role_paths | first ~ '/templates/' ~ _common_service_name ~ '.yml.j2') }}"
# # dest: "/etc/{{ _common_service_name }}.yml"
# dest: "{{ _config_dest | default('/etc/' ~ _common_service_name ~ '.yml') }}"
# owner: "{{ _system_user }}"
# group: "{{ _system_group }}"
# mode: 0644
# notify:
# - reload_service
# when: (ansible_parent_role_paths | first '/templates/' _common_service_name '.yml.j2')

View file

@ -0,0 +1,139 @@
---
- name: "Validate invocation of _common role"
ansible.builtin.assert:
that:
- "ansible_parent_role_names is defined"
- "ansible_parent_role_names | default() | length > 0"
fail_msg: "Error: The '_common' role is a internal role and cannot be invoked directly."
tags:
- always
- name: "Gather system user and group facts"
ansible.builtin.getent:
database: "{{ item }}"
loop:
- passwd
- group
tags:
- "{{ ansible_parent_role_names | first | regex_replace(ansible_collection_name ~ '.', '') }}"
- install
- "{{ ansible_parent_role_names | first | regex_replace(ansible_collection_name ~ '.', '') }}_install"
- name: "Create system group {{ _common_system_group }}"
ansible.builtin.group:
name: "{{ _common_system_group }}"
system: true
state: present
become: true
when: _common_system_group not in ansible_facts.getent_group
tags:
- "{{ ansible_parent_role_names | first | regex_replace(ansible_collection_name ~ '.', '') }}"
- install
- "{{ ansible_parent_role_names | first | regex_replace(ansible_collection_name ~ '.', '') }}_install"
- name: "Create system user {{ _common_system_user }}"
ansible.builtin.user:
name: "{{ _common_system_user }}"
system: true
shell: "/usr/sbin/nologin"
group: "{{ _common_system_group }}"
home: "{{ _common_config_dir | default('/', true) }}"
create_home: false
become: true
when: _common_system_user not in ansible_facts.getent_passwd
tags:
- "{{ ansible_parent_role_names | first | regex_replace(ansible_collection_name ~ '.', '') }}"
- install
- "{{ ansible_parent_role_names | first | regex_replace(ansible_collection_name ~ '.', '') }}_install"
- name: "Create localhost binary cache path"
ansible.builtin.file:
path: "{{ _common_local_cache_path }}"
state: directory
mode: 0755
delegate_to: localhost
check_mode: false
become: false
tags:
- "{{ ansible_parent_role_names | first | regex_replace(ansible_collection_name ~ '.', '') }}"
- install
- "{{ ansible_parent_role_names | first | regex_replace(ansible_collection_name ~ '.', '') }}_install"
- download
- "{{ ansible_parent_role_names | first | regex_replace(ansible_collection_name ~ '.', '') }}_download"
- name: "Download binary {{ __common_binary_basename }}"
tags:
- "{{ ansible_parent_role_names | first | regex_replace(ansible_collection_name ~ '.', '') }}"
- install
- "{{ ansible_parent_role_names | first | regex_replace(ansible_collection_name ~ '.', '') }}_install"
- download
- "{{ ansible_parent_role_names | first | regex_replace(ansible_collection_name ~ '.', '') }}_download"
become: false
block:
- name: "Get checksum list for {{ __common_binary_basename }}"
ansible.builtin.set_fact:
__common_binary_checksums: "{{ dict(lookup('url', _common_checksums_url, headers=__common_github_api_headers, wantlist=True)
| map('regex_replace', '^([a-fA-F0-9]+)\\s+', 'sha256:\\1 ')
| map('regex_findall', '^(sha256:[a-fA-F0-9]+)\\s+(.+)$') | map('flatten') | map('reverse')) }}"
run_once: true
when: (_common_checksums_url)
- name: "Download {{ __common_binary_basename }}"
ansible.builtin.get_url:
url: "{{ _common_binary_url }}"
dest: "{{ _common_local_cache_path }}/{{ _common_binary_name | default(__common_binary_basename) }}"
headers: "{{ __common_github_api_headers }}"
checksum: "{{ __common_binary_checksums[__common_binary_basename] | default(omit) }}"
mode: 0644
register: __common_download
until: __common_download is succeeded
retries: 5
delay: 2
# run_once: true # <-- this can't be set due to multi-arch support
delegate_to: localhost
check_mode: false
- name: "Unpack binary archive {{ __common_binary_basename }}"
ansible.builtin.unarchive:
src: "{{ _common_local_cache_path }}/{{ __common_binary_basename }}"
dest: "{{ _common_local_cache_path }}"
mode: 0755
list_files: true
extra_opts: "{{ _common_binary_unarchive_opts | default(omit, true) }}"
delegate_to: localhost
check_mode: false
when: __common_binary_basename is search('\.zip$|\.tar\.gz$')
- name: "Check existence of binary install dir"
ansible.builtin.stat:
path: "{{ _common_binary_install_dir }}"
register: __common_binary_install_dir
- name: "Make sure binary install dir exists"
ansible.builtin.file:
path: "{{ _common_binary_install_dir }}"
mode: 0755
owner: root
group: root
become: true
tags:
- "{{ ansible_parent_role_names | first | regex_replace(ansible_collection_name ~ '.', '') }}"
- install
- "{{ ansible_parent_role_names | first | regex_replace(ansible_collection_name ~ '.', '') }}_install"
when: not __common_binary_install_dir.stat.exists
- name: "Propagate binaries"
ansible.builtin.copy:
src: "{{ _common_local_cache_path }}/{{ item }}"
dest: "{{ _common_binary_install_dir }}/{{ item }}"
mode: 0755
owner: root
group: root
loop: "{{ _common_binaries }}"
become: true
notify:
- "{{ ansible_parent_role_names | first }} : Restart {{ _common_service_name }}"
tags:
- "{{ ansible_parent_role_names | first | regex_replace(ansible_collection_name ~ '.', '') }}"
- install
- "{{ ansible_parent_role_names | first | regex_replace(ansible_collection_name ~ '.', '') }}_install"

View file

@ -0,0 +1,76 @@
---
- name: "Validate invocation of _common role"
ansible.builtin.assert:
that:
- "ansible_parent_role_names is defined"
- "ansible_parent_role_names | default() | length > 0"
fail_msg: "Error: The '_common' role is a internal role and cannot be invoked directly."
tags:
- always
- name: "Check for deprecated skip_install variable"
ansible.builtin.assert:
that:
- __common_parent_role_short_name ~ '_skip_install' not in vars
fail_msg: "The variable {{ __common_parent_role_short_name ~ '_skip_install' }} is deprecated.
Please use `--skip-tags {{ __common_parent_role_short_name }}_install` instead to skip the installation."
tags:
- "{{ ansible_parent_role_names | first | regex_replace(ansible_collection_name ~ '.', '') }}"
- configure
- "{{ ansible_parent_role_names | first | regex_replace(ansible_collection_name ~ '.', '') }}_configure"
- name: "Check for deprecated binary_local_dir variable"
ansible.builtin.assert:
that:
- __common_parent_role_short_name ~ '_binary_local_dir' not in vars
fail_msg: "The variable {{ __common_parent_role_short_name ~ '_binary_local_dir' }} is deprecated.
Please use the variable {{ __common_parent_role_short_name ~ '_local_cache_path' }} instead"
tags:
- "{{ ansible_parent_role_names | first | regex_replace(ansible_collection_name ~ '.', '') }}"
- configure
- "{{ ansible_parent_role_names | first | regex_replace(ansible_collection_name ~ '.', '') }}_configure"
- name: "Check for deprecated archive_path variable"
ansible.builtin.assert:
that:
- __common_parent_role_short_name ~ '_archive_path' not in vars
fail_msg: "The variable {{ __common_parent_role_short_name ~ '_archive_path' }} is deprecated.
Please use the variable {{ __common_parent_role_short_name ~ '_local_cache_path' }} instead"
tags:
- "{{ ansible_parent_role_names | first | regex_replace(ansible_collection_name ~ '.', '') }}"
- configure
- "{{ ansible_parent_role_names | first | regex_replace(ansible_collection_name ~ '.', '') }}_configure"
- name: Assert usage of systemd as an init system
ansible.builtin.assert:
that: ansible_service_mgr == 'systemd'
msg: "This module only works with systemd"
tags:
- "{{ ansible_parent_role_names | first | regex_replace(ansible_collection_name ~ '.', '') }}"
- configure
- "{{ ansible_parent_role_names | first | regex_replace(ansible_collection_name ~ '.', '') }}_configure"
- install
- "{{ ansible_parent_role_names | first | regex_replace(ansible_collection_name ~ '.', '') }}_install"
- name: Install dependencies
become: true
ansible.builtin.package:
name: "{{ _common_dependencies }}"
state: present
when: (_common_dependencies)
tags:
- "{{ ansible_parent_role_names | first | regex_replace(ansible_collection_name ~ '.', '') }}"
- configure
- "{{ ansible_parent_role_names | first | regex_replace(ansible_collection_name ~ '.', '') }}_configure"
- install
- "{{ ansible_parent_role_names | first | regex_replace(ansible_collection_name ~ '.', '') }}_install"
- name: Gather package facts
ansible.builtin.package_facts:
when: "not 'packages' in ansible_facts"
tags:
- "{{ ansible_parent_role_names | first | regex_replace(ansible_collection_name ~ '.', '') }}"
- configure
- "{{ ansible_parent_role_names | first | regex_replace(ansible_collection_name ~ '.', '') }}_configure"
- install
- "{{ ansible_parent_role_names | first | regex_replace(ansible_collection_name ~ '.', '') }}_install"

View file

@ -0,0 +1,58 @@
---
- name: "Validate invocation of _common role"
ansible.builtin.assert:
that:
- "ansible_parent_role_names is defined"
- "ansible_parent_role_names | default() | length > 0"
fail_msg: "Error: The '_common' role is a internal role and cannot be invoked directly."
tags:
- always
- name: Install selinux python packages [RedHat]
ansible.builtin.package:
name: "{{ ['libselinux-python', 'policycoreutils-python']
if ansible_python_version is version('3', '<') else
['python3-libselinux', 'python3-policycoreutils'] }}"
state: present
register: __common_install_selinux_packages
until: __common_install_selinux_packages is success
retries: 5
delay: 2
become: true
when: ansible_os_family | lower == "redhat"
tags:
- "{{ ansible_parent_role_names | first | regex_replace(ansible_collection_name ~ '.', '') }}"
- configure
- "{{ ansible_parent_role_names | first | regex_replace(ansible_collection_name ~ '.', '') }}_configure"
- name: Install selinux python packages [clearlinux]
ansible.builtin.package:
name: sysadmin-basic
state: present
register: __common_install_selinux_packages
until: __common_install_selinux_packages is success
retries: 5
delay: 2
become: true
when:
- ansible_distribution | lower == "clearlinux"
tags:
- "{{ ansible_parent_role_names | first | regex_replace(ansible_collection_name ~ '.', '') }}"
- configure
- "{{ ansible_parent_role_names | first | regex_replace(ansible_collection_name ~ '.', '') }}_configure"
- name: Allow port in SELinux
community.general.seport:
ports: "{{ _common_selinux_port }}"
proto: tcp
setype: http_port_t
state: present
become: true
when:
- ansible_version.full is version_compare('2.4', '>=')
- ansible_selinux.status == "enabled"
- (_common_selinux_port)
tags:
- "{{ ansible_parent_role_names | first | regex_replace(ansible_collection_name ~ '.', '') }}"
- configure
- "{{ ansible_parent_role_names | first | regex_replace(ansible_collection_name ~ '.', '') }}_configure"

View file

@ -0,0 +1,18 @@
---
{{ ansible_managed | comment }}
{% if _common_tls_server_config | length > 0 %}
tls_server_config:
{{ _common_tls_server_config | to_nice_yaml | indent(2, true) }}
{% endif %}
{% if _common_http_server_config | length > 0 %}
http_server_config:
{{ _common_http_server_config | to_nice_yaml | indent(2, true) }}
{% endif %}
{% if _common_basic_auth_users | length > 0 %}
basic_auth_users:
{% for k, v in _common_basic_auth_users.items() %}
{{ k }}: {{ v | string | password_hash('bcrypt', ('abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ1234567890' | shuffle(seed=inventory_hostname) | join)[:22], rounds=9) }}
{% endfor %}
{% endif %}

View file

@ -0,0 +1,24 @@
---
_common_local_cache_path: ""
_common_binaries: []
_common_binary_name: "{{ __common_binary_basename }}"
_common_binary_install_dir:
_common_config_dir: ""
_common_binary_url: ""
_common_checksums_url: ""
_common_selinux_port: ""
_common_service_name: "{{ __common_parent_role_short_name }}"
_common_system_user: ""
_common_system_group: ""
_common_dependencies: "{% if (ansible_pkg_mgr == 'apt') %}\
{{ ('python-apt' if ansible_python_version is version('3', '<') else 'python3-apt') }}
{% else %}\
{% endif %}"
_common_binary_unarchive_opts: ""
_common_tls_server_config: {}
_common_http_server_config: {}
_common_basic_auth_users: {}
# Variables that should not be overwritten
__common_binary_basename: "{{ _common_binary_url | urlsplit('path') | basename }}"
__common_github_api_headers: "{{ {'GITHUB_TOKEN': lookup('ansible.builtin.env', 'GITHUB_TOKEN')} if (lookup('ansible.builtin.env', 'GITHUB_TOKEN')) else {} }}"
__common_parent_role_short_name: "{{ ansible_parent_role_names | first | regex_replace(ansible_collection_name ~ '.', '') }}"

View file

@ -1,10 +1,8 @@
---
alertmanager_version: 0.27.0
alertmanager_binary_local_dir: ''
alertmanager_binary_url: "https://github.com/{{ _alertmanager_repo }}/releases/download/v{{ alertmanager_version }}/\
alertmanager-{{ alertmanager_version }}.linux-{{ go_arch }}.tar.gz"
alertmanager-{{ alertmanager_version }}.{{ ansible_system | lower }}-{{ _alertmanager_go_ansible_arch }}.tar.gz"
alertmanager_checksums_url: "https://github.com/{{ _alertmanager_repo }}/releases/download/v{{ alertmanager_version }}/sha256sums.txt"
alertmanager_skip_install: false
alertmanager_config_dir: /etc/alertmanager
alertmanager_db_dir: /var/lib/alertmanager
@ -135,5 +133,10 @@ alertmanager_amtool_config_alertmanager_url: "{{ alertmanager_web_external_url }
# Extended output of `amtool` commands, use '' for less verbosity
alertmanager_amtool_config_output: 'extended'
alertmanager_binary_install_dir: '/usr/local/bin'
# Local path to stash the archive and its extraction
alertmanager_archive_path: /tmp
alertmanager_local_cache_path: "/tmp/alertmanager-{{ ansible_system | lower }}-{{ _alertmanager_go_ansible_arch }}/{{ alertmanager_version }}"
alertmanager_system_user: alertmanager
alertmanager_system_group: "{{ alertmanager_system_user }}"

View file

@ -11,18 +11,9 @@ argument_specs:
alertmanager_version:
description: "Alertmanager package version. Also accepts `latest` as parameter."
default: 0.27.0
alertmanager_skip_install:
description: "Alertmanager installation tasks gets skipped when set to true."
type: bool
default: false
alertmanager_binary_local_dir:
description:
- "Allows to use local packages instead of ones distributed on github."
- "As parameter it takes a directory where C(alertmanager) AND C(amtool) binaries are stored on host on which ansible is ran."
- "This overrides I(alertmanager_version) parameter"
alertmanager_binary_url:
description: "URL of the alertmanager binaries .tar.gz file"
default: "https://github.com/{{ _alertmanager_repo }}/releases/download/v{{ alertmanager_version }}/alertmanager-{{ alertmanager_version }}.linux-{{ go_arch }}.tar.gz"
default: "https://github.com/{{ _alertmanager_repo }}/releases/download/v{{ alertmanager_version }}/alertmanager-{{ alertmanager_version }}.{{ ansible_system | lower }}-{{ _alertmanager_go_ansible_arch }}.tar.gz"
alertmanager_checksums_url:
description: "URL of the alertmanager checksums file"
default: "https://github.com/{{ _alertmanager_repo }}/releases/download/v{{ alertmanager_version }}/sha256sums.txt"
@ -32,6 +23,11 @@ argument_specs:
alertmanager_web_external_url:
description: "External address on which alertmanager is available. Useful when behind reverse proxy. Ex. example.org/alertmanager"
default: "http://localhost:9093/"
alertmanager_binary_install_dir:
description:
- "I(Advanced)"
- "Directory to install binaries"
default: "/usr/local/bin"
alertmanager_config_dir:
description: "Path to directory with alertmanager configuration"
default: "/etc/alertmanager"
@ -110,6 +106,16 @@ argument_specs:
alertmanager_amtool_config_output:
description: 'Extended output, use C("") for simple output.'
default: "extended"
alertmanager_archive_path:
alertmanager_local_cache_path:
description: 'Local path to stash the archive and its extraction'
default: "/tmp"
default: "/tmp/alertmanager-{{ ansible_system | lower }}-{{ _alertmanager_go_ansible_arch }}/{{ alertmanager_version }}"
alertmanager_system_user:
description:
- "I(Advanced)"
- "alertmanager system user"
default: alertmanager
alertmanager_system_group:
description:
- "I(Advanced)"
- "System group for alertmanager"
default: alertmanager

View file

@ -9,19 +9,14 @@ galaxy_info:
versions:
- "focal"
- "jammy"
- "noble"
- name: "Debian"
versions:
- "bullseye"
- "buster"
- name: "EL"
versions:
- "7"
- "8"
- "9"
- name: "Fedora"
versions:
- "37"
- '38'
galaxy_tags:
- "monitoring"
- "prometheus"

View file

@ -1,9 +1,12 @@
---
provisioner:
playbooks:
prepare: "${MOLECULE_PROJECT_DIRECTORY}/../../.config/molecule/alternative/prepare.yml"
inventory:
group_vars:
all:
alertmanager_binary_local_dir: '/tmp/alertmanager-linux-amd64'
alertmanager_version: 0.25.0
alertmanager_local_cache_path: "/tmp/alertmanager-linux-amd64/{{ alertmanager_version }}"
alertmanager_config_dir: /opt/am/etc
alertmanager_db_dir: /opt/am/lib
alertmanager_web_listen_address:
@ -33,4 +36,5 @@ provisioner:
peers:
- "127.0.0.1:6783"
- "alertmanager.demo.do.prometheus.io:6783"
alertmanager_version: 0.25.0
alertmanager_binary_url: "https://github.com/prometheus/alertmanager/releases/download/v{{ alertmanager_version\
\ }}/alertmanager-{{ alertmanager_version }}.linux-amd64.tar.gz"

View file

@ -1,33 +0,0 @@
---
- name: Run localhost preparation
hosts: localhost
gather_facts: false
tasks:
- name: Download alertmanager binary to local folder
become: false
ansible.builtin.get_url:
url: "https://github.com/prometheus/alertmanager/releases/download/v{{ alertmanager_version\
\ }}/alertmanager-{{ alertmanager_version }}.linux-amd64.tar.gz"
dest: "/tmp/alertmanager-{{ alertmanager_version }}.linux-amd64.tar.gz"
mode: 0644
register: _download_archive
until: _download_archive is succeeded
retries: 5
delay: 2
check_mode: false
- name: Unpack alertmanager binaries
become: false
ansible.builtin.unarchive:
src: "/tmp/alertmanager-{{ alertmanager_version }}.linux-amd64.tar.gz"
dest: "/tmp"
creates: "/tmp/alertmanager-{{ alertmanager_version }}.linux-amd64/alertmanager"
check_mode: false
- name: Link to alertmanager binaries directory
become: false
ansible.builtin.file:
src: "/tmp/alertmanager-{{ alertmanager_version }}.linux-amd64"
dest: "/tmp/alertmanager-linux-amd64"
state: link
check_mode: false

View file

@ -1,12 +1,10 @@
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
import testinfra.utils.ansible_runner
from testinfra_helpers import get_target_hosts
import pytest
testinfra_hosts = testinfra.utils.ansible_runner.AnsibleRunner(
os.environ['MOLECULE_INVENTORY_FILE']).get_hosts('all')
testinfra_hosts = get_target_hosts()
@pytest.mark.parametrize("dirs", [

View file

@ -1,12 +1,10 @@
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
import testinfra.utils.ansible_runner
import pytest
from testinfra_helpers import get_target_hosts
testinfra_hosts = testinfra.utils.ansible_runner.AnsibleRunner(
os.environ['MOLECULE_INVENTORY_FILE']).get_hosts('all')
testinfra_hosts = get_target_hosts()
@pytest.mark.parametrize("dirs", [

View file

@ -1,12 +1,10 @@
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
import testinfra.utils.ansible_runner
from testinfra_helpers import get_target_hosts
import pytest
testinfra_hosts = testinfra.utils.ansible_runner.AnsibleRunner(
os.environ['MOLECULE_INVENTORY_FILE']).get_hosts('all')
testinfra_hosts = get_target_hosts()
@pytest.mark.parametrize("files", [

View file

@ -1,44 +1,79 @@
---
- name: "Common configure"
ansible.builtin.include_role:
name: prometheus.prometheus._common
tasks_from: configure.yml
vars:
_common_system_user: "{{ alertmanager_system_user }}"
_common_system_group: "{{ alertmanager_system_group }}"
_common_config_dir: "{{ alertmanager_config_dir }}"
tags:
- alertmanager
- configure
- alertmanager_configure
- name: Create alertmanager directories
ansible.builtin.file:
path: "{{ item }}"
state: directory
owner: "{{ alertmanager_system_user }}"
group: "{{ alertmanager_system_group }}"
mode: 0755
loop:
- "{{ alertmanager_config_dir }}/templates"
- "{{ alertmanager_db_dir }}"
- "{{ _alertmanager_amtool_config_dir }}"
become: true
tags:
- alertmanager
- configure
- alertmanager_configure
- name: Copy amtool config
ansible.builtin.template:
force: true
src: "{{ alertmanager_amtool_config_file }}"
dest: "{{ _alertmanager_amtool_config_dir }}/config.yml"
owner: alertmanager
group: alertmanager
owner: "{{ alertmanager_system_user }}"
group: "{{ alertmanager_system_group }}"
mode: 0644
become: true
tags:
- alertmanager
- configure
- alertmanager_configure
- name: Copy alertmanager config
ansible.builtin.template:
force: true
src: "{{ alertmanager_config_file }}"
dest: "{{ alertmanager_config_dir }}/alertmanager.yml"
owner: alertmanager
group: alertmanager
owner: "{{ alertmanager_system_user }}"
group: "{{ alertmanager_system_group }}"
mode: 0644
validate: "{{ _alertmanager_binary_install_dir }}/amtool check-config %s"
validate: "{{ alertmanager_binary_install_dir }}/amtool check-config %s"
no_log: "{{ false if (lookup('env', 'CI')) or (lookup('env', 'MOLECULE_PROVISIONER_NAME')) else true }}"
become: true
notify:
- restart alertmanager
- name: Create systemd service unit
ansible.builtin.template:
src: alertmanager.service.j2
dest: /etc/systemd/system/alertmanager.service
owner: root
group: root
mode: 0644
notify:
- restart alertmanager
tags:
- alertmanager
- configure
- alertmanager_configure
- name: Copy alertmanager template files
ansible.builtin.copy:
src: "{{ item }}"
dest: "{{ alertmanager_config_dir }}/templates/"
force: true
owner: alertmanager
group: alertmanager
owner: "{{ alertmanager_system_user }}"
group: "{{ alertmanager_system_group }}"
mode: 0644
with_fileglob: "{{ alertmanager_template_files }}"
become: true
notify:
- restart alertmanager
tags:
- alertmanager
- configure
- alertmanager_configure

View file

@ -1,87 +0,0 @@
---
- name: Create alertmanager system group
ansible.builtin.group:
name: alertmanager
system: true
state: present
- name: Create alertmanager system user
ansible.builtin.user:
name: alertmanager
system: true
shell: "/usr/sbin/nologin"
group: alertmanager
createhome: false
- name: Create alertmanager directories
ansible.builtin.file:
path: "{{ item }}"
state: directory
owner: alertmanager
group: alertmanager
mode: 0755
with_items:
- "{{ alertmanager_config_dir }}"
- "{{ alertmanager_config_dir }}/templates"
- "{{ alertmanager_db_dir }}"
- "{{ _alertmanager_amtool_config_dir }}"
- name: Get binary
when:
- alertmanager_binary_local_dir | length == 0
- not alertmanager_skip_install
block:
- name: Download alertmanager binary to local folder
become: false
ansible.builtin.get_url:
url: "{{ alertmanager_binary_url }}"
dest: "{{ alertmanager_archive_path }}/alertmanager-{{ alertmanager_version }}.linux-{{ go_arch }}.tar.gz"
checksum: "sha256:{{ __alertmanager_checksum }}"
mode: 0644
register: _download_archive
until: _download_archive is succeeded
retries: 5
delay: 2
# run_once: true # <-- this can't be set due to multi-arch support
delegate_to: localhost
check_mode: false
- name: Unpack alertmanager binaries
become: false
ansible.builtin.unarchive:
src: "{{ alertmanager_archive_path }}/alertmanager-{{ alertmanager_version }}.linux-{{ go_arch }}.tar.gz"
dest: "{{ alertmanager_archive_path }}"
mode: 0755
creates: "{{ alertmanager_archive_path }}/alertmanager-{{ alertmanager_version }}.linux-{{ go_arch }}/alertmanager"
delegate_to: localhost
check_mode: false
- name: Propagate official alertmanager and amtool binaries
ansible.builtin.copy:
src: "{{ alertmanager_archive_path }}/alertmanager-{{ alertmanager_version }}.linux-{{ go_arch }}/{{ item }}"
dest: "{{ _alertmanager_binary_install_dir }}/{{ item }}"
mode: 0755
owner: root
group: root
with_items:
- alertmanager
- amtool
notify:
- restart alertmanager
- name: Propagate locally distributed alertmanager and amtool binaries
ansible.builtin.copy:
src: "{{ alertmanager_binary_local_dir }}/{{ item }}"
dest: "{{ _alertmanager_binary_install_dir }}/{{ item }}"
mode: 0755
owner: root
group: root
with_items:
- alertmanager
- amtool
when:
- alertmanager_binary_local_dir | length > 0
- not alertmanager_skip_install
notify:
- restart alertmanager

View file

@ -2,56 +2,48 @@
- name: Preflight
ansible.builtin.include_tasks:
file: preflight.yml
apply:
tags:
- alertmanager_install
- alertmanager_configure
- alertmanager_run
tags:
- alertmanager
- install
- configure
- alertmanager_install
- alertmanager_configure
- alertmanager_run
- name: Install
ansible.builtin.include_tasks:
file: install.yml
apply:
become: true
tags:
- alertmanager_install
ansible.builtin.include_role:
name: prometheus.prometheus._common
tasks_from: install.yml
vars:
_common_local_cache_path: "{{ alertmanager_local_cache_path }}"
_common_binaries: "{{ _alertmanager_binaries }}"
_common_binary_install_dir: "{{ alertmanager_binary_install_dir }}"
_common_binary_url: "{{ alertmanager_binary_url }}"
_common_checksums_url: "{{ alertmanager_checksums_url }}"
_common_system_group: "{{ alertmanager_system_group }}"
_common_system_user: "{{ alertmanager_system_user }}"
_common_config_dir: "{{ alertmanager_config_dir }}"
_common_binary_unarchive_opts: ['--strip-components=1']
tags:
- alertmanager
- install
- alertmanager_install
- name: SELinux
ansible.builtin.include_tasks:
file: selinux.yml
apply:
become: true
tags:
- alertmanager_configure
ansible.builtin.include_role:
name: prometheus.prometheus._common
tasks_from: selinux.yml
vars:
_common_selinux_port: "{{ alertmanager_web_listen_address | urlsplit('port') }}"
when: ansible_selinux.status == "enabled"
tags:
- alertmanager
- configure
- alertmanager_configure
- name: Configure
ansible.builtin.include_tasks:
file: configure.yml
apply:
become: true
tags:
- alertmanager_configure
tags:
- alertmanager
- configure
- alertmanager_configure
- name: Ensure alertmanager service is started and enabled
become: true
ansible.builtin.systemd:
daemon_reload: true
name: alertmanager
state: started
enabled: true
tags:
- alertmanager_run
- name: Flush alertmangaer handlers after run.
ansible.builtin.meta: flush_handlers

View file

@ -1,24 +1,8 @@
---
- name: Assert usage of systemd as an init system
ansible.builtin.assert:
that: ansible_service_mgr == 'systemd'
msg: "This module only works with systemd"
- name: Install package fact dependencies
become: true
ansible.builtin.package:
name: "{{ _pkg_fact_req }}"
state: present
when: (_pkg_fact_req)
vars:
_pkg_fact_req: "{% if (ansible_pkg_mgr == 'apt') %}\
{{ ('python-apt' if ansible_python_version is version('3', '<') else 'python3-apt') }}
{% else %}\
{% endif %}"
- name: Gather package facts
ansible.builtin.package_facts:
when: "not 'packages' in ansible_facts"
- name: Common preflight
ansible.builtin.include_role:
name: prometheus.prometheus._common
tasks_from: preflight.yml
- name: Assert that used version supports listen address type
ansible.builtin.assert:
@ -50,28 +34,12 @@
retries: 10
when:
- alertmanager_version == "latest"
- alertmanager_binary_local_dir | length == 0
- not alertmanager_skip_install
- name: Get alertmanager binary checksum
when:
- alertmanager_binary_local_dir | length == 0
- not alertmanager_skip_install
block:
- name: "Get checksum list"
ansible.builtin.set_fact:
__alertmanager_checksums: "{{ lookup('url', alertmanager_checksums_url, headers=_github_api_headers, wantlist=True) | list }}"
run_once: true
until: __alertmanager_checksums is search('linux-' + go_arch + '.tar.gz')
retries: 10
- name: "Get checksum for {{ go_arch }}"
ansible.builtin.set_fact:
__alertmanager_checksum: "{{ item.split(' ')[0] }}"
with_items: "{{ __alertmanager_checksums }}"
when:
- "('linux-' + go_arch + '.tar.gz') in item"
tags:
- alertmanager
- install
- alertmanager_install
- download
- alertmanager_download
- name: Fail when extra config flags are duplicating ansible variables
ansible.builtin.fail:

View file

@ -1,23 +0,0 @@
---
- name: Install selinux python packages [RedHat]
ansible.builtin.package:
name: "{{ ['libselinux-python', 'policycoreutils-python']
if ansible_python_version is version('3', '<') else
['python3-libselinux', 'python3-policycoreutils'] }}"
state: present
register: _install_selinux_packages
until: _install_selinux_packages is success
retries: 5
delay: 2
when: ansible_os_family | lower == "redhat"
- name: Install selinux python packages [clearlinux]
ansible.builtin.package:
name: sysadmin-basic
state: present
register: _install_selinux_packages
until: _install_selinux_packages is success
retries: 5
delay: 2
when:
- ansible_distribution | lower == "clearlinux"

View file

@ -17,11 +17,15 @@ StartLimitIntervalSec=0
[Service]
Type=simple
{% if (ansible_facts.packages.systemd | first).version is version('239', '>=') %}
PIDFile=/run/alertmanager.pid
{% else %}
PIDFile=/var/run/alertmanager.pid
{% endif %}
User=alertmanager
Group=alertmanager
ExecReload=/bin/kill -HUP $MAINPID
ExecStart={{ _alertmanager_binary_install_dir }}/alertmanager \
ExecStart={{ alertmanager_binary_install_dir }}/alertmanager \
{% for option, value in (alertmanager_cluster.items() | sort) %}
{% if option == "peers" %}
{% for peer in value %}

View file

@ -1,15 +1,14 @@
---
go_arch_map:
i386: '386'
x86_64: 'amd64'
aarch64: 'arm64'
armv7l: 'armv7'
armv6l: 'armv6'
go_arch: "{{ go_arch_map[ansible_architecture] | default(ansible_architecture) }}"
_alertmanager_binary_install_dir: '/usr/local/bin'
_alertmanager_go_ansible_arch: "{{ {'i386': '386',
'x86_64': 'amd64',
'aarch64': 'arm64',
'armv7l': 'armv7',
'armv6l': 'armv6'}.get(ansible_architecture, ansible_architecture) }}"
# The expected location of the amtool configuration file
_alertmanager_amtool_config_dir: '/etc/amtool'
_alertmanager_repo: "prometheus/alertmanager"
_github_api_headers: "{{ {'GITHUB_TOKEN': lookup('ansible.builtin.env', 'GITHUB_TOKEN')} if (lookup('ansible.builtin.env', 'GITHUB_TOKEN')) else {} }}"
_alertmanager_binaries:
- alertmanager
- amtool

View file

@ -1,10 +1,8 @@
---
bind_exporter_version: 0.7.0
bind_exporter_binary_local_dir: ""
bind_exporter_binary_url: "https://github.com/{{ _bind_exporter_repo }}/releases/download/v{{ bind_exporter_version }}/\
bind_exporter-{{ bind_exporter_version }}.linux-{{ go_arch }}.tar.gz"
bind_exporter-{{ bind_exporter_version }}.{{ ansible_system | lower }}-{{ _bind_exporter_go_ansible_arch }}.tar.gz"
bind_exporter_checksums_url: "https://github.com/{{ _bind_exporter_repo }}/releases/download/v{{ bind_exporter_version }}/sha256sums.txt"
bind_exporter_skip_install: false
bind_exporter_web_listen_address: "0.0.0.0:9119"
bind_exporter_web_telemetry_path: "/metrics"
@ -20,7 +18,9 @@ bind_exporter_http_server_config: {}
bind_exporter_basic_auth_users: {}
# Internal variables.
bind_exporter_binary_install_dir: "/usr/local/bin"
bind_exporter_config_dir: "/etc/bind_exporter"
bind_exporter_system_group: "bind-exp"
bind_exporter_system_user: "{{ bind_exporter_system_group }}"
bind_exporter_binary_install_dir: "/usr/local/bin"
bind_exporter_config_dir: "/etc/bind_exporter"
bind_exporter_local_cache_path: "/tmp/bind_exporter-{{ ansible_system | lower }}-{{ _bind_exporter_go_ansible_arch }}/{{ bind_exporter_version }}"

View file

@ -11,18 +11,9 @@ argument_specs:
bind_exporter_version:
description: "BIND exporter package version. Also accepts latest as parameter."
default: "0.7.0"
bind_exporter_skip_install:
description: "BIND installation tasks gets skipped when set to true."
type: bool
default: false
bind_exporter_binary_local_dir:
description:
- "Enables the use of local packages instead of those distributed on github."
- "The parameter may be set to a directory where the C(bind_exporter) binary is stored on the host where ansible is run."
- "This overrides the I(bind_exporter_version) parameter"
bind_exporter_binary_url:
description: "URL of the bind_exporter binaries .tar.gz file"
default: "https://github.com/{{ _bind_exporter_repo }}/download/v{{ bind_exporter_version }}/bind_exporter-{{ bind_exporter_version }}.linux-{{ go_arch }}.tar.gz"
default: "https://github.com/{{ _bind_exporter_repo }}/releases/download/v{{ bind_exporter_version }}/bind_exporter-{{ bind_exporter_version }}.{{ ansible_system | lower }}-{{ _bind_exporter_go_ansible_arch }}.tar.gz"
bind_exporter_checksums_url:
description: "URL of the bind_exporter checksums file"
default: "https://github.com/{{ _bind_exporter_repo }}/releases/download/v{{ bind_exporter_version }}/sha256sums.txt"
@ -33,7 +24,7 @@ argument_specs:
description: "Path under which to expose metrics"
default: "/metrics"
bind_exporter_config_dir:
description: "The path where exporter configuration is stored"
description: "Path to directory with bind_exporter configuration"
default: "/etc/bind_exporter"
bind_exporter_stats_url:
description: "HTTP XML API address of BIND server"
@ -88,3 +79,6 @@ argument_specs:
- "I(Advanced)"
- "BIND Exporter user"
default: "bind-exp"
bind_exporter_local_cache_path:
description: "Local path to stash the archive and its extraction"
default: "/tmp/bind_exporter-{{ ansible_system | lower }}-{{ _bind_exporter_go_ansible_arch }}/{{ bind_exporter_version }}"

View file

@ -9,19 +9,14 @@ galaxy_info:
versions:
- "focal"
- "jammy"
- "noble"
- name: "Debian"
versions:
- "bullseye"
- "buster"
- name: "EL"
versions:
- "7"
- "8"
- "9"
- name: "Fedora"
versions:
- "37"
- '38'
galaxy_tags:
- "monitoring"
- "prometheus"

View file

@ -1,9 +1,12 @@
---
provisioner:
playbooks:
prepare: "${MOLECULE_PROJECT_DIRECTORY}/../../.config/molecule/alternative/prepare.yml"
inventory:
group_vars:
all:
bind_exporter_binary_local_dir: "/tmp/bind_exporter-linux-amd64"
bind_exporter_version: 0.7.0
bind_exporter_local_cache_path: "/tmp/bind_exporter-linux-amd64/{{ bind_exporter_version }}"
bind_exporter_web_listen_address:
- '127.0.0.1:8080'
- '127.0.1.1:8080'
@ -17,5 +20,6 @@ provisioner:
http2: true
bind_exporter_basic_auth_users:
randomuser: examplepassword
go_arch: amd64
bind_exporter_version: 0.7.0
bind_exporter_binary_url: "https://github.com/prometheus-community/bind_exporter/releases/download/v{{\
\ bind_exporter_version }}/bind_exporter-{{ bind_exporter_version\
\ }}.linux-amd64.tar.gz"

View file

@ -1,80 +0,0 @@
---
- name: Run local preparation
hosts: localhost
gather_facts: false
tasks:
- name: Download bind_exporter binary to local folder
become: false
ansible.builtin.get_url:
url: "https://github.com/prometheus-community/bind_exporter/releases/download/v{{\
\ bind_exporter_version }}/bind_exporter-{{ bind_exporter_version\
\ }}.linux-{{ go_arch }}.tar.gz"
dest: "/tmp/bind_exporter-{{ bind_exporter_version }}.linux-{{ go_arch\
\ }}.tar.gz"
mode: 0644
register: _download_binary
until: _download_binary is succeeded
retries: 5
delay: 2
check_mode: false
- name: Unpack bind_exporter binary
become: false
ansible.builtin.unarchive:
src: "/tmp/bind_exporter-{{ bind_exporter_version }}.linux-{{ go_arch\
\ }}.tar.gz"
dest: "/tmp"
creates: "/tmp/bind_exporter-{{ bind_exporter_version }}.linux-{{ go_arch\
\ }}/bind_exporter"
check_mode: false
- name: Link to bind_exporter binaries directory
become: false
ansible.builtin.file:
src: "/tmp/bind_exporter-{{ bind_exporter_version }}.linux-amd64"
dest: "/tmp/bind_exporter-linux-amd64"
state: link
check_mode: false
- name: Install pyOpenSSL for certificate generation
ansible.builtin.pip:
name: "pyOpenSSL"
- name: Create private key
community.crypto.openssl_privatekey:
path: "/tmp/tls.key"
- name: Create CSR
community.crypto.openssl_csr:
path: "/tmp/tls.csr"
privatekey_path: "/tmp/tls.key"
- name: Create certificate
community.crypto.x509_certificate:
path: "/tmp/tls.cert"
csr_path: "/tmp/tls.csr"
privatekey_path: "/tmp/tls.key"
provider: selfsigned
- name: Run target preparation
hosts: all
any_errors_fatal: true
tasks:
- name: Create bind_exporter cert dir
ansible.builtin.file:
path: "{{ bind_exporter_tls_server_config.cert_file | dirname }}"
state: directory
owner: root
group: root
mode: u+rwX,g+rwX,o=rX
- name: Copy cert and key
ansible.builtin.copy:
src: "{{ item.src }}"
dest: "{{ item.dest }}"
mode: "{{ item.mode | default('0644') }}"
loop:
- src: "/tmp/tls.cert"
dest: "{{ bind_exporter_tls_server_config.cert_file }}"
- src: "/tmp/tls.key"
dest: "{{ bind_exporter_tls_server_config.key_file }}"

View file

@ -1,12 +1,10 @@
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
import testinfra.utils.ansible_runner
from testinfra_helpers import get_target_hosts
import pytest
testinfra_hosts = testinfra.utils.ansible_runner.AnsibleRunner(
os.environ['MOLECULE_INVENTORY_FILE']).get_hosts('all')
testinfra_hosts = get_target_hosts()
def test_service(host):

View file

@ -1,11 +1,9 @@
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
import testinfra.utils.ansible_runner
from testinfra_helpers import get_target_hosts
testinfra_hosts = testinfra.utils.ansible_runner.AnsibleRunner(
os.environ['MOLECULE_INVENTORY_FILE']).get_hosts('all')
testinfra_hosts = get_target_hosts()
def test_directories(host):
@ -48,7 +46,6 @@ def test_user(host):
assert host.group("bind-exp").exists
assert "bind-exp" in host.user("bind-exp").groups
assert host.user("bind-exp").shell == "/usr/sbin/nologin"
assert host.user("bind-exp").home == "/"
def test_service(host):

View file

@ -1,12 +1,10 @@
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
import testinfra.utils.ansible_runner
from testinfra_helpers import get_target_hosts
import pytest
testinfra_hosts = testinfra.utils.ansible_runner.AnsibleRunner(
os.environ['MOLECULE_INVENTORY_FILE']).get_hosts('all')
testinfra_hosts = get_target_hosts()
@pytest.mark.parametrize("files", [

View file

@ -1,42 +0,0 @@
---
- name: Copy the bind_exporter systemd service file
ansible.builtin.template:
src: bind_exporter.service.j2
dest: /etc/systemd/system/bind_exporter.service
owner: root
group: root
mode: '0644'
notify: restart bind_exporter
- name: Create bind_exporter config directory
ansible.builtin.file:
path: "{{ bind_exporter_config_dir }}"
state: directory
owner: root
group: root
mode: u+rwX,g+rwX,o=rX
- name: Configure bind_exporter web config
when:
( bind_exporter_tls_server_config | length > 0 ) or
( bind_exporter_http_server_config | length > 0 ) or
( bind_exporter_basic_auth_users | length > 0 )
block:
- name: Copy the bind_exporter web config file
ansible.builtin.template:
src: web_config.yaml.j2
dest: "{{ bind_exporter_config_dir }}/web_config.yaml"
owner: root
group: '{{ bind_exporter_system_group }}'
mode: '0640'
notify: restart bind_exporter
- name: Allow bind_exporter port in SELinux on RedHat OS family
community.general.seport:
ports: "{{ bind_exporter_web_listen_address.split(':')[-1] }}"
proto: tcp
setype: http_port_t
state: present
when:
- ansible_version.full is version_compare('2.4', '>=')
- ansible_selinux.status == "enabled"

View file

@ -1,71 +0,0 @@
---
- name: Create the bind_exporter group
ansible.builtin.group:
name: "{{ bind_exporter_system_group }}"
state: present
system: true
when: bind_exporter_system_group not in ["root"]
- name: Create the bind_exporter user
ansible.builtin.user:
name: "{{ bind_exporter_system_user }}"
groups: "{{ bind_exporter_system_group }}"
append: true
shell: /usr/sbin/nologin
system: true
create_home: false
home: /
when: bind_exporter_system_user not in ["root"]
- name: Get binary
when:
- bind_exporter_binary_local_dir | length == 0
- not bind_exporter_skip_install
block:
- name: Download bind_exporter binary to local folder
become: false
ansible.builtin.get_url:
url: "{{ bind_exporter_binary_url }}"
dest: "/tmp/bind_exporter-{{ bind_exporter_version }}.linux-{{ go_arch }}.tar.gz"
checksum: "sha256:{{ __bind_exporter_checksum }}"
mode: '0644'
register: _download_binary
until: _download_binary is succeeded
retries: 5
delay: 2
delegate_to: localhost
check_mode: false
- name: Unpack bind_exporter binary
become: false
ansible.builtin.unarchive:
src: "/tmp/bind_exporter-{{ bind_exporter_version }}.linux-{{ go_arch }}.tar.gz"
dest: "/tmp"
creates: "/tmp/bind_exporter-{{ bind_exporter_version }}.linux-{{ go_arch }}/bind_exporter"
extra_opts:
- --no-same-owner
delegate_to: localhost
check_mode: false
- name: Propagate bind_exporter binaries
ansible.builtin.copy:
src: "/tmp/bind_exporter-{{ bind_exporter_version }}.linux-{{ go_arch }}/bind_exporter"
dest: "{{ bind_exporter_binary_install_dir }}/bind_exporter"
mode: '0755'
owner: root
group: root
notify: restart bind_exporter
when: not ansible_check_mode
- name: Propagate locally distributed bind_exporter binary
ansible.builtin.copy:
src: "{{ bind_exporter_binary_local_dir }}/bind_exporter"
dest: "{{ bind_exporter_binary_install_dir }}/bind_exporter"
mode: '0755'
owner: root
group: root
when:
- bind_exporter_binary_local_dir | length > 0
- not bind_exporter_skip_install
notify: restart bind_exporter

View file

@ -2,51 +2,50 @@
- name: Preflight
ansible.builtin.include_tasks:
file: preflight.yml
apply:
tags:
- bind_exporter_install
- bind_exporter_configure
- bind_exporter_run
tags:
- bind_exporter_install
- bind_exporter_configure
- bind_exporter_run
- name: Install
ansible.builtin.include_tasks:
file: install.yml
apply:
become: true
tags:
- bind_exporter_install
when:
( not __bind_exporter_is_installed.stat.exists ) or
( (__bind_exporter_current_version_output.stderr_lines | length > 0)
and (__bind_exporter_current_version_output.stderr_lines[0].split(" ")[2] != bind_exporter_version) ) or
( (__bind_exporter_current_version_output.stdout_lines | length > 0)
and (__bind_exporter_current_version_output.stdout_lines[0].split(" ")[2] != bind_exporter_version) ) or
( bind_exporter_binary_local_dir | length > 0 )
ansible.builtin.include_role:
name: prometheus.prometheus._common
tasks_from: install.yml
vars:
_common_local_cache_path: "{{ bind_exporter_local_cache_path }}"
_common_binaries: "{{ _bind_exporter_binaries }}"
_common_binary_install_dir: "{{ bind_exporter_binary_install_dir }}"
_common_binary_url: "{{ bind_exporter_binary_url }}"
_common_checksums_url: "{{ bind_exporter_checksums_url }}"
_common_system_group: "{{ bind_exporter_system_group }}"
_common_system_user: "{{ bind_exporter_system_user }}"
_common_config_dir: "{{ bind_exporter_config_dir }}"
_common_binary_unarchive_opts: ['--strip-components=1']
tags:
- bind_exporter_install
- name: SELinux
ansible.builtin.include_tasks:
file: selinux.yml
apply:
become: true
tags:
- bind_exporter_configure
ansible.builtin.include_role:
name: prometheus.prometheus._common
tasks_from: selinux.yml
vars:
_common_selinux_port: "{{ bind_exporter_web_listen_address | urlsplit('port') }}"
when: ansible_selinux.status == "enabled"
tags:
- bind_exporter_configure
- name: Configure
ansible.builtin.include_tasks:
file: configure.yml
apply:
become: true
tags:
- bind_exporter_configure
ansible.builtin.include_role:
name: prometheus.prometheus._common
tasks_from: configure.yml
vars:
_common_system_user: "{{ bind_exporter_system_user }}"
_common_system_group: "{{ bind_exporter_system_group }}"
_common_config_dir: "{{ bind_exporter_config_dir }}"
_common_tls_server_config: "{{ bind_exporter_tls_server_config }}"
_common_http_server_config: "{{ bind_exporter_http_server_config }}"
_common_basic_auth_users: "{{ bind_exporter_basic_auth_users }}"
tags:
- bind_exporter_configure

View file

@ -1,24 +1,8 @@
---
- name: Assert usage of systemd as an init system
ansible.builtin.assert:
that: ansible_service_mgr == 'systemd'
msg: "This role only works with systemd"
- name: Install package fact dependencies
become: true
ansible.builtin.package:
name: "{{ _pkg_fact_req }}"
state: present
when: (_pkg_fact_req)
vars:
_pkg_fact_req: "{% if (ansible_pkg_mgr == 'apt') %}\
{{ ('python-apt' if ansible_python_version is version('3', '<') else 'python3-apt') }}
{% else %}\
{% endif %}"
- name: Gather package facts
ansible.builtin.package_facts:
when: "not 'packages' in ansible_facts"
- name: Common preflight
ansible.builtin.include_role:
name: prometheus.prometheus._common
tasks_from: preflight.yml
- name: Naive assertion of proper listen address
ansible.builtin.assert:
@ -55,23 +39,6 @@
- "__bind_exporter_cert_file.stat.exists"
- "__bind_exporter_key_file.stat.exists"
- name: Check if bind_exporter is installed
ansible.builtin.stat:
path: "{{ bind_exporter_binary_install_dir }}/bind_exporter"
register: __bind_exporter_is_installed
check_mode: false
tags:
- bind_exporter_install
- name: Gather currently installed bind_exporter version (if any)
ansible.builtin.command: "{{ bind_exporter_binary_install_dir }}/bind_exporter --version"
changed_when: false
register: __bind_exporter_current_version_output
check_mode: false
when: __bind_exporter_is_installed.stat.exists
tags:
- bind_exporter_install
- name: Discover latest version
ansible.builtin.set_fact:
bind_exporter_version: "{{ (lookup('url', 'https://api.github.com/repos/{{ _bind_exporter_repo }}/releases/latest', headers=_github_api_headers,
@ -81,24 +48,9 @@
retries: 10
when:
- bind_exporter_version == "latest"
- bind_exporter_binary_local_dir | length == 0
- not bind_exporter_skip_install
- name: Get bind_exporter binary checksum
when:
- bind_exporter_binary_local_dir | length == 0
- not bind_exporter_skip_install
block:
- name: Get checksum list from github
ansible.builtin.set_fact:
__bind_exporter_checksums: "{{ lookup('url', bind_exporter_checksums_url, headers=_github_api_headers, wantlist=True) | list }}"
run_once: true
until: __bind_exporter_checksums is search('linux-' + go_arch + '.tar.gz')
retries: 10
- name: "Get checksum for {{ go_arch }}"
ansible.builtin.set_fact:
__bind_exporter_checksum: "{{ item.split(' ')[0] }}"
with_items: "{{ __bind_exporter_checksums }}"
when:
- "('linux-' + go_arch + '.tar.gz') in item"
tags:
- bind_exporter
- install
- bind_exporter_install
- download
- bind_exporter_download

View file

@ -1,23 +0,0 @@
---
- name: Install selinux python packages [RedHat]
ansible.builtin.package:
name: "{{ ['libselinux-python', 'policycoreutils-python']
if ansible_python_version is version('3', '<') else
['python3-libselinux', 'python3-policycoreutils'] }}"
state: present
register: _install_selinux_packages
until: _install_selinux_packages is success
retries: 5
delay: 2
when: ansible_os_family | lower == "redhat"
- name: Install selinux python packages [clearlinux]
ansible.builtin.package:
name: sysadmin-basic
state: present
register: _install_selinux_packages
until: _install_selinux_packages is success
retries: 5
delay: 2
when:
- ansible_distribution | lower == "clearlinux"

View file

@ -13,7 +13,7 @@ ExecStart={{ bind_exporter_binary_install_dir }}/bind_exporter \
--bind.stats-groups="{{ bind_exporter_stats_groups | join(',') }}" \
{% endif %}
{% if bind_exporter_tls_server_config | length > 0 or bind_exporter_http_server_config | length > 0 or bind_exporter_basic_auth_users | length > 0 %}
--web.config.file={{ bind_exporter_config_dir }}/web_config.yaml \
--web.config.file={{ bind_exporter_config_dir }}/web_config.yml \
{% endif %}
--bind.stats-url="{{ bind_exporter_stats_url }}" \
--bind.timeout="{{ bind_exporter_timeout }}" \

View file

@ -1,18 +0,0 @@
---
{{ ansible_managed | comment }}
{% if bind_exporter_tls_server_config | length > 0 %}
tls_server_config:
{{ bind_exporter_tls_server_config | to_nice_yaml | indent(2, true) }}
{% endif %}
{% if bind_exporter_http_server_config | length > 0 %}
http_server_config:
{{ bind_exporter_http_server_config | to_nice_yaml | indent(2, true) }}
{% endif %}
{% if bind_exporter_basic_auth_users | length > 0 %}
basic_auth_users:
{% for k, v in bind_exporter_basic_auth_users.items() %}
{{ k }}: {{ v | string | password_hash('bcrypt', ('abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ1234567890' | shuffle(seed=inventory_hostname) | join)[:22], rounds=9) }}
{% endfor %}
{% endif %}

View file

@ -1,12 +1,9 @@
---
go_arch_map:
i386: '386'
x86_64: 'amd64'
aarch64: 'arm64'
armv7l: 'armv7'
armv6l: 'armv6'
go_arch: "{{ go_arch_map[ansible_architecture] | default(ansible_architecture) }}"
_bind_exporter_go_ansible_arch: "{{ {'i386': '386',
'x86_64': 'amd64',
'aarch64': 'arm64',
'armv7l': 'armv7',
'armv6l': 'armv6'}.get(ansible_architecture, ansible_architecture) }}"
_bind_exporter_repo: "prometheus-community/bind_exporter"
_github_api_headers: "{{ {'GITHUB_TOKEN': lookup('ansible.builtin.env', 'GITHUB_TOKEN')} if (lookup('ansible.builtin.env', 'GITHUB_TOKEN')) else {} }}"
_bind_exporter_binaries: ['bind_exporter']

View file

@ -1,16 +1,13 @@
---
blackbox_exporter_version: 0.25.0
blackbox_exporter_binary_local_dir: ""
blackbox_exporter_binary_url: "https://github.com/{{ _blackbox_exporter_repo }}/releases/download/v{{ blackbox_exporter_version }}/\
blackbox_exporter-{{ blackbox_exporter_version }}.linux-{{ go_arch_map[ansible_architecture] |
default(ansible_architecture) }}.tar.gz"
blackbox_exporter-{{ blackbox_exporter_version }}.{{ ansible_system | lower }}-{{ _blackbox_exporter_go_ansible_arch }}.tar.gz"
blackbox_exporter_checksums_url: "https://github.com/{{ _blackbox_exporter_repo }}/releases/download/v{{ blackbox_exporter_version }}/sha256sums.txt"
blackbox_exporter_skip_install: false
blackbox_exporter_web_listen_address: "0.0.0.0:9115"
blackbox_exporter_user: blackbox-exp
blackbox_exporter_group: "{{ blackbox_exporter_user }}"
blackbox_exporter_system_user: blackbox-exp
blackbox_exporter_system_group: "{{ blackbox_exporter_system_user }}"
blackbox_exporter_cli_flags: {}
# blackbox_exporter_cli_flags:
@ -74,7 +71,8 @@ blackbox_exporter_configuration_modules:
# Where to put the blackbox_exporter.yml main configuration file
blackbox_exporter_config_dir: /etc/blackbox_exporter
blackbox_exporter_binary_install_dir: "/usr/local/bin"
# Local path to stash the archive and its extraction
blackbox_exporter_archive_path: /tmp
blackbox_exporter_local_cache_path: "/tmp/blackbox_exporter-{{ ansible_system | lower }}-{{ _blackbox_exporter_go_ansible_arch }}/\
{{ blackbox_exporter_version }}"
blackbox_exporter_binary_install_dir: "/usr/local/bin"

View file

@ -11,18 +11,9 @@ argument_specs:
blackbox_exporter_version:
description: "Blackbox exporter package version. Also accepts latest as parameter."
default: "0.25.0"
blackbox_exporter_skip_install:
description: "Blackbox exporter installation tasks gets skipped when set to true."
type: bool
default: false
blackbox_exporter_binary_local_dir:
description:
- "Enables the use of local packages instead of those distributed on github."
- "The parameter may be set to a directory where the C(blackbox_exporter) binary is stored on the host where ansible is run."
- "This overrides the I(blackbox_exporter_version) parameter"
blackbox_exporter_binary_url:
description: "URL of the blackbox_exporter binaries .tar.gz file"
default: "https://github.com/{{ _blackbox_exporter_repo }}/releases/download/v{{ blackbox_exporter_version }}/blackbox_exporter-{{ blackbox_exporter_version }}.linux-{{ go_arch_map[ansible_architecture] | default(ansible_architecture) }}.tar.gz"
default: "https://github.com/{{ _blackbox_exporter_repo }}/releases/download/v{{ blackbox_exporter_version }}/blackbox_exporter-{{ blackbox_exporter_version }}.{{ ansible_system | lower }}-{{ _blackbox_exporter_go_ansible_arch }}.tar.gz"
blackbox_exporter_checksums_url:
description: "URL of the blackbox exporter checksums file"
default: "https://github.com/{{ _blackbox_exporter_repo }}/releases/download/v{{ blackbox_exporter_version }}/sha256sums.txt"
@ -43,19 +34,19 @@ argument_specs:
method: GET
valid_status_codes: []
blackbox_exporter_config_dir:
description: "Directory where the blackbox exporter configuration file is placed"
default: "/etc"
description: "Path to directory with blackbox_exporter configuration"
default: "/etc/blackbox_exporter"
blackbox_exporter_binary_install_dir:
description:
- "I(Advanced)"
- "Directory to install blackbox_exporter binary"
default: "/usr/local/bin"
blackbox_exporter_user:
blackbox_exporter_system_user:
description: "The user the exporter runs as"
default: "blackbox-exp"
blackbox_exporter_group:
blackbox_exporter_system_group:
description: "The group the exporter runs as"
default: "blackbox-exp"
blackbox_exporter_archive_path:
blackbox_exporter_local_cache_path:
description: 'Local path to stash the archive and its extraction'
default: "/tmp"
default: "/tmp/blackbox_exporter-{{ ansible_system | lower }}-{{ _blackbox_exporter_go_ansible_arch }}/{{ blackbox_exporter_version }}"

View file

@ -9,19 +9,14 @@ galaxy_info:
versions:
- "focal"
- "jammy"
- "noble"
- name: "Debian"
versions:
- "bullseye"
- "buster"
- name: "EL"
versions:
- "7"
- "8"
- "9"
- name: "Fedora"
versions:
- "37"
- '38'
galaxy_tags:
- "exporter"
- "monitoring"

View file

@ -1,12 +1,10 @@
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
import testinfra.utils.ansible_runner
from testinfra_helpers import get_target_hosts
import pytest
testinfra_hosts = testinfra.utils.ansible_runner.AnsibleRunner(
os.environ['MOLECULE_INVENTORY_FILE']).get_hosts('all')
testinfra_hosts = get_target_hosts()
@pytest.mark.parametrize("files", [

View file

@ -1,12 +1,10 @@
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
import testinfra.utils.ansible_runner
from testinfra_helpers import get_target_hosts
import pytest
testinfra_hosts = testinfra.utils.ansible_runner.AnsibleRunner(
os.environ['MOLECULE_INVENTORY_FILE']).get_hosts('all')
testinfra_hosts = get_target_hosts()
@pytest.mark.parametrize("files", [

View file

@ -1,12 +1,10 @@
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
import testinfra.utils.ansible_runner
from testinfra_helpers import get_target_hosts
import pytest
testinfra_hosts = testinfra.utils.ansible_runner.AnsibleRunner(
os.environ['MOLECULE_INVENTORY_FILE']).get_hosts('all')
testinfra_hosts = get_target_hosts()
@pytest.mark.parametrize("files", [

View file

@ -1,28 +1,50 @@
---
- name: Create systemd service unit
ansible.builtin.template:
src: blackbox_exporter.service.j2
dest: /etc/systemd/system/blackbox_exporter.service
owner: root
group: root
mode: '0644'
notify:
- restart blackbox_exporter
- name: "Common configure"
ansible.builtin.include_role:
name: prometheus.prometheus._common
tasks_from: configure.yml
vars:
_common_system_user: "{{ blackbox_exporter_system_user }}"
_common_system_group: "{{ blackbox_exporter_system_group }}"
_common_config_dir: "{{ blackbox_exporter_config_dir }}"
tags:
- blackbox_exporter
- configure
- blackbox_exporter_configure
- name: Create blackbox_exporter config directory
ansible.builtin.file:
path: "{{ blackbox_exporter_config_dir }}"
state: directory
owner: root
group: root
mode: u+rwX,g+rwX,o=rX
- name: Ensure blackbox exporter binary has cap_net_raw capability
community.general.capabilities:
path: "{{ blackbox_exporter_binary_install_dir }}/blackbox_exporter"
capability: cap_net_raw=ep
state: present
become: true
when: not ansible_check_mode
tags:
- blackbox_exporter
- configure
- blackbox_exporter_configure
- molecule-idempotence-notest
- name: Check Debug Message
ansible.builtin.debug:
msg: "The capabilities module is skipped during check mode, as the file may not exist, causing execution to fail."
when: ansible_check_mode
tags:
- blackbox_exporter
- configure
- blackbox_exporter_configure
- name: Configure blackbox exporter
ansible.builtin.template:
src: blackbox_exporter.yml.j2
dest: "{{ blackbox_exporter_config_dir }}/blackbox_exporter.yml"
owner: root
group: "{{ blackbox_exporter_group }}"
owner: "{{ blackbox_exporter_system_user }}"
group: "{{ blackbox_exporter_system_group }}"
mode: '0644'
become: true
notify:
- reload blackbox_exporter
tags:
- blackbox_exporter
- configure
- blackbox_exporter_configure

View file

@ -1,90 +0,0 @@
---
- name: Create blackbox_exporter system group
ansible.builtin.group:
name: "{{ blackbox_exporter_group }}"
system: true
state: present
when: blackbox_exporter_group != 'root'
- name: Create blackbox_exporter system user
ansible.builtin.user:
name: "{{ blackbox_exporter_user }}"
system: true
shell: "/usr/sbin/nologin"
group: "{{ blackbox_exporter_group }}"
createhome: false
when: blackbox_exporter_user != 'root'
- name: Get binary
when:
- blackbox_exporter_binary_local_dir | length == 0
- not blackbox_exporter_skip_install
block:
- name: Download blackbox_exporter binary to local folder
become: false
ansible.builtin.get_url:
url: "{{ blackbox_exporter_binary_url }}"
dest: "{{ blackbox_exporter_archive_path }}/blackbox_exporter-{{ blackbox_exporter_version }}.linux-{{ go_arch }}.tar.gz"
checksum: "sha256:{{ __blackbox_exporter_checksum }}"
mode: '0644'
register: _download_binary
until: _download_binary is succeeded
retries: 5
delay: 2
delegate_to: localhost
check_mode: false
- name: Unpack blackbox_exporter binary
become: false
ansible.builtin.unarchive:
src: "{{ blackbox_exporter_archive_path }}/blackbox_exporter-{{ blackbox_exporter_version }}.linux-{{ go_arch }}.tar.gz"
dest: "{{ blackbox_exporter_archive_path }}"
creates: "{{ blackbox_exporter_archive_path }}/blackbox_exporter-{{ blackbox_exporter_version }}.linux-{{ go_arch }}/blackbox_exporter"
delegate_to: localhost
check_mode: false
- name: Propagate blackbox_exporter binaries
ansible.builtin.copy:
src: "{{ blackbox_exporter_archive_path }}/blackbox_exporter-{{ blackbox_exporter_version }}.linux-{{ go_arch }}/blackbox_exporter"
dest: "{{ blackbox_exporter_binary_install_dir }}/blackbox_exporter"
mode: 0755
owner: root
group: root
notify: restart blackbox_exporter
when: not ansible_check_mode
- name: Propagate locally distributed blackbox_exporter binary
ansible.builtin.copy:
src: "{{ blackbox_exporter_binary_local_dir }}/blackbox_exporter"
dest: "{{ blackbox_exporter_binary_install_dir }}/blackbox_exporter"
mode: '0755'
owner: root
group: root
when:
- blackbox_exporter_binary_local_dir | length > 0
- not blackbox_exporter_skip_install
notify: restart blackbox_exporter
- name: Install libcap on Debian systems
ansible.builtin.package:
name: "libcap2-bin"
state: present
register: _download_packages
until: _download_packages is succeeded
retries: 5
delay: 2
when: ansible_os_family | lower == "debian"
- name: Ensure blackbox exporter binary has cap_net_raw capability
community.general.capabilities:
path: '/usr/local/bin/blackbox_exporter'
capability: cap_net_raw+ep
state: present
when: not ansible_check_mode
changed_when: "'molecule-idempotence-notest' not in ansible_skip_tags"
- name: Check Debug Message
ansible.builtin.debug:
msg: "The capabilities module is skipped during check mode, as the file may not exist, causing execution to fail."
when: ansible_check_mode

View file

@ -2,33 +2,41 @@
- name: Preflight
ansible.builtin.include_tasks:
file: preflight.yml
apply:
tags:
- blackbox_exporter_install
- blackbox_exporter_configure
- blackbox_exporter_run
tags:
- blackbox_exporter_install
- blackbox_exporter_configure
- blackbox_exporter_run
- name: Install
ansible.builtin.include_tasks:
file: install.yml
apply:
become: true
tags:
- blackbox_exporter_install
ansible.builtin.include_role:
name: prometheus.prometheus._common
tasks_from: install.yml
vars:
_common_local_cache_path: "{{ blackbox_exporter_local_cache_path }}"
_common_binaries: "{{ _blackbox_exporter_binaries }}"
_common_binary_install_dir: "{{ blackbox_exporter_binary_install_dir }}"
_common_binary_url: "{{ blackbox_exporter_binary_url }}"
_common_checksums_url: "{{ blackbox_exporter_checksums_url }}"
_common_system_group: "{{ blackbox_exporter_system_group }}"
_common_system_user: "{{ blackbox_exporter_system_user }}"
_common_config_dir: "{{ blackbox_exporter_config_dir }}"
_common_binary_unarchive_opts: ['--strip-components=1']
tags:
- blackbox_exporter_install
- name: SELinux
ansible.builtin.include_role:
name: prometheus.prometheus._common
tasks_from: selinux.yml
vars:
_common_selinux_port: "{{ blackbox_exporter_web_listen_address | urlsplit('port') }}"
when: ansible_selinux.status == "enabled"
tags:
- blackbox_exporter_configure
- name: Configure
ansible.builtin.include_tasks:
file: configure.yml
apply:
become: true
tags:
- blackbox_exporter_configure
tags:
- blackbox_exporter_configure

View file

@ -1,24 +1,10 @@
---
- name: Assert usage of systemd as an init system
ansible.builtin.assert:
that: ansible_service_mgr == 'systemd'
msg: "This role only works with systemd"
- name: Install package fact dependencies
become: true
ansible.builtin.package:
name: "{{ _pkg_fact_req }}"
state: present
when: (_pkg_fact_req)
- name: Common preflight
ansible.builtin.include_role:
name: prometheus.prometheus._common
tasks_from: preflight.yml
vars:
_pkg_fact_req: "{% if (ansible_pkg_mgr == 'apt') %}\
{{ ('python-apt' if ansible_python_version is version('3', '<') else 'python3-apt') }}
{% else %}\
{% endif %}"
- name: Gather package facts
ansible.builtin.package_facts:
when: "not 'packages' in ansible_facts"
_common_dependencies: "{{ _blackbox_exporter_dependencies }}"
- name: Assert that used version supports listen address type
ansible.builtin.assert:
@ -44,30 +30,15 @@
- name: Discover latest version
ansible.builtin.set_fact:
blackbox_exporter_version: "{{ (lookup('url', 'https://api.github.com/repos/{{ _blackbox_exporter_repo }}/releases/latest', headers=_github_api_headers,
split_lines=False) | from_json).get('tag_name') | replace('v', '') }}"
split_lines=False) | from_json).get('tag_name') | replace('v', '') }}"
run_once: true
until: blackbox_exporter_version is version('0.0.0', '>=')
retries: 10
when:
- blackbox_exporter_version == "latest"
- blackbox_exporter_binary_local_dir | length == 0
- not blackbox_exporter_skip_install
- name: Get blackbox_exporter binary checksum
when:
- blackbox_exporter_binary_local_dir | length == 0
- not blackbox_exporter_skip_install
block:
- name: Get checksum list from github
ansible.builtin.set_fact:
__blackbox_exporter_checksums: "{{ lookup('url', blackbox_exporter_checksums_url, headers=_github_api_headers, wantlist=True) | list }}"
run_once: true
until: __blackbox_exporter_checksums is search('linux-' + go_arch + '.tar.gz')
retries: 10
- name: "Get checksum for {{ go_arch }}"
ansible.builtin.set_fact:
__blackbox_exporter_checksum: "{{ item.split(' ')[0] }}"
with_items: "{{ __blackbox_exporter_checksums }}"
when:
- "('linux-' + go_arch + '.tar.gz') in item"
tags:
- blackbox_exporter
- install
- blackbox_exporter_install
- download
- blackbox_exporter_download

View file

@ -7,11 +7,11 @@ StartLimitIntervalSec=0
[Service]
Type=simple
User={{ blackbox_exporter_user }}
Group={{ blackbox_exporter_group }}
User={{ blackbox_exporter_system_user }}
Group={{ blackbox_exporter_system_group }}
PermissionsStartOnly=true
ExecReload=/bin/kill -HUP $MAINPID
ExecStart=/usr/local/bin/blackbox_exporter \
ExecStart={{ blackbox_exporter_binary_install_dir }}/blackbox_exporter \
--config.file={{ blackbox_exporter_config_dir }}/blackbox_exporter.yml \
{% for flag, flag_value in blackbox_exporter_cli_flags.items() -%}
--{{ flag }}={{ flag_value }} \

View file

@ -1,11 +1,13 @@
---
go_arch_map:
i386: '386'
x86_64: 'amd64'
aarch64: 'arm64'
armv7l: 'armv7'
armv6l: 'armv6'
go_arch: "{{ go_arch_map[ansible_architecture] | default(ansible_architecture) }}"
_blackbox_exporter_go_ansible_arch: "{{ {'i386': '386',
'x86_64': 'amd64',
'aarch64': 'arm64',
'armv7l': 'armv7',
'armv6l': 'armv6'}.get(ansible_architecture, ansible_architecture) }}"
_blackbox_exporter_repo: "prometheus/blackbox_exporter"
_github_api_headers: "{{ {'GITHUB_TOKEN': lookup('ansible.builtin.env', 'GITHUB_TOKEN')} if (lookup('ansible.builtin.env', 'GITHUB_TOKEN')) else {} }}"
_blackbox_exporter_binaries: ['blackbox_exporter']
_blackbox_exporter_dependencies: "{% if (ansible_pkg_mgr == 'apt') %}\
{{ (['python-apt', 'libcap2-bin'] if ansible_python_version is version('3', '<') else ['python3-apt', 'libcap2-bin']) }}
{% else %}\
{% endif %}"

View file

@ -1,9 +1,7 @@
---
cadvisor_version: 0.49.1
cadvisor_binary_local_dir: ""
cadvisor_binary_url: "https://github.com/{{ _cadvisor_repo }}/releases/download/v{{ cadvisor_version }}/\
cadvisor-v{{ cadvisor_version }}-linux-{{ go_arch }}"
cadvisor_skip_install: false
cadvisor-v{{ cadvisor_version }}-{{ ansible_system | lower }}-{{ _cadvisor_go_ansible_arch }}"
cadvisor_listen_ip: "0.0.0.0"
cadvisor_port: "8080"
@ -11,11 +9,15 @@ cadvisor_prometheus_endpoint: "/metrics"
cadvisor_enable_metrics: []
cadvisor_disable_metrics: []
cadvisor_env_metadata_whitelist: []
cadvisor_whitelisted_container_labels: []
cadvisor_store_container_labels: true
cadvisor_docker_only: false
cadvisor_binary_install_dir: "/usr/local/bin"
cadvisor_system_group: "root"
cadvisor_system_user: "{{ cadvisor_system_group }}"
# Local path to stash the archive and its extraction
cadvisor_archive_path: /tmp
cadvisor_local_cache_path: "/tmp/cadvisor-{{ ansible_system | lower }}-{{ _cadvisor_go_ansible_arch }}/{{ cadvisor_version }}"
cadvisor_binary_install_dir: "/usr/local/bin"

View file

@ -11,18 +11,9 @@ argument_specs:
cadvisor_version:
description: "cAdvisor package version. Also accepts latest as parameter."
default: "0.49.1"
cadvisor_skip_install:
description: "cAdvisor installation tasks gets skipped when set to true."
type: bool
default: false
cadvisor_binary_local_dir:
description:
- "Enables the use of local packages instead of those distributed on github."
- "The parameter may be set to a directory where the C(cadvisor) binary is stored on the host where ansible is run."
- "This overrides the I(cadvisor_version) parameter"
cadvisor_binary_url:
description: "URL of the cadvisor binary file"
default: "https://github.com/{{ _cadvisor_repo }}/releases/download/v{{ cadvisor_version }}/cadvisor-{{ cadvisor_version }}-linux-{{ go_arch }}"
default: "https://github.com/{{ _cadvisor_repo }}/releases/download/v{{ cadvisor_version }}/cadvisor-v{{ cadvisor_version }}-{{ ansible_system | lower }}-{{ _cadvisor_go_ansible_arch }}"
cadvisor_listen_ip:
description: "Address on which cadvisor will listen"
default: "0.0.0.0"
@ -64,15 +55,22 @@ argument_specs:
default: []
type: "list"
choices: *metrics_choices
cadvisor_store_container_labels:
description: "store all container labels"
type: "bool"
default: true
cadvisor_whitelisted_container_labels:
description: "comma-separated list of container labels to be used as labels on prometheus metrics"
default: []
type: "list"
cadvisor_env_metadata_whitelist:
description: "comma-separated list of env variables to be used as labels on prometheus metrics"
default: []
type: "list"
cadvisor_docker_only:
description: "do not report raw cgroup metrics, except the root cgroup"
type: "bool"
default: false
cadvisor_binary_install_dir:
description:
- "I(Advanced)"
- "Directory to install cadvisor binary"
default: "/usr/local/bin"
cadvisor_system_group:
description:
- "I(Advanced)"
@ -83,6 +81,11 @@ argument_specs:
- "I(Advanced)"
- "cAdvisor user"
default: "root"
cadvisor_archive_path:
cadvisor_binary_install_dir:
description:
- "I(Advanced)"
- "Directory to install binaries"
default: "/usr/local/bin"
cadvisor_local_cache_path:
description: 'Local path to stash the archive and its extraction'
default: "/tmp"
default: "/tmp/cadvisor-{{ ansible_system | lower }}-{{ _cadvisor_go_ansible_arch }}/{{ cadvisor_version }}"

View file

@ -9,19 +9,14 @@ galaxy_info:
versions:
- "focal"
- "jammy"
- "noble"
- name: "Debian"
versions:
- "bullseye"
- "buster"
- name: "EL"
versions:
- "7"
- "8"
- "9"
- name: "Fedora"
versions:
- "37"
- '38'
galaxy_tags:
- "monitoring"
- "prometheus"

View file

@ -1,11 +1,11 @@
---
provisioner:
playbooks:
prepare: "${MOLECULE_PROJECT_DIRECTORY}/../../.config/molecule/alternative/prepare.yml"
inventory:
group_vars:
all:
cadvisor_binary_local_dir: "/tmp"
cadvisor_port: "8000"
go_arch: amd64
cadvisor_version: 0.47.0
cadvisor_enable_metrics:
- tcp
@ -13,3 +13,6 @@ provisioner:
- network
- cpu
cadvisor_docker_only: true
whitelisted_container_labels: [ "com.docker.compose.image" ]
env_metadata_whitelist: [ "PATH" ]
store_container_labels: false

View file

@ -1,24 +0,0 @@
---
- name: Run local preparation
hosts: localhost
gather_facts: false
tasks:
- name: Download cadvisor binary to local folder
become: false
ansible.builtin.get_url:
url: "https://github.com/google/cadvisor/releases/download/v{{ cadvisor_version }}/cadvisor-v{{ cadvisor_version }}-linux-{{ go_arch }}"
dest: "/tmp/cadvisor-v{{ cadvisor_version }}-linux-{{ go_arch }}"
mode: 0644
register: _download_binary
until: _download_binary is succeeded
retries: 5
delay: 2
check_mode: false
- name: Link to cadvisor binaries directory
become: false
ansible.builtin.file:
src: "/tmp/cadvisor-v{{ cadvisor_version }}-linux-{{ go_arch }}"
dest: "/tmp/cadvisor"
state: link
check_mode: false

View file

@ -1,12 +1,10 @@
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
import testinfra.utils.ansible_runner
from testinfra_helpers import get_target_hosts
import pytest
testinfra_hosts = testinfra.utils.ansible_runner.AnsibleRunner(
os.environ['MOLECULE_INVENTORY_FILE']).get_hosts('all')
testinfra_hosts = get_target_hosts()
def test_service(host):

View file

@ -1,11 +1,9 @@
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
import testinfra.utils.ansible_runner
from testinfra_helpers import get_target_hosts
testinfra_hosts = testinfra.utils.ansible_runner.AnsibleRunner(
os.environ['MOLECULE_INVENTORY_FILE']).get_hosts('all')
testinfra_hosts = get_target_hosts()
def test_files(host):

View file

@ -1,12 +1,10 @@
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
import testinfra.utils.ansible_runner
from testinfra_helpers import get_target_hosts
import pytest
testinfra_hosts = testinfra.utils.ansible_runner.AnsibleRunner(
os.environ['MOLECULE_INVENTORY_FILE']).get_hosts('all')
testinfra_hosts = get_target_hosts()
@pytest.mark.parametrize("files", [

View file

@ -1,19 +0,0 @@
---
- name: Copy the cadvisor systemd service file
ansible.builtin.template:
src: cadvisor.service.j2
dest: /etc/systemd/system/cadvisor.service
owner: root
group: root
mode: 0644
notify: restart cadvisor
- name: Allow cadvisor port in SELinux on RedHat OS family
community.general.seport:
ports: "{{ cadvisor_port }}"
proto: tcp
setype: http_port_t
state: present
when:
- ansible_version.full is version_compare('2.4', '>=')
- ansible_selinux.status == "enabled"

View file

@ -1,59 +0,0 @@
---
- name: Create the cadvisor group
ansible.builtin.group:
name: "{{ cadvisor_system_group }}"
state: present
system: true
when: cadvisor_system_group != "root"
- name: Create the cadvisor user
ansible.builtin.user:
name: "{{ cadvisor_system_user }}"
groups: "{{ cadvisor_system_group }}"
append: true
shell: /usr/sbin/nologin
system: true
create_home: false
home: /
when: cadvisor_system_user != "root"
- name: Get binary
when:
- cadvisor_binary_local_dir | length == 0
- not cadvisor_skip_install
block:
- name: Download cadvisor binary to local folder
become: false
ansible.builtin.get_url:
url: "{{ cadvisor_binary_url }}"
dest: "{{ cadvisor_archive_path }}/cadvisor-v{{ cadvisor_version }}-linux-{{ go_arch }}"
mode: '0644'
register: _download_binary
until: _download_binary is succeeded
retries: 5
delay: 2
delegate_to: localhost
check_mode: false
- name: Propagate cadvisor binaries
ansible.builtin.copy:
src: "{{ cadvisor_archive_path }}/cadvisor-v{{ cadvisor_version }}-linux-{{ go_arch }}"
dest: "{{ cadvisor_binary_install_dir }}/cadvisor"
mode: 0755
owner: root
group: root
notify: restart cadvisor
when: not ansible_check_mode
- name: Propagate locally distributed cadvisor binary
ansible.builtin.copy:
src: "{{ cadvisor_binary_local_dir }}/cadvisor"
dest: "{{ cadvisor_binary_install_dir }}/cadvisor"
mode: 0755
owner: root
group: root
when:
- cadvisor_binary_local_dir | length > 0
- not cadvisor_skip_install
notify: restart cadvisor

View file

@ -2,51 +2,41 @@
- name: Preflight
ansible.builtin.include_tasks:
file: preflight.yml
apply:
tags:
- cadvisor_install
- cadvisor_configure
- cadvisor_run
tags:
- cadvisor_install
- cadvisor_configure
- cadvisor_run
- name: Install
ansible.builtin.include_tasks:
file: install.yml
apply:
become: true
tags:
- cadvisor_install
when:
( not __cadvisor_is_installed.stat.exists ) or
( (__cadvisor_current_version_output.stderr_lines | length > 0)
and ((__cadvisor_current_version_output.stderr_lines[0].split(" ")[2] | replace('v', '')) != cadvisor_version) ) or
( (__cadvisor_current_version_output.stdout_lines | length > 0)
and ((__cadvisor_current_version_output.stdout_lines[0].split(" ")[2] | replace('v', '')) != cadvisor_version) ) or
( cadvisor_binary_local_dir | length > 0 )
ansible.builtin.include_role:
name: prometheus.prometheus._common
tasks_from: install.yml
vars:
_common_local_cache_path: "{{ cadvisor_local_cache_path }}"
_common_binary_name: "{{ _cadvisor_binaries | first }}"
_common_binaries: "{{ _cadvisor_binaries }}"
_common_binary_install_dir: "{{ cadvisor_binary_install_dir }}"
_common_binary_url: "{{ cadvisor_binary_url }}"
_common_system_group: "{{ cadvisor_system_group }}"
_common_system_user: "{{ cadvisor_system_user }}"
_common_config_dir: "{{ cadvisor_config_dir }}"
tags:
- cadvisor_install
- name: SELinux
ansible.builtin.include_tasks:
file: selinux.yml
apply:
become: true
tags:
- cadvisor_configure
ansible.builtin.include_role:
name: prometheus.prometheus._common
tasks_from: selinux.yml
vars:
_common_selinux_port: "{{ cadvisor_port }}"
when: ansible_selinux.status == "enabled"
tags:
- cadvisor_configure
- name: Configure
ansible.builtin.include_tasks:
file: configure.yml
apply:
become: true
tags:
- cadvisor_configure
ansible.builtin.include_role:
name: prometheus.prometheus._common
tasks_from: configure.yml
tags:
- cadvisor_configure

View file

@ -1,41 +1,8 @@
---
- name: Assert usage of systemd as an init system
ansible.builtin.assert:
that: ansible_service_mgr == 'systemd'
msg: "This role only works with systemd"
- name: Install package fact dependencies
become: true
ansible.builtin.package:
name: "{{ _pkg_fact_req }}"
state: present
when: (_pkg_fact_req)
vars:
_pkg_fact_req: "{% if (ansible_pkg_mgr == 'apt') %}\
{{ ('python-apt' if ansible_python_version is version('3', '<') else 'python3-apt') }}
{% else %}\
{% endif %}"
- name: Gather package facts
ansible.builtin.package_facts:
when: "not 'packages' in ansible_facts"
- name: Check if cadvisor is installed
ansible.builtin.stat:
path: "{{ cadvisor_binary_install_dir }}/cadvisor"
register: __cadvisor_is_installed
check_mode: false
tags:
- cadvisor_install
- name: Gather currently installed cadvisor version (if any)
ansible.builtin.command: "{{ cadvisor_binary_install_dir }}/cadvisor --version"
changed_when: false
register: __cadvisor_current_version_output
check_mode: false
when: __cadvisor_is_installed.stat.exists
tags:
- cadvisor_install
- name: Common preflight
ansible.builtin.include_role:
name: prometheus.prometheus._common
tasks_from: preflight.yml
- name: Discover latest version
ansible.builtin.set_fact:
@ -46,5 +13,9 @@
retries: 10
when:
- cadvisor_version == "latest"
- cadvisor_binary_local_dir | length == 0
- not cadvisor_skip_install
tags:
- cadvisor
- install
- cadvisor_install
- download
- cadvisor_download

View file

@ -1,23 +0,0 @@
---
- name: Install selinux python packages [RedHat]
ansible.builtin.package:
name: "{{ ['libselinux-python', 'policycoreutils-python']
if ansible_python_version is version('3', '<') else
['python3-libselinux', 'python3-policycoreutils'] }}"
state: present
register: _install_selinux_packages
until: _install_selinux_packages is success
retries: 5
delay: 2
when: ansible_os_family | lower == "redhat"
- name: Install selinux python packages [clearlinux]
ansible.builtin.package:
name: sysadmin-basic
state: present
register: _install_selinux_packages
until: _install_selinux_packages is success
retries: 5
delay: 2
when:
- ansible_distribution | lower == "clearlinux"

View file

@ -18,6 +18,13 @@ ExecStart={{ cadvisor_binary_install_dir }}/cadvisor \
{% if cadvisor_docker_only %}
'--docker_only={{ cadvisor_docker_only | lower }}' \
{% endif -%}
{% if cadvisor_whitelisted_container_labels | length > 0 %}
'--whitelisted_container_labels={{ cadvisor_whitelisted_container_labels | join(',') }}' \
{% endif -%}
{% if cadvisor_env_metadata_whitelist | length > 0 %}
'--env_metadata_whitelist={{ cadvisor_env_metadata_whitelist | join(',') }}' \
{% endif %}
'--store_container_labels={{ cadvisor_store_container_labels | lower }}' \
'--listen_ip={{ cadvisor_listen_ip }}' \
'--port={{ cadvisor_port }}' \
'--prometheus_endpoint={{ cadvisor_prometheus_endpoint }}'

View file

@ -1,10 +1,9 @@
---
go_arch_map:
x86_64: 'amd64'
aarch64: 'arm64'
armv7l: 'arm'
armv6l: 'arm'
go_arch: "{{ go_arch_map[ansible_architecture] | default(ansible_architecture) }}"
_cadvisor_go_ansible_arch: "{{ {'i386': '386',
'x86_64': 'amd64',
'aarch64': 'arm64',
'armv7l': 'armv7',
'armv6l': 'armv6'}.get(ansible_architecture, ansible_architecture) }}"
_cadvisor_repo: "google/cadvisor"
_github_api_headers: "{{ {'GITHUB_TOKEN': lookup('ansible.builtin.env', 'GITHUB_TOKEN')} if (lookup('ansible.builtin.env', 'GITHUB_TOKEN')) else {} }}"
_cadvisor_binaries: ['cadvisor']

View file

@ -1,10 +1,8 @@
---
chrony_exporter_version: 0.9.2
chrony_exporter_binary_local_dir: ""
chrony_exporter_version: 0.10.1
chrony_exporter_binary_url: "https://github.com/{{ _chrony_exporter_repo }}/releases/download/v{{ chrony_exporter_version }}/\
chrony_exporter-{{ chrony_exporter_version }}.linux-{{ go_arch }}.tar.gz"
chrony_exporter-{{ chrony_exporter_version }}.{{ ansible_system | lower }}-{{ _chrony_exporter_go_ansible_arch }}.tar.gz"
chrony_exporter_checksums_url: "https://github.com/{{ _chrony_exporter_repo }}/releases/download/v{{ chrony_exporter_version }}/sha256sums.txt"
chrony_exporter_skip_install: false
chrony_exporter_web_listen_address: "0.0.0.0:9123"
chrony_exporter_web_telemetry_path: "/metrics"
@ -21,9 +19,12 @@ chrony_exporter_enabled_collectors: []
chrony_exporter_disabled_collectors: []
chrony_exporter_binary_install_dir: "/usr/local/bin"
chrony_exporter_system_group: "chrony-exp"
chrony_exporter_system_user: "{{ chrony_exporter_system_group }}"
chrony_exporter_system_user: "chrony-exp"
chrony_exporter_system_group: "{{ chrony_exporter_system_user }}"
# Local path to stash the archive and its extraction
chrony_exporter_archive_path: /tmp
chrony_exporter_local_cache_path: "/tmp/chrony_exporter-{{ ansible_system | lower }}-{{ _chrony_exporter_go_ansible_arch }}/{{ chrony_exporter_version }}"
chrony_exporter_binary_install_dir: "/usr/local/bin"
chrony_exporter_config_dir: "/etc/chrony_exporter"

View file

@ -10,19 +10,10 @@ argument_specs:
options:
chrony_exporter_version:
description: "Chrony exporter package version. Also accepts latest as parameter."
default: "0.9.2"
chrony_exporter_skip_install:
description: "Chrony exporter installation tasks gets skipped when set to true."
type: bool
default: false
chrony_exporter_binary_local_dir:
description:
- "Enables the use of local packages instead of those distributed on github."
- "The parameter may be set to a directory where the C(chrony_exporter) binary is stored on the host where ansible is run."
- "This overrides the I(chrony_exporter_version) parameter"
default: "0.10.1"
chrony_exporter_binary_url:
description: "URL of the chrony_exporter binaries .tar.gz file"
default: "https://github.com/{{ _chrony_exporter_repo }}/releases/download/v{{ chrony_exporter_version }}/chrony_exporter-{{ chrony_exporter_version }}.linux-{{ go_arch }}.tar.gz"
default: "https://github.com/{{ _chrony_exporter_repo }}/releases/download/v{{ chrony_exporter_version }}/chrony_exporter-{{ chrony_exporter_version }}.{{ ansible_system | lower }}-{{ _chrony_exporter_go_ansible_arch }}.tar.gz"
chrony_exporter_checksums_url:
description: "URL of the chrony_exporter checksums file"
default: "https://github.com/{{ _chrony_exporter_repo }}/releases/download/v{{ chrony_exporter_version }}/sha256sums.txt"
@ -58,11 +49,6 @@ argument_specs:
chrony_exporter_basic_auth_users:
description: "Dictionary of users and password for basic authentication. Passwords are automatically hashed with bcrypt."
type: "dict"
chrony_exporter_binary_install_dir:
description:
- "I(Advanced)"
- "Directory to install chrony_exporter binary"
default: "/usr/local/bin"
chrony_exporter_system_group:
description:
- "I(Advanced)"
@ -73,6 +59,14 @@ argument_specs:
- "I(Advanced)"
- "Chrony exporter user"
default: "chrony-exp"
chrony_exporter_archive_path:
chrony_exporter_binary_install_dir:
description:
- "I(Advanced)"
- "Directory to install binaries"
default: "/usr/local/bin"
chrony_exporter_local_cache_path:
description: 'Local path to stash the archive and its extraction'
default: "/tmp"
default: "/tmp/chrony_exporter-{{ ansible_system | lower }}-{{ _chrony_exporter_go_ansible_arch }}/{{ chrony_exporter_version }}"
chrony_exporter_config_dir:
description: "Path to directory with chrony_exporter configuration"
default: "/etc/chrony_exporter"

View file

@ -9,19 +9,14 @@ galaxy_info:
versions:
- "focal"
- "jammy"
- "noble"
- name: "Debian"
versions:
- "bullseye"
- "buster"
- name: "EL"
versions:
- "7"
- "8"
- "9"
- name: "Fedora"
versions:
- "37"
- '38'
galaxy_tags:
- "monitoring"
- "prometheus"

View file

@ -1,9 +1,12 @@
---
provisioner:
playbooks:
prepare: "${MOLECULE_PROJECT_DIRECTORY}/../../.config/molecule/alternative/prepare.yml"
inventory:
group_vars:
all:
chrony_exporter_binary_local_dir: "/tmp/chrony_exporter-linux-amd64"
chrony_exporter_version: 0.6.0
chrony_exporter_local_cache_path: "/tmp/chrony_exporter-linux-amd64/{{ chrony_exporter_version }}"
chrony_exporter_web_listen_address:
- '127.0.0.1:8080'
- '127.0.1.1:8080'
@ -11,7 +14,6 @@ provisioner:
- sources
chrony_exporter_disabled_collectors:
- tracking
chrony_exporter_tls_server_config:
cert_file: /etc/chrony_exporter/tls.cert
key_file: /etc/chrony_exporter/tls.key
@ -19,5 +21,5 @@ provisioner:
http2: true
chrony_exporter_basic_auth_users:
randomuser: examplepassword
go_arch: amd64
chrony_exporter_version: 0.6.0
chrony_exporter_binary_url: "https://github.com/superq/chrony_exporter/releases/download/v{{ chrony_exporter_version\
\ }}/chrony_exporter-{{ chrony_exporter_version }}.linux-amd64.tar.gz"

View file

@ -1,79 +0,0 @@
---
- name: Run local preparation
hosts: localhost
gather_facts: false
tasks:
- name: Download chrony_exporter binary to local folder
become: false
ansible.builtin.get_url:
url: "https://github.com/superq/chrony_exporter/releases/download/v{{ chrony_exporter_version\
\ }}/chrony_exporter-{{ chrony_exporter_version }}.linux-{{ go_arch }}.tar.gz"
dest: "/tmp/chrony_exporter-{{ chrony_exporter_version }}.linux-{{ go_arch\
\ }}.tar.gz"
mode: 0644
register: _download_binary
until: _download_binary is succeeded
retries: 5
delay: 2
check_mode: false
- name: Unpack chrony_exporter binary
become: false
ansible.builtin.unarchive:
src: "/tmp/chrony_exporter-{{ chrony_exporter_version }}.linux-{{ go_arch\
\ }}.tar.gz"
dest: "/tmp"
creates: "/tmp/chrony_exporter-{{ chrony_exporter_version }}.linux-{{ go_arch\
\ }}/chrony_exporter"
check_mode: false
- name: Link to chrony_exporter binaries directory
become: false
ansible.builtin.file:
src: "/tmp/chrony_exporter-{{ chrony_exporter_version }}.linux-amd64"
dest: "/tmp/chrony_exporter-linux-amd64"
state: link
check_mode: false
- name: Install pyOpenSSL for certificate generation
ansible.builtin.pip:
name: "pyOpenSSL"
- name: Create private key
community.crypto.openssl_privatekey:
path: "/tmp/tls.key"
- name: Create CSR
community.crypto.openssl_csr:
path: "/tmp/tls.csr"
privatekey_path: "/tmp/tls.key"
- name: Create certificate
community.crypto.x509_certificate:
path: "/tmp/tls.cert"
csr_path: "/tmp/tls.csr"
privatekey_path: "/tmp/tls.key"
provider: selfsigned
- name: Run target preparation
hosts: all
any_errors_fatal: true
tasks:
- name: Create chrony_exporter cert dir
ansible.builtin.file:
path: "{{ chrony_exporter_tls_server_config.cert_file | dirname }}"
state: directory
owner: root
group: root
mode: u+rwX,g+rwX,o=rX
- name: Copy cert and key
ansible.builtin.copy:
src: "{{ item.src }}"
dest: "{{ item.dest }}"
mode: "{{ item.mode | default('0644') }}"
loop:
- src: "/tmp/tls.cert"
dest: "{{ chrony_exporter_tls_server_config.cert_file }}"
- src: "/tmp/tls.key"
dest: "{{ chrony_exporter_tls_server_config.key_file }}"

View file

@ -1,12 +1,10 @@
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
import testinfra.utils.ansible_runner
from testinfra_helpers import get_target_hosts
import pytest
testinfra_hosts = testinfra.utils.ansible_runner.AnsibleRunner(
os.environ['MOLECULE_INVENTORY_FILE']).get_hosts('all')
testinfra_hosts = get_target_hosts()
def test_directories(host):

View file

@ -1,11 +1,9 @@
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
import testinfra.utils.ansible_runner
from testinfra_helpers import get_target_hosts
testinfra_hosts = testinfra.utils.ansible_runner.AnsibleRunner(
os.environ['MOLECULE_INVENTORY_FILE']).get_hosts('all')
testinfra_hosts = get_target_hosts()
def test_directories(host):
@ -48,7 +46,6 @@ def test_user(host):
assert host.group("chrony-exp").exists
assert "chrony-exp" in host.user("chrony-exp").groups
assert host.user("chrony-exp").shell == "/usr/sbin/nologin"
assert host.user("chrony-exp").home == "/"
def test_service(host):

Some files were not shown because too many files have changed in this diff Show more