Merge branch 'main' into rust-dep-info

Signed-off-by: C0D3 M4513R <28912031+C0D3-M4513R@users.noreply.github.com>
This commit is contained in:
C0D3 M4513R 2024-09-12 22:48:00 +02:00 committed by GitHub
commit 8e0f6934c8
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
256 changed files with 14091 additions and 3009 deletions

View file

@ -18,7 +18,7 @@ tools:
# used to sign mac binaries at release
- name: quill
version:
want: v0.4.1
want: v0.4.2
method: github-release
with:
repo: anchore/quill
@ -26,7 +26,7 @@ tools:
# used for linting
- name: golangci-lint
version:
want: v1.59.1
want: v1.60.3
method: github-release
with:
repo: golangci/golangci-lint
@ -34,7 +34,7 @@ tools:
# used for showing the changelog at release
- name: glow
version:
want: v1.5.1
want: v2.0.0
method: github-release
with:
repo: charmbracelet/glow
@ -42,7 +42,7 @@ tools:
# used for signing the checksums file at release
- name: cosign
version:
want: v2.2.4
want: v2.4.0
method: github-release
with:
repo: sigstore/cosign
@ -58,7 +58,7 @@ tools:
# used to release all artifacts
- name: goreleaser
version:
want: v2.0.1
want: v2.2.0
method: github-release
with:
repo: goreleaser/goreleaser
@ -111,7 +111,23 @@ tools:
# used for triggering a release
- name: gh
version:
want: v2.52.0
want: v2.55.0
method: github-release
with:
repo: cli/cli
# used to upload test fixture cache
- name: oras
version:
want: v1.2.0
method: github-release
with:
repo: oras-project/oras
# used to upload test fixture cache
- name: yq
version:
want: v4.44.3
method: github-release
with:
repo: mikefarah/yq

View file

@ -6,6 +6,7 @@ permit:
- MPL.*
- ISC
- WTFPL
- Unlicense
ignore-packages:
# packageurl-go is released under the MIT license located in the root of the repo at /mit.LICENSE

View file

@ -1,6 +1,6 @@
contact_links:
- name: Join the Slack community 💬
# link to our community Slack registration page
url: https://anchore.com/slack
- name: Join our Discourse community 💬
# link to our community Discourse site
url: https://anchore.com/discourse
about: 'Come chat with us! Ask for help, join our software development efforts, or just give us feedback!'

View file

@ -13,16 +13,15 @@ inputs:
cache-key-prefix:
description: "Prefix all cache keys with this value"
required: true
default: "1ac8281053"
compute-fingerprints:
description: "Compute test fixture fingerprints"
default: "181053ac82"
download-test-fixture-cache:
description: "Download test fixture cache from OCI and github actions"
required: true
default: "true"
default: "false"
bootstrap-apt-packages:
description: "Space delimited list of tools to install via apt"
default: "libxml2-utils"
runs:
using: "composite"
steps:
@ -54,8 +53,14 @@ runs:
run: |
DEBIAN_FRONTEND=noninteractive sudo apt update && sudo -E apt install -y ${{ inputs.bootstrap-apt-packages }}
- name: Create all cache fingerprints
if: inputs.compute-fingerprints == 'true'
shell: bash
run: make fingerprints
- name: Restore ORAS cache from github actions
if: inputs.download-test-fixture-cache == 'true'
uses: actions/cache@704facf57e6136b1bc63b828d79edcd491f0ee84 # v3.3.2
with:
path: ${{ github.workspace }}/.tmp/oras-cache
key: ${{ inputs.cache-key-prefix }}-oras-cache
- name: Download test fixture cache
if: inputs.download-test-fixture-cache == 'true'
shell: bash
run: make download-test-fixture-cache

135
.github/scripts/find_cache_paths.py vendored Executable file
View file

@ -0,0 +1,135 @@
#!/usr/bin/env python3
from __future__ import annotations
import os
import glob
import sys
import json
import hashlib
IGNORED_PREFIXES = []
def find_fingerprints_and_check_dirs(base_dir):
all_fingerprints = set(glob.glob(os.path.join(base_dir, '**', 'test*', '**', '*.fingerprint'), recursive=True))
all_fingerprints = {os.path.relpath(fp) for fp in all_fingerprints
if not any(fp.startswith(prefix) for prefix in IGNORED_PREFIXES)}
if not all_fingerprints:
show("No .fingerprint files or cache directories found.")
exit(1)
missing_content = []
valid_paths = set()
fingerprint_contents = []
for fingerprint in all_fingerprints:
path = fingerprint.replace('.fingerprint', '')
if not os.path.exists(path):
missing_content.append(path)
continue
if not os.path.isdir(path):
valid_paths.add(path)
continue
if os.listdir(path):
valid_paths.add(path)
else:
missing_content.append(path)
with open(fingerprint, 'r') as f:
content = f.read().strip()
fingerprint_contents.append((fingerprint, content))
return sorted(valid_paths), missing_content, fingerprint_contents
def parse_fingerprint_contents(fingerprint_content):
input_map = {}
for line in fingerprint_content.splitlines():
digest, path = line.split()
input_map[path] = digest
return input_map
def calculate_sha256(fingerprint_contents):
sorted_fingerprint_contents = sorted(fingerprint_contents, key=lambda x: x[0])
concatenated_contents = ''.join(content for _, content in sorted_fingerprint_contents)
sha256_hash = hashlib.sha256(concatenated_contents.encode()).hexdigest()
return sha256_hash
def calculate_file_sha256(file_path):
sha256_hash = hashlib.sha256()
with open(file_path, 'rb') as f:
for byte_block in iter(lambda: f.read(4096), b""):
sha256_hash.update(byte_block)
return sha256_hash.hexdigest()
def show(*s: str):
print(*s, file=sys.stderr)
def main(file_path: str | None):
base_dir = '.'
valid_paths, missing_content, fingerprint_contents = find_fingerprints_and_check_dirs(base_dir)
if missing_content:
show("The following paths are missing or have no content, but have corresponding .fingerprint files:")
for path in sorted(missing_content):
show(f"- {path}")
show("Please ensure these paths exist and have content if they are directories.")
exit(1)
sha256_hash = calculate_sha256(fingerprint_contents)
paths_with_digests = []
for path in sorted(valid_paths):
fingerprint_file = f"{path}.fingerprint"
try:
if os.path.exists(fingerprint_file):
file_digest = calculate_file_sha256(fingerprint_file)
# Parse the fingerprint file to get the digest/path tuples
with open(fingerprint_file, 'r') as f:
fingerprint_content = f.read().strip()
input_map = parse_fingerprint_contents(fingerprint_content)
paths_with_digests.append({
"path": path,
"digest": file_digest,
"input": input_map
})
except Exception as e:
show(f"Error processing {fingerprint_file}: {e}")
raise e
output = {
"digest": sha256_hash,
"paths": paths_with_digests
}
content = json.dumps(output, indent=2, sort_keys=True)
if file_path:
with open(file_path, 'w') as f:
f.write(content)
print(content)
if __name__ == "__main__":
file_path = None
if len(sys.argv) > 1:
file_path = sys.argv[1]
main(file_path)

View file

@ -0,0 +1,70 @@
#!/usr/bin/env python3
import os
import subprocess
import hashlib
BOLD = '\033[1m'
YELLOW = '\033[0;33m'
RESET = '\033[0m'
def print_message(message):
print(f"{YELLOW}{message}{RESET}")
def sha256sum(filepath):
h = hashlib.sha256()
with open(filepath, 'rb') as f:
for chunk in iter(lambda: f.read(4096), b""):
h.update(chunk)
return h.hexdigest()
def is_git_tracked_or_untracked(directory):
"""Returns a sorted list of files in the directory that are tracked or not ignored by Git."""
result = subprocess.run(
["git", "ls-files", "--cached", "--others", "--exclude-standard"],
cwd=directory,
stdout=subprocess.PIPE,
text=True
)
return sorted(result.stdout.strip().splitlines())
def find_test_fixture_dirs_with_images(base_dir):
"""Find directories that contain 'test-fixtures' and at least one 'image-*' directory."""
for root, dirs, files in os.walk(base_dir):
if 'test-fixtures' in root:
image_dirs = [d for d in dirs if d.startswith('image-')]
if image_dirs:
yield os.path.realpath(root)
def generate_fingerprints():
print_message("creating fingerprint files for docker fixtures...")
for test_fixture_dir in find_test_fixture_dirs_with_images('.'):
cache_fingerprint_path = os.path.join(test_fixture_dir, 'cache.fingerprint')
with open(cache_fingerprint_path, 'w') as fingerprint_file:
for image_dir in find_image_dirs(test_fixture_dir):
for file in is_git_tracked_or_untracked(image_dir):
file_path = os.path.join(image_dir, file)
checksum = sha256sum(file_path)
path_from_fixture_dir = os.path.relpath(file_path, test_fixture_dir)
fingerprint_file.write(f"{checksum} {path_from_fixture_dir}\n")
def find_image_dirs(test_fixture_dir):
"""Find all 'image-*' directories inside a given test-fixture directory."""
result = []
for root, dirs, files in os.walk(test_fixture_dir):
for dir_name in dirs:
if dir_name.startswith('image-'):
result.append(os.path.join(root, dir_name))
return sorted(result)
if __name__ == "__main__":
generate_fingerprints()

2
.github/scripts/labeler.py vendored Normal file → Executable file
View file

@ -1,3 +1,5 @@
#!/usr/bin/env python3
from __future__ import annotations
import sys

2
.github/scripts/labeler_test.py vendored Normal file → Executable file
View file

@ -1,3 +1,5 @@
#!/usr/bin/env python3
import unittest
from unittest.mock import patch
import subprocess

View file

@ -4,12 +4,19 @@ set -eu
bold=$(tput bold)
normal=$(tput sgr0)
if ! [ -x "$(command -v gh)" ]; then
echo "The GitHub CLI could not be found. To continue follow the instructions at https://github.com/cli/cli#installation"
GH_CLI=.tool/gh
if ! [ -x "$(command -v $GH_CLI)" ]; then
echo "The GitHub CLI could not be found. run: make bootstrap"
exit 1
fi
gh auth status
$GH_CLI auth status
# set the default repo in cases where multiple remotes are defined
$GH_CLI repo set-default anchore/syft
export GITHUB_TOKEN="${GITHUB_TOKEN-"$($GH_CLI auth token)"}"
# we need all of the git state to determine the next version. Since tagging is done by
# the release pipeline it is possible to not have all of the tags from previous releases.
@ -37,7 +44,7 @@ done
echo "${bold}Kicking off release for ${NEXT_VERSION}${normal}..."
echo
gh workflow run release.yaml -f version=${NEXT_VERSION}
$GH_CLI workflow run release.yaml -f version=${NEXT_VERSION}
echo
echo "${bold}Waiting for release to start...${normal}"
@ -45,6 +52,6 @@ sleep 10
set +e
echo "${bold}Head to the release workflow to monitor the release:${normal} $(gh run list --workflow=release.yaml --limit=1 --json url --jq '.[].url')"
id=$(gh run list --workflow=release.yaml --limit=1 --json databaseId --jq '.[].databaseId')
gh run watch $id --exit-status || (echo ; echo "${bold}Logs of failed step:${normal}" && GH_PAGER="" gh run view $id --log-failed)
echo "${bold}Head to the release workflow to monitor the release:${normal} $($GH_CLI run list --workflow=release.yaml --limit=1 --json url --jq '.[].url')"
id=$($GH_CLI run list --workflow=release.yaml --limit=1 --json databaseId --jq '.[].databaseId')
$GH_CLI run watch $id --exit-status || (echo ; echo "${bold}Logs of failed step:${normal}" && GH_PAGER="" $GH_CLI run view $id --log-failed)

View file

@ -39,7 +39,7 @@ jobs:
OUTPUT="${OUTPUT//$'\r'/'%0D'}" # URL encode all '\r' characters
echo "result=$OUTPUT" >> $GITHUB_OUTPUT
- uses: actions/upload-artifact@0b2256b8c012f0828dc542b3febcab082c67f72b # v4.3.4
- uses: actions/upload-artifact@50769540e7f4bd5e21e526ee35c689e35e0d6874 # v4.4.0
with:
name: benchmark-test-results
path: test/results/**/*

View file

@ -39,13 +39,13 @@ jobs:
uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 #v4.1.7
- name: Install Go
uses: actions/setup-go@cdcb36043654635271a94b9a6d1392de5bb323a7 #v5.0.1
uses: actions/setup-go@0a12ed9d6a96ab950c8f026ed9f722fe0da7ef32 #v5.0.2
with:
go-version-file: go.mod
# Initializes the CodeQL tools for scanning.
- name: Initialize CodeQL
uses: github/codeql-action/init@b611370bb5703a7efb587f9d136a52ea24c5c38c #v3.25.11
uses: github/codeql-action/init@4dd16135b69a43b6c8efb853346f8437d92d3c93 #v3.26.6
with:
languages: ${{ matrix.language }}
# If you wish to specify custom queries, you can do so here or in a config file.
@ -56,7 +56,7 @@ jobs:
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
# If this step fails, then you should remove it and run the build manually (see below)
- name: Autobuild
uses: github/codeql-action/autobuild@b611370bb5703a7efb587f9d136a52ea24c5c38c #v3.25.11
uses: github/codeql-action/autobuild@4dd16135b69a43b6c8efb853346f8437d92d3c93 #v3.26.6
# Command-line programs to run using the OS shell.
# 📚 https://git.io/JvXDl
@ -70,4 +70,4 @@ jobs:
# make release
- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@b611370bb5703a7efb587f9d136a52ea24c5c38c #v3.25.11
uses: github/codeql-action/analyze@4dd16135b69a43b6c8efb853346f8437d92d3c93 #v3.26.6

View file

@ -1,4 +1,4 @@
name: "Release"
name: "Release: version file"
on:

View file

@ -113,13 +113,13 @@ jobs:
uses: ./.github/actions/bootstrap
- name: Login to Docker Hub
uses: docker/login-action@0d4c9c5ea7693da7b068278f7b52bda2a190a446 #v3.2.0
uses: docker/login-action@9780b0c442fbb1117ed29e0efdff1e18412f7567 #v3.3.0
with:
username: ${{ secrets.ANCHOREOSSWRITE_DH_USERNAME }}
password: ${{ secrets.ANCHOREOSSWRITE_DH_PAT }}
- name: Login to GitHub Container Registry
uses: docker/login-action@0d4c9c5ea7693da7b068278f7b52bda2a190a446 #v3.2.0
uses: docker/login-action@9780b0c442fbb1117ed29e0efdff1e18412f7567 #v3.3.0
with:
registry: ghcr.io
username: ${{ github.actor }}
@ -148,7 +148,7 @@ jobs:
# for updating brew formula in anchore/homebrew-syft
GITHUB_BREW_TOKEN: ${{ secrets.ANCHOREOPS_GITHUB_OSS_WRITE_TOKEN }}
- uses: anchore/sbom-action@e8d2a6937ecead383dfe75190d104edd1f9c5751 #v0.16.0
- uses: anchore/sbom-action@61119d458adab75f756bc0b9e4bde25725f86a7a #v0.17.2
continue-on-error: true
with:
artifact-name: sbom.spdx.json

View file

@ -0,0 +1,39 @@
name: "Test fixture cache: publish"
on:
workflow_dispatch:
schedule:
# run nightly at 4AM UTC
- cron: "0 4 * * *"
permissions:
contents: read
jobs:
Publish:
name: "Publish test fixture image cache"
# we use this runner to get enough storage space for docker images and fixture cache
runs-on: ubuntu-22.04-4core-16gb
permissions:
packages: write
steps:
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 #v4.1.7
- name: Bootstrap environment
uses: ./.github/actions/bootstrap
with:
# we want to rebuild the cache with no previous state
download-test-fixture-cache: false
- name: Run all tests
run: make test
env:
# we want to rebuild the cache with no previous state
DOWNLOAD_TEST_FIXTURE_CACHE: "false"
- name: Login to GitHub Container Registry (ORAS)
run: echo "${{ secrets.GITHUB_TOKEN }}" | .tool/oras login ghcr.io -u ${{ github.actor }} --password-stdin
- name: Publish test fixture cache
run: make upload-test-fixture-cache

View file

@ -19,7 +19,6 @@ jobs:
uses: ./.github/actions/bootstrap
with:
bootstrap-apt-packages: ""
compute-fingerprints: "false"
go-dependencies: false
- name: "Update tool versions"
@ -50,7 +49,7 @@ jobs:
app_id: ${{ secrets.TOKEN_APP_ID }}
private_key: ${{ secrets.TOKEN_APP_PRIVATE_KEY }}
- uses: peter-evans/create-pull-request@c5a7806660adbe173f04e3e038b0ccdcd758773c #v6.1.0
- uses: peter-evans/create-pull-request@8867c4aba1b742c39f8d0ba35429c2dfa4b6cb20 #v7.0.1
with:
signoff: true
delete-branch: true

View file

@ -33,7 +33,7 @@ jobs:
app_id: ${{ secrets.TOKEN_APP_ID }}
private_key: ${{ secrets.TOKEN_APP_PRIVATE_KEY }}
- uses: peter-evans/create-pull-request@c5a7806660adbe173f04e3e038b0ccdcd758773c #v6.1.0
- uses: peter-evans/create-pull-request@8867c4aba1b742c39f8d0ba35429c2dfa4b6cb20 #v7.0.1
with:
signoff: true
delete-branch: true

View file

@ -19,7 +19,7 @@ jobs:
steps:
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 #v4.1.7
- uses: actions/setup-go@cdcb36043654635271a94b9a6d1392de5bb323a7 #v5.0.1
- uses: actions/setup-go@0a12ed9d6a96ab950c8f026ed9f722fe0da7ef32 #v5.0.2
with:
go-version: ${{ env.GO_VERSION }}
stable: ${{ env.GO_STABLE_VERSION }}
@ -44,7 +44,7 @@ jobs:
app_id: ${{ secrets.TOKEN_APP_ID }}
private_key: ${{ secrets.TOKEN_APP_PRIVATE_KEY }}
- uses: peter-evans/create-pull-request@c5a7806660adbe173f04e3e038b0ccdcd758773c #v6.1.0
- uses: peter-evans/create-pull-request@8867c4aba1b742c39f8d0ba35429c2dfa4b6cb20 #v7.0.1
with:
signoff: true
delete-branch: true

View file

@ -35,48 +35,8 @@ jobs:
- name: Bootstrap environment
uses: ./.github/actions/bootstrap
- name: Restore file executable test-fixture cache
uses: actions/cache@0c45773b623bea8c8e75f6c82b208c3cf94ea4f9 #v4.0.2
with:
path: syft/file/cataloger/executable/test-fixtures/elf/bin
key: ${{ runner.os }}-unit-file-executable-elf-cache-${{ hashFiles( 'syft/file/cataloger/executable/test-fixtures/elf/cache.fingerprint' ) }}
- name: Restore file executable shared-info test-fixture cache
uses: actions/cache@0c45773b623bea8c8e75f6c82b208c3cf94ea4f9 #v4.0.2
with:
path: syft/file/cataloger/executable/test-fixtures/shared-info/bin
key: ${{ runner.os }}-unit-file-executable-shared-info-cache-${{ hashFiles( 'syft/file/cataloger/executable/test-fixtures/shared-info/cache.fingerprint' ) }}
- name: Restore Java test-fixture cache
uses: actions/cache@0c45773b623bea8c8e75f6c82b208c3cf94ea4f9 #v4.0.2
with:
path: syft/pkg/cataloger/java/test-fixtures/java-builds/packages
key: ${{ runner.os }}-unit-java-cache-${{ hashFiles( 'syft/pkg/cataloger/java/test-fixtures/java-builds/cache.fingerprint' ) }}
- name: Restore RPM test-fixture cache
uses: actions/cache@0c45773b623bea8c8e75f6c82b208c3cf94ea4f9 #v4.0.2
with:
path: syft/pkg/cataloger/redhat/test-fixtures/rpms
key: ${{ runner.os }}-unit-rpm-cache-${{ hashFiles( 'syft/pkg/cataloger/redhat/test-fixtures/rpms.fingerprint' ) }}
- name: Restore go binary test-fixture cache
uses: actions/cache@0c45773b623bea8c8e75f6c82b208c3cf94ea4f9 #v4.0.2
with:
path: syft/pkg/cataloger/golang/test-fixtures/archs/binaries
key: ${{ runner.os }}-unit-go-binaries-cache-${{ hashFiles( 'syft/pkg/cataloger/golang/test-fixtures/archs/binaries.fingerprint' ) }}
- name: Restore binary cataloger test-fixture cache
uses: actions/cache@0c45773b623bea8c8e75f6c82b208c3cf94ea4f9 #v4.0.2
with:
path: syft/pkg/cataloger/binary/test-fixtures/classifiers/bin
key: ${{ runner.os }}-unit-binary-cataloger-cache-${{ hashFiles( 'syft/pkg/cataloger/binary/test-fixtures/cache.fingerprint' ) }}
- name: Restore Kernel test-fixture cache
uses: actions/cache@0c45773b623bea8c8e75f6c82b208c3cf94ea4f9 #v4.0.2
with:
path: syft/pkg/cataloger/kernel/test-fixtures/cache
key: ${{ runner.os }}-unit-kernel-cache-${{ hashFiles( 'syft/pkg/cataloger/kernel/test-fixtures/cache.fingerprint' ) }}
download-test-fixture-cache: true
- name: Run unit tests
run: make unit
@ -91,16 +51,12 @@ jobs:
- name: Bootstrap environment
uses: ./.github/actions/bootstrap
with:
download-test-fixture-cache: true
- name: Validate syft output against the CycloneDX schema
run: make validate-cyclonedx-schema
- name: Restore integration test cache
uses: actions/cache@0c45773b623bea8c8e75f6c82b208c3cf94ea4f9 #v4.0.2
with:
path: ${{ github.workspace }}/cmd/syft/internal/test/integration/test-fixtures/cache
key: ${{ runner.os }}-integration-test-cache-${{ hashFiles('/cmd/syft/internal/test/integration/test-fixtures/cache.fingerprint') }}
- name: Run integration tests
run: make integration
@ -143,6 +99,8 @@ jobs:
- name: Bootstrap environment
uses: ./.github/actions/bootstrap
with:
download-test-fixture-cache: true
- name: Download snapshot build
id: snapshot-cache
@ -162,13 +120,6 @@ jobs:
- name: Run comparison tests (Linux)
run: make compare-linux
- name: Restore install.sh test image cache
id: install-test-image-cache
uses: actions/cache@0c45773b623bea8c8e75f6c82b208c3cf94ea4f9 #v4.0.2
with:
path: ${{ github.workspace }}/test/install/cache
key: ${{ runner.os }}-install-test-image-cache-${{ hashFiles('test/install/cache.fingerprint') }}
- name: Load test image cache
if: steps.install-test-image-cache.outputs.cache-hit == 'true'
run: make install-test-cache-load
@ -188,7 +139,7 @@ jobs:
runs-on: macos-latest
steps:
- name: Install Cosign
uses: sigstore/cosign-installer@v3.5.0
uses: sigstore/cosign-installer@v3.6.0
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 #v4.1.7
@ -196,8 +147,8 @@ jobs:
uses: ./.github/actions/bootstrap
with:
bootstrap-apt-packages: ""
compute-fingerprints: "false"
go-dependencies: false
download-test-fixture-cache: true
- name: Download snapshot build
id: snapshot-cache
@ -214,13 +165,6 @@ jobs:
if: steps.snapshot-cache.outputs.cache-hit != 'true'
run: echo "unable to download snapshots from previous job" && false
- name: Restore docker image cache for compare testing
id: mac-compare-testing-cache
uses: actions/cache@0c45773b623bea8c8e75f6c82b208c3cf94ea4f9 #v4.0.2
with:
path: image.tar
key: ${{ runner.os }}-${{ hashFiles('test/compare/mac.sh') }}
- name: Run comparison tests (Mac)
run: make compare-mac
@ -238,12 +182,8 @@ jobs:
- name: Bootstrap environment
uses: ./.github/actions/bootstrap
- name: Restore CLI test-fixture cache
uses: actions/cache@0c45773b623bea8c8e75f6c82b208c3cf94ea4f9 #v4.0.2
with:
path: ${{ github.workspace }}/test/cli/test-fixtures/cache
key: ${{ runner.os }}-cli-test-cache-${{ hashFiles('test/cli/test-fixtures/cache.fingerprint') }}
download-test-fixture-cache: true
- name: Download snapshot build
id: snapshot-cache
@ -262,3 +202,22 @@ jobs:
- name: Run CLI Tests (Linux)
run: make cli
Cleanup-Cache:
name: "Cleanup snapshot cache"
if: github.event.pull_request.head.repo.full_name == github.repository
runs-on: ubuntu-20.04
permissions:
actions: write
needs:
- Acceptance-Linux
- Acceptance-Mac
- Cli-Linux
steps:
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 #v4.1.7
- name: Delete snapshot cache
run: gh cache delete "snapshot-build-${{ github.run_id }}"
env:
GH_TOKEN: ${{ github.token }}

View file

@ -12,10 +12,10 @@ linters:
enable:
- asciicheck
- bodyclose
- copyloopvar
- dogsled
- dupl
- errcheck
- exportloopref
- funlen
- gocognit
- goconst
@ -30,6 +30,7 @@ linters:
- ineffassign
- misspell
- nakedret
- nolintlint
- revive
- staticcheck
- stylecheck
@ -80,7 +81,6 @@ run:
# - lll # without a way to specify per-line exception cases, this is not usable
# - maligned # this is an excellent linter, but tricky to optimize and we are not sensitive to memory layout optimizations
# - nestif
# - nolintlint # as of go1.19 this conflicts with the behavior of gofmt, which is a deal-breaker (lint-fix will still fail when running lint)
# - prealloc # following this rule isn't consistently a good idea, as it sometimes forces unnecessary allocations that result in less idiomatic code
# - rowserrcheck # not in a repo with sql, so this is not useful
# - scopelint # deprecated

128
CODE_OF_CONDUCT.md Normal file
View file

@ -0,0 +1,128 @@
# Contributor Covenant Code of Conduct
## Our Pledge
We as members, contributors, and leaders pledge to make participation in our
community a harassment-free experience for everyone, regardless of age, body
size, visible or invisible disability, ethnicity, sex characteristics, gender
identity and expression, level of experience, education, socio-economic status,
nationality, personal appearance, race, religion, or sexual identity
and orientation.
We pledge to act and interact in ways that contribute to an open, welcoming,
diverse, inclusive, and healthy community.
## Our Standards
Examples of behavior that contributes to a positive environment for our
community include:
* Demonstrating empathy and kindness toward other people
* Being respectful of differing opinions, viewpoints, and experiences
* Giving and gracefully accepting constructive feedback
* Accepting responsibility and apologizing to those affected by our mistakes,
and learning from the experience
* Focusing on what is best not just for us as individuals, but for the
overall community
Examples of unacceptable behavior include:
* The use of sexualized language or imagery, and sexual attention or
advances of any kind
* Trolling, insulting or derogatory comments, and personal or political attacks
* Public or private harassment
* Publishing others' private information, such as a physical or email
address, without their explicit permission
* Other conduct which could reasonably be considered inappropriate in a
professional setting
## Enforcement Responsibilities
Community leaders are responsible for clarifying and enforcing our standards of
acceptable behavior and will take appropriate and fair corrective action in
response to any behavior that they deem inappropriate, threatening, offensive,
or harmful.
Community leaders have the right and responsibility to remove, edit, or reject
comments, commits, code, wiki edits, issues, and other contributions that are
not aligned to this Code of Conduct, and will communicate reasons for moderation
decisions when appropriate.
## Scope
This Code of Conduct applies within all community spaces, and also applies when
an individual is officially representing the community in public spaces.
Examples of representing our community include using an official e-mail address,
posting via an official social media account, or acting as an appointed
representative at an online or offline event.
## Enforcement
Instances of abusive, harassing, or otherwise unacceptable behavior may be
reported to the community leaders responsible for enforcement at
[opensource@anchore.com](mailto:opensource@anchore.com).
All complaints will be reviewed and investigated promptly and fairly.
All community leaders are obligated to respect the privacy and security of the
reporter of any incident.
## Enforcement Guidelines
Community leaders will follow these Community Impact Guidelines in determining
the consequences for any action they deem in violation of this Code of Conduct:
### 1. Correction
**Community Impact**: Use of inappropriate language or other behavior deemed
unprofessional or unwelcome in the community.
**Consequence**: A private, written warning from community leaders, providing
clarity around the nature of the violation and an explanation of why the
behavior was inappropriate. A public apology may be requested.
### 2. Warning
**Community Impact**: A violation through a single incident or series
of actions.
**Consequence**: A warning with consequences for continued behavior. No
interaction with the people involved, including unsolicited interaction with
those enforcing the Code of Conduct, for a specified period of time. This
includes avoiding interactions in community spaces as well as external channels
like social media. Violating these terms may lead to a temporary or
permanent ban.
### 3. Temporary Ban
**Community Impact**: A serious violation of community standards, including
sustained inappropriate behavior.
**Consequence**: A temporary ban from any sort of interaction or public
communication with the community for a specified period of time. No public or
private interaction with the people involved, including unsolicited interaction
with those enforcing the Code of Conduct, is allowed during this period.
Violating these terms may lead to a permanent ban.
### 4. Permanent Ban
**Community Impact**: Demonstrating a pattern of violation of community
standards, including sustained inappropriate behavior, harassment of an
individual, or aggression toward or disparagement of classes of individuals.
**Consequence**: A permanent ban from any sort of public interaction within
the community.
## Attribution
This Code of Conduct is adapted from the [Contributor Covenant][homepage],
version 2.0, available at
https://www.contributor-covenant.org/version/2/0/code_of_conduct.html.
Community Impact Guidelines were inspired by [Mozilla's code of conduct
enforcement ladder](https://github.com/mozilla/diversity).
[homepage]: https://www.contributor-covenant.org
For answers to common questions about this code of conduct, see the FAQ at
https://www.contributor-covenant.org/faq. Translations are available at
https://www.contributor-covenant.org/translations.

View file

@ -83,7 +83,7 @@ Syft's core library is implemented in the `syft` package and subpackages, where
- the `syft` package contains a single function that can take a `source.Source` object and catalog it, producing an `sbom.SBOM` object
- the `syft/format` package contains the ability to encode and decode SBOMs to and from different SBOM formats (such as SPDX and CycloneDX)
The `cmd` pacakge at the highest level execution flow wires up [`spf13/cobra`](https://github.com/spf13/cobra) commands for execution in the main application:
The `cmd` package at the highest level execution flow wires up [`spf13/cobra`](https://github.com/spf13/cobra) commands for execution in the main application:
```mermaid
sequenceDiagram
participant main as cmd/syft/main
@ -212,7 +212,7 @@ Finally, here is an example of where the package construction is done within the
- [The APK package constructor itself](https://github.com/anchore/syft/tree/v0.70.0/syft/pkg/cataloger/apkdb/package.go#L12-L27)
Interested in building a new cataloger? Checkout the [list of issues with the `new-cataloger` label](https://github.com/anchore/syft/issues?q=is%3Aopen+is%3Aissue+label%3Anew-cataloger+no%3Aassignee)!
If you have questions about implementing a cataloger feel free to file an issue or reach out to us [on slack](https://anchore.com/slack)!
If you have questions about implementing a cataloger feel free to file an issue or reach out to us [on discourse](https://anchore.com/discourse)!
#### Searching for files

View file

@ -25,8 +25,8 @@ ci-bootstrap-go:
# this is a bootstrapping catch-all, where if the target doesn't exist, we'll ensure the tools are installed and then try again
%:
make $(TASK)
$(TASK) $@
@make --silent $(TASK)
@$(TASK) $@
## Shim targets #################################

View file

@ -11,8 +11,8 @@
&nbsp;<a href="https://goreportcard.com/report/github.com/anchore/syft" target="_blank"><img alt="Go Report Card" src="https://goreportcard.com/badge/github.com/anchore/syft"></a>&nbsp;
&nbsp;<a href="https://github.com/anchore/syft/releases/latest" target="_blank"><img alt="GitHub release" src="https://img.shields.io/github/release/anchore/syft.svg"></a>&nbsp;
&nbsp;<a href="https://github.com/anchore/syft" target="_blank"><img alt="GitHub go.mod Go version" src="https://img.shields.io/github/go-mod/go-version/anchore/syft.svg"></a>&nbsp;
&nbsp;<a href"" target="_blank"><img alt="License: Apache-2.0" src="https://img.shields.io/badge/License-Apache%202.0-blue.svg"></a>&nbsp;
&nbsp;<a href="https://anchore.com/slack" target="_blank"><img alt="Slack" src="https://img.shields.io/badge/Slack-Join-blue?logo=slack"></a>&nbsp;
&nbsp;<a href="" target="_blank"><img alt="License: Apache-2.0" src="https://img.shields.io/badge/License-Apache%202.0-blue.svg"></a>&nbsp;
&nbsp;<a href="https://anchore.com/discourse" target="_blank"><img alt="Join our Discourse" src="https://img.shields.io/badge/Discourse-Join-blue?logo=discourse"/></a>&nbsp;
</p>
![syft-demo](https://user-images.githubusercontent.com/590471/90277200-2a253000-de33-11ea-893f-32c219eea11a.gif)
@ -168,6 +168,10 @@ Our [wiki](https://github.com/anchore/syft/wiki) contains further details on the
* [Adding an SBOM to an image as an attestation using Syft](https://github.com/anchore/syft/wiki/attestation#adding-an-sbom-to-an-image-as-an-attestation-using-syft)
* [Configuration](https://github.com/anchore/syft/wiki/configuration)
## Contributing
Check out our [contributing](/CONTRIBUTING.md) guide and [developer](/DEVELOPING.md) docs.
## Syft Team Meetings
The Syft Team hold regular community meetings online. All are welcome to join to bring topics for discussion.

View file

@ -4,9 +4,19 @@ vars:
OWNER: anchore
PROJECT: syft
CACHE_IMAGE: ghcr.io/{{ .OWNER }}/{{ .PROJECT }}/test-fixture-cache:latest
# static file dirs
TOOL_DIR: .tool
TMP_DIR: .tmp
ORAS_CACHE: "{{ .TMP_DIR }}/oras-cache"
CACHE_PATHS_FILE: "{{ .TMP_DIR }}/cache_paths.json"
LAST_CACHE_PULL_FILE: "{{ .TMP_DIR }}/last_cache_paths.json"
# TOOLS
ORAS: "{{ .TOOL_DIR }}/oras"
YQ: "{{ .TOOL_DIR }}/yq"
TASK: "{{ .TOOL_DIR }}/task"
# used for changelog generation
CHANGELOG: CHANGELOG.md
@ -33,6 +43,9 @@ vars:
COMPARE_DIR: ./test/compare
COMPARE_TEST_IMAGE: centos:8.2.2004
env:
GNUMAKEFLAGS: '--no-print-directory'
tasks:
## High-level tasks #################################
@ -65,6 +78,7 @@ tasks:
- task: benchmark
- task: test-utils
- task: cli
- task: check-docker-cache
## Bootstrap tasks #################################
@ -212,10 +226,6 @@ tasks:
# that the cache being restored with the correct binary will be rebuilt since the timestamps
# and local checksums will not line up.
deps: [tools, snapshot]
sources:
- "{{ .SNAPSHOT_BIN }}"
- ./test/cli/**
- ./**/*.go
cmds:
- cmd: "echo 'testing binary: {{ .SNAPSHOT_BIN }}'"
silent: true
@ -229,18 +239,14 @@ tasks:
test-utils:
desc: Run tests for pipeline utils
sources:
- .github/scripts/labeler*.py
cmds:
- cmd: python .github/scripts/labeler_test.py
- cmd: .github/scripts/labeler_test.py
## Benchmark test targets #################################
benchmark:
deps: [tmpdir]
sources:
- ./**/*.go
generates:
- "{{ .TMP_DIR }}/benchmark-main.txt"
cmds:
@ -253,8 +259,6 @@ tasks:
show-benchstat:
deps: [benchmark, tmpdir]
sources:
- "{{ .TMP_DIR }}/benchstat.txt"
cmds:
- cmd: "cat {{ .TMP_DIR }}/benchstat.txt"
silent: true
@ -263,56 +267,188 @@ tasks:
## Test-fixture-related targets #################################
fingerprints:
desc: Generate test fixture fingerprints
desc: Generate fingerprints for all non-docker test fixture
silent: true
# this will look for `test-fixtures/Makefile` and invoke the `fingerprint` target to calculate all cache input fingerprint files
generates:
- cmd/syft/internal/test/integration/test-fixtures/cache.fingerprint
- syft/file/cataloger/executable/test-fixtures/elf/cache.fingerprint
- syft/file/cataloger/executable/test-fixtures/shared-info/cache.fingerprint
- syft/pkg/cataloger/binary/test-fixtures/cache.fingerprint
- syft/pkg/cataloger/java/test-fixtures/java-builds/cache.fingerprint
- syft/pkg/cataloger/golang/test-fixtures/archs/binaries.fingerprint
- syft/pkg/cataloger/redhat/test-fixtures/rpms.fingerprint
- syft/pkg/cataloger/kernel/test-fixtures/cache.fingerprint
- '**/test-fixtures/**/*.fingerprint'
- test/install/cache.fingerprint
- test/cli/test-fixtures/cache.fingerprint
cmds:
# for EXECUTABLE unit test fixtures
- "cd syft/file/cataloger/executable/test-fixtures/elf && make cache.fingerprint"
- "cd syft/file/cataloger/executable/test-fixtures/shared-info && make cache.fingerprint"
# for IMAGE integration test fixtures
- "cd cmd/syft/internal/test/integration/test-fixtures && make cache.fingerprint"
# for BINARY unit test fixtures
- "cd syft/pkg/cataloger/binary/test-fixtures && make cache.fingerprint"
# for JAVA BUILD unit test fixtures
- "cd syft/pkg/cataloger/java/test-fixtures/java-builds && make cache.fingerprint"
# for GO BINARY unit test fixtures
- "cd syft/pkg/cataloger/golang/test-fixtures/archs && make binaries.fingerprint"
# for RPM unit test fixtures
- "cd syft/pkg/cataloger/redhat/test-fixtures && make rpms.fingerprint"
# for Kernel unit test fixtures
- "cd syft/pkg/cataloger/kernel/test-fixtures && make cache.fingerprint"
# for INSTALL test fixtures
- "cd test/install && make cache.fingerprint"
# for CLI test fixtures
- "cd test/cli/test-fixtures && make cache.fingerprint"
- |
BOLD='\033[1m'
YELLOW='\033[0;33m'
RESET='\033[0m'
fixtures:
desc: Generate test fixtures
echo -e "${YELLOW}creating fingerprint files for non-docker fixtures...${RESET}"
for dir in $(find . -type d -name 'test-fixtures'); do
if [ -f "$dir/Makefile" ]; then
# for debugging...
#echo -e "${YELLOW}• calculating fingerprints in $dir... ${RESET}"
(make -C "$dir" fingerprint)
fi
done
# for debugging...
# echo -e "generated all fixture fingerprints"
- .github/scripts/fingerprint_docker_fixtures.py
- |
# if DOWNLOAD_TEST_FIXTURE_CACHE is set to 'false', then we don't need to calculate the fingerprint for the cache
if [ "$DOWNLOAD_TEST_FIXTURE_CACHE" = "false" ]; then
exit 0
fi
.github/scripts/find_cache_paths.py {{ .CACHE_PATHS_FILE }} > /dev/null
refresh-fixtures:
desc: Clear and fetch all test fixture cache
aliases:
- fixtures
silent: true
deps:
- tools
cmds:
- "cd syft/file/cataloger/executable/test-fixtures/elf && make"
- "cd syft/file/cataloger/executable/test-fixtures/shared-info && make"
- "cd syft/pkg/cataloger/java/test-fixtures/java-builds && make"
- "cd syft/pkg/cataloger/redhat/test-fixtures && make"
- "cd syft/pkg/cataloger/binary/test-fixtures && make"
- |
BOLD='\033[1m'
PURPLE='\033[0;35m'
RESET='\033[0m'
# if DOWNLOAD_TEST_FIXTURE_CACHE is set to 'false', then skip the cache download and always build
if [ "$DOWNLOAD_TEST_FIXTURE_CACHE" = "false" ]; then
echo -e "${BOLD}${PURPLE}skipping cache download, rebuilding cache...${RESET}"
{{ .TASK }} build-fixtures
exit 0
fi
LATEST_FINGERPRINT=$(docker manifest inspect {{ .CACHE_IMAGE }} | {{ .YQ }} -r '.annotations.fingerprint')
echo "latest cache: $LATEST_FINGERPRINT"
if [ -f {{ .LAST_CACHE_PULL_FILE }} ]; then
LAST_PULL_FINGERPRINT=$(cat {{ .LAST_CACHE_PULL_FILE }} | {{ .YQ }} -r '.digest')
else
echo -e "${BOLD}${PURPLE}empty cache, downloading cache...${RESET}"
{{ .TASK }} download-test-fixture-cache
exit 0
fi
{{ .TASK }} fingerprints
WANT_FINGERPRINT=$(cat {{ .CACHE_PATHS_FILE }} | {{ .YQ }} -r '.digest')
echo "desired cache: $WANT_FINGERPRINT"
echo "last pulled cache: $LAST_PULL_FINGERPRINT"
# if we already have the latest cache, skip the refresh
if [ "$LAST_PULL_FINGERPRINT" = "$WANT_FINGERPRINT" ]; then
echo -e "${BOLD}${PURPLE}already have the latest cache (skipping cache download)${RESET}"
exit 0
fi
# at this point we only refresh the cache if we want the same cache that is currently available.
# we don't by default refresh the cache if the cache if it is simply different from what we have,
# because we may be working on a code change that doesn't require a cache refresh (but could trigger one,
# which would be annoying to deal with in a development workflow).
if [ "$LATEST_FINGERPRINT" = "$WANT_FINGERPRINT" ]; then
echo -e "${BOLD}${PURPLE}found newer cache! downloading cache...${RESET}"
{{ .TASK }} download-test-fixture-cache
else
echo -e "${BOLD}${PURPLE}found different cache, but isn't clear if it's newer (skipping cache download and manually building)${RESET}"
{{ .YQ }} eval '.paths[] | "\(.digest) \(.path)"' {{ .LAST_CACHE_PULL_FILE }} > .tmp/last_cache_lines
{{ .YQ }} eval '.paths[] | "\(.digest) \(.path)"' {{ .CACHE_PATHS_FILE }} > .tmp/cache_lines
diff .tmp/last_cache_lines .tmp/cache_lines || true
echo -e "${BOLD}${PURPLE}diff with more context...${RESET}"
diff -U10000 {{ .LAST_CACHE_PULL_FILE }} {{ .CACHE_PATHS_FILE }} || true
echo -e "${BOLD}${PURPLE}detected changes to input material, manually building fixtures...${RESET}"
{{ .TASK }} build-fixtures
fi
build-fixtures:
desc: Generate all non-docker test fixtures
silent: true
# this will look for `test-fixtures/Makefile` and invoke the `fixtures` target to generate any and all test fixtures
cmds:
- |
BOLD='\033[1m'
YELLOW='\033[0;33m'
RESET='\033[0m'
# Use a for loop with command substitution to avoid subshell issues
for dir in $(find . -type d -name 'test-fixtures'); do
if [ -f "$dir/Makefile" ]; then
echo -e "${YELLOW}${BOLD}generating fixtures in $dir${RESET}"
(make -C "$dir" fixtures)
fi
done
echo -e "${BOLD}generated all fixtures${RESET}"
download-test-fixture-cache:
desc: Download test fixture cache from ghcr.io
deps: [tools, clean-cache]
vars:
CACHE_DIGEST:
sh: docker manifest inspect {{ .CACHE_IMAGE }} | {{ .YQ }} -r '.annotations.fingerprint'
cmds:
- silent: true
cmd: |
# if oras cache is > 4 GB, delete it
if [ -d {{ .ORAS_CACHE }} ]; then
total_size=$(du -c {{ .ORAS_CACHE }} | grep total | awk '{print $1}')
if [ "$total_size" -gt 4194304 ]; then
echo 'deleting oras cache'
rm -rf {{ .ORAS_CACHE }}
fi
fi
- "ORAS_CACHE={{ .ORAS_CACHE }} {{ .ORAS }} pull {{ .CACHE_IMAGE }}"
- "cp {{ .CACHE_PATHS_FILE }} {{ .LAST_CACHE_PULL_FILE }}"
upload-test-fixture-cache:
desc: Upload the test fixture cache to ghcr.io
deps: [tools, fingerprints]
silent: true
cmd: |
set -eu
oras_command="{{ .ORAS }} push {{ .CACHE_IMAGE }}"
paths=$(cat {{ .CACHE_PATHS_FILE }} | {{ .YQ }} -r '.paths[].path')
for path in $paths; do
oras_command+=" $path"
done
oras_command+=" {{ .CACHE_PATHS_FILE }}"
oras_command+=" --annotation org.opencontainers.image.source=https://github.com/{{ .OWNER }}/{{ .PROJECT }}"
oras_command+=" --annotation fingerprint=$(cat {{ .CACHE_PATHS_FILE }} | {{ .YQ }} -r '.digest')"
echo "Executing: $oras_command"
eval $oras_command
show-test-image-cache:
silent: true
cmds:
- "echo '\nDocker daemon cache:'"
- "echo 'Docker daemon cache:'"
- "docker images --format '{{`{{.ID}}`}} {{`{{.Repository}}`}}:{{`{{.Tag}}`}}' | grep stereoscope-fixture- | sort"
- "echo '\nTar cache:'"
- 'find . -type f -wholename "**/test-fixtures/snapshot/*" | sort'
- 'find . -type f -wholename "**/test-fixtures/cache/stereoscope-fixture-*.tar" | sort'
check-docker-cache:
desc: Ensure docker caches aren't using too much disk space
silent: true
cmd: |
total_size=$(find . | grep cache | grep tar | xargs du -c | grep total | awk '{print $1}')
find . | grep cache | grep tar | xargs du
echo "total $total_size KB"
if [ "$total_size" -gt 1048576 ]; then
echo 'docker cache is larger than 1GB'
exit 1
fi
## install.sh testing targets #################################
@ -489,8 +625,31 @@ tasks:
- "rm -rf {{ .SNAPSHOT_DIR }}"
- "rm -rf {{ .TMP_DIR }}/goreleaser.yaml"
clean-cache:
desc: Remove all docker cache and local image tar cache
clean-docker-cache:
desc: Remove all docker cache tars and images from the daemon
cmds:
- 'find . -type f -wholename "**/test-fixtures/cache/stereoscope-fixture-*.tar" -delete'
- "docker images --format '{{`{{.ID}}`}} {{`{{.Repository}}`}}' | grep stereoscope-fixture- | awk '{print $$1}' | uniq | xargs -r docker rmi --force"
- find . -type d -wholename "**/test-fixtures/cache" | xargs rm -rf
- docker images --format '{{`{{.ID}}`}} {{`{{.Repository}}`}}' | grep stereoscope-fixture- | awk '{print $1}' | uniq | xargs -r docker rmi --force
clean-oras-cache:
desc: Remove all cache for oras commands
cmd: rm -rf {{ .ORAS_CACHE }}
clean-cache:
desc: Remove all image docker tar cache, images from the docker daemon, and ephemeral test fixtures
cmds:
- task: clean-docker-cache
- |
BOLD='\033[1m'
YELLOW='\033[0;33m'
RESET='\033[0m'
# Use a for loop with command substitution to avoid subshell issues
for dir in $(find . -type d -name 'test-fixtures'); do
if [ -f "$dir/Makefile" ]; then
echo -e "${YELLOW}${BOLD}deleting ephemeral test fixtures in $dir${RESET}"
(make -C "$dir" clean)
fi
done
echo -e "${BOLD}Deleted all ephemeral test fixtures${RESET}"
- rm -f {{ .LAST_CACHE_PULL_FILE }} {{ .CACHE_PATHS_FILE }}

View file

@ -93,14 +93,13 @@ func defaultAttestOutputOptions() options.Output {
string(spdxtagvalue.ID),
},
Outputs: []string{syftjson.ID.String()},
OutputFile: options.OutputFile{ // nolint:staticcheck
OutputFile: options.OutputFile{ //nolint:staticcheck
Enabled: false, // explicitly not allowed
},
Format: options.DefaultFormat(),
}
}
//nolint:funlen
func runAttest(ctx context.Context, id clio.Identification, opts *attestOptions, userInput string) error {
// TODO: what other validation here besides binary name?
if !commandExists(cosignBinName) {

View file

@ -28,7 +28,6 @@ type ConvertOptions struct {
options.UpdateCheck `yaml:",inline" mapstructure:",squash"`
}
//nolint:dupl
func Convert(app clio.Application) *cobra.Command {
id := app.ID()

View file

@ -80,7 +80,6 @@ func defaultScanOptions() *scanOptions {
}
}
//nolint:dupl
func Scan(app clio.Application) *cobra.Command {
id := app.ID()
@ -161,7 +160,7 @@ func validateArgs(cmd *cobra.Command, args []string, error string) error {
if err := cmd.Help(); err != nil {
return fmt.Errorf("unable to display help: %w", err)
}
return fmt.Errorf(error)
return fmt.Errorf("%v", error)
}
return cobra.MaximumNArgs(1)(cmd, args)
@ -396,13 +395,13 @@ func getExplanation(expErr task.ErrInvalidExpression) string {
if errors.Is(err, task.ErrNamesNotAllowed) {
if expErr.Operation == task.SubSelectOperation {
return "However, " + err.Error() + ".\nIt seems like you are intending to add a cataloger in addition to the default set." // nolint:goconst
return "However, " + err.Error() + ".\nIt seems like you are intending to add a cataloger in addition to the default set."
}
return "However, " + err.Error() + "." // nolint:goconst
return "However, " + err.Error() + "."
}
if errors.Is(err, task.ErrTagsNotAllowed) {
return "However, " + err.Error() + ".\nAdding groups of catalogers may result in surprising behavior (create inaccurate SBOMs)." // nolint:goconst
return "However, " + err.Error() + ".\nAdding groups of catalogers may result in surprising behavior (create inaccurate SBOMs)."
}
if errors.Is(err, task.ErrAllNotAllowed) {

View file

@ -11,6 +11,7 @@ import (
"github.com/anchore/fangs"
intFile "github.com/anchore/syft/internal/file"
"github.com/anchore/syft/internal/log"
"github.com/anchore/syft/internal/task"
"github.com/anchore/syft/syft"
"github.com/anchore/syft/syft/cataloging"
"github.com/anchore/syft/syft/cataloging/filecataloging"
@ -36,6 +37,7 @@ type Catalog struct {
Scope string `yaml:"scope" json:"scope" mapstructure:"scope"`
Parallelism int `yaml:"parallelism" json:"parallelism" mapstructure:"parallelism"` // the number of catalog workers to run in parallel
Relationships relationshipsConfig `yaml:"relationships" json:"relationships" mapstructure:"relationships"`
Enrich []string `yaml:"enrich" json:"enrich" mapstructure:"enrich"`
// ecosystem-specific cataloger configuration
Golang golangConfig `yaml:"golang" json:"golang" mapstructure:"golang"`
@ -55,7 +57,7 @@ type Catalog struct {
var _ interface {
clio.FlagAdder
clio.PostLoader
fangs.FieldDescriber
clio.FieldDescriber
} = (*Catalog)(nil)
func DefaultCatalog() Catalog {
@ -64,6 +66,7 @@ func DefaultCatalog() Catalog {
Package: defaultPackageConfig(),
LinuxKernel: defaultLinuxKernelConfig(),
Golang: defaultGolangConfig(),
Java: defaultJavaConfig(),
File: defaultFileConfig(),
Relationships: defaultRelationshipsConfig(),
Source: defaultSourceConfig(),
@ -129,9 +132,9 @@ func (cfg Catalog) ToPackagesConfig() pkgcataloging.Config {
return pkgcataloging.Config{
Binary: binary.DefaultClassifierCatalogerConfig(),
Golang: golang.DefaultCatalogerConfig().
WithSearchLocalModCacheLicenses(cfg.Golang.SearchLocalModCacheLicenses).
WithSearchLocalModCacheLicenses(*multiLevelOption(false, enrichmentEnabled(cfg.Enrich, task.Go, task.Golang), cfg.Golang.SearchLocalModCacheLicenses)).
WithLocalModCacheDir(cfg.Golang.LocalModCacheDir).
WithSearchRemoteLicenses(cfg.Golang.SearchRemoteLicenses).
WithSearchRemoteLicenses(*multiLevelOption(false, enrichmentEnabled(cfg.Enrich, task.Go, task.Golang), cfg.Golang.SearchRemoteLicenses)).
WithProxy(cfg.Golang.Proxy).
WithNoProxy(cfg.Golang.NoProxy).
WithMainModuleVersion(
@ -141,7 +144,7 @@ func (cfg Catalog) ToPackagesConfig() pkgcataloging.Config {
WithFromLDFlags(cfg.Golang.MainModuleVersion.FromLDFlags),
),
JavaScript: javascript.DefaultCatalogerConfig().
WithSearchRemoteLicenses(cfg.JavaScript.SearchRemoteLicenses).
WithSearchRemoteLicenses(*multiLevelOption(false, enrichmentEnabled(cfg.Enrich, task.JavaScript, task.Node, task.NPM), cfg.JavaScript.SearchRemoteLicenses)).
WithNpmBaseURL(cfg.JavaScript.NpmBaseURL),
LinuxKernel: kernel.LinuxKernelCatalogerConfig{
CatalogModules: cfg.LinuxKernel.CatalogModules,
@ -150,7 +153,9 @@ func (cfg Catalog) ToPackagesConfig() pkgcataloging.Config {
GuessUnpinnedRequirements: cfg.Python.GuessUnpinnedRequirements,
},
JavaArchive: java.DefaultArchiveCatalogerConfig().
WithUseNetwork(cfg.Java.UseNetwork).
WithUseMavenLocalRepository(*multiLevelOption(false, enrichmentEnabled(cfg.Enrich, task.Java, task.Maven), cfg.Java.UseMavenLocalRepository)).
WithMavenLocalRepositoryDir(cfg.Java.MavenLocalRepositoryDir).
WithUseNetwork(*multiLevelOption(false, enrichmentEnabled(cfg.Enrich, task.Java, task.Maven), cfg.Java.UseNetwork)).
WithMavenBaseURL(cfg.Java.MavenURL).
WithArchiveTraversal(archiveSearch, cfg.Java.MaxParentRecursiveDepth),
}
@ -190,6 +195,9 @@ func (cfg *Catalog) AddFlags(flags clio.FlagSet) {
flags.StringArrayVarP(&cfg.SelectCatalogers, "select-catalogers", "",
"add, remove, and filter the catalogers to be used")
flags.StringArrayVarP(&cfg.Enrich, "enrich", "",
fmt.Sprintf("enable package data enrichment from local and online sources (options: %s)", strings.Join(publicisedEnrichmentOptions, ", ")))
flags.StringVarP(&cfg.Source.Name, "source-name", "",
"set the name of the target being analyzed")
@ -202,6 +210,10 @@ func (cfg *Catalog) AddFlags(flags clio.FlagSet) {
func (cfg *Catalog) DescribeFields(descriptions fangs.FieldDescriptionSet) {
descriptions.Add(&cfg.Parallelism, "number of cataloger workers to run in parallel")
descriptions.Add(&cfg.Enrich, fmt.Sprintf(`Enable data enrichment operations, which can utilize services such as Maven Central and NPM.
By default all enrichment is disabled, use: all to enable everything.
Available options are: %s`, strings.Join(publicisedEnrichmentOptions, ", ")))
}
func (cfg *Catalog) PostLoad() error {
@ -212,23 +224,12 @@ func (cfg *Catalog) PostLoad() error {
return fmt.Errorf("cannot use both 'catalogers' and 'select-catalogers'/'default-catalogers' flags")
}
flatten := func(l []string) []string {
var out []string
for _, v := range l {
for _, s := range strings.Split(v, ",") {
out = append(out, strings.TrimSpace(s))
}
}
sort.Strings(out)
return out
}
cfg.From = flatten(cfg.From)
cfg.Catalogers = flatten(cfg.Catalogers)
cfg.DefaultCatalogers = flatten(cfg.DefaultCatalogers)
cfg.SelectCatalogers = flatten(cfg.SelectCatalogers)
cfg.Enrich = flatten(cfg.Enrich)
// for backwards compatibility
cfg.DefaultCatalogers = append(cfg.DefaultCatalogers, cfg.Catalogers...)
@ -240,3 +241,68 @@ func (cfg *Catalog) PostLoad() error {
return nil
}
func flatten(commaSeparatedEntries []string) []string {
var out []string
for _, v := range commaSeparatedEntries {
for _, s := range strings.Split(v, ",") {
out = append(out, strings.TrimSpace(s))
}
}
sort.Strings(out)
return out
}
var publicisedEnrichmentOptions = []string{
"all",
task.Golang,
task.Java,
task.JavaScript,
}
func enrichmentEnabled(enrichDirectives []string, features ...string) *bool {
if len(enrichDirectives) == 0 {
return nil
}
enabled := func(features ...string) *bool {
for _, directive := range enrichDirectives {
enable := true
directive = strings.TrimPrefix(directive, "+") // +java and java are equivalent
if strings.HasPrefix(directive, "-") {
directive = directive[1:]
enable = false
}
for _, feature := range features {
if directive == feature {
return &enable
}
}
}
return nil
}
enableAll := enabled("all")
disableAll := enabled("none")
if disableAll != nil && *disableAll {
if enableAll != nil {
log.Warn("you have specified to both enable and disable all enrichment functionality, defaulting to disabled")
}
enableAll = ptr(false)
}
// check for explicit enable/disable of feature names
for _, feat := range features {
enableFeature := enabled(feat)
if enableFeature != nil {
return enableFeature
}
}
return enableAll
}
func ptr[T any](val T) *T {
return &val
}

View file

@ -70,3 +70,69 @@ func TestCatalog_PostLoad(t *testing.T) {
})
}
}
func Test_enrichmentEnabled(t *testing.T) {
tests := []struct {
directives string
test string
expected *bool
}{
{
directives: "",
test: "java",
expected: nil,
},
{
directives: "none",
test: "java",
expected: ptr(false),
},
{
directives: "none,+java",
test: "java",
expected: ptr(true),
},
{
directives: "all,none",
test: "java",
expected: ptr(false),
},
{
directives: "all",
test: "java",
expected: ptr(true),
},
{
directives: "golang,js",
test: "java",
expected: nil,
},
{
directives: "golang,-js,java",
test: "java",
expected: ptr(true),
},
{
directives: "golang,js,-java",
test: "java",
expected: ptr(false),
},
{
directives: "all",
test: "java",
expected: ptr(true),
},
{
directives: "all,-java",
test: "java",
expected: ptr(false),
},
}
for _, test := range tests {
t.Run(test.directives, func(t *testing.T) {
got := enrichmentEnabled(flatten([]string{test.directives}), test.test)
assert.Equal(t, test.expected, got)
})
}
}

View file

@ -50,7 +50,7 @@ if false, uses the syft-json output for templating (which follows the syft JSON
Note: long term support for this option is not guaranteed (it may change or break at any time)`)
prettyDescription := `include space indention and newlines
prettyDescription := `include space indentation and newlines
note: inherits default value from 'format.pretty' or 'false' if parent is unset`
descriptions.Add(&o.SyftJSON.Pretty, prettyDescription)
descriptions.Add(&o.SPDXJSON.Pretty, prettyDescription)

View file

@ -8,9 +8,9 @@ import (
)
type golangConfig struct {
SearchLocalModCacheLicenses bool `json:"search-local-mod-cache-licenses" yaml:"search-local-mod-cache-licenses" mapstructure:"search-local-mod-cache-licenses"`
SearchLocalModCacheLicenses *bool `json:"search-local-mod-cache-licenses" yaml:"search-local-mod-cache-licenses" mapstructure:"search-local-mod-cache-licenses"`
LocalModCacheDir string `json:"local-mod-cache-dir" yaml:"local-mod-cache-dir" mapstructure:"local-mod-cache-dir"`
SearchRemoteLicenses bool `json:"search-remote-licenses" yaml:"search-remote-licenses" mapstructure:"search-remote-licenses"`
SearchRemoteLicenses *bool `json:"search-remote-licenses" yaml:"search-remote-licenses" mapstructure:"search-remote-licenses"`
Proxy string `json:"proxy" yaml:"proxy" mapstructure:"proxy"`
NoProxy string `json:"no-proxy" yaml:"no-proxy" mapstructure:"no-proxy"`
MainModuleVersion golangMainModuleVersionConfig `json:"main-module-version" yaml:"main-module-version" mapstructure:"main-module-version"`
@ -47,9 +47,9 @@ type golangMainModuleVersionConfig struct {
func defaultGolangConfig() golangConfig {
def := golang.DefaultCatalogerConfig()
return golangConfig{
SearchLocalModCacheLicenses: def.SearchLocalModCacheLicenses,
SearchLocalModCacheLicenses: nil, // this defaults to false, which is the API default
LocalModCacheDir: def.LocalModCacheDir,
SearchRemoteLicenses: def.SearchRemoteLicenses,
SearchRemoteLicenses: nil, // this defaults to false, which is the API default
Proxy: strings.Join(def.Proxies, ","),
NoProxy: strings.Join(def.NoProxy, ","),
MainModuleVersion: golangMainModuleVersionConfig{

View file

@ -1,24 +1,46 @@
package options
import "github.com/anchore/clio"
import (
"github.com/anchore/clio"
"github.com/anchore/syft/syft/pkg/cataloger/java"
)
type javaConfig struct {
UseNetwork bool `yaml:"use-network" json:"use-network" mapstructure:"use-network"`
UseNetwork *bool `yaml:"use-network" json:"use-network" mapstructure:"use-network"`
UseMavenLocalRepository *bool `yaml:"use-maven-local-repository" json:"use-maven-local-repository" mapstructure:"use-maven-local-repository"`
MavenLocalRepositoryDir string `yaml:"maven-local-repository-dir" json:"maven-local-repository-dir" mapstructure:"maven-local-repository-dir"`
MavenURL string `yaml:"maven-url" json:"maven-url" mapstructure:"maven-url"`
MaxParentRecursiveDepth int `yaml:"max-parent-recursive-depth" json:"max-parent-recursive-depth" mapstructure:"max-parent-recursive-depth"`
}
func defaultJavaConfig() javaConfig {
def := java.DefaultArchiveCatalogerConfig()
return javaConfig{
UseNetwork: nil, // this defaults to false, which is the API default
MaxParentRecursiveDepth: def.MaxParentRecursiveDepth,
UseMavenLocalRepository: nil, // this defaults to false, which is the API default
MavenLocalRepositoryDir: def.MavenLocalRepositoryDir,
MavenURL: def.MavenBaseURL,
}
}
var _ interface {
clio.FieldDescriber
} = (*javaConfig)(nil)
func (o *javaConfig) DescribeFields(descriptions clio.FieldDescriptionSet) {
descriptions.Add(&o.UseNetwork, `enables Syft to use the network to fill in more detailed information about artifacts
currently this enables searching maven-url for license data
when running across pom.xml files that could have more information, syft will
explicitly search maven for license information by querying the online pom when this is true
this option is helpful for when the parent pom has more data,
that is not accessible from within the final built artifact`)
descriptions.Add(&o.UseNetwork, `enables Syft to use the network to fetch version and license information for packages when
a parent or imported pom file is not found in the local maven repository.
the pom files are downloaded from the remote Maven repository at 'maven-url'`)
descriptions.Add(&o.MavenURL, `maven repository to use, defaults to Maven central`)
descriptions.Add(&o.MaxParentRecursiveDepth, `depth to recursively resolve parent POMs`)
descriptions.Add(&o.MaxParentRecursiveDepth, `depth to recursively resolve parent POMs, no limit if <= 0`)
descriptions.Add(&o.UseMavenLocalRepository, `use the local Maven repository to retrieve pom files. When Maven is installed and was previously used
for building the software that is being scanned, then most pom files will be available in this
repository on the local file system. this greatly speeds up scans. when all pom files are available
in the local repository, then 'use-network' is not needed.
TIP: If you want to download all required pom files to the local repository without running a full
build, run 'mvn help:effective-pom' before performing the scan with syft.`)
descriptions.Add(&o.MavenLocalRepositoryDir, `override the default location of the local Maven repository.
the default is the subdirectory '.m2/repository' in your home directory`)
}

View file

@ -3,7 +3,7 @@ package options
import "github.com/anchore/clio"
type javaScriptConfig struct {
SearchRemoteLicenses bool `json:"search-remote-licenses" yaml:"search-remote-licenses" mapstructure:"search-remote-licenses"`
SearchRemoteLicenses *bool `json:"search-remote-licenses" yaml:"search-remote-licenses" mapstructure:"search-remote-licenses"`
NpmBaseURL string `json:"npm-base-url" yaml:"npm-base-url" mapstructure:"npm-base-url"`
}

View file

@ -0,0 +1 @@
results

View file

@ -7,7 +7,7 @@ import (
)
func Test_AllLayersIncludesSquashed(t *testing.T) {
// This is a verification test for issue #894 (https://github.com/anchore/syft/issues/894)
// This is a verification test for issue grype/#894 (https://github.com/anchore/grype/issues/894)
allLayers, _ := catalogFixtureImage(t, "image-suse-all-layers", source.AllLayersScope)
squashed, _ := catalogFixtureImage(t, "image-suse-all-layers", source.SquashedScope)

View file

@ -35,7 +35,7 @@ var imageOnlyTestCases = []testCase{
pkgType: pkg.PythonPkg,
pkgLanguage: pkg.Python,
pkgInfo: map[string]string{
"Pygments": "2.6.1",
"pygments": "2.6.1",
"requests": "2.22.0",
"somerequests": "3.22.0",
"someotherpkg": "3.19.0",
@ -172,7 +172,7 @@ var dirOnlyTestCases = []testCase{
"passlib": "1.7.2",
"mypy": "v0.770",
// common to image and directory
"Pygments": "2.6.1",
"pygments": "2.6.1",
"requests": "2.22.0",
"somerequests": "3.22.0",
"someotherpkg": "3.19.0",
@ -388,6 +388,14 @@ var dirOnlyTestCases = []testCase{
"swift-numerics": "1.0.2",
},
},
{
name: "find swipl pack package manager packages",
pkgType: pkg.SwiplPackPkg,
pkgLanguage: pkg.Swipl,
pkgInfo: map[string]string{
"hdt": "0.5.2",
},
},
{
name: "find github action packages (from usage in workflow files and composite actions)",
pkgType: pkg.GithubActionPkg,
@ -402,6 +410,14 @@ var dirOnlyTestCases = []testCase{
"octo-org/this-repo/.github/workflows/workflow-1.yml": "172239021f7ba04fe7327647b213799853a9eb89",
},
},
{
name: "find opam package",
pkgType: pkg.OpamPkg,
pkgLanguage: pkg.OCaml,
pkgInfo: map[string]string{
"ocaml-base-compiler": "4.14.0",
},
},
}
var commonTestCases = []testCase{

View file

@ -51,6 +51,8 @@ func TestPkgCoverageImage(t *testing.T) {
definedLanguages.Remove(pkg.Rust.String())
definedLanguages.Remove(pkg.Dart.String())
definedLanguages.Remove(pkg.Swift.String())
definedLanguages.Remove(pkg.Swipl.String())
definedLanguages.Remove(pkg.OCaml.String())
definedLanguages.Remove(pkg.CPP.String())
definedLanguages.Remove(pkg.Haskell.String())
definedLanguages.Remove(pkg.Elixir.String())
@ -76,6 +78,8 @@ func TestPkgCoverageImage(t *testing.T) {
definedPkgs.Remove(string(pkg.LinuxKernelPkg))
definedPkgs.Remove(string(pkg.LinuxKernelModulePkg))
definedPkgs.Remove(string(pkg.SwiftPkg))
definedPkgs.Remove(string(pkg.SwiplPackPkg))
definedPkgs.Remove(string(pkg.OpamPkg))
definedPkgs.Remove(string(pkg.GithubActionPkg))
definedPkgs.Remove(string(pkg.GithubActionWorkflowPkg))

View file

@ -2,7 +2,9 @@ package integration
import (
"bytes"
"regexp"
"os"
"path/filepath"
"strings"
"testing"
"github.com/google/go-cmp/cmp"
@ -12,8 +14,6 @@ import (
"github.com/anchore/syft/cmd/syft/internal/options"
"github.com/anchore/syft/syft/format"
"github.com/anchore/syft/syft/format/cyclonedxjson"
"github.com/anchore/syft/syft/format/cyclonedxxml"
"github.com/anchore/syft/syft/format/syftjson"
"github.com/anchore/syft/syft/source"
)
@ -43,26 +43,27 @@ func TestEncodeDecodeEncodeCycleComparison(t *testing.T) {
},
json: true,
},
{
name: cyclonedxjson.ID.String(),
redactor: func(in []byte) []byte {
// unstable values
in = regexp.MustCompile(`"(timestamp|serialNumber|bom-ref|ref)":\s*"(\n|[^"])+"`).ReplaceAll(in, []byte(`"$1": "redacted"`))
in = regexp.MustCompile(`"(dependsOn)":\s*\[(?:\s|[^]])+]`).ReplaceAll(in, []byte(`"$1": []`))
return in
},
json: true,
},
{
name: cyclonedxxml.ID.String(),
redactor: func(in []byte) []byte {
// unstable values
in = regexp.MustCompile(`(serialNumber|bom-ref|ref)="[^"]+"`).ReplaceAll(in, []byte{})
in = regexp.MustCompile(`<timestamp>[^<]+</timestamp>`).ReplaceAll(in, []byte{})
return in
},
},
// TODO: ignoring the `ref` field though does create stable results to compare, but the SBOM is fundamentally gutted and not worth comparing (find a better redaction or compare method)
//{
// name: cyclonedxjson.ID.String(),
// redactor: func(in []byte) []byte {
// // unstable values
// in = regexp.MustCompile(`"(timestamp|serialNumber|bom-ref|ref)":\s*"(\n|[^"])+"`).ReplaceAll(in, []byte(`"$1": "redacted"`))
// in = regexp.MustCompile(`"(dependsOn)":\s*\[(?:\s|[^]])+]`).ReplaceAll(in, []byte(`"$1": []`))
// return in
// },
// json: true,
//},
//{
// name: cyclonedxxml.ID.String(),
// redactor: func(in []byte) []byte {
// // unstable values
// in = regexp.MustCompile(`(serialNumber|bom-ref|ref)="[^"]+"`).ReplaceAll(in, []byte{})
// in = regexp.MustCompile(`<timestamp>[^<]+</timestamp>`).ReplaceAll(in, []byte{})
//
// return in
// },
//},
}
opts := options.DefaultOutput()
@ -112,6 +113,21 @@ func TestEncodeDecodeEncodeCycleComparison(t *testing.T) {
diffs := dmp.DiffMain(string(by1), string(by2), true)
t.Errorf("diff: %s", dmp.DiffPrettyText(diffs))
}
// write raw IMAGE@NAME-start and IMAGE@NAME-finish to files within the results dir
// ... this is helpful for debugging
require.NoError(t, os.MkdirAll("results", 0700))
suffix := "sbom"
switch {
case strings.Contains(test.name, "json"):
suffix = "json"
case strings.Contains(test.name, "xml"):
suffix = "xml"
}
require.NoError(t, os.WriteFile(filepath.Join("results", image+"@"+test.name+"-start."+suffix), by1, 0600))
require.NoError(t, os.WriteFile(filepath.Join("results", image+"@"+test.name+"-finish."+suffix), by2, 0600))
}
})
}

View file

@ -35,8 +35,8 @@ func TestGolangCompilerDetection(t *testing.T) {
for _, pkg := range packages {
foundCompilerVersions[pkg.Version] = struct{}{}
foundPURL[pkg.PURL] = struct{}{}
for _, cpe := range pkg.CPEs {
foundCPE[cpe] = struct{}{}
for _, c := range pkg.CPEs {
foundCPE[c] = struct{}{}
}
}

File diff suppressed because it is too large Load diff

View file

@ -5,6 +5,7 @@ import (
"github.com/stretchr/testify/require"
"github.com/anchore/syft/internal/relationship"
"github.com/anchore/syft/syft/artifact"
"github.com/anchore/syft/syft/source"
)
@ -44,12 +45,13 @@ func TestBinaryElfRelationships(t *testing.T) {
}
}
relationshipIndex := relationship.NewIndex(sbom.Relationships...)
for name, expectedDepNames := range expectedGraph {
pkgId := nameToId[name]
p := sbom.Artifacts.Packages.Package(pkgId)
require.NotNil(t, p, "expected package %q to be present in the SBOM", name)
rels := sbom.RelationshipsForPackage(*p, artifact.DependencyOfRelationship)
rels := relationshipIndex.References(*p, artifact.DependencyOfRelationship)
require.NotEmpty(t, rels, "expected package %q to have relationships", name)
toIds := map[artifact.ID]struct{}{}

View file

@ -1,5 +1,3 @@
//go:build !arm64
package integration
import (
@ -7,7 +5,6 @@ import (
"testing"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/anchore/syft/syft/pkg"
"github.com/anchore/syft/syft/source"
@ -22,41 +19,39 @@ func TestPackageDeduplication(t *testing.T) {
}{
{
scope: source.AllLayersScope,
packageCount: 172, // without deduplication this would be 618
packageCount: 178, // without deduplication this would be ~600
instanceCount: map[string]int{
"basesystem": 1,
"wget": 1,
"curl": 2, // upgraded in the image
"vsftpd": 1,
"httpd": 1, // rpm, - we exclude binary
"basesystem": 1,
"wget": 1,
"curl-minimal": 2, // upgraded in the image
"vsftpd": 1,
"httpd": 1, // rpm, - we exclude binary
},
locationCount: map[string]int{
"basesystem-10.0-7.el7.centos": 4,
"curl-7.29.0-59.el7": 1, // from base image
"curl-7.29.0-59.el7_9.1": 3, // upgrade
"wget-1.14-18.el7_6.1": 3,
"vsftpd-3.0.2-29.el7_9": 2,
"httpd-2.4.6-97.el7.centos.5": 1,
// "httpd-2.4.6": 1, // binary
"basesystem-11-13.el9": 5, // in all layers
"curl-minimal-7.76.1-26.el9_3.2.0.1": 2, // base + wget layer
"curl-minimal-7.76.1-29.el9_4.1": 3, // curl upgrade layer + all above layers
"wget-1.21.1-8.el9_4": 4, // wget + all above layers
"vsftpd-3.0.5-5.el9": 2, // vsftpd + all above layers
"httpd-2.4.57-11.el9_4.1": 1, // last layer
},
},
{
scope: source.SquashedScope,
packageCount: 170,
packageCount: 172,
instanceCount: map[string]int{
"basesystem": 1,
"wget": 1,
"curl": 1, // upgraded, but the most recent
"vsftpd": 1,
"httpd": 1, // rpm, binary is now excluded by overlap
"basesystem": 1,
"wget": 1,
"curl-minimal": 1, // upgraded, but the most recent
"vsftpd": 1,
"httpd": 1, // rpm, binary is now excluded by overlap
},
locationCount: map[string]int{
"basesystem-10.0-7.el7.centos": 1,
"curl-7.29.0-59.el7_9.1": 1, // upgrade
"wget-1.14-18.el7_6.1": 1,
"vsftpd-3.0.2-29.el7_9": 1,
"httpd-2.4.6-97.el7.centos.5": 1,
// "httpd-2.4.6": 1, // binary (excluded)
"basesystem-11-13.el9": 1,
"curl-minimal-7.76.1-29.el9_4.1": 1, // upgrade
"wget-1.21.1-8.el9_4": 1,
"vsftpd-3.0.5-5.el9": 1,
"httpd-2.4.57-11.el9_4.1": 1,
},
},
}
@ -75,20 +70,21 @@ func TestPackageDeduplication(t *testing.T) {
pkgs := sbom.Artifacts.Packages.PackagesByName(name)
// with multiple packages with the same name, something is wrong (or this is the wrong fixture)
require.Len(t, pkgs, expectedInstanceCount)
if assert.Len(t, pkgs, expectedInstanceCount, "unexpected package count for %s", name) {
for _, p := range pkgs {
nameVersion := fmt.Sprintf("%s-%s", name, p.Version)
expectedLocationCount, ok := tt.locationCount[nameVersion]
if !ok {
t.Errorf("missing name-version: %s", nameVersion)
continue
}
for _, p := range pkgs {
nameVersion := fmt.Sprintf("%s-%s", name, p.Version)
expectedLocationCount, ok := tt.locationCount[nameVersion]
if !ok {
t.Fatalf("missing name-version: %s", nameVersion)
// we should see merged locations (assumption, there was 1 location for each package)
assert.Len(t, p.Locations.ToSlice(), expectedLocationCount, "unexpected location count for %s", nameVersion)
// all paths should match
assert.Len(t, p.Locations.CoordinateSet().Paths(), 1, "unexpected location count for %s", nameVersion)
}
// we should see merged locations (assumption, there was 1 location for each package)
assert.Len(t, p.Locations.ToSlice(), expectedLocationCount)
// all paths should match
assert.Len(t, p.Locations.CoordinateSet().Paths(), 1)
}
}

View file

@ -0,0 +1,25 @@
package integration
import (
"reflect"
"testing"
"github.com/anchore/syft/syft/pkg"
"github.com/anchore/syft/syft/source"
)
func TestSbomMetadataComponent(t *testing.T) {
sbom, _ := catalogFixtureImage(t, "image-sbom-metadata-component", source.SquashedScope, "+sbom-cataloger")
expectedPkgs := []string{"first-subcomponent", "main-component"}
foundPkgs := []string{}
for sbomPkg := range sbom.Artifacts.Packages.Enumerate(pkg.JavaPkg) {
foundPkgs = append(foundPkgs, sbomPkg.Name)
}
// check if both the package in `.metadata.component` and the one in `.components` were found
if !reflect.DeepEqual(expectedPkgs, foundPkgs) {
t.Errorf("expected packages %v, got %v", expectedPkgs, foundPkgs)
}
}

View file

@ -1,6 +1,21 @@
# change these if you want CI to not use previous stored cache
INTEGRATION_CACHE_BUSTER := "894d8ca"
FINGERPRINT_FILE := cache.fingerprint
.PHONY: cache.fingerprint
cache.fingerprint:
find image-* -type f -exec md5sum {} + | awk '{print $1}' | sort | tee /dev/stderr | md5sum | tee cache.fingerprint && echo "$(INTEGRATION_CACHE_BUSTER)" >> cache.fingerprint
.DEFAULT_GOAL := fixtures
# requirement 1: 'fixtures' goal to generate any and all test fixtures
fixtures:
@echo "nothing to do"
# requirement 2: 'fingerprint' goal to determine if the fixture input that indicates any existing cache should be busted
fingerprint: $(FINGERPRINT_FILE)
# requirement 3: we always need to recalculate the fingerprint based on source regardless of any existing fingerprint
.PHONY: $(FINGERPRINT_FILE)
$(FINGERPRINT_FILE):
@find image-* -type f -exec sha256sum {} \; | sort -k2 > $(FINGERPRINT_FILE)
@#cat $(FINGERPRINT_FILE) | sha256sum | awk '{print $$1}'
# requirement 4: 'clean' goal to remove all generated test fixtures
.PHONY: clean
clean:
rm -f $(FINGERPRINT_FILE)

View file

@ -1 +1,6 @@
FROM golang:1.18.10-alpine
FROM --platform=linux/amd64 golang:1.18.10-alpine
FROM scratch
# we don't need the entire golang toolchain, just a single binary with the stdlib baked in
COPY --from=0 /usr/local/go/bin/gofmt bin/gofmt

View file

@ -1,4 +1,4 @@
FROM jenkins/jenkins:2.346.3-slim-jdk17@sha256:028fbbd9112c60ed086f5197fcba71992317864d27644e5949cf9c52ff4b65f0
FROM jenkins/jenkins:2.346.3-slim-jdk17@sha256:028fbbd9112c60ed086f5197fcba71992317864d27644e5949cf9c52ff4b65f0 AS base
USER root
@ -12,7 +12,7 @@ RUN apt-get update 2>&1 > /dev/null && apt-get install -y less zip 2>&1 > /dev/n
RUN unzip ../jenkins.war 2>&1 > /dev/null
RUN rm -f ./META-INF/MANIFEST.MF
RUN rm -rf ./META-INF/MANIFEST.MF ./WEB-INF ./jsbundles ./scripts ./css
WORKDIR /usr/share/jenkins
@ -21,3 +21,7 @@ RUN rm -rf jenkins.war
RUN cd ./tmp && zip -r ../jenkins.war . && cd ..
RUN rm -rf ./tmp
FROM scratch
COPY --from=base /usr/share/jenkins/jenkins.war /jenkins.war

View file

@ -1,7 +1,15 @@
FROM alpine:3.18.3@sha256:7144f7bab3d4c2648d7e59409f15ec52a18006a128c733fcff20d3a4a54ba44a
FROM alpine:3.18.3@sha256:7144f7bab3d4c2648d7e59409f15ec52a18006a128c733fcff20d3a4a54ba44a AS base
RUN wget https://repo1.maven.org/maven2/org/jvnet/hudson/main/hudson-war/2.2.1/hudson-war-2.2.1.war
RUN mv hudson-war-2.2.1.war hudson.war
# let's make this image a little smaller as to not take up so much disk space
# we'll only keep the jar metadata files (pom data + manifest) and throw away the rest
RUN apk add --no-cache python3 py3-pip
COPY extract.py /extract.py
RUN python extract.py
FROM scratch
COPY --from=base /slim /

View file

@ -0,0 +1,69 @@
import os
import zipfile
import io
ARCHIVE_EXTENSIONS = ('.jar', '.war', '.ear', '.hpi', '.war', '.sar', '.nar', '.par')
METADATA_FILES = ('pom.xml', 'pom.properties', 'MANIFEST.MF')
def slim_archive(archive, output_dir, base_path="", archive_name=""):
"""
extracts metadata files from the archive and creates a slim JAR file
containing only these files. handles nested JARs by preserving them.
"""
slim_buffer = io.BytesIO()
with zipfile.ZipFile(archive, 'r') as zip_file:
with zipfile.ZipFile(slim_buffer, 'w', zipfile.ZIP_DEFLATED) as slim_zip:
for file_name in zip_file.namelist():
# check for metadata files or nested JARs
if file_name.endswith(METADATA_FILES):
# add metadata files directly to the slimmed archive
file_data = zip_file.read(file_name)
slim_zip.writestr(file_name, file_data)
elif file_name.endswith(ARCHIVE_EXTENSIONS):
# if it's a nested archive, recursively slim it
nested_archive = io.BytesIO(zip_file.read(file_name))
nested_slim_buffer = io.BytesIO()
slim_archive(
nested_archive,
nested_slim_buffer,
base_path=os.path.join(base_path, os.path.dirname(file_name)),
archive_name=os.path.basename(file_name)
)
# add the slimmed nested archive back to the parent archive
nested_slim_buffer.seek(0)
slim_zip.writestr(file_name, nested_slim_buffer.read())
# write out the slimmed JAR to the output directory if output_dir is a directory
if isinstance(output_dir, str):
output_path = os.path.join(output_dir, base_path, archive_name)
os.makedirs(os.path.dirname(output_path), exist_ok=True)
with open(output_path, 'wb') as f:
slim_buffer.seek(0)
f.write(slim_buffer.read())
else:
# if output_dir is a BytesIO buffer (for nested archives), just write to it
output_dir.seek(0)
output_dir.write(slim_buffer.getvalue())
def walk_directory_and_slim_jars(base_dir, output_dir):
"""
recursively walks through a directory tree looking for .jar, .war, .ear,
.hpi files and slims them down by keeping only metadata files.
"""
for dirpath, _, filenames in os.walk(base_dir):
for filename in filenames:
if filename.endswith(ARCHIVE_EXTENSIONS):
archive_path = os.path.join(dirpath, filename)
print(f"Processing {archive_path}")
slim_archive(archive_path, output_dir, os.path.relpath(dirpath, base_dir), filename)
# a helper script for slimming down JAR files by keeping only metadata files but still keeping the jar packaging,
# including nested JARs! Useful for testing purposes.
if __name__ == "__main__":
BASE_DIR = "."
OUTPUT_DIR = "./slim"
os.makedirs(OUTPUT_DIR, exist_ok=True)
walk_directory_and_slim_jars(BASE_DIR, OUTPUT_DIR)

View file

@ -1,4 +1,4 @@
FROM alpine@sha256:d9a7354e3845ea8466bb00b22224d9116b183e594527fb5b6c3d30bc01a20378
FROM alpine@sha256:d9a7354e3845ea8466bb00b22224d9116b183e594527fb5b6c3d30bc01a20378 AS base
# we keep these unpinned so that if alpine
# changes our integration tests can adapt
@ -6,3 +6,8 @@ RUN apk add --no-cache \
tzdata \
vim \
alpine-sdk
# we don't need the installed bins for this test, only the APK installed metadata
FROM scratch
COPY --from=base /lib/apk/db/installed /lib/apk/db/installed

View file

@ -1 +1,8 @@
FROM mcr.microsoft.com/cbl-mariner/distroless/base:2.0.202205275@sha256:f550c5428df17b145851ad75983aca6d613ad4b51ca7983b2a83e67d0ac91a5d
FROM mcr.microsoft.com/cbl-mariner/distroless/base:2.0.202205275@sha256:f550c5428df17b145851ad75983aca6d613ad4b51ca7983b2a83e67d0ac91a5d AS base
# let's shoot for smaller test fixtures
FROM scratch
COPY --from=base /var/lib/rpmmanifest/container-manifest-2 /var/lib/rpmmanifest/container-manifest-2
COPY --from=base /usr/bin/gencat /usr/bin/gencat
COPY --from=base /usr/bin/openssl /usr/bin/openssl

View file

@ -1,3 +1,8 @@
FROM ubuntu:20.04@sha256:33a5cc25d22c45900796a1aca487ad7a7cb09f09ea00b779e3b2026b4fc2faba
FROM ubuntu:20.04@sha256:33a5cc25d22c45900796a1aca487ad7a7cb09f09ea00b779e3b2026b4fc2faba AS base
# this covers rpm-python
RUN apt-get update && apt-get install -y python-pil=6.2.1-3
# let's save some space...
FROM scratch
COPY --from=base /var/lib/dpkg/status /var/lib/dpkg/status

View file

@ -1 +1,5 @@
FROM photon:5.0-20230729@sha256:4cf2a1ce0a3f4625f13a0becb6b9bccfdb014c565be6e9a2ec4c4aad1ff8a5d9
FROM photon:5.0-20230729@sha256:4cf2a1ce0a3f4625f13a0becb6b9bccfdb014c565be6e9a2ec4c4aad1ff8a5d9 AS base
FROM scratch
COPY --from=base /usr/lib/sysimage/rpm /usr/lib/sysimage/rpm

View file

@ -0,0 +1,93 @@
opam-version: "2.0"
synopsis: "Official release 4.14.0"
maintainer: [
"David Allsopp <david@tarides.com>"
"Florian Angeletti <florian.angeletti@inria.fr>"
]
authors: "Xavier Leroy and many contributors"
license: "LGPL-2.1-or-later WITH OCaml-LGPL-linking-exception"
homepage: "https://ocaml.org"
bug-reports: "https://github.com/ocaml/opam-repository/issues"
depends: [
"ocaml" {= "4.14.0" & post}
"base-unix" {post}
"base-bigarray" {post}
"base-threads" {post}
"host-arch-arm32" {arch = "arm32" & post}
"host-arch-arm64" {arch = "arm64" & post}
"host-arch-ppc64" {arch = "ppc64" & post}
"host-arch-riscv64" {arch = "riscv64" & post}
"host-arch-s390x" {arch = "s390x" & post}
"host-arch-x86_32" {os != "win32" & arch = "x86_32" & post}
"host-arch-x86_64" {os != "win32" & arch = "x86_64" & post}
"host-arch-unknown"
{os != "win32" & arch != "arm32" & arch != "arm64" & arch != "ppc64" &
arch != "riscv64" &
arch != "s390x" &
arch != "x86_32" &
arch != "x86_64" &
post}
(("arch-x86_64" {os = "win32" & arch = "x86_64"} &
(("system-mingw" & "mingw-w64-shims" {os-distribution = "cygwin" & post}) |
"system-msvc")) |
("arch-x86_32" {os = "win32"} &
(("system-mingw" & "mingw-w64-shims" {os-distribution = "cygwin" & post}) |
"system-msvc")) |
"host-system-other" {os != "win32" & post})
"ocaml-options-vanilla" {post}
"flexdll" {>= "0.36" & os = "win32"}
]
conflict-class: "ocaml-core-compiler"
flags: compiler
setenv: CAML_LD_LIBRARY_PATH = "%{lib}%/stublibs"
build: [
[
"./configure"
"--host=x86_64-pc-windows"
{system-msvc:installed & arch-x86_64:installed}
"--host=x86_64-w64-mingw32"
{os-distribution = "cygwin" & system-mingw:installed &
arch-x86_64:installed}
"--host=i686-pc-windows" {system-msvc:installed & arch-x86_32:installed}
"--host=i686-w64-mingw32"
{os-distribution = "cygwin" & system-mingw:installed &
arch-x86_32:installed}
"--prefix=%{prefix}%"
"--docdir=%{doc}%/ocaml"
"--with-flexdll=%{flexdll:share}%" {os = "win32" & flexdll:installed}
"-C"
"CC=cc" {os = "openbsd" | os = "macos"}
"ASPP=cc -c" {os = "openbsd" | os = "macos"}
]
[make "-j%{jobs}%"]
]
install: [make "install"]
build-env: MSYS2_ARG_CONV_EXCL = "*"
post-messages: [
"""\
A failure in the middle of the build may be caused by build parallelism
(enabled by default).
Please file a bug report at https://github.com/ocaml/opam-repository/issues"""
{failure & jobs > "1"}
"""\
You can try installing again including --jobs=1
to force a sequential build instead."""
{failure & jobs > "1" & opam-version >= "2.0.5"}
]
dev-repo: "git+https://github.com/ocaml/ocaml#4.14"
url {
src: "https://github.com/ocaml/ocaml/archive/4.14.0.tar.gz"
checksum:
"sha256=39f44260382f28d1054c5f9d8bf4753cb7ad64027da792f7938344544da155e8"
}
extra-source "ocaml-base-compiler.install" {
src:
"https://raw.githubusercontent.com/ocaml/opam-source-archives/main/patches/ocaml-base-compiler/ocaml-base-compiler.install"
checksum: [
"sha256=79f2a1a5044a91350a0eb6ce12e261a72a2855c094c425cddf3860e58c486678"
"md5=3e969b841df1f51ca448e6e6295cb451"
]
}
x-env-path-rewrite: [
[CAML_LD_LIBRARY_PATH (";" {os = "win32"} ":" {os != "win32"}) "target"]
]

View file

@ -0,0 +1,10 @@
name(hdt).
version('0.5.2').
% TODO: swipl_version([90121]).
title('Access RDF HDT files').
keywords(['RDF']).
author( 'Jan Wielemaker', 'J.Wielemaker@vu.nl' ).
packager( 'Jan Wielemaker', 'J.Wielemaker@vu.nl' ).
maintainer( 'Jan Wielemaker', 'J.Wielemaker@vu.nl' ).
home( 'https://github.com/JanWielemaker/hdt' ).
download( 'https://github.com/JanWielemaker/hdt/archive/V*.zip' ).

View file

@ -0,0 +1,2 @@
FROM scratch
COPY test.cdx.json /

View file

@ -0,0 +1,32 @@
{
"bomFormat" : "CycloneDX",
"specVersion" : "1.5",
"serialNumber" : "urn:uuid:dc807d4b-0415-35ab-ba61-49b5d39bc2d9",
"version" : 1,
"metadata" : {
"component" : {
"name" : "main-component",
"version" : "1.2.3",
"purl" : "pkg:maven/org.example/main-component@1.2.3",
"type" : "library",
"bom-ref" : "pkg:maven/org.example/main-component@1.2.3"
}
},
"components" : [
{
"name" : "first-subcomponent",
"version" : "2.3.4",
"purl" : "pkg:maven/org.example/first-subcomponent@2.3.4",
"type" : "library",
"bom-ref" : "pkg:maven/org.example/first-subcomponent@2.3.4"
}
],
"dependencies" : [
{
"ref" : "pkg:maven/org.example/main-component-assembly@1.2.3",
"dependsOn" : [
"pkg:maven/org.example/first-subcomponent@2.3.4"
]
}
]
}

View file

@ -1 +1,6 @@
FROM fedora:35@sha256:36af84ba69e21c9ef86a0424a090674c433b2b80c2462e57503886f1d823abe8
FROM fedora:35@sha256:36af84ba69e21c9ef86a0424a090674c433b2b80c2462e57503886f1d823abe8 AS base
# lets save some space
FROM scratch
COPY --from=base /var/lib/rpm /var/lib/rpm

View file

@ -1,2 +1,11 @@
FROM registry.suse.com/suse/sle15:15.3.17.20.20@sha256:fd657ecbab5ca564d6933e887f6ae8542a9398e6a4b399f352ce10c3a24afc64
FROM registry.suse.com/suse/sle15:15.3.17.20.20@sha256:fd657ecbab5ca564d6933e887f6ae8542a9398e6a4b399f352ce10c3a24afc64 AS base
RUN zypper in -y wget
# let's save some space... we really just need an image that has an RPM DB that is linked across layers
FROM --platform=linux/amd64 busybox:1.36.1
# setup a link /var/lib/rpm -> ../../usr/lib/sysimage/rpm
RUN mkdir -p /var/lib && ln -s ../../usr/lib/sysimage/rpm /var/lib/rpm
# copy the RPM DB from the SUSE image
COPY --from=base /usr/lib/sysimage/rpm/Packages.db /usr/lib/sysimage/rpm/Packages.db

View file

@ -1 +1,18 @@
FROM anchore/test_images:java-1abc58f@sha256:3add9f90e9ed35739cc99b7830767e09eec921052e2412adf4491648c741f066
FROM docker.io/anchore/test_images:java-88948cc@sha256:dea0e6c24636937f53bdc997d9960c2a18966d1e38bcd8ebd0c395d4e169b806 AS base
# not covered in testing...
RUN rm /packages/gradle-7.1.1-bin.zip
RUN apk add --no-cache python3 py3-pip
COPY extract.py /extract.py
WORKDIR /
# let's make this image a little smaller as to not take up so much disk space
# we'll only keep the jar metadata files (pom data + manifest) and throw away the rest
RUN python extract.py
FROM scratch
COPY --from=base /slim/packages /packages

View file

@ -0,0 +1,69 @@
import os
import zipfile
import io
ARCHIVE_EXTENSIONS = ('.jar', '.war', '.ear', '.hpi', '.war', '.sar', '.nar', '.par')
METADATA_FILES = ('pom.xml', 'pom.properties', 'MANIFEST.MF')
def slim_archive(archive, output_dir, base_path="", archive_name=""):
"""
extracts metadata files from the archive and creates a slim JAR file
containing only these files. handles nested JARs by preserving them.
"""
slim_buffer = io.BytesIO()
with zipfile.ZipFile(archive, 'r') as zip_file:
with zipfile.ZipFile(slim_buffer, 'w', zipfile.ZIP_DEFLATED) as slim_zip:
for file_name in zip_file.namelist():
# check for metadata files or nested JARs
if file_name.endswith(METADATA_FILES):
# add metadata files directly to the slimmed archive
file_data = zip_file.read(file_name)
slim_zip.writestr(file_name, file_data)
elif file_name.endswith(ARCHIVE_EXTENSIONS):
# if it's a nested archive, recursively slim it
nested_archive = io.BytesIO(zip_file.read(file_name))
nested_slim_buffer = io.BytesIO()
slim_archive(
nested_archive,
nested_slim_buffer,
base_path=os.path.join(base_path, os.path.dirname(file_name)),
archive_name=os.path.basename(file_name)
)
# add the slimmed nested archive back to the parent archive
nested_slim_buffer.seek(0)
slim_zip.writestr(file_name, nested_slim_buffer.read())
# write out the slimmed JAR to the output directory if output_dir is a directory
if isinstance(output_dir, str):
output_path = os.path.join(output_dir, base_path, archive_name)
os.makedirs(os.path.dirname(output_path), exist_ok=True)
with open(output_path, 'wb') as f:
slim_buffer.seek(0)
f.write(slim_buffer.read())
else:
# if output_dir is a BytesIO buffer (for nested archives), just write to it
output_dir.seek(0)
output_dir.write(slim_buffer.getvalue())
def walk_directory_and_slim_jars(base_dir, output_dir):
"""
recursively walks through a directory tree looking for .jar, .war, .ear,
.hpi files and slims them down by keeping only metadata files.
"""
for dirpath, _, filenames in os.walk(base_dir):
for filename in filenames:
if filename.endswith(ARCHIVE_EXTENSIONS):
archive_path = os.path.join(dirpath, filename)
print(f"Processing {archive_path}")
slim_archive(archive_path, output_dir, os.path.relpath(dirpath, base_dir), filename)
# a helper script for slimming down JAR files by keeping only metadata files but still keeping the jar packaging,
# including nested JARs! Useful for testing purposes.
if __name__ == "__main__":
BASE_DIR = "."
OUTPUT_DIR = "./slim"
os.makedirs(OUTPUT_DIR, exist_ok=True)
walk_directory_and_slim_jars(BASE_DIR, OUTPUT_DIR)

View file

@ -1,6 +1,27 @@
FROM centos:7.9.2009@sha256:be65f488b7764ad3638f236b7b515b3678369a5124c47b8d32916d6487418ea4
FROM --platform=linux/amd64 rockylinux:9.3.20231119@sha256:d644d203142cd5b54ad2a83a203e1dee68af2229f8fe32f52a30c6e1d3c3a9e0 AS base
# modifying the RPM DB multiple times will result in duplicate packages when using all-layers (if there was no de-dup logic)
# curl is tricky, it already exists in the image and is being upgraded
RUN yum install -y wget-1.14-18.el7_6.1 curl-7.29.0-59.el7_9.1
RUN yum install -y vsftpd-3.0.2-29.el7_9
RUN yum install -y httpd-2.4.6-97.el7.centos.5
# but... we want to make the test image as small as possible, so we are making the changes in stages and then
# copying the RPM DB from each stage to a final stage in separate layers. This will result in a much smaller image.
FROM base AS stage1
RUN dnf install -y wget
FROM stage1 AS stage2
RUN dnf update -y curl-minimal
FROM stage2 AS stage3
RUN dnf install -y vsftpd
FROM stage3 AS stage4
RUN dnf install -y httpd
FROM scratch
COPY --from=base /var/lib/rpm /var/lib/rpm
COPY --from=stage1 /var/lib/rpm /var/lib/rpm
COPY --from=stage2 /var/lib/rpm /var/lib/rpm
COPY --from=stage3 /var/lib/rpm /var/lib/rpm
COPY --from=stage4 /var/lib/rpm /var/lib/rpm

98
go.mod
View file

@ -5,41 +5,41 @@ go 1.22.0
require (
github.com/CycloneDX/cyclonedx-go v0.9.0
github.com/Masterminds/semver v1.5.0
github.com/Masterminds/sprig/v3 v3.2.3
github.com/Masterminds/sprig/v3 v3.3.0
github.com/acarl005/stripansi v0.0.0-20180116102854-5a71ef0e047d
github.com/acobaugh/osrelease v0.1.0
github.com/anchore/bubbly v0.0.0-20231115134915-def0aba654a9
github.com/anchore/clio v0.0.0-20240522144804-d81e109008aa
github.com/anchore/fangs v0.0.0-20240508143433-f016b099950f
github.com/anchore/fangs v0.0.0-20240903175602-e716ef12c23d
github.com/anchore/go-collections v0.0.0-20240216171411-9321230ce537
github.com/anchore/go-logger v0.0.0-20230725134548-c21dafa1ec5a
github.com/anchore/go-macholibre v0.0.0-20220308212642-53e6d0aaf6fb
github.com/anchore/go-testutils v0.0.0-20200925183923-d5f45b0d3c04
github.com/anchore/go-version v1.2.2-0.20200701162849-18adb9c92b9b
github.com/anchore/packageurl-go v0.1.1-0.20240507183024-848e011fc24f
github.com/anchore/stereoscope v0.0.3-0.20240705165118-e46739e21796
github.com/anchore/stereoscope v0.0.3
github.com/anmitsu/go-shlex v0.0.0-20200514113438-38f4b401e2be
// we are hinting brotli to latest due to warning when installing archiver v3:
// go: warning: github.com/andybalholm/brotli@v1.0.1: retracted by module author: occasional panics and data corruption
github.com/aquasecurity/go-pep440-version v0.0.0-20210121094942-22b2f8951d46
github.com/bmatcuk/doublestar/v4 v4.6.1
github.com/charmbracelet/bubbles v0.18.0
github.com/charmbracelet/bubbletea v0.26.6
github.com/charmbracelet/lipgloss v0.11.0
github.com/dave/jennifer v1.7.0
github.com/charmbracelet/bubbles v0.20.0
github.com/charmbracelet/bubbletea v1.1.0
github.com/charmbracelet/lipgloss v0.13.0
github.com/dave/jennifer v1.7.1
github.com/deitch/magic v0.0.0-20230404182410-1ff89d7342da
github.com/distribution/reference v0.6.0
github.com/docker/docker v27.0.3+incompatible
github.com/docker/docker v27.2.1+incompatible
github.com/dustin/go-humanize v1.0.1
github.com/elliotchance/phpserialize v1.4.0
github.com/facebookincubator/nvdtools v0.1.5
github.com/github/go-spdx/v2 v2.3.1
github.com/gkampitakis/go-snaps v0.5.4
github.com/gkampitakis/go-snaps v0.5.7
github.com/go-git/go-billy/v5 v5.5.0
github.com/go-git/go-git/v5 v5.12.0
github.com/go-test/deep v1.1.1
github.com/google/go-cmp v0.6.0
github.com/google/go-containerregistry v0.20.0
github.com/google/go-containerregistry v0.20.2
github.com/google/licensecheck v0.3.1
github.com/google/uuid v1.6.0
github.com/gookit/color v1.5.4
@ -55,7 +55,7 @@ require (
github.com/mitchellh/go-homedir v1.1.0
github.com/mitchellh/hashstructure/v2 v2.0.2
github.com/mitchellh/mapstructure v1.5.0
github.com/moby/sys/mountinfo v0.7.1
github.com/moby/sys/mountinfo v0.7.2
github.com/olekukonko/tablewriter v0.0.5
github.com/opencontainers/go-digest v1.0.0
github.com/pelletier/go-toml v1.9.5 // indirect
@ -78,28 +78,30 @@ require (
github.com/xeipuuv/gojsonschema v1.2.0
github.com/zyedidia/generic v1.2.2-0.20230320175451-4410d2372cb1
go.uber.org/goleak v1.3.0
golang.org/x/mod v0.19.0
golang.org/x/net v0.27.0
golang.org/x/mod v0.21.0
golang.org/x/net v0.29.0
gopkg.in/yaml.v3 v3.0.1
modernc.org/sqlite v1.30.1
modernc.org/sqlite v1.33.0
)
require google.golang.org/genproto v0.0.0-20231106174013-bbf56f31fb17 // indirect
require google.golang.org/genproto v0.0.0-20240213162025-012b6fc9bca9 // indirect
require (
github.com/BurntSushi/toml v1.4.0
github.com/OneOfOne/xxhash v1.2.8
github.com/adrg/xdg v0.5.0
github.com/magiconair/properties v1.8.7
github.com/pelletier/go-toml/v2 v2.1.0
golang.org/x/exp v0.0.0-20231108232855-2478ac86f678
)
require (
dario.cat/mergo v1.0.0 // indirect
dario.cat/mergo v1.0.1 // indirect
github.com/AdaLogics/go-fuzz-headers v0.0.0-20230811130428-ced1acdcaa24 // indirect
github.com/AdamKorcz/go-118-fuzz-build v0.0.0-20230306123547-8075edf89bb0 // indirect
github.com/DataDog/zstd v1.5.5 // indirect
github.com/Masterminds/goutils v1.1.1 // indirect
github.com/Masterminds/semver/v3 v3.2.0 // indirect
github.com/Masterminds/semver/v3 v3.3.0 // indirect
github.com/Microsoft/go-winio v0.6.1 // indirect
github.com/Microsoft/hcsshim v0.11.4 // indirect
github.com/ProtonMail/go-crypto v1.0.0 // indirect
@ -110,10 +112,8 @@ require (
github.com/aymanbagabas/go-osc52/v2 v2.0.1 // indirect
github.com/becheran/wildmatch-go v1.0.0 // indirect
github.com/charmbracelet/harmonica v0.2.0 // indirect
github.com/charmbracelet/x/ansi v0.1.2 // indirect
github.com/charmbracelet/x/input v0.1.0 // indirect
github.com/charmbracelet/x/term v0.1.1 // indirect
github.com/charmbracelet/x/windows v0.1.0 // indirect
github.com/charmbracelet/x/ansi v0.2.3 // indirect
github.com/charmbracelet/x/term v0.2.0 // indirect
github.com/cloudflare/circl v1.3.8 // indirect
github.com/containerd/cgroups v1.1.0 // indirect
github.com/containerd/containerd v1.7.11 // indirect
@ -125,7 +125,7 @@ require (
github.com/containerd/typeurl/v2 v2.1.1 // indirect
github.com/cyphar/filepath-securejoin v0.2.4 // indirect
github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc // indirect
github.com/docker/cli v27.0.3+incompatible // indirect
github.com/docker/cli v27.1.1+incompatible // indirect
github.com/docker/distribution v2.8.3+incompatible // indirect
github.com/docker/docker-credential-helpers v0.7.0 // indirect
github.com/docker/go-connections v0.4.0 // indirect
@ -136,13 +136,13 @@ require (
github.com/emirpasic/gods v1.18.1 // indirect
github.com/erikgeiser/coninput v0.0.0-20211004153227-1c3628e74d0f // indirect
github.com/felixge/fgprof v0.9.3 // indirect
github.com/felixge/httpsnoop v1.0.3 // indirect
github.com/felixge/httpsnoop v1.0.4 // indirect
github.com/fsnotify/fsnotify v1.7.0 // indirect
github.com/gabriel-vasile/mimetype v1.4.0 // indirect
github.com/gabriel-vasile/mimetype v1.4.4 // indirect
github.com/gkampitakis/ciinfo v0.3.0 // indirect
github.com/gkampitakis/go-diff v1.3.2 // indirect
github.com/go-git/gcfg v1.5.1-0.20230307220236-3a3c6141e376 // indirect
github.com/go-logr/logr v1.2.4 // indirect
github.com/go-logr/logr v1.4.1 // indirect
github.com/go-logr/stdr v1.2.2 // indirect
github.com/go-restruct/restruct v1.2.0-alpha // indirect
github.com/gogo/protobuf v1.3.2 // indirect
@ -153,9 +153,8 @@ require (
github.com/hashicorp/errwrap v1.1.0 // indirect
github.com/hashicorp/golang-lru/v2 v2.0.7 // indirect
github.com/hashicorp/hcl v1.0.0 // indirect
github.com/huandu/xstrings v1.3.3 // indirect
github.com/huandu/xstrings v1.5.0 // indirect
github.com/iancoleman/orderedmap v0.0.0-20190318233801-ac98e3ecb4b0 // indirect
github.com/imdario/mergo v0.3.15 // indirect
github.com/inconshreveable/mousetrap v1.1.0 // indirect
github.com/jbenet/go-context v0.0.0-20150711004518-d14ea06fba99 // indirect
github.com/kevinburke/ssh_config v1.2.0 // indirect
@ -163,13 +162,13 @@ require (
github.com/klauspost/pgzip v1.2.5 // indirect
github.com/kr/pretty v0.3.1 // indirect
github.com/kr/text v0.2.0 // indirect
github.com/logrusorgru/aurora v0.0.0-20200102142835-e9ef32dff381 // indirect
github.com/logrusorgru/aurora v2.0.3+incompatible // indirect
github.com/lucasb-eyer/go-colorful v1.2.0 // indirect
github.com/maruel/natural v1.1.1 // indirect
github.com/mattn/go-colorable v0.1.13 // indirect
github.com/mattn/go-isatty v0.0.20 // indirect
github.com/mattn/go-localereader v0.0.2-0.20220822084749-2491eb6c1c75 // indirect
github.com/mattn/go-runewidth v0.0.15 // indirect
github.com/mattn/go-runewidth v0.0.16 // indirect
github.com/mgutz/ansi v0.0.0-20200706080929-d51e80ef957d // indirect
github.com/mitchellh/copystructure v1.2.0 // indirect
github.com/mitchellh/reflectwalk v1.0.2 // indirect
@ -179,16 +178,16 @@ require (
github.com/moby/sys/signal v0.7.0 // indirect
github.com/muesli/ansi v0.0.0-20230316100256-276c6243b2f6 // indirect
github.com/muesli/cancelreader v0.2.2 // indirect
github.com/muesli/reflow v0.3.0 // indirect
github.com/muesli/termenv v0.15.2 // indirect
github.com/ncruces/go-strftime v0.1.9 // indirect
github.com/nwaples/rardecode v1.1.0 // indirect
github.com/opencontainers/image-spec v1.1.0 // indirect
github.com/opencontainers/runc v1.1.12 // indirect
github.com/opencontainers/runc v1.1.14 // indirect
github.com/opencontainers/runtime-spec v1.1.0-rc.1 // indirect
github.com/opencontainers/selinux v1.11.0 // indirect
github.com/pborman/indent v1.2.1 // indirect
github.com/pierrec/lz4/v4 v4.1.15 // indirect
github.com/pelletier/go-toml/v2 v2.2.2 // indirect
github.com/pierrec/lz4/v4 v4.1.19 // indirect
github.com/pjbgf/sha1cd v0.3.0 // indirect
github.com/pkg/errors v0.9.1 // indirect
github.com/pkg/profile v1.7.0 // indirect
@ -198,18 +197,18 @@ require (
github.com/rogpeppe/go-internal v1.12.0 // indirect
github.com/sagikazarmark/locafero v0.4.0 // indirect
github.com/sagikazarmark/slog-shim v0.1.0 // indirect
github.com/sahilm/fuzzy v0.1.1-0.20230530133925-c48e322e2a8f // indirect
github.com/sahilm/fuzzy v0.1.1 // indirect
github.com/secDre4mer/pkcs7 v0.0.0-20240322103146-665324a4461d // indirect
github.com/shopspring/decimal v1.2.0 // indirect
github.com/shopspring/decimal v1.4.0 // indirect
github.com/sirupsen/logrus v1.9.3 // indirect
github.com/skeema/knownhosts v1.2.2 // indirect
github.com/sourcegraph/conc v0.3.0 // indirect
github.com/spf13/cast v1.6.0 // indirect
github.com/spf13/cast v1.7.0 // indirect
github.com/spf13/pflag v1.0.5 // indirect
github.com/spf13/viper v1.18.2 // indirect
github.com/spf13/viper v1.19.0 // indirect
github.com/subosito/gotenv v1.6.0 // indirect
github.com/sylabs/sif/v2 v2.17.1 // indirect
github.com/sylabs/squashfs v0.6.1 // indirect
github.com/sylabs/squashfs v1.0.0 // indirect
github.com/therootcompany/xz v1.0.1 // indirect
github.com/tidwall/gjson v1.17.0 // indirect
github.com/tidwall/match v1.1.1 // indirect
@ -223,27 +222,26 @@ require (
github.com/xi2/xz v0.0.0-20171230120015-48954b6210f8 // indirect
github.com/xo/terminfo v0.0.0-20220910002029-abceb7e1c41e // indirect
go.opencensus.io v0.24.0 // indirect
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.45.0 // indirect
go.opentelemetry.io/otel v1.19.0 // indirect
go.opentelemetry.io/otel/metric v1.19.0 // indirect
go.opentelemetry.io/otel/trace v1.19.0 // indirect
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.49.0 // indirect
go.opentelemetry.io/otel v1.24.0 // indirect
go.opentelemetry.io/otel/metric v1.24.0 // indirect
go.opentelemetry.io/otel/trace v1.24.0 // indirect
go.uber.org/atomic v1.9.0 // indirect
go.uber.org/multierr v1.9.0 // indirect
golang.org/x/crypto v0.25.0 // indirect
golang.org/x/exp v0.0.0-20231108232855-2478ac86f678 // indirect
golang.org/x/sync v0.7.0 // indirect
golang.org/x/sys v0.22.0 // indirect
golang.org/x/term v0.22.0 // indirect
golang.org/x/text v0.16.0 // indirect
golang.org/x/crypto v0.27.0 // indirect
golang.org/x/sync v0.8.0 // indirect
golang.org/x/sys v0.25.0 // indirect
golang.org/x/term v0.24.0 // indirect
golang.org/x/text v0.18.0 // indirect
golang.org/x/tools v0.21.1-0.20240508182429-e35e4ccd0d2d // indirect
golang.org/x/xerrors v0.0.0-20220907171357-04be3eba64a2 // indirect
google.golang.org/genproto/googleapis/rpc v0.0.0-20231120223509-83a465c0220f // indirect
google.golang.org/grpc v1.59.0 // indirect
google.golang.org/genproto/googleapis/rpc v0.0.0-20240314234333-6e1732d8331c // indirect
google.golang.org/grpc v1.62.1 // indirect
google.golang.org/protobuf v1.33.0 // indirect
gopkg.in/ini.v1 v1.67.0 // indirect
gopkg.in/warnings.v0 v0.1.2 // indirect
modernc.org/gc/v3 v3.0.0-20240107210532-573471604cb6 // indirect
modernc.org/libc v1.52.1 // indirect
modernc.org/libc v1.55.3 // indirect
modernc.org/mathutil v1.6.0 // indirect
modernc.org/memory v1.8.0 // indirect
modernc.org/strutil v1.2.0 // indirect

217
go.sum
View file

@ -45,8 +45,8 @@ cloud.google.com/go/storage v1.5.0/go.mod h1:tpKbwo567HUNpVclU5sGELwQWBDZ8gh0Zeo
cloud.google.com/go/storage v1.6.0/go.mod h1:N7U0C8pVQ/+NIKOBQyamJIeKQKkZ+mxpohlUTyfDhBk=
cloud.google.com/go/storage v1.8.0/go.mod h1:Wv1Oy7z6Yz3DshWRJFhqM/UCfaWIRTdp0RXyy7KQOVs=
cloud.google.com/go/storage v1.10.0/go.mod h1:FLPqc6j+Ki4BU591ie1oL6qBQGu2Bl/tZ9ullr3+Kg0=
dario.cat/mergo v1.0.0 h1:AGCNq9Evsj31mOgNPcLyXc+4PNABt905YmuqPYYpBWk=
dario.cat/mergo v1.0.0/go.mod h1:uNxQE+84aUszobStD9th8a29P2fMDhsBdgRYvZOxGmk=
dario.cat/mergo v1.0.1 h1:Ra4+bf83h2ztPIQYNP99R6m+Y7KfnARDfID+a+vLl4s=
dario.cat/mergo v1.0.1/go.mod h1:uNxQE+84aUszobStD9th8a29P2fMDhsBdgRYvZOxGmk=
dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU=
github.com/AdaLogics/go-fuzz-headers v0.0.0-20230811130428-ced1acdcaa24 h1:bvDV9vkmnHYOMsOr4WLk+Vo07yKIzd94sVoIqshQ4bU=
github.com/AdaLogics/go-fuzz-headers v0.0.0-20230811130428-ced1acdcaa24/go.mod h1:8o94RPi1/7XTJvwPpRSzSUedZrtlirdB3r9Z20bi2f8=
@ -69,16 +69,18 @@ github.com/Masterminds/goutils v1.1.1 h1:5nUrii3FMTL5diU80unEVvNevw1nH4+ZV4DSLVJ
github.com/Masterminds/goutils v1.1.1/go.mod h1:8cTjp+g8YejhMuvIA5y2vz3BpJxksy863GQaJW2MFNU=
github.com/Masterminds/semver v1.5.0 h1:H65muMkzWKEuNDnfl9d70GUjFniHKHRbFPGBuZ3QEww=
github.com/Masterminds/semver v1.5.0/go.mod h1:MB6lktGJrhw8PrUyiEoblNEGEQ+RzHPF078ddwwvV3Y=
github.com/Masterminds/semver/v3 v3.2.0 h1:3MEsd0SM6jqZojhjLWWeBY+Kcjy9i6MQAeY7YgDP83g=
github.com/Masterminds/semver/v3 v3.2.0/go.mod h1:qvl/7zhW3nngYb5+80sSMF+FG2BjYrf8m9wsX0PNOMQ=
github.com/Masterminds/sprig/v3 v3.2.3 h1:eL2fZNezLomi0uOLqjQoN6BfsDD+fyLtgbJMAj9n6YA=
github.com/Masterminds/sprig/v3 v3.2.3/go.mod h1:rXcFaZ2zZbLRJv/xSysmlgIM1u11eBaRMhvYXJNkGuM=
github.com/Masterminds/semver/v3 v3.3.0 h1:B8LGeaivUe71a5qox1ICM/JLl0NqZSW5CHyL+hmvYS0=
github.com/Masterminds/semver/v3 v3.3.0/go.mod h1:4V+yj/TJE1HU9XfppCwVMZq3I84lprf4nC11bSS5beM=
github.com/Masterminds/sprig/v3 v3.3.0 h1:mQh0Yrg1XPo6vjYXgtf5OtijNAKJRNcTdOOGZe3tPhs=
github.com/Masterminds/sprig/v3 v3.3.0/go.mod h1:Zy1iXRYNqNLUolqCpL4uhk6SHUMAOSCzdgBfDb35Lz0=
github.com/Microsoft/go-winio v0.5.2/go.mod h1:WpS1mjBmmwHBEWmogvA2mj8546UReBk4v8QkMxJ6pZY=
github.com/Microsoft/go-winio v0.6.1 h1:9/kr64B9VUZrLm5YYwbGtUJnMgqWVOdUAXu6Migciow=
github.com/Microsoft/go-winio v0.6.1/go.mod h1:LRdKpFKfdobln8UmuiYcKPot9D2v6svN5+sAH+4kjUM=
github.com/Microsoft/hcsshim v0.11.4 h1:68vKo2VN8DE9AdN4tnkWnmdhqdbpUFM8OF3Airm7fz8=
github.com/Microsoft/hcsshim v0.11.4/go.mod h1:smjE4dvqPX9Zldna+t5FG3rnoHhaB7QYxPRqGcpAD9w=
github.com/OneOfOne/xxhash v1.2.2/go.mod h1:HSdplMjZKSmBqAxg5vPj2TmRDmfkzw+cTzAElWljhcU=
github.com/OneOfOne/xxhash v1.2.8 h1:31czK/TI9sNkxIKfaUfGlU47BAxQ0ztGgd9vPyqimf8=
github.com/OneOfOne/xxhash v1.2.8/go.mod h1:eZbhyaAYD41SGSSsnmcpxVoRiQ/MPUTjUdIIOT9Um7Q=
github.com/ProtonMail/go-crypto v1.0.0 h1:LRuvITjQWX+WIfr930YHG2HNfjR1uOfyf5vE0kC2U78=
github.com/ProtonMail/go-crypto v1.0.0/go.mod h1:EjAoLdwvbIOoOQr3ihjnSoLZRtE8azugULFRteWMNc0=
github.com/acarl005/stripansi v0.0.0-20180116102854-5a71ef0e047d h1:licZJFw2RwpHMqeKTCYkitsPqHNxTmd4SNR5r94FGM8=
@ -97,8 +99,8 @@ github.com/anchore/bubbly v0.0.0-20231115134915-def0aba654a9 h1:p0ZIe0htYOX284Y4
github.com/anchore/bubbly v0.0.0-20231115134915-def0aba654a9/go.mod h1:3ZsFB9tzW3vl4gEiUeuSOMDnwroWxIxJelOOHUp8dSw=
github.com/anchore/clio v0.0.0-20240522144804-d81e109008aa h1:pwlAn4O9SBUnlgfa69YcqIynbUyobLVFYu8HxSoCffA=
github.com/anchore/clio v0.0.0-20240522144804-d81e109008aa/go.mod h1:nD3H5uIvjxlfmakOBgtyFQbk5Zjp3l538kxfpHPslzI=
github.com/anchore/fangs v0.0.0-20240508143433-f016b099950f h1:NOhzafCyNYFi88qxkBFjMzQo4dRa1vDhBzx+0Uovx8Q=
github.com/anchore/fangs v0.0.0-20240508143433-f016b099950f/go.mod h1:sVpRS2yNCw6tLVpvA1QSDVWTJVpCuAm8JNZgn4Sjz/k=
github.com/anchore/fangs v0.0.0-20240903175602-e716ef12c23d h1:ZD4wdCBgJJzJybjTUIEiiupLF7B9H3WLuBTjspBO2Mc=
github.com/anchore/fangs v0.0.0-20240903175602-e716ef12c23d/go.mod h1:Xh4ObY3fmoMzOEVXwDtS1uK44JC7+nRD0n29/1KYFYg=
github.com/anchore/go-collections v0.0.0-20240216171411-9321230ce537 h1:GjNGuwK5jWjJMyVppBjYS54eOiiSNv4Ba869k4wh72Q=
github.com/anchore/go-collections v0.0.0-20240216171411-9321230ce537/go.mod h1:1aiktV46ATCkuVg0O573ZrH56BUawTECPETbZyBcqT8=
github.com/anchore/go-logger v0.0.0-20230725134548-c21dafa1ec5a h1:nJ2G8zWKASyVClGVgG7sfM5mwoZlZ2zYpIzN2OhjWkw=
@ -113,8 +115,8 @@ github.com/anchore/go-version v1.2.2-0.20200701162849-18adb9c92b9b h1:e1bmaoJfZV
github.com/anchore/go-version v1.2.2-0.20200701162849-18adb9c92b9b/go.mod h1:Bkc+JYWjMCF8OyZ340IMSIi2Ebf3uwByOk6ho4wne1E=
github.com/anchore/packageurl-go v0.1.1-0.20240507183024-848e011fc24f h1:B/E9ixKNCasntpoch61NDaQyGPDXLEJlL+B9B/PbdbA=
github.com/anchore/packageurl-go v0.1.1-0.20240507183024-848e011fc24f/go.mod h1:Blo6OgJNiYF41ufcgHKkbCKF2MDOMlrqhXv/ij6ocR4=
github.com/anchore/stereoscope v0.0.3-0.20240705165118-e46739e21796 h1:mb2i23zBKaSloS4RHMwBCKTCB3X5HnObcRvjxrvvMr8=
github.com/anchore/stereoscope v0.0.3-0.20240705165118-e46739e21796/go.mod h1:6r8piaIGsYZuVkFw+fAXYtEGFI58dZuni/jNdvK+IxQ=
github.com/anchore/stereoscope v0.0.3 h1:JRPHySy8S6P+Ff3IDiQ29ap1i8/laUQxDk9K1eFh/2U=
github.com/anchore/stereoscope v0.0.3/go.mod h1:5DJheGPjVRsSqegTB24Zi6SCHnYQnA519yeIG+RG+I4=
github.com/andreyvit/diff v0.0.0-20170406064948-c7f18ee00883/go.mod h1:rCTlJbsFo29Kk6CurOXKm700vrz8f0KW0JNfpkRJY/8=
github.com/andybalholm/brotli v1.0.1/go.mod h1:loMXtMfwqflxFJPmdbJO0a3KNoPuLBgiu3qAvBg8x/Y=
github.com/andybalholm/brotli v1.0.4 h1:V7DdXeJtZscaqfNuAdSRuRFzuiKlHSC/Zh3zl9qY3JY=
@ -155,22 +157,18 @@ github.com/census-instrumentation/opencensus-proto v0.3.0/go.mod h1:f6KPmirojxKA
github.com/cespare/xxhash v1.1.0/go.mod h1:XrSqR1VqqWfGrhpAt58auRo0WTKS1nRRg3ghfAqPWnc=
github.com/cespare/xxhash/v2 v2.1.1/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs=
github.com/cespare/xxhash/v2 v2.1.2/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs=
github.com/charmbracelet/bubbles v0.18.0 h1:PYv1A036luoBGroX6VWjQIE9Syf2Wby2oOl/39KLfy0=
github.com/charmbracelet/bubbles v0.18.0/go.mod h1:08qhZhtIwzgrtBjAcJnij1t1H0ZRjwHyGsy6AL11PSw=
github.com/charmbracelet/bubbletea v0.26.6 h1:zTCWSuST+3yZYZnVSvbXwKOPRSNZceVeqpzOLN2zq1s=
github.com/charmbracelet/bubbletea v0.26.6/go.mod h1:dz8CWPlfCCGLFbBlTY4N7bjLiyOGDJEnd2Muu7pOWhk=
github.com/charmbracelet/bubbles v0.20.0 h1:jSZu6qD8cRQ6k9OMfR1WlM+ruM8fkPWkHvQWD9LIutE=
github.com/charmbracelet/bubbles v0.20.0/go.mod h1:39slydyswPy+uVOHZ5x/GjwVAFkCsV8IIVy+4MhzwwU=
github.com/charmbracelet/bubbletea v1.1.0 h1:FjAl9eAL3HBCHenhz/ZPjkKdScmaS5SK69JAK2YJK9c=
github.com/charmbracelet/bubbletea v1.1.0/go.mod h1:9Ogk0HrdbHolIKHdjfFpyXJmiCzGwy+FesYkZr7hYU4=
github.com/charmbracelet/harmonica v0.2.0 h1:8NxJWRWg/bzKqqEaaeFNipOu77YR5t8aSwG4pgaUBiQ=
github.com/charmbracelet/harmonica v0.2.0/go.mod h1:KSri/1RMQOZLbw7AHqgcBycp8pgJnQMYYT8QZRqZ1Ao=
github.com/charmbracelet/lipgloss v0.11.0 h1:UoAcbQ6Qml8hDwSWs0Y1cB5TEQuZkDPH/ZqwWWYTG4g=
github.com/charmbracelet/lipgloss v0.11.0/go.mod h1:1UdRTH9gYgpcdNN5oBtjbu/IzNKtzVtb7sqN1t9LNn8=
github.com/charmbracelet/x/ansi v0.1.2 h1:6+LR39uG8DE6zAmbu023YlqjJHkYXDF1z36ZwzO4xZY=
github.com/charmbracelet/x/ansi v0.1.2/go.mod h1:dk73KoMTT5AX5BsX0KrqhsTqAnhZZoCBjs7dGWp4Ktw=
github.com/charmbracelet/x/input v0.1.0 h1:TEsGSfZYQyOtp+STIjyBq6tpRaorH0qpwZUj8DavAhQ=
github.com/charmbracelet/x/input v0.1.0/go.mod h1:ZZwaBxPF7IG8gWWzPUVqHEtWhc1+HXJPNuerJGRGZ28=
github.com/charmbracelet/x/term v0.1.1 h1:3cosVAiPOig+EV4X9U+3LDgtwwAoEzJjNdwbXDjF6yI=
github.com/charmbracelet/x/term v0.1.1/go.mod h1:wB1fHt5ECsu3mXYusyzcngVWWlu1KKUmmLhfgr/Flxw=
github.com/charmbracelet/x/windows v0.1.0 h1:gTaxdvzDM5oMa/I2ZNF7wN78X/atWemG9Wph7Ika2k4=
github.com/charmbracelet/x/windows v0.1.0/go.mod h1:GLEO/l+lizvFDBPLIOk+49gdX49L9YWMB5t+DZd0jkQ=
github.com/charmbracelet/lipgloss v0.13.0 h1:4X3PPeoWEDCMvzDvGmTajSyYPcZM4+y8sCA/SsA3cjw=
github.com/charmbracelet/lipgloss v0.13.0/go.mod h1:nw4zy0SBX/F/eAO1cWdcvy6qnkDUxr8Lw7dvFrAIbbY=
github.com/charmbracelet/x/ansi v0.2.3 h1:VfFN0NUpcjBRd4DnKfRaIRo53KRgey/nhOoEqosGDEY=
github.com/charmbracelet/x/ansi v0.2.3/go.mod h1:dk73KoMTT5AX5BsX0KrqhsTqAnhZZoCBjs7dGWp4Ktw=
github.com/charmbracelet/x/term v0.2.0 h1:cNB9Ot9q8I711MyZ7myUR5HFWL/lc3OpU8jZ4hwm0x0=
github.com/charmbracelet/x/term v0.2.0/go.mod h1:GVxgxAbjUrmpvIINHIQnJJKpMlHiZ4cktEQCN6GWyF0=
github.com/chzyer/logex v1.1.10/go.mod h1:+Ywpsq7O8HXn0nuIou7OrIPyXbp3wmkHB+jjWRnGsAI=
github.com/chzyer/readline v0.0.0-20180603132655-2972be24d48e/go.mod h1:nSuG5e5PlCu98SY8svDHJxuZscDgtXS6KTTbou5AhLI=
github.com/chzyer/test v0.0.0-20180213035817-a1ea475d72b1/go.mod h1:Q3SI9o4m/ZMnBNeIyt5eFwwo7qiLfzFZmjNmxjkiQlU=
@ -215,8 +213,8 @@ github.com/cpuguy83/go-md2man/v2 v2.0.4/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46t
github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E=
github.com/cyphar/filepath-securejoin v0.2.4 h1:Ugdm7cg7i6ZK6x3xDF1oEu1nfkyfH53EtKeQYTC3kyg=
github.com/cyphar/filepath-securejoin v0.2.4/go.mod h1:aPGpWjXOXUn2NCNjFvBE6aRxGGx79pTxQpKOJNYHHl4=
github.com/dave/jennifer v1.7.0 h1:uRbSBH9UTS64yXbh4FrMHfgfY762RD+C7bUPKODpSJE=
github.com/dave/jennifer v1.7.0/go.mod h1:nXbxhEmQfOZhWml3D1cDK5M1FLnMSozpbFN/m3RmGZc=
github.com/dave/jennifer v1.7.1 h1:B4jJJDHelWcDhlRQxWeo0Npa/pYKBLrirAQoTN45txo=
github.com/dave/jennifer v1.7.1/go.mod h1:nXbxhEmQfOZhWml3D1cDK5M1FLnMSozpbFN/m3RmGZc=
github.com/davecgh/go-spew v0.0.0-20161028175848-04cdfd42973b/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
@ -227,12 +225,12 @@ github.com/deitch/magic v0.0.0-20230404182410-1ff89d7342da/go.mod h1:B3tI9iGHi4i
github.com/dgrijalva/jwt-go/v4 v4.0.0-preview1/go.mod h1:+hnT3ywWDTAFrW5aE+u2Sa/wT555ZqwoCS+pk3p6ry4=
github.com/distribution/reference v0.6.0 h1:0IXCQ5g4/QMHHkarYzh5l+u8T3t73zM5QvfrDyIgxBk=
github.com/distribution/reference v0.6.0/go.mod h1:BbU0aIcezP1/5jX/8MP0YiH4SdvB5Y4f/wlDRiLyi3E=
github.com/docker/cli v27.0.3+incompatible h1:usGs0/BoBW8MWxGeEtqPMkzOY56jZ6kYlSN5BLDioCQ=
github.com/docker/cli v27.0.3+incompatible/go.mod h1:JLrzqnKDaYBop7H2jaqPtU4hHvMKP+vjCwu2uszcLI8=
github.com/docker/cli v27.1.1+incompatible h1:goaZxOqs4QKxznZjjBWKONQci/MywhtRv2oNn0GkeZE=
github.com/docker/cli v27.1.1+incompatible/go.mod h1:JLrzqnKDaYBop7H2jaqPtU4hHvMKP+vjCwu2uszcLI8=
github.com/docker/distribution v2.8.3+incompatible h1:AtKxIZ36LoNK51+Z6RpzLpddBirtxJnzDrHLEKxTAYk=
github.com/docker/distribution v2.8.3+incompatible/go.mod h1:J2gT2udsDAN96Uj4KfcMRqY0/ypR+oyYUYmja8H+y+w=
github.com/docker/docker v27.0.3+incompatible h1:aBGI9TeQ4MPlhquTQKq9XbK79rKFVwXNUAYz9aXyEBE=
github.com/docker/docker v27.0.3+incompatible/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk=
github.com/docker/docker v27.2.1+incompatible h1:fQdiLfW7VLscyoeYEBz7/J8soYFDZV1u6VW6gJEjNMI=
github.com/docker/docker v27.2.1+incompatible/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk=
github.com/docker/docker-credential-helpers v0.7.0 h1:xtCHsjxogADNZcdv1pKUHXryefjlVRqWqIhk/uXJp0A=
github.com/docker/docker-credential-helpers v0.7.0/go.mod h1:rETQfLdHNT3foU5kuNkFR1R1V12OJRRO5lzt2D1b5X0=
github.com/docker/go-connections v0.4.0 h1:El9xVISelRB7BuFusrZozjnkIM5YnzCViNKohAFqRJQ=
@ -277,15 +275,15 @@ github.com/fatih/set v0.2.1 h1:nn2CaJyknWE/6txyUDGwysr3G5QC6xWB/PtVjPBbeaA=
github.com/fatih/set v0.2.1/go.mod h1:+RKtMCH+favT2+3YecHGxcc0b4KyVWA1QWWJUs4E0CI=
github.com/felixge/fgprof v0.9.3 h1:VvyZxILNuCiUCSXtPtYmmtGvb65nqXh2QFWc0Wpf2/g=
github.com/felixge/fgprof v0.9.3/go.mod h1:RdbpDgzqYVh/T9fPELJyV7EYJuHB55UTEULNun8eiPw=
github.com/felixge/httpsnoop v1.0.3 h1:s/nj+GCswXYzN5v2DpNMuMQYe+0DDwt5WVCU6CWBdXk=
github.com/felixge/httpsnoop v1.0.3/go.mod h1:m8KPJKqk1gH5J9DgRY2ASl2lWCfGKXixSwevea8zH2U=
github.com/felixge/httpsnoop v1.0.4 h1:NFTV2Zj1bL4mc9sqWACXbQFVBBg2W3GPvqp8/ESS2Wg=
github.com/felixge/httpsnoop v1.0.4/go.mod h1:m8KPJKqk1gH5J9DgRY2ASl2lWCfGKXixSwevea8zH2U=
github.com/frankban/quicktest v1.14.6 h1:7Xjx+VpznH+oBnejlPUj8oUpdxnVs4f8XU8WnHkI4W8=
github.com/frankban/quicktest v1.14.6/go.mod h1:4ptaffx2x8+WTWXmUCuVU6aPUX1/Mz7zb5vbUoiM6w0=
github.com/fsnotify/fsnotify v1.5.1/go.mod h1:T3375wBYaZdLLcVNkcVbzGHY7f1l/uK5T5Ai1i3InKU=
github.com/fsnotify/fsnotify v1.7.0 h1:8JEhPFa5W2WU7YfeZzPNqzMP6Lwt7L2715Ggo0nosvA=
github.com/fsnotify/fsnotify v1.7.0/go.mod h1:40Bi/Hjc2AVfZrqy+aj+yEI+/bRxZnMJyTJwOpGvigM=
github.com/gabriel-vasile/mimetype v1.4.0 h1:Cn9dkdYsMIu56tGho+fqzh7XmvY2YyGU0FnbhiOsEro=
github.com/gabriel-vasile/mimetype v1.4.0/go.mod h1:fA8fi6KUiG7MgQQ+mEWotXoEOvmxRtOJlERCzSmRvr8=
github.com/gabriel-vasile/mimetype v1.4.4 h1:QjV6pZ7/XZ7ryI2KuyeEDE8wnh7fHP9YnQy+R0LnH8I=
github.com/gabriel-vasile/mimetype v1.4.4/go.mod h1:JwLei5XPtWdGiMFB5Pjle1oEeoSeEuJfJE+TtfvdB/s=
github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04=
github.com/github/go-spdx/v2 v2.3.1 h1:ffGuHTbHuHzWPt53n8f9o8clGutuLPObo3zB4JAjxU8=
github.com/github/go-spdx/v2 v2.3.1/go.mod h1:2ZxKsOhvBp+OYBDlsGnUMcchLeo2mrpEBn2L1C+U3IQ=
@ -293,8 +291,8 @@ github.com/gkampitakis/ciinfo v0.3.0 h1:gWZlOC2+RYYttL0hBqcoQhM7h1qNkVqvRCV1fOvp
github.com/gkampitakis/ciinfo v0.3.0/go.mod h1:1NIwaOcFChN4fa/B0hEBdAb6npDlFL8Bwx4dfRLRqAo=
github.com/gkampitakis/go-diff v1.3.2 h1:Qyn0J9XJSDTgnsgHRdz9Zp24RaJeKMUHg2+PDZZdC4M=
github.com/gkampitakis/go-diff v1.3.2/go.mod h1:LLgOrpqleQe26cte8s36HTWcTmMEur6OPYerdAAS9tk=
github.com/gkampitakis/go-snaps v0.5.4 h1:GX+dkKmVsRenz7SoTbdIEL4KQARZctkMiZ8ZKprRwT8=
github.com/gkampitakis/go-snaps v0.5.4/go.mod h1:ZABkO14uCuVxBHAXAfKG+bqNz+aa1bGPAg8jkI0Nk8Y=
github.com/gkampitakis/go-snaps v0.5.7 h1:uVGjHR4t4pPHU944udMx7VKHpwepZXmvDMF+yDmI0rg=
github.com/gkampitakis/go-snaps v0.5.7/go.mod h1:ZABkO14uCuVxBHAXAfKG+bqNz+aa1bGPAg8jkI0Nk8Y=
github.com/glebarez/go-sqlite v1.20.3 h1:89BkqGOXR9oRmG58ZrzgoY/Fhy5x0M+/WV48U5zVrZ4=
github.com/glebarez/go-sqlite v1.20.3/go.mod h1:u3N6D/wftiAzIOJtZl6BmedqxmmkDfH3q+ihjqxC9u0=
github.com/gliderlabs/ssh v0.3.7 h1:iV3Bqi942d9huXnzEF2Mt+CY9gLu8DNM4Obd+8bODRE=
@ -315,8 +313,8 @@ github.com/go-kit/kit v0.9.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2
github.com/go-logfmt/logfmt v0.3.0/go.mod h1:Qt1PoO58o5twSAckw1HlFXLmHsOX5/0LbT9GBnD5lWE=
github.com/go-logfmt/logfmt v0.4.0/go.mod h1:3RMwSq7FuexP4Kalkev3ejPJsZTpXXBr9+V4qmtdjCk=
github.com/go-logr/logr v1.2.2/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A=
github.com/go-logr/logr v1.2.4 h1:g01GSCwiDw2xSZfjJ2/T9M+S6pFdcNtFYsp+Y43HYDQ=
github.com/go-logr/logr v1.2.4/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A=
github.com/go-logr/logr v1.4.1 h1:pKouT5E8xu9zeFC39JXRDukb6JFQPXM5p5I91188VAQ=
github.com/go-logr/logr v1.4.1/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY=
github.com/go-logr/stdr v1.2.2 h1:hSWxHoqTgW2S2qGc0LTAI563KZ5YKYRhT3MFKZMbjag=
github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre4VKE=
github.com/go-restruct/restruct v1.2.0-alpha h1:2Lp474S/9660+SJjpVxoKuWX09JsXHSrdV7Nv3/gkvc=
@ -384,8 +382,8 @@ github.com/google/go-cmp v0.5.6/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/
github.com/google/go-cmp v0.5.9/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI=
github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
github.com/google/go-containerregistry v0.20.0 h1:wRqHpOeVh3DnenOrPy9xDOLdnLatiGuuNRVelR2gSbg=
github.com/google/go-containerregistry v0.20.0/go.mod h1:YCMFNQeeXeLF+dnhhWkqDItx/JSkH01j1Kis4PsjzFI=
github.com/google/go-containerregistry v0.20.2 h1:B1wPJ1SN/S7pB+ZAimcciVD+r+yV/l/DSArMxlbwseo=
github.com/google/go-containerregistry v0.20.2/go.mod h1:z38EKdKh4h7IP2gSfUUqEvalZBqs6AoLeWfUy34nQC8=
github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
github.com/google/licensecheck v0.3.1 h1:QoxgoDkaeC4nFrtGN1jV7IPmDCHFNIVh54e5hSt6sPs=
github.com/google/licensecheck v0.3.1/go.mod h1:ORkR35t/JjW+emNKtfJDII0zlciG9JgbT7SmsohlHmY=
@ -411,7 +409,6 @@ github.com/google/pprof v0.0.0-20211214055906-6f57359322fd/go.mod h1:KgnwoLYCZ8I
github.com/google/pprof v0.0.0-20240409012703-83162a5b38cd h1:gbpYu9NMq8jhDVbvlGkMFWCjLFlqqEZjEmObmhUy6Vo=
github.com/google/pprof v0.0.0-20240409012703-83162a5b38cd/go.mod h1:kf6iHlnVGwgKolg33glAes7Yg/8iWP8ukqeldJSO7jw=
github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI=
github.com/google/uuid v1.1.1/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
github.com/google/uuid v1.1.2/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0=
github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
@ -463,8 +460,8 @@ github.com/hashicorp/memberlist v0.2.2/go.mod h1:MS2lj3INKhZjWNqd3N0m3J+Jxf3DAOn
github.com/hashicorp/memberlist v0.3.0/go.mod h1:MS2lj3INKhZjWNqd3N0m3J+Jxf3DAOnAH9VT3Sh9MUE=
github.com/hashicorp/serf v0.9.5/go.mod h1:UWDWwZeL5cuWDJdl0C6wrvrUwEqtQ4ZKBKKENpqIUyk=
github.com/hashicorp/serf v0.9.6/go.mod h1:TXZNMjZQijwlDvp+r0b63xZ45H7JmCmgg4gpTwn9UV4=
github.com/huandu/xstrings v1.3.3 h1:/Gcsuc1x8JVbJ9/rlye4xZnVAbEkGauT8lbebqcQws4=
github.com/huandu/xstrings v1.3.3/go.mod h1:y5/lhBue+AyNmUVz9RLU9xbLR0o4KIIExikq4ovT0aE=
github.com/huandu/xstrings v1.5.0 h1:2ag3IFq9ZDANvthTwTiqSSZLjDc+BedvHPAp5tJy2TI=
github.com/huandu/xstrings v1.5.0/go.mod h1:y5/lhBue+AyNmUVz9RLU9xbLR0o4KIIExikq4ovT0aE=
github.com/iancoleman/orderedmap v0.0.0-20190318233801-ac98e3ecb4b0 h1:i462o439ZjprVSFSZLZxcsoAe592sZB1rci2Z8j4wdk=
github.com/iancoleman/orderedmap v0.0.0-20190318233801-ac98e3ecb4b0/go.mod h1:N0Wam8K1arqPXNWjMo21EXnBPOPp36vB07FNRdD2geA=
github.com/iancoleman/strcase v0.2.0/go.mod h1:iwCmte+B7n89clKwxIoIXy/HfoL7AsD47ZCWhYzw7ho=
@ -473,9 +470,6 @@ github.com/iancoleman/strcase v0.3.0/go.mod h1:iwCmte+B7n89clKwxIoIXy/HfoL7AsD47
github.com/ianlancetaylor/demangle v0.0.0-20181102032728-5e5cf60278f6/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc=
github.com/ianlancetaylor/demangle v0.0.0-20200824232613-28f6c0f3b639/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc=
github.com/ianlancetaylor/demangle v0.0.0-20210905161508-09a460cdf81d/go.mod h1:aYm2/VgdVmcIU8iMfdMvDMsRAQjcfZSKFby6HOFvi/w=
github.com/imdario/mergo v0.3.11/go.mod h1:jmQim1M+e3UYxmgPu/WyfjB3N3VflVyUjjjwH0dnCYA=
github.com/imdario/mergo v0.3.15 h1:M8XP7IuFNsqUx6VPK2P9OSmsYsI/YFaGil0uD21V3dM=
github.com/imdario/mergo v0.3.15/go.mod h1:WBLT9ZmE3lPoWsEzCh9LPo3TiwVN+ZKEjmz+hD27ysY=
github.com/inconshreveable/mousetrap v1.0.0/go.mod h1:PxqpIevigyE2G7u3NXJIT2ANytuPF1OarO4DADm73n8=
github.com/inconshreveable/mousetrap v1.1.0 h1:wN+x4NVGpMsO7ErUn/mUI3vEoE6Jt13X2s0bqwp9tc8=
github.com/inconshreveable/mousetrap v1.1.0/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw=
@ -522,8 +516,9 @@ github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY=
github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE=
github.com/kylelemons/godebug v1.1.0 h1:RPNrshWIDI6G2gRW9EHilWtl7Z6Sb1BR0xunSBf0SNc=
github.com/kylelemons/godebug v1.1.0/go.mod h1:9/0rRGxNHcop5bhtWyNeEfOS8JIWk580+fNqagV/RAw=
github.com/logrusorgru/aurora v0.0.0-20200102142835-e9ef32dff381 h1:bqDmpDG49ZRnB5PcgP0RXtQvnMSgIF14M7CBd2shtXs=
github.com/logrusorgru/aurora v0.0.0-20200102142835-e9ef32dff381/go.mod h1:7rIyQOR62GCctdiQpZ/zOJlFyk6y+94wXzv6RNZgaR4=
github.com/logrusorgru/aurora v2.0.3+incompatible h1:tOpm7WcpBTn4fjmVfgpQq0EfczGlG91VSDkswnjF5A8=
github.com/logrusorgru/aurora v2.0.3+incompatible/go.mod h1:7rIyQOR62GCctdiQpZ/zOJlFyk6y+94wXzv6RNZgaR4=
github.com/lucasb-eyer/go-colorful v1.2.0 h1:1nnpGOrhyZZuNyfu1QjKiUICQ74+3FNCN69Aj6K7nkY=
github.com/lucasb-eyer/go-colorful v1.2.0/go.mod h1:R4dSotOR9KMtayYi1e77YzuveK+i7ruzyGqttikkLy0=
github.com/lyft/protoc-gen-star v0.5.3/go.mod h1:V0xaHgaf5oCCqmcxYcWiDfTiKsZsRc87/1qhoTACD8w=
@ -551,9 +546,8 @@ github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D
github.com/mattn/go-localereader v0.0.2-0.20220822084749-2491eb6c1c75 h1:P8UmIzZMYDR+NGImiFvErt6VWfIRPuGM+vyjiEdkmIw=
github.com/mattn/go-localereader v0.0.2-0.20220822084749-2491eb6c1c75/go.mod h1:8fBrzywKY7BI3czFoHkuzRoWE9C+EiG4R1k4Cjx5p88=
github.com/mattn/go-runewidth v0.0.9/go.mod h1:H031xJmbD/WCDINGzjvQ9THkh0rPKHF+m2gUSrubnMI=
github.com/mattn/go-runewidth v0.0.12/go.mod h1:RAqKPSqVFrSLVXbA8x7dzmKdmGzieGRCM46jaSJTDAk=
github.com/mattn/go-runewidth v0.0.15 h1:UNAjwbU9l54TA3KzvqLGxwWjHmMgBUVhBiTjelZgg3U=
github.com/mattn/go-runewidth v0.0.15/go.mod h1:Jdepj2loyihRzMpdS35Xk/zdY8IAYHsh153qUoGf23w=
github.com/mattn/go-runewidth v0.0.16 h1:E5ScNMtiwvlvB5paMFdw9p4kSQzbXFikJ5SQO6TULQc=
github.com/mattn/go-runewidth v0.0.16/go.mod h1:Jdepj2loyihRzMpdS35Xk/zdY8IAYHsh153qUoGf23w=
github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5Ld7szi9bcBfOoFv/3dc6xSMkL2PC0=
github.com/mgutz/ansi v0.0.0-20200706080929-d51e80ef957d h1:5PJl274Y63IEHC+7izoQE9x6ikvDFZS2mDVS3drnohI=
github.com/mgutz/ansi v0.0.0-20200706080929-d51e80ef957d/go.mod h1:01TrycV0kFyexm33Z7vhZRXopbI8J3TDReVlkTgMUxE=
@ -563,7 +557,6 @@ github.com/miekg/dns v1.0.14/go.mod h1:W1PPwlIAgtquWBMBEV9nkV9Cazfe8ScdGz/Lj7v3N
github.com/miekg/dns v1.1.26/go.mod h1:bPDLeHnStXmXAq1m/Ch/hvfNHr14JKNPMBo3VZKjuso=
github.com/miekg/dns v1.1.41/go.mod h1:p6aan82bvRIyn+zDIv9xYNUpwa73JcSh9BKwknJysuI=
github.com/mitchellh/cli v1.1.0/go.mod h1:xcISNoH86gajksDmfB23e/pu+B+GeFRMYmoHXxx3xhI=
github.com/mitchellh/copystructure v1.0.0/go.mod h1:SNtv71yrdKgLRyLFxmLdkAbkKEFWgYaq1OVrnRcwhnw=
github.com/mitchellh/copystructure v1.2.0 h1:vpKXTN4ewci03Vljg/q9QvCGUDttBOGBIa15WveJJGw=
github.com/mitchellh/copystructure v1.2.0/go.mod h1:qLl+cE2AmVv+CoeAwDPye/v+N2HKCj9FbZEVFJRxO9s=
github.com/mitchellh/go-homedir v1.1.0 h1:lukF9ziXFxDFPkA1vsr5zpc1XuPDn/wFntq5mG+4E0Y=
@ -576,15 +569,14 @@ github.com/mitchellh/mapstructure v1.1.2/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh
github.com/mitchellh/mapstructure v1.4.3/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo=
github.com/mitchellh/mapstructure v1.5.0 h1:jeMsZIYE/09sWLaz43PL7Gy6RuMjD2eJVyuac5Z2hdY=
github.com/mitchellh/mapstructure v1.5.0/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo=
github.com/mitchellh/reflectwalk v1.0.0/go.mod h1:mSTlrgnPZtwu0c4WaC2kGObEpuNDbx0jmZXqmk4esnw=
github.com/mitchellh/reflectwalk v1.0.2 h1:G2LzWKi524PWgd3mLHV8Y5k7s6XUvT0Gef6zxSIeXaQ=
github.com/mitchellh/reflectwalk v1.0.2/go.mod h1:mSTlrgnPZtwu0c4WaC2kGObEpuNDbx0jmZXqmk4esnw=
github.com/moby/docker-image-spec v1.3.1 h1:jMKff3w6PgbfSa69GfNg+zN/XLhfXJGnEx3Nl2EsFP0=
github.com/moby/docker-image-spec v1.3.1/go.mod h1:eKmb5VW8vQEh/BAr2yvVNvuiJuY6UIocYsFu/DxxRpo=
github.com/moby/locker v1.0.1 h1:fOXqR41zeveg4fFODix+1Ch4mj/gT0NE1XJbp/epuBg=
github.com/moby/locker v1.0.1/go.mod h1:S7SDdo5zpBK84bzzVlKr2V0hz+7x9hWbYC/kq7oQppc=
github.com/moby/sys/mountinfo v0.7.1 h1:/tTvQaSJRr2FshkhXiIpux6fQ2Zvc4j7tAhMTStAG2g=
github.com/moby/sys/mountinfo v0.7.1/go.mod h1:IJb6JQeOklcdMU9F5xQ8ZALD+CUr5VlGpwtX+VE0rpI=
github.com/moby/sys/mountinfo v0.7.2 h1:1shs6aH5s4o5H2zQLn796ADW1wMrIwHsyJ2v9KouLrg=
github.com/moby/sys/mountinfo v0.7.2/go.mod h1:1YOa8w8Ih7uW0wALDUgT1dTTSBrZ+HiBLGws92L2RU4=
github.com/moby/sys/sequential v0.5.0 h1:OPvI35Lzn9K04PBbCLW0g4LcFAJgHsvXsRyewg5lXtc=
github.com/moby/sys/sequential v0.5.0/go.mod h1:tH2cOOs5V9MlPiXcQzRC+eEyab644PWKGRYaaV5ZZlo=
github.com/moby/sys/signal v0.7.0 h1:25RW3d5TnQEoKvRbEKUGay6DCQ46IxAVTT9CUMgmsSI=
@ -602,8 +594,6 @@ github.com/muesli/ansi v0.0.0-20230316100256-276c6243b2f6 h1:ZK8zHtRHOkbHy6Mmr5D
github.com/muesli/ansi v0.0.0-20230316100256-276c6243b2f6/go.mod h1:CJlz5H+gyd6CUWT45Oy4q24RdLyn7Md9Vj2/ldJBSIo=
github.com/muesli/cancelreader v0.2.2 h1:3I4Kt4BQjOR54NavqnDogx/MIoWBFa0StPA8ELUXHmA=
github.com/muesli/cancelreader v0.2.2/go.mod h1:3XuTXfFS2VjM+HTLZY9Ak0l6eUKfijIfMUZ4EgX0QYo=
github.com/muesli/reflow v0.3.0 h1:IFsN6K9NfGtjeggFP+68I4chLZV2yIKsXJFNZ+eWh6s=
github.com/muesli/reflow v0.3.0/go.mod h1:pbwTDkVPibjO2kyvBQRBxTWEEGDGq0FlB1BIKtnHY/8=
github.com/muesli/termenv v0.15.2 h1:GohcuySI0QmI3wN8Ok9PtKGkgkFIk7y6Vpb5PvrY+Wo=
github.com/muesli/termenv v0.15.2/go.mod h1:Epx+iuz8sNs7mNKhxzH4fWXGNpZwUaJKRS1noLXviQ8=
github.com/mwitkow/go-conntrack v0.0.0-20161129095857-cc309e4a2223/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U=
@ -619,8 +609,8 @@ github.com/opencontainers/go-digest v1.0.0 h1:apOUWs51W5PlhuyGyz9FCeeBIOUDA/6nW8
github.com/opencontainers/go-digest v1.0.0/go.mod h1:0JzlMkj0TRzQZfJkVvzbP0HBR3IKzErnv2BNG4W4MAM=
github.com/opencontainers/image-spec v1.1.0 h1:8SG7/vwALn54lVB/0yZ/MMwhFrPYtpEHQb2IpWsCzug=
github.com/opencontainers/image-spec v1.1.0/go.mod h1:W4s4sFTMaBeK1BQLXbG4AdM2szdn85PY75RI83NrTrM=
github.com/opencontainers/runc v1.1.12 h1:BOIssBaW1La0/qbNZHXOOa71dZfZEQOzW7dqQf3phss=
github.com/opencontainers/runc v1.1.12/go.mod h1:S+lQwSfncpBha7XTy/5lBwWgm5+y5Ma/O44Ekby9FK8=
github.com/opencontainers/runc v1.1.14 h1:rgSuzbmgz5DUJjeSnw337TxDbRuqjs6iqQck/2weR6w=
github.com/opencontainers/runc v1.1.14/go.mod h1:E4C2z+7BxR7GHXp0hAY53mek+x49X1LjPNeMTfRGvOA=
github.com/opencontainers/runtime-spec v1.1.0-rc.1 h1:wHa9jroFfKGQqFHj0I1fMRKLl0pfj+ynAqBxo3v6u9w=
github.com/opencontainers/runtime-spec v1.1.0-rc.1/go.mod h1:jwyrGlmzljRJv/Fgzds9SsS/C5hL+LL3ko9hs6T5lQ0=
github.com/opencontainers/selinux v1.11.0 h1:+5Zbo97w3Lbmb3PeqQtpmTkMwsW5nRI3YaLpt7tQ7oU=
@ -632,11 +622,11 @@ github.com/pborman/indent v1.2.1/go.mod h1:FitS+t35kIYtB5xWTZAPhnmrxcciEEOdbyrrp
github.com/pelletier/go-toml v1.9.4/go.mod h1:u1nR/EPcESfeI/szUZKdtJ0xRNbUoANCkoOuaOx1Y+c=
github.com/pelletier/go-toml v1.9.5 h1:4yBQzkHv+7BHq2PQUZF3Mx0IYxG7LsP222s7Agd3ve8=
github.com/pelletier/go-toml v1.9.5/go.mod h1:u1nR/EPcESfeI/szUZKdtJ0xRNbUoANCkoOuaOx1Y+c=
github.com/pelletier/go-toml/v2 v2.1.0 h1:FnwAJ4oYMvbT/34k9zzHuZNrhlz48GB3/s6at6/MHO4=
github.com/pelletier/go-toml/v2 v2.1.0/go.mod h1:tJU2Z3ZkXwnxa4DPO899bsyIoywizdUvyaeZurnPPDc=
github.com/pelletier/go-toml/v2 v2.2.2 h1:aYUidT7k73Pcl9nb2gScu7NSrKCSHIDE89b3+6Wq+LM=
github.com/pelletier/go-toml/v2 v2.2.2/go.mod h1:1t835xjRzz80PqgE6HHgN2JOsmgYu/h4qDAS4n929Rs=
github.com/pierrec/lz4/v4 v4.1.2/go.mod h1:gZWDp/Ze/IJXGXf23ltt2EXimqmTUXEy0GFuRQyBid4=
github.com/pierrec/lz4/v4 v4.1.15 h1:MO0/ucJhngq7299dKLwIMtgTfbkoSPF6AoMYDd8Q4q0=
github.com/pierrec/lz4/v4 v4.1.15/go.mod h1:gZWDp/Ze/IJXGXf23ltt2EXimqmTUXEy0GFuRQyBid4=
github.com/pierrec/lz4/v4 v4.1.19 h1:tYLzDnjDXh9qIxSTKHwXwOYmm9d887Y7Y1ZkyXYHAN4=
github.com/pierrec/lz4/v4 v4.1.19/go.mod h1:gZWDp/Ze/IJXGXf23ltt2EXimqmTUXEy0GFuRQyBid4=
github.com/pjbgf/sha1cd v0.3.0 h1:4D5XXmUUBUl/xQ6IjCkEAbqXskkq/4O7LmGn0AqMDs4=
github.com/pjbgf/sha1cd v0.3.0/go.mod h1:nZ1rrWOcGJ5uZgEEVL1VUM9iRQiZvWdbZjkKyFzPPsI=
github.com/pkg/diff v0.0.0-20210226163009-20ebb0f2a09e/go.mod h1:pJLUxLENpZxwdsKMEsNbx1VGcRFpLqf3715MtcvvzbA=
@ -672,7 +662,6 @@ github.com/quasilyte/go-ruleguard/dsl v0.3.22 h1:wd8zkOhSNr+I+8Qeciml08ivDt1pSXe
github.com/quasilyte/go-ruleguard/dsl v0.3.22/go.mod h1:KeCP03KrjuSO0H1kTuZQCWlQPulDV6YMIXmpQss17rU=
github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec h1:W09IVJc94icq4NjY3clb7Lk8O1qJ8BdBEF8z0ibU0rE=
github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec/go.mod h1:qqbHyh8v60DhA7CoWK5oRCqLrMHRGoxYCSS9EjAz6Eo=
github.com/rivo/uniseg v0.1.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc=
github.com/rivo/uniseg v0.2.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc=
github.com/rivo/uniseg v0.4.7 h1:WUdvkW8uEhrYfLC4ZzdpI2ztxP1I582+49Oc5Mq64VQ=
github.com/rivo/uniseg v0.4.7/go.mod h1:FN3SvrM+Zdj16jyLfmOkMNblXMcoc8DfTHruCPUcx88=
@ -691,8 +680,8 @@ github.com/sagikazarmark/locafero v0.4.0 h1:HApY1R9zGo4DBgr7dqsTH/JJxLTTsOt7u6ke
github.com/sagikazarmark/locafero v0.4.0/go.mod h1:Pe1W6UlPYUk/+wc/6KFhbORCfqzgYEpgQ3O5fPuL3H4=
github.com/sagikazarmark/slog-shim v0.1.0 h1:diDBnUNK9N/354PgrxMywXnAwEr1QZcOr6gto+ugjYE=
github.com/sagikazarmark/slog-shim v0.1.0/go.mod h1:SrcSrq8aKtyuqEI1uvTDTK1arOWRIczQRv+GVI1AkeQ=
github.com/sahilm/fuzzy v0.1.1-0.20230530133925-c48e322e2a8f h1:MvTmaQdww/z0Q4wrYjDSCcZ78NoftLQyHBSLW/Cx79Y=
github.com/sahilm/fuzzy v0.1.1-0.20230530133925-c48e322e2a8f/go.mod h1:VFvziUEIMCrT6A6tw2RFIXPXXmzXbOsSHF0DOI8ZK9Y=
github.com/sahilm/fuzzy v0.1.1 h1:ceu5RHF8DGgoi+/dR5PsECjCDH1BE3Fnmpo7aVXOdRA=
github.com/sahilm/fuzzy v0.1.1/go.mod h1:VFvziUEIMCrT6A6tw2RFIXPXXmzXbOsSHF0DOI8ZK9Y=
github.com/saintfish/chardet v0.0.0-20230101081208-5e3ef4b5456d h1:hrujxIzL1woJ7AwssoOcM/tq5JjjG2yYOc8odClEiXA=
github.com/saintfish/chardet v0.0.0-20230101081208-5e3ef4b5456d/go.mod h1:uugorj2VCxiV1x+LzaIdVa9b4S4qGAcH6cbhh4qVxOU=
github.com/sanity-io/litter v1.5.5 h1:iE+sBxPBzoK6uaEP5Lt3fHNgpKcHXc/A2HGETy0uJQo=
@ -710,8 +699,8 @@ github.com/sergi/go-diff v1.1.0/go.mod h1:STckp+ISIX8hZLjrqAeVduY0gWCT9IjLuqbuNX
github.com/sergi/go-diff v1.2.0/go.mod h1:STckp+ISIX8hZLjrqAeVduY0gWCT9IjLuqbuNXdaHfM=
github.com/sergi/go-diff v1.3.2-0.20230802210424-5b0b94c5c0d3 h1:n661drycOFuPLCN3Uc8sB6B/s6Z4t2xvBgU1htSHuq8=
github.com/sergi/go-diff v1.3.2-0.20230802210424-5b0b94c5c0d3/go.mod h1:A0bzQcvG0E7Rwjx0REVgAGH58e96+X0MeOfepqsbeW4=
github.com/shopspring/decimal v1.2.0 h1:abSATXmQEYyShuxI4/vyW3tV1MrKAJzCZ/0zLUXYbsQ=
github.com/shopspring/decimal v1.2.0/go.mod h1:DKyhrW/HYNuLGql+MJL6WCR6knT2jwCFRcu2hWCYk4o=
github.com/shopspring/decimal v1.4.0 h1:bxl37RwXBklmTi0C79JfXCEBD1cqqHt0bbgBAGFp81k=
github.com/shopspring/decimal v1.4.0/go.mod h1:gawqmDU56v4yIKSwfBSFip1HdCCXN8/+DMd9qYNcwME=
github.com/shurcooL/sanitized_anchor_name v1.0.0/go.mod h1:1NzhyTcUVG4SuEtjjoZeVRXNmyL/1OwPU0+IJeTBvfc=
github.com/sirupsen/logrus v1.2.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPxbbu5VWo=
github.com/sirupsen/logrus v1.4.2/go.mod h1:tLMulIdttU9McNUspp0xgXVQah82FyeX6MwdIuYE2rE=
@ -732,10 +721,9 @@ github.com/spf13/afero v1.3.3/go.mod h1:5KUK8ByomD5Ti5Artl0RtHeI5pTF7MIDuXL3yY52
github.com/spf13/afero v1.6.0/go.mod h1:Ai8FlHk4v/PARR026UzYexafAt9roJ7LcLMAmO6Z93I=
github.com/spf13/afero v1.11.0 h1:WJQKhtpdm3v2IzqG8VMqrr6Rf3UYpEF239Jy9wNepM8=
github.com/spf13/afero v1.11.0/go.mod h1:GH9Y3pIexgf1MTIWtNGyogA5MwRIDXGUr+hbWNoBjkY=
github.com/spf13/cast v1.3.1/go.mod h1:Qx5cxh0v+4UWYiBimWS+eyWzqEqokIECu5etghLkUJE=
github.com/spf13/cast v1.4.1/go.mod h1:Qx5cxh0v+4UWYiBimWS+eyWzqEqokIECu5etghLkUJE=
github.com/spf13/cast v1.6.0 h1:GEiTHELF+vaR5dhz3VqZfFSzZjYbgeKDpBxQVS4GYJ0=
github.com/spf13/cast v1.6.0/go.mod h1:ancEpBxwJDODSW/UG4rDrAqiKolqNNh2DX3mk86cAdo=
github.com/spf13/cast v1.7.0 h1:ntdiHjuueXFgm5nzDRdOS4yfT43P5Fnud6DH50rz/7w=
github.com/spf13/cast v1.7.0/go.mod h1:ancEpBxwJDODSW/UG4rDrAqiKolqNNh2DX3mk86cAdo=
github.com/spf13/cobra v1.3.0/go.mod h1:BrRVncBjOJa/eUcVVm9CE+oC6as8k+VYr4NY7WCi9V4=
github.com/spf13/cobra v1.8.1 h1:e5/vxKd/rZsfSJMUX1agtjeTDf+qv1/JdBF8gg5k9ZM=
github.com/spf13/cobra v1.8.1/go.mod h1:wHxEcudfqmLYa8iTfL+OuZPbBZkmvliBWKIezN3kD9Y=
@ -743,8 +731,8 @@ github.com/spf13/jwalterweatherman v1.1.0/go.mod h1:aNWZUN0dPAAO/Ljvb5BEdw96iTZ0
github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA=
github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg=
github.com/spf13/viper v1.10.0/go.mod h1:SoyBPwAtKDzypXNDFKN5kzH7ppppbGZtls1UpIy5AsM=
github.com/spf13/viper v1.18.2 h1:LUXCnvUvSM6FXAsj6nnfc8Q2tp1dIgUfY9Kc8GsSOiQ=
github.com/spf13/viper v1.18.2/go.mod h1:EKmWIqdnk5lOcmR72yw6hS+8OPYcwD0jteitLMVB+yk=
github.com/spf13/viper v1.19.0 h1:RWq5SEjt8o25SROyN3z2OrDB9l7RPd3lwTWU8EcEdcI=
github.com/spf13/viper v1.19.0/go.mod h1:GQUN9bilAbhU/jgc1bKs99f/suXKeUMct8Adx5+Ntkg=
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw=
@ -770,8 +758,8 @@ github.com/subosito/gotenv v1.6.0 h1:9NlTDc1FTs4qu0DDq7AEtTPNw6SVm7uBMsUCUjABIf8
github.com/subosito/gotenv v1.6.0/go.mod h1:Dk4QP5c2W3ibzajGcXpNraDfq2IrhjMIvMSWPKKo0FU=
github.com/sylabs/sif/v2 v2.17.1 h1:p6Sl0LWyShXBj2SBsS1dMOMIMrZHe8pwBnBrYt6uo4M=
github.com/sylabs/sif/v2 v2.17.1/go.mod h1:XUGB6AQUXGkms3qPOPdevctT3lBLRLWZNWHVnt5HMKE=
github.com/sylabs/squashfs v0.6.1 h1:4hgvHnD9JGlYWwT0bPYNt9zaz23mAV3Js+VEgQoRGYQ=
github.com/sylabs/squashfs v0.6.1/go.mod h1:ZwpbPCj0ocIvMy2br6KZmix6Gzh6fsGQcCnydMF+Kx8=
github.com/sylabs/squashfs v1.0.0 h1:xAyMS21ogglkuR5HaY55PCfqY3H32ma9GkasTYo28Zg=
github.com/sylabs/squashfs v1.0.0/go.mod h1:rhWzvgefq1X+R+LZdts10hfMsTg3g74OfGunW8tvg/4=
github.com/terminalstatic/go-xsd-validate v0.1.5 h1:RqpJnf6HGE2CB/lZB1A8BYguk8uRtcvYAPLCF15qguo=
github.com/terminalstatic/go-xsd-validate v0.1.5/go.mod h1:18lsvYFofBflqCrvo1umpABZ99+GneNTw2kEEc8UPJw=
github.com/therootcompany/xz v1.0.1 h1:CmOtsn1CbtmyYiusbfmhmkpAAETj0wBIH6kCYaX+xzw=
@ -836,20 +824,20 @@ go.opencensus.io v0.22.5/go.mod h1:5pWMHQbX5EPX2/62yrJeAkowc+lfs/XD7Uxpq3pI6kk=
go.opencensus.io v0.23.0/go.mod h1:XItmlyltB5F7CS4xOC1DcqMoFqwtC6OG2xF7mCv7P7E=
go.opencensus.io v0.24.0 h1:y73uSU6J157QMP2kn2r30vwW1A2W2WFwSCGnAVxeaD0=
go.opencensus.io v0.24.0/go.mod h1:vNK8G9p7aAivkbmorf4v+7Hgx+Zs0yY+0fOtgBfjQKo=
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.45.0 h1:x8Z78aZx8cOF0+Kkazoc7lwUNMGy0LrzEMxTm4BbTxg=
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.45.0/go.mod h1:62CPTSry9QZtOaSsE3tOzhx6LzDhHnXJ6xHeMNNiM6Q=
go.opentelemetry.io/otel v1.19.0 h1:MuS/TNf4/j4IXsZuJegVzI1cwut7Qc00344rgH7p8bs=
go.opentelemetry.io/otel v1.19.0/go.mod h1:i0QyjOq3UPoTzff0PJB2N66fb4S0+rSbSB15/oyH9fY=
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.49.0 h1:jq9TW8u3so/bN+JPT166wjOI6/vQPF6Xe7nMNIltagk=
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.49.0/go.mod h1:p8pYQP+m5XfbZm9fxtSKAbM6oIllS7s2AfxrChvc7iw=
go.opentelemetry.io/otel v1.24.0 h1:0LAOdjNmQeSTzGBzduGe/rU4tZhMwL5rWgtp9Ku5Jfo=
go.opentelemetry.io/otel v1.24.0/go.mod h1:W7b9Ozg4nkF5tWI5zsXkaKKDjdVjpD4oAt9Qi/MArHo=
go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.19.0 h1:Mne5On7VWdx7omSrSSZvM4Kw7cS7NQkOOmLcgscI51U=
go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.19.0/go.mod h1:IPtUMKL4O3tH5y+iXVyAXqpAwMuzC1IrxVS81rummfE=
go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.19.0 h1:IeMeyr1aBvBiPVYihXIaeIZba6b8E1bYp7lbdxK8CQg=
go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.19.0/go.mod h1:oVdCUtjq9MK9BlS7TtucsQwUcXcymNiEDjgDD2jMtZU=
go.opentelemetry.io/otel/metric v1.19.0 h1:aTzpGtV0ar9wlV4Sna9sdJyII5jTVJEvKETPiOKwvpE=
go.opentelemetry.io/otel/metric v1.19.0/go.mod h1:L5rUsV9kM1IxCj1MmSdS+JQAcVm319EUrDVLrt7jqt8=
go.opentelemetry.io/otel/metric v1.24.0 h1:6EhoGWWK28x1fbpA4tYTOWBkPefTDQnb8WSGXlc88kI=
go.opentelemetry.io/otel/metric v1.24.0/go.mod h1:VYhLe1rFfxuTXLgj4CBiyz+9WYBA8pNGJgDcSFRKBco=
go.opentelemetry.io/otel/sdk v1.19.0 h1:6USY6zH+L8uMH8L3t1enZPR3WFEmSTADlqldyHtJi3o=
go.opentelemetry.io/otel/sdk v1.19.0/go.mod h1:NedEbbS4w3C6zElbLdPJKOpJQOrGUJ+GfzpjUvI0v1A=
go.opentelemetry.io/otel/trace v1.19.0 h1:DFVQmlVbfVeOuBRrwdtaehRrWiL1JoVs9CPIQ1Dzxpg=
go.opentelemetry.io/otel/trace v1.19.0/go.mod h1:mfaSyvGyEJEI0nyV2I4qhNQnbBOUUmYZpYojqMnX2vo=
go.opentelemetry.io/otel/trace v1.24.0 h1:CsKnnL4dUAr/0llH9FKuc698G04IrpWV0MQA/Y1YELI=
go.opentelemetry.io/otel/trace v1.24.0/go.mod h1:HPc3Xr/cOApsBI154IU0OI0HJexz+aw5uPdbs3UCjNU=
go.opentelemetry.io/proto/otlp v0.7.0/go.mod h1:PqfVotwruBrMGOCsRd/89rSnXhoiJIqeYNgFYFoEGnI=
go.opentelemetry.io/proto/otlp v1.0.0 h1:T0TX0tmXU8a3CbNXzEKGeU5mIVOdf0oykP+u2lIVU/I=
go.opentelemetry.io/proto/otlp v1.0.0/go.mod h1:Sy6pihPLfYHkr3NkUbEhGHFhINUSI/v80hjKIs5JXpM=
@ -874,11 +862,10 @@ golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPh
golang.org/x/crypto v0.0.0-20210817164053-32db794688a5/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
golang.org/x/crypto v0.0.0-20220622213112-05595931fe9d/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4=
golang.org/x/crypto v0.3.0/go.mod h1:hebNnKkNXi2UzZN1eVRvBB7co0a+JxK6XbPiWVs/3J4=
golang.org/x/crypto v0.3.1-0.20221117191849-2c476679df9a/go.mod h1:hebNnKkNXi2UzZN1eVRvBB7co0a+JxK6XbPiWVs/3J4=
golang.org/x/crypto v0.7.0/go.mod h1:pYwdfH91IfpZVANVyUOhSIPZaFoJGxTFbZhFTx+dXZU=
golang.org/x/crypto v0.25.0 h1:ypSNr+bnYL2YhwoMt2zPxHFmbAN1KZs/njMG3hxUp30=
golang.org/x/crypto v0.25.0/go.mod h1:T+wALwcMOSE0kXgUAnPAHqTLW+XHgcELELW8VaDgm/M=
golang.org/x/crypto v0.27.0 h1:GXm2NjJrPaiv/h1tb2UH8QfgC/hOf/+z0p6PT8o1w7A=
golang.org/x/crypto v0.27.0/go.mod h1:1Xngt8kV6Dvbssa53Ziq6Eqn0HqbZi5Z6R0ZpwQzt70=
golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8=
@ -919,8 +906,8 @@ golang.org/x/mod v0.4.2/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
golang.org/x/mod v0.5.0/go.mod h1:5OXOZSfqPIIbmVBIIKWRFfZjPR0E5r58TLhUjH0a2Ro=
golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4=
golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
golang.org/x/mod v0.19.0 h1:fEdghXQSo20giMthA7cd28ZC+jts4amQ3YMXiP5oMQ8=
golang.org/x/mod v0.19.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c=
golang.org/x/mod v0.21.0 h1:vvrHzRwRfVKSiLrG+d4FMl/Qi4ukBCE6kZlTUkDYRT0=
golang.org/x/mod v0.21.0/go.mod h1:6SkKJ3Xj0I0BrPOZoBy3bdMptDDU9oJrpohJ3eWZ1fY=
golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20181023162649-9b4f9f5ad519/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
@ -961,15 +948,14 @@ golang.org/x/net v0.0.0-20210316092652-d523dce5a7f4/go.mod h1:RBQZq4jEuRlivfhVLd
golang.org/x/net v0.0.0-20210405180319-a5a99cb37ef4/go.mod h1:p54w0d4576C0XHj96bSt6lcn1PtDYWL6XObtHCRCNQM=
golang.org/x/net v0.0.0-20210410081132-afb366fc7cd1/go.mod h1:9tjilg8BloeKEkVJvy7fQ90B1CfIiPueXVOjqfkSzI8=
golang.org/x/net v0.0.0-20210503060351-7fd8e65b6420/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
golang.org/x/net v0.0.0-20210505024714-0287a6fb4125/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
golang.org/x/net v0.0.0-20210813160813-60bc85c4be6d/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c=
golang.org/x/net v0.2.0/go.mod h1:KqCZLdyyvdV855qA2rE3GC2aiw5xGR5TEjj8smXukLY=
golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs=
golang.org/x/net v0.8.0/go.mod h1:QVkue5JL9kW//ek3r6jTKnTFis1tRmNAW2P1shuFdJc=
golang.org/x/net v0.27.0 h1:5K3Njcw06/l2y9vpGCSdcxWOYHOUk3dVNGDXN+FvAys=
golang.org/x/net v0.27.0/go.mod h1:dDi0PyhWNoiUOrAS8uXv/vnScO4wnHQO4mj9fn/RytE=
golang.org/x/net v0.29.0 h1:5ORfpBpCs4HzDYoodCDBbwHzdR5UrLBZ3sOnUJmFoHo=
golang.org/x/net v0.29.0/go.mod h1:gLkgy8jTGERgjzMic6DS9+SP0ajcu6Xu3Orq/SpETg0=
golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
@ -1000,8 +986,8 @@ golang.org/x/sync v0.0.0-20201207232520-09787c993a3a/go.mod h1:RxMgew5VJxzue5/jJ
golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.7.0 h1:YsImfSBoP9QPYL0xyKJPq0gcaJdG3rInoqxTWbfQu9M=
golang.org/x/sync v0.7.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
golang.org/x/sync v0.8.0 h1:3NFvSEYkUoMifnESzZl15y791HH1qU2xm6eCJU5ZPXQ=
golang.org/x/sync v0.8.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
golang.org/x/sys v0.0.0-20180823144017-11551d06cbcc/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
@ -1081,15 +1067,15 @@ golang.org/x/sys v0.2.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.3.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.22.0 h1:RI27ohtqKCnwULzJLqkv897zojh5/DwS/ENaMzUOaWI=
golang.org/x/sys v0.22.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/sys v0.25.0 h1:r+8e+loiHxRqhXVl6ML1nO3l1+oFoWbnlu2Ehimmi34=
golang.org/x/sys v0.25.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
golang.org/x/term v0.2.0/go.mod h1:TVmDHMZPmdnySmBfhjOoOdhjzdE1h4u1VwSiw2l1Nuc=
golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k=
golang.org/x/term v0.6.0/go.mod h1:m6U89DPEgQRMq3DNkDClhWw02AUbt2daBVO4cn4Hv9U=
golang.org/x/term v0.22.0 h1:BbsgPEJULsl2fV/AT3v15Mjva5yXKQDyKf+TbDz7QJk=
golang.org/x/term v0.22.0/go.mod h1:F3qCibpT5AMpCRfhfT53vVJwhLtIVHhB9XDjfFvnMI4=
golang.org/x/term v0.24.0 h1:Mh5cbb+Zk2hqqXNO7S1iTjEphVL+jb8ZWaqh/g+JWkM=
golang.org/x/term v0.24.0/go.mod h1:lOBK/LVxemqiMij05LGJ0tzNr8xlmwBRJ81PX6wVLH8=
golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
@ -1102,8 +1088,8 @@ golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
golang.org/x/text v0.4.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
golang.org/x/text v0.8.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8=
golang.org/x/text v0.16.0 h1:a94ExnEXNtEwYLGJSIUxnWoxoRz/ZcCsV63ROupILh4=
golang.org/x/text v0.16.0/go.mod h1:GhwF1Be+LQoKShO3cGOHzqOgRrGaYc9AvblQOmPVHnI=
golang.org/x/text v0.18.0 h1:XvMDiNzPAl0jr17s6W9lcaIhGUfUORdGCNsuLmPG224=
golang.org/x/text v0.18.0/go.mod h1:BuEKDfySbSR4drPmRPG/7iBdf8hvFMuRexcpahXilzY=
golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
@ -1275,12 +1261,12 @@ google.golang.org/genproto v0.0.0-20211129164237-f09f9a12af12/go.mod h1:5CzLGKJ6
google.golang.org/genproto v0.0.0-20211203200212-54befc351ae9/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc=
google.golang.org/genproto v0.0.0-20211206160659-862468c7d6e0/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc=
google.golang.org/genproto v0.0.0-20211208223120-3a66f561d7aa/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc=
google.golang.org/genproto v0.0.0-20231106174013-bbf56f31fb17 h1:wpZ8pe2x1Q3f2KyT5f8oP/fa9rHAKgFPr/HZdNuS+PQ=
google.golang.org/genproto v0.0.0-20231106174013-bbf56f31fb17/go.mod h1:J7XzRzVy1+IPwWHZUzoD0IccYZIrXILAQpc+Qy9CMhY=
google.golang.org/genproto/googleapis/api v0.0.0-20231106174013-bbf56f31fb17 h1:JpwMPBpFN3uKhdaekDpiNlImDdkUAyiJ6ez/uxGaUSo=
google.golang.org/genproto/googleapis/api v0.0.0-20231106174013-bbf56f31fb17/go.mod h1:0xJLfVdJqpAPl8tDg1ujOCGzx6LFLttXT5NhllGOXY4=
google.golang.org/genproto/googleapis/rpc v0.0.0-20231120223509-83a465c0220f h1:ultW7fxlIvee4HYrtnaRPon9HpEgFk5zYpmfMgtKB5I=
google.golang.org/genproto/googleapis/rpc v0.0.0-20231120223509-83a465c0220f/go.mod h1:L9KNLi232K1/xB6f7AlSX692koaRnKaWSR0stBki0Yc=
google.golang.org/genproto v0.0.0-20240213162025-012b6fc9bca9 h1:9+tzLLstTlPTRyJTh+ah5wIMsBW5c4tQwGTN3thOW9Y=
google.golang.org/genproto v0.0.0-20240213162025-012b6fc9bca9/go.mod h1:mqHbVIp48Muh7Ywss/AD6I5kNVKZMmAa/QEW58Gxp2s=
google.golang.org/genproto/googleapis/api v0.0.0-20240311132316-a219d84964c2 h1:rIo7ocm2roD9DcFIX67Ym8icoGCKSARAiPljFhh5suQ=
google.golang.org/genproto/googleapis/api v0.0.0-20240311132316-a219d84964c2/go.mod h1:O1cOfN1Cy6QEYr7VxtjOyP5AdAuR0aJ/MYZaaof623Y=
google.golang.org/genproto/googleapis/rpc v0.0.0-20240314234333-6e1732d8331c h1:lfpJ/2rWPa/kJgxyyXM8PrNnfCzcmxJ265mADgwmvLI=
google.golang.org/genproto/googleapis/rpc v0.0.0-20240314234333-6e1732d8331c/go.mod h1:WtryC6hu0hhx87FDGxWCDptyssuo68sk10vYjF+T9fY=
google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c=
google.golang.org/grpc v1.20.1/go.mod h1:10oTOabMzJvdu6/UiuZezV6QK5dSlG84ov/aaiqXj38=
google.golang.org/grpc v1.21.1/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ijfRaM=
@ -1308,8 +1294,8 @@ google.golang.org/grpc v1.39.1/go.mod h1:PImNr+rS9TWYb2O4/emRugxiyHZ5JyHW5F+RPnD
google.golang.org/grpc v1.40.0/go.mod h1:ogyxbiOoUXAkP+4+xa6PZSE9DZgIHtSpzjDTB9KAK34=
google.golang.org/grpc v1.40.1/go.mod h1:ogyxbiOoUXAkP+4+xa6PZSE9DZgIHtSpzjDTB9KAK34=
google.golang.org/grpc v1.42.0/go.mod h1:k+4IHHFw41K8+bbowsex27ge2rCb65oeWqe4jJ590SU=
google.golang.org/grpc v1.59.0 h1:Z5Iec2pjwb+LEOqzpB2MR12/eKFhDPhuqW91O+4bwUk=
google.golang.org/grpc v1.59.0/go.mod h1:aUPDwccQo6OTjy7Hct4AfBPD1GptF4fyUjIkQ9YtF98=
google.golang.org/grpc v1.62.1 h1:B4n+nfKzOICUXMgyrNd19h/I9oH0L1pizfk1d4zSgTk=
google.golang.org/grpc v1.62.1/go.mod h1:IWTG0VlJLCh1SkC58F7np9ka9mx/WNkjl4PGJaiq+QE=
google.golang.org/grpc/cmd/protoc-gen-go-grpc v1.1.0/go.mod h1:6Kw0yEErY5E/yWrBtf03jp27GLLJujG4z/JK95pnjjw=
google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8=
google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0=
@ -1344,7 +1330,6 @@ gopkg.in/yaml.v2 v2.2.3/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
gopkg.in/yaml.v2 v2.2.4/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
gopkg.in/yaml.v2 v2.2.5/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
gopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
gopkg.in/yaml.v2 v2.3.0/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ=
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
@ -1359,28 +1344,18 @@ honnef.co/go/tools v0.0.0-20190523083050-ea95bdfd59fc/go.mod h1:rf3lG4BRIbNafJWh
honnef.co/go/tools v0.0.1-2019.2.3/go.mod h1:a3bituU0lyd329TUQxRnasdCoJDkEUEAqEt0JzvZhAg=
honnef.co/go/tools v0.0.1-2020.1.3/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k=
honnef.co/go/tools v0.0.1-2020.1.4/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k=
modernc.org/cc/v4 v4.21.2 h1:dycHFB/jDc3IyacKipCNSDrjIC0Lm1hyoWOZTRR20Lk=
modernc.org/cc/v4 v4.21.2/go.mod h1:HM7VJTZbUCR3rV8EYBi9wxnJ0ZBRiGE5OeGXNA0IsLQ=
modernc.org/ccgo/v4 v4.17.10 h1:6wrtRozgrhCxieCeJh85QsxkX/2FFrT9hdaWPlbn4Zo=
modernc.org/ccgo/v4 v4.17.10/go.mod h1:0NBHgsqTTpm9cA5z2ccErvGZmtntSM9qD2kFAs6pjXM=
modernc.org/fileutil v1.3.0 h1:gQ5SIzK3H9kdfai/5x41oQiKValumqNTDXMvKo62HvE=
modernc.org/fileutil v1.3.0/go.mod h1:XatxS8fZi3pS8/hKG2GH/ArUogfxjpEKs3Ku3aK4JyQ=
modernc.org/gc/v2 v2.4.1 h1:9cNzOqPyMJBvrUipmynX0ZohMhcxPtMccYgGOJdOiBw=
modernc.org/gc/v2 v2.4.1/go.mod h1:wzN5dK1AzVGoH6XOzc3YZ+ey/jPgYHLuVckd62P0GYU=
modernc.org/gc/v3 v3.0.0-20240107210532-573471604cb6 h1:5D53IMaUuA5InSeMu9eJtlQXS2NxAhyWQvkKEgXZhHI=
modernc.org/gc/v3 v3.0.0-20240107210532-573471604cb6/go.mod h1:Qz0X07sNOR1jWYCrJMEnbW/X55x206Q7Vt4mz6/wHp4=
modernc.org/libc v1.52.1 h1:uau0VoiT5hnR+SpoWekCKbLqm7v6dhRL3hI+NQhgN3M=
modernc.org/libc v1.52.1/go.mod h1:HR4nVzFDSDizP620zcMCgjb1/8xk2lg5p/8yjfGv1IQ=
modernc.org/libc v1.55.3 h1:AzcW1mhlPNrRtjS5sS+eW2ISCgSOLLNyFzRh/V3Qj/U=
modernc.org/libc v1.55.3/go.mod h1:qFXepLhz+JjFThQ4kzwzOjA/y/artDeg+pcYnY+Q83w=
modernc.org/mathutil v1.6.0 h1:fRe9+AmYlaej+64JsEEhoWuAYBkOtQiMEU7n/XgfYi4=
modernc.org/mathutil v1.6.0/go.mod h1:Ui5Q9q1TR2gFm0AQRqQUaBWFLAhQpCwNcuhBOSedWPo=
modernc.org/memory v1.8.0 h1:IqGTL6eFMaDZZhEWwcREgeMXYwmW83LYW8cROZYkg+E=
modernc.org/memory v1.8.0/go.mod h1:XPZ936zp5OMKGWPqbD3JShgd/ZoQ7899TUuQqxY+peU=
modernc.org/opt v0.1.3 h1:3XOZf2yznlhC+ibLltsDGzABUGVx8J6pnFMS3E4dcq4=
modernc.org/opt v0.1.3/go.mod h1:WdSiB5evDcignE70guQKxYUl14mgWtbClRi5wmkkTX0=
modernc.org/sortutil v1.2.0 h1:jQiD3PfS2REGJNzNCMMaLSp/wdMNieTbKX920Cqdgqc=
modernc.org/sortutil v1.2.0/go.mod h1:TKU2s7kJMf1AE84OoiGppNHJwvB753OYfNl2WRb++Ss=
modernc.org/sqlite v1.30.1 h1:YFhPVfu2iIgUf9kuA1CR7iiHdcEEsI2i+yjRYHscyxk=
modernc.org/sqlite v1.30.1/go.mod h1:DUmsiWQDaAvU4abhc/N+djlom/L2o8f7gZ95RCvyoLU=
modernc.org/sqlite v1.33.0 h1:WWkA/T2G17okiLGgKAj4/RMIvgyMT19yQ038160IeYk=
modernc.org/sqlite v1.33.0/go.mod h1:9uQ9hF/pCZoYZK73D/ud5Z7cIRIILSZI8NdIemVMTX8=
modernc.org/strutil v1.2.0 h1:agBi9dp1I+eOnxXeiZawM8F4LawKv4NzGWSaLfyeNZA=
modernc.org/strutil v1.2.0/go.mod h1:/mdcBmfOibveCTBxUl5B5l6W+TTH1FXPLHZE6bTosX0=
modernc.org/token v1.1.0 h1:Xl7Ap9dKaEs5kLoOQeQmPWevfnk/DM5qcLcYlA8ys6Y=

View file

@ -1,6 +1,6 @@
# Caching
All caches are created from a global `manager`. By defaut this is a `bypassedCache`, which performs no caching.
All caches are created from a global `manager`. By default this is a `bypassedCache`, which performs no caching.
One benefit of this is that tests don't need to worry about caching causing issues unless they explicitly need
to test the cache and can opt-in using the `cache.TestCache(t)` helper.

View file

@ -34,7 +34,7 @@ func (r *errorResolver[T]) Resolve(key string, resolver resolverFunc[T]) (T, err
return v.Value, err
}
if v.Error != "" {
return v.Value, fmt.Errorf(v.Error)
return v.Value, fmt.Errorf("failed to resolve cache: %s", v.Error)
}
return v.Value, nil
}

View file

@ -3,5 +3,5 @@ package internal
const (
// JSONSchemaVersion is the current schema version output by the JSON encoder
// This is roughly following the "SchemaVer" guidelines for versioning the JSON schema. Please see schema/json/README.md for details on how to increment.
JSONSchemaVersion = "16.0.14"
JSONSchemaVersion = "16.0.16"
)

View file

@ -6,6 +6,7 @@ import (
"errors"
"fmt"
"io"
"math"
"os"
)
@ -52,9 +53,14 @@ func OpenZip(filepath string) (*ZipReadCloser, error) {
return nil, fmt.Errorf("unable to seek to beginning of archive: %w", err)
}
size := fi.Size() - int64(offset)
if offset > math.MaxInt64 {
return nil, fmt.Errorf("archive start offset too large: %v", offset)
}
offset64 := int64(offset) //nolint:gosec // lint bug, checked above: https://github.com/securego/gosec/issues/1187
r, err := zip.NewReader(io.NewSectionReader(f, int64(offset), size), size)
size := fi.Size() - offset64
r, err := zip.NewReader(io.NewSectionReader(f, offset64, size), size)
if err != nil {
return nil, fmt.Errorf("unable to open ZipReadCloser @ %q: %w", filepath, err)
}
@ -95,8 +101,6 @@ type directoryEnd struct {
}
// note: this is derived from readDirectoryEnd within the archive/zip package
//
//nolint:gocognit
func findArchiveStartOffset(r io.ReaderAt, size int64) (startOfArchive uint64, err error) {
// look for directoryEndSignature in the last 1k, then in the last 65k
var buf []byte
@ -150,7 +154,7 @@ func findArchiveStartOffset(r io.ReaderAt, size int64) (startOfArchive uint64, e
startOfArchive = uint64(directoryEndOffset) - d.directorySize - d.directoryOffset
// Make sure directoryOffset points to somewhere in our file.
if o := int64(d.directoryOffset); o < 0 || o >= size {
if d.directoryOffset >= uint64(size) {
return 0, zip.ErrFormat
}
return startOfArchive, nil
@ -179,7 +183,7 @@ func findDirectory64End(r io.ReaderAt, directoryEndOffset int64) (int64, error)
if b.uint32() != 1 { // total number of disks
return -1, nil // the file is not a valid zip64-file
}
return int64(p), nil
return int64(p), nil //nolint:gosec
}
// readDirectory64End reads the zip64 directory end and updates the

View file

@ -21,28 +21,22 @@ func NewDependencyRelationships(resolver file.Resolver, accessor sbomsync.Access
// 3. craft package-to-package relationships for each binary that represent shared library dependencies
//note: we only care about package-to-package relationships
var relIndex *relationship.Index
accessor.ReadFromSBOM(func(s *sbom.SBOM) {
relIndex = relationship.NewIndex(s.Relationships...)
})
return generateRelationships(resolver, accessor, index, relIndex)
return generateRelationships(resolver, accessor, index)
}
func generateRelationships(resolver file.Resolver, accessor sbomsync.Accessor, index *sharedLibraryIndex, relIndex *relationship.Index) []artifact.Relationship {
// read all existing dependencyOf relationships
accessor.ReadFromSBOM(func(s *sbom.SBOM) {
for _, r := range s.Relationships {
if r.Type != artifact.DependencyOfRelationship {
continue
}
relIndex.Track(r)
}
})
func generateRelationships(resolver file.Resolver, accessor sbomsync.Accessor, index *sharedLibraryIndex) []artifact.Relationship {
newRelationships := relationship.NewIndex()
// find all package-to-package relationships for shared library dependencies
accessor.ReadFromSBOM(func(s *sbom.SBOM) {
for _, parentPkg := range s.Artifacts.Packages.Sorted(pkg.BinaryPkg) {
relIndex := relationship.NewIndex(s.Relationships...)
addRelationship := func(r artifact.Relationship) {
if !relIndex.Contains(r) {
newRelationships.Add(r)
}
}
for _, parentPkg := range allElfPackages(s) {
for _, evidentLocation := range parentPkg.Locations.ToSlice() {
if evidentLocation.Annotations[pkg.EvidenceAnnotationKey] != pkg.PrimaryEvidenceAnnotation {
continue
@ -54,12 +48,12 @@ func generateRelationships(resolver file.Resolver, accessor sbomsync.Accessor, i
continue
}
populateRelationships(exec, parentPkg, resolver, relIndex, index)
populateRelationships(exec, parentPkg, resolver, addRelationship, index)
}
}
})
return relIndex.NewRelationships()
return newRelationships.All()
}
// PackagesToRemove returns a list of binary packages (resolved by the ELF cataloger) that should be removed from the SBOM
@ -106,7 +100,7 @@ func onlyPrimaryEvidenceLocations(p pkg.Package) []file.Location {
func allElfPackages(s *sbom.SBOM) []pkg.Package {
var elfPkgs []pkg.Package
for _, p := range s.Artifacts.Packages.Sorted(pkg.BinaryPkg) {
for _, p := range s.Artifacts.Packages.Sorted() {
if !isElfPackage(p) {
continue
}
@ -147,7 +141,7 @@ func getBinaryPackagesToDelete(resolver file.Resolver, s *sbom.SBOM) []artifact.
return pkgsToDelete
}
func populateRelationships(exec file.Executable, parentPkg pkg.Package, resolver file.Resolver, relIndex *relationship.Index, index *sharedLibraryIndex) {
func populateRelationships(exec file.Executable, parentPkg pkg.Package, resolver file.Resolver, addRelationship func(artifact.Relationship), index *sharedLibraryIndex) {
for _, libReference := range exec.ImportedLibraries {
// for each library reference, check s.Artifacts.Packages.Sorted(pkg.BinaryPkg) for a binary package that represents that library
// if found, create a relationship between the parent package and the library package
@ -167,7 +161,7 @@ func populateRelationships(exec file.Executable, parentPkg pkg.Package, resolver
realBaseName := path.Base(loc.RealPath)
pkgCollection := index.owningLibraryPackage(realBaseName)
if pkgCollection.PackageCount() < 1 {
relIndex.Add(
addRelationship(
artifact.Relationship{
From: loc.Coordinates,
To: parentPkg,
@ -176,7 +170,7 @@ func populateRelationships(exec file.Executable, parentPkg pkg.Package, resolver
)
}
for _, p := range pkgCollection.Sorted() {
relIndex.Add(
addRelationship(
artifact.Relationship{
From: p,
To: parentPkg,

View file

@ -2,6 +2,7 @@ package binary
import (
"path"
"strings"
"testing"
"github.com/google/go-cmp/cmp"
@ -49,6 +50,22 @@ func TestPackagesToRemove(t *testing.T) {
}
glibCBinaryELFPackage.SetID()
glibCBinaryELFPackageAsRPM := pkg.Package{
Name: "glibc",
Locations: file.NewLocationSet(
file.NewLocation(glibcCoordinate.RealPath).WithAnnotation(pkg.EvidenceAnnotationKey, pkg.PrimaryEvidenceAnnotation),
),
Type: pkg.RpmPkg, // note: the elf package claims it is a RPM, not binary
Metadata: pkg.ELFBinaryPackageNoteJSONPayload{
Type: "rpm",
Vendor: "syft",
System: "syftsys",
SourceRepo: "https://github.com/someone/somewhere.git",
Commit: "5534c38d0ffef9a3f83154f0b7a7fb6ab0ab6dbb",
},
}
glibCBinaryELFPackageAsRPM.SetID()
glibCBinaryClassifierPackage := pkg.Package{
Name: "glibc",
Locations: file.NewLocationSet(
@ -82,9 +99,15 @@ func TestPackagesToRemove(t *testing.T) {
want: []artifact.ID{glibCBinaryELFPackage.ID()},
},
{
name: "remove no packages when there is a single binary package",
name: "keep packages that are overlapping rpm --> binary when the binary self identifies as an RPM",
resolver: file.NewMockResolverForPaths(glibcCoordinate.RealPath),
accessor: newAccessor([]pkg.Package{glibCBinaryELFPackage}, map[file.Coordinates]file.Executable{}, nil),
accessor: newAccessor([]pkg.Package{glibCPackage, glibCBinaryELFPackageAsRPM}, map[file.Coordinates]file.Executable{}, nil),
want: []artifact.ID{},
},
{
name: "remove no packages when there is a single binary package (or self identifying RPM)",
resolver: file.NewMockResolverForPaths(glibcCoordinate.RealPath),
accessor: newAccessor([]pkg.Package{glibCBinaryELFPackage, glibCBinaryELFPackageAsRPM}, map[file.Coordinates]file.Executable{}, nil),
want: []artifact.ID{},
},
{
@ -172,9 +195,9 @@ func TestNewDependencyRelationships(t *testing.T) {
file.NewLocation(parallelLibCoordinate.RealPath).WithAnnotation(pkg.EvidenceAnnotationKey, pkg.SupportingEvidenceAnnotation),
),
Language: "",
Type: pkg.BinaryPkg,
Type: pkg.RpmPkg,
Metadata: pkg.ELFBinaryPackageNoteJSONPayload{
Type: "testfixture",
Type: "rpm",
Vendor: "syft",
System: "syftsys",
SourceRepo: "https://github.com/someone/somewhere.git",
@ -328,7 +351,20 @@ func relationshipComparer(x, y []artifact.Relationship) string {
artifact.Relationship{},
file.LocationSet{},
pkg.LicenseSet{},
))
), cmpopts.SortSlices(lessRelationships))
}
func lessRelationships(r1, r2 artifact.Relationship) bool {
c := strings.Compare(string(r1.Type), string(r2.Type))
if c != 0 {
return c < 0
}
c = strings.Compare(string(r1.From.ID()), string(r2.From.ID()))
if c != 0 {
return c < 0
}
c = strings.Compare(string(r1.To.ID()), string(r2.To.ID()))
return c < 0
}
func newAccessor(pkgs []pkg.Package, coordinateIndex map[file.Coordinates]file.Executable, preexistingRelationships []artifact.Relationship) sbomsync.Accessor {

View file

@ -1,88 +1,181 @@
package relationship
import (
"github.com/scylladb/go-set/strset"
"slices"
"strings"
"github.com/anchore/syft/syft/artifact"
"github.com/anchore/syft/syft/file"
)
// Index indexes relationships, preventing duplicates
type Index struct {
typesByFromTo map[artifact.ID]map[artifact.ID]*strset.Set
existing []artifact.Relationship
additional []artifact.Relationship
all []*sortableRelationship
fromID map[artifact.ID]*mappedRelationships
toID map[artifact.ID]*mappedRelationships
}
func NewIndex(existing ...artifact.Relationship) *Index {
r := &Index{
typesByFromTo: make(map[artifact.ID]map[artifact.ID]*strset.Set),
}
r.TrackAll(existing...)
return r
// NewIndex returns a new relationship Index
func NewIndex(relationships ...artifact.Relationship) *Index {
out := Index{}
out.Add(relationships...)
return &out
}
func (i *Index) track(r artifact.Relationship) bool {
fromID := r.From.ID()
if _, ok := i.typesByFromTo[fromID]; !ok {
i.typesByFromTo[fromID] = make(map[artifact.ID]*strset.Set)
// Add adds all the given relationships to the index, without adding duplicates
func (i *Index) Add(relationships ...artifact.Relationship) {
if i.fromID == nil {
i.fromID = map[artifact.ID]*mappedRelationships{}
}
if i.toID == nil {
i.toID = map[artifact.ID]*mappedRelationships{}
}
toID := r.To.ID()
if _, ok := i.typesByFromTo[fromID][toID]; !ok {
i.typesByFromTo[fromID][toID] = strset.New()
// store appropriate indexes for stable ordering to minimize ID() calls
for _, r := range relationships {
// prevent duplicates
if i.Contains(r) {
continue
}
fromID := r.From.ID()
toID := r.To.ID()
relationship := &sortableRelationship{
from: fromID,
to: toID,
relationship: r,
}
// add to all relationships
i.all = append(i.all, relationship)
// add from -> to mapping
mapped := i.fromID[fromID]
if mapped == nil {
mapped = &mappedRelationships{}
i.fromID[fromID] = mapped
}
mapped.add(toID, relationship)
// add to -> from mapping
mapped = i.toID[toID]
if mapped == nil {
mapped = &mappedRelationships{}
i.toID[toID] = mapped
}
mapped.add(fromID, relationship)
}
var exists bool
if i.typesByFromTo[fromID][toID].Has(string(r.Type)) {
exists = true
}
i.typesByFromTo[fromID][toID].Add(string(r.Type))
return !exists
}
// Track this relationship as "exists" in the index (this is used to prevent duplicate relationships from being added).
// returns true if the relationship is new to the index, false otherwise.
func (i *Index) Track(r artifact.Relationship) bool {
unique := i.track(r)
if unique {
i.existing = append(i.existing, r)
}
return unique
// From returns all relationships from the given identifiable, with specified types
func (i *Index) From(identifiable artifact.Identifiable, types ...artifact.RelationshipType) []artifact.Relationship {
return toSortedSlice(fromMapped(i.fromID, identifiable), types)
}
// Add a new relationship to the index, returning true if the relationship is new to the index, false otherwise (thus is a duplicate).
func (i *Index) Add(r artifact.Relationship) bool {
if i.track(r) {
i.additional = append(i.additional, r)
return true
// To returns all relationships to the given identifiable, with specified types
func (i *Index) To(identifiable artifact.Identifiable, types ...artifact.RelationshipType) []artifact.Relationship {
return toSortedSlice(fromMapped(i.toID, identifiable), types)
}
// References returns all relationships that reference to or from the given identifiable
func (i *Index) References(identifiable artifact.Identifiable, types ...artifact.RelationshipType) []artifact.Relationship {
return toSortedSlice(append(fromMapped(i.fromID, identifiable), fromMapped(i.toID, identifiable)...), types)
}
// Coordinates returns all coordinates for the provided identifiable for provided relationship types
// If no types are provided, all relationship types are considered.
func (i *Index) Coordinates(identifiable artifact.Identifiable, types ...artifact.RelationshipType) []file.Coordinates {
var coordinates []file.Coordinates
for _, relationship := range i.References(identifiable, types...) {
cords := extractCoordinates(relationship)
coordinates = append(coordinates, cords...)
}
return coordinates
}
// Contains indicates the relationship is present in this index
func (i *Index) Contains(r artifact.Relationship) bool {
if mapped := i.fromID[r.From.ID()]; mapped != nil {
if ids := mapped.typeMap[r.Type]; ids != nil {
return ids[r.To.ID()] != nil
}
}
return false
}
func (i *Index) TrackAll(rs ...artifact.Relationship) {
for _, r := range rs {
i.Track(r)
// All returns a sorted set of relationships matching all types, or all relationships if no types specified
func (i *Index) All(types ...artifact.RelationshipType) []artifact.Relationship {
return toSortedSlice(i.all, types)
}
func fromMapped(idMap map[artifact.ID]*mappedRelationships, identifiable artifact.Identifiable) []*sortableRelationship {
if identifiable == nil || idMap == nil {
return nil
}
}
func (i *Index) AddAll(rs ...artifact.Relationship) {
for _, r := range rs {
i.Add(r)
mapped := idMap[identifiable.ID()]
if mapped == nil {
return nil
}
return mapped.allRelated
}
func (i *Index) NewRelationships() []artifact.Relationship {
return i.additional
func toSortedSlice(relationships []*sortableRelationship, types []artifact.RelationshipType) []artifact.Relationship {
// always return sorted for SBOM stability
slices.SortFunc(relationships, sortFunc)
var out []artifact.Relationship
for _, r := range relationships {
if len(types) == 0 || slices.Contains(types, r.relationship.Type) {
out = append(out, r.relationship)
}
}
return out
}
func (i *Index) ExistingRelationships() []artifact.Relationship {
return i.existing
func extractCoordinates(relationship artifact.Relationship) (results []file.Coordinates) {
if coordinates, exists := relationship.From.(file.Coordinates); exists {
results = append(results, coordinates)
}
if coordinates, exists := relationship.To.(file.Coordinates); exists {
results = append(results, coordinates)
}
return results
}
func (i *Index) AllUniqueRelationships() []artifact.Relationship {
var all []artifact.Relationship
all = append(all, i.existing...)
all = append(all, i.additional...)
return all
type mappedRelationships struct {
typeMap map[artifact.RelationshipType]map[artifact.ID]*sortableRelationship
allRelated []*sortableRelationship
}
func (m *mappedRelationships) add(id artifact.ID, newRelationship *sortableRelationship) {
m.allRelated = append(m.allRelated, newRelationship)
if m.typeMap == nil {
m.typeMap = map[artifact.RelationshipType]map[artifact.ID]*sortableRelationship{}
}
typeMap := m.typeMap[newRelationship.relationship.Type]
if typeMap == nil {
typeMap = map[artifact.ID]*sortableRelationship{}
m.typeMap[newRelationship.relationship.Type] = typeMap
}
typeMap[id] = newRelationship
}
type sortableRelationship struct {
from artifact.ID
to artifact.ID
relationship artifact.Relationship
}
func sortFunc(a, b *sortableRelationship) int {
cmp := strings.Compare(string(a.relationship.Type), string(b.relationship.Type))
if cmp != 0 {
return cmp
}
cmp = strings.Compare(string(a.from), string(b.from))
if cmp != 0 {
return cmp
}
return strings.Compare(string(a.to), string(b.to))
}

View file

@ -3,223 +3,231 @@ package relationship
import (
"testing"
"github.com/google/go-cmp/cmp"
"github.com/stretchr/testify/require"
"github.com/anchore/syft/syft/artifact"
"github.com/anchore/syft/syft/file"
"github.com/anchore/syft/syft/pkg"
)
func Test_newRelationshipIndex(t *testing.T) {
from := fakeIdentifiable{id: "from"}
to := fakeIdentifiable{id: "to"}
tests := []struct {
name string
given []artifact.Relationship
track []artifact.Relationship
add []artifact.Relationship
wantExisting []string
wantAdditional []string
}{
{
name: "empty",
},
{
name: "tracks existing relationships",
given: []artifact.Relationship{
{
From: from,
To: to,
Type: artifact.EvidentByRelationship,
},
},
wantExisting: []string{"from [evident-by] to"},
},
{
name: "deduplicate tracked relationships",
given: []artifact.Relationship{
{
From: from,
To: to,
Type: artifact.EvidentByRelationship,
},
{
From: from,
To: to,
Type: artifact.EvidentByRelationship,
},
{
From: from,
To: to,
Type: artifact.EvidentByRelationship,
},
},
track: []artifact.Relationship{
{
From: from,
To: to,
Type: artifact.EvidentByRelationship,
},
{
From: from,
To: to,
Type: artifact.EvidentByRelationship,
},
},
wantExisting: []string{"from [evident-by] to"},
},
{
name: "deduplicate any input relationships",
given: []artifact.Relationship{
{
From: from,
To: to,
Type: artifact.EvidentByRelationship,
},
{
From: from,
To: to,
Type: artifact.EvidentByRelationship,
},
},
track: []artifact.Relationship{
{
From: from,
To: to,
Type: artifact.EvidentByRelationship,
},
{
From: from,
To: to,
Type: artifact.EvidentByRelationship,
},
},
add: []artifact.Relationship{
{
From: from,
To: to,
Type: artifact.EvidentByRelationship,
},
{
From: from,
To: to,
Type: artifact.EvidentByRelationship,
},
},
wantExisting: []string{"from [evident-by] to"},
},
{
name: "deduplicate any added relationships",
add: []artifact.Relationship{
{
From: from,
To: to,
Type: artifact.EvidentByRelationship,
},
{
From: from,
To: to,
Type: artifact.EvidentByRelationship,
},
},
wantAdditional: []string{"from [evident-by] to"},
},
func Test_Index(t *testing.T) {
p1 := pkg.Package{
Name: "pkg-1",
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
idx := NewIndex(tt.given...)
idx.TrackAll(tt.track...)
idx.AddAll(tt.add...)
diffRelationships(t, tt.wantExisting, idx.existing)
diffRelationships(t, tt.wantAdditional, idx.additional)
})
p2 := pkg.Package{
Name: "pkg-2",
}
p3 := pkg.Package{
Name: "pkg-3",
}
c1 := file.Coordinates{
RealPath: "/coords/1",
}
c2 := file.Coordinates{
RealPath: "/coords/2",
}
for _, p := range []*pkg.Package{&p1, &p2, &p3} {
p.SetID()
}
r1 := artifact.Relationship{
From: p1,
To: p2,
Type: artifact.DependencyOfRelationship,
}
r2 := artifact.Relationship{
From: p1,
To: p3,
Type: artifact.DependencyOfRelationship,
}
r3 := artifact.Relationship{
From: p1,
To: c1,
Type: artifact.ContainsRelationship,
}
r4 := artifact.Relationship{
From: p2,
To: c2,
Type: artifact.ContainsRelationship,
}
r5 := artifact.Relationship{
From: p3,
To: c2,
Type: artifact.ContainsRelationship,
}
dup := artifact.Relationship{
From: p3,
To: c2,
Type: artifact.ContainsRelationship,
}
idx := NewIndex(r1, r2, r3, r4, r5, dup)
require.ElementsMatch(t, slice(r1, r2, r3, r4, r5), idx.All())
require.ElementsMatch(t, slice(r1, r4), idx.References(p2))
require.ElementsMatch(t, slice(r4), idx.References(p2, artifact.ContainsRelationship))
require.ElementsMatch(t, slice(r1), idx.To(p2))
require.ElementsMatch(t, []artifact.Relationship(nil), idx.To(p2, artifact.ContainsRelationship))
require.ElementsMatch(t, slice(r4), idx.From(p2))
require.ElementsMatch(t, slice(r4), idx.From(p2, artifact.ContainsRelationship))
}
func diffRelationships(t *testing.T, expected []string, actual []artifact.Relationship) {
if d := cmp.Diff(expected, stringRelationships(actual)); d != "" {
t.Errorf("unexpected relationships (-want, +got): %s", d)
func Test_sortOrder(t *testing.T) {
r1 := artifact.Relationship{
From: id("1"),
To: id("2"),
Type: "1",
}
}
func stringRelationships(relationships []artifact.Relationship) []string {
var result []string
for _, r := range relationships {
result = append(result, string(r.From.ID())+" ["+string(r.Type)+"] "+string(r.To.ID()))
r2 := artifact.Relationship{
From: id("2"),
To: id("3"),
Type: "1",
}
return result
}
func Test_relationshipIndex_track(t *testing.T) {
from := fakeIdentifiable{id: "from"}
to := fakeIdentifiable{id: "to"}
relationship := artifact.Relationship{From: from, To: to, Type: artifact.EvidentByRelationship}
tests := []struct {
name string
existing []artifact.Relationship
given artifact.Relationship
want bool
}{
{
name: "track returns true for a new relationship",
existing: []artifact.Relationship{},
given: relationship,
want: true,
},
{
name: "track returns false for an existing relationship",
existing: []artifact.Relationship{relationship},
given: relationship,
want: false,
},
r3 := artifact.Relationship{
From: id("3"),
To: id("4"),
Type: "1",
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
i := NewIndex(tt.existing...)
if got := i.Track(tt.given); got != tt.want {
t.Errorf("track() = %v, want %v", got, tt.want)
}
})
r4 := artifact.Relationship{
From: id("1"),
To: id("2"),
Type: "2",
}
}
func Test_relationshipIndex_add(t *testing.T) {
from := fakeIdentifiable{id: "from"}
to := fakeIdentifiable{id: "to"}
relationship := artifact.Relationship{From: from, To: to, Type: artifact.EvidentByRelationship}
tests := []struct {
name string
existing []artifact.Relationship
given artifact.Relationship
want bool
}{
{
name: "add returns true for a new relationship",
existing: []artifact.Relationship{},
given: relationship,
want: true,
},
{
name: "add returns false for an existing relationship",
existing: []artifact.Relationship{relationship},
given: relationship,
want: false,
},
r5 := artifact.Relationship{
From: id("2"),
To: id("3"),
Type: "2",
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
i := NewIndex(tt.existing...)
if got := i.Add(tt.given); got != tt.want {
t.Errorf("add() = %v, want %v", got, tt.want)
}
})
dup := artifact.Relationship{
From: id("2"),
To: id("3"),
Type: "2",
}
r6 := artifact.Relationship{
From: id("2"),
To: id("3"),
Type: "3",
}
idx := NewIndex(r5, r2, r6, r4, r1, r3, dup)
require.EqualValues(t, slice(r1, r2, r3, r4, r5, r6), idx.All())
require.EqualValues(t, slice(r1, r4), idx.From(id("1")))
require.EqualValues(t, slice(r2, r5, r6), idx.To(id("3")))
rLast := artifact.Relationship{
From: id("0"),
To: id("3"),
Type: "9999",
}
rFirst := artifact.Relationship{
From: id("0"),
To: id("3"),
Type: "1",
}
rMid := artifact.Relationship{
From: id("0"),
To: id("1"),
Type: "2",
}
idx.Add(rLast, rFirst, rMid)
require.EqualValues(t, slice(rFirst, r1, r2, r3, rMid, r4, r5, r6, rLast), idx.All())
require.EqualValues(t, slice(rFirst, r2, r5, r6, rLast), idx.To(id("3")))
}
type fakeIdentifiable struct {
id string
func Test_Coordinates(t *testing.T) {
p1 := pkg.Package{
Name: "pkg-1",
}
p2 := pkg.Package{
Name: "pkg-2",
}
p3 := pkg.Package{
Name: "pkg-3",
}
c1 := file.Coordinates{
RealPath: "/coords/1",
}
c2 := file.Coordinates{
RealPath: "/coords/2",
}
c3 := file.Coordinates{
RealPath: "/coords/3",
}
c4 := file.Coordinates{
RealPath: "/coords/4",
}
for _, p := range []*pkg.Package{&p1, &p2, &p3} {
p.SetID()
}
r1 := artifact.Relationship{
From: p1,
To: p2,
Type: artifact.DependencyOfRelationship,
}
r2 := artifact.Relationship{
From: p1,
To: p3,
Type: artifact.DependencyOfRelationship,
}
r3 := artifact.Relationship{
From: p1,
To: c1,
Type: artifact.ContainsRelationship,
}
r4 := artifact.Relationship{
From: p2,
To: c2,
Type: artifact.ContainsRelationship,
}
r5 := artifact.Relationship{
From: p3,
To: c1,
Type: artifact.ContainsRelationship,
}
r6 := artifact.Relationship{
From: p3,
To: c2,
Type: artifact.ContainsRelationship,
}
r7 := artifact.Relationship{
From: c1,
To: c3,
Type: artifact.ContainsRelationship,
}
r8 := artifact.Relationship{
From: c3,
To: c4,
Type: artifact.ContainsRelationship,
}
idx := NewIndex(r1, r2, r3, r4, r5, r6, r7, r8)
got := idx.Coordinates(p1)
require.ElementsMatch(t, slice(c1), got)
got = idx.Coordinates(p3)
require.ElementsMatch(t, slice(c1, c2), got)
}
func (f fakeIdentifiable) ID() artifact.ID {
return artifact.ID(f.id)
type id string
func (i id) ID() artifact.ID {
return artifact.ID(i)
}
func slice[T any](values ...T) []T {
return values
}

View file

@ -1,9 +1,9 @@
// Code generated by go generate; DO NOT EDIT.
// This file was generated by robots at 2024-05-23 08:47:23.204981 -0400 EDT m=+0.050881068
// This file was generated by robots at 2024-08-20 11:33:49.349625 -0400 EDT m=+0.383911876
// using data from https://spdx.org/licenses/licenses.json
package spdxlicense
const Version = "3.24.0"
const Version = "3.25.0"
var licenseIDs = map[string]string{
"0bsd": "0BSD",
@ -501,6 +501,8 @@ var licenseIDs = map[string]string{
"dldezero2.0": "DL-DE-ZERO-2.0",
"dldezero2.0.0": "DL-DE-ZERO-2.0",
"doc": "DOC",
"docbookschema": "DocBook-Schema",
"docbookxml": "DocBook-XML",
"dotseqn": "Dotseqn",
"drl1": "DRL-1.0",
"drl1.0": "DRL-1.0",
@ -714,6 +716,7 @@ var licenseIDs = map[string]string{
"gutmann": "Gutmann",
"haskellreport": "HaskellReport",
"hdparm": "hdparm",
"hidapi": "HIDAPI",
"hippocratic2": "Hippocratic-2.1",
"hippocratic2.1": "Hippocratic-2.1",
"hippocratic2.1.0": "Hippocratic-2.1",
@ -740,6 +743,7 @@ var licenseIDs = map[string]string{
"hpndmarkuskuhn": "HPND-Markus-Kuhn",
"hpndmerchantabilityvariant": "HPND-merchantability-variant",
"hpndmitdisclaimer": "HPND-MIT-disclaimer",
"hpndnetrek": "HPND-Netrek",
"hpndpbmplus": "HPND-Pbmplus",
"hpndsellmitdisclaimerxserver": "HPND-sell-MIT-disclaimer-xserver",
"hpndsellregexpr": "HPND-sell-regexpr",
@ -1165,6 +1169,7 @@ var licenseIDs = map[string]string{
"rsamd": "RSA-MD",
"rscpl": "RSCPL",
"ruby": "Ruby",
"rubypty": "Ruby-pty",
"saxpath": "Saxpath",
"saxpd": "SAX-PD",
"saxpd2": "SAX-PD-2.0",
@ -1265,6 +1270,9 @@ var licenseIDs = map[string]string{
"tuberlin2": "TU-Berlin-2.0",
"tuberlin2.0": "TU-Berlin-2.0",
"tuberlin2.0.0": "TU-Berlin-2.0",
"ubuntufont1": "Ubuntu-font-1.0",
"ubuntufont1.0": "Ubuntu-font-1.0",
"ubuntufont1.0.0": "Ubuntu-font-1.0",
"ucar": "UCAR",
"ucl1": "UCL-1.0",
"ucl1.0": "UCL-1.0",
@ -1315,8 +1323,11 @@ var licenseIDs = map[string]string{
"x11.0": "X11",
"x11.0.0": "X11",
"x11.0.0distributemodificationsvariant": "X11-distribute-modifications-variant",
"x11.0.0swapped": "X11-swapped",
"x11.0distributemodificationsvariant": "X11-distribute-modifications-variant",
"x11.0swapped": "X11-swapped",
"x11distributemodificationsvariant": "X11-distribute-modifications-variant",
"x11swapped": "X11-swapped",
"xdebug1": "Xdebug-1.03",
"xdebug1.03": "Xdebug-1.03",
"xdebug1.03.0": "Xdebug-1.03",

View file

@ -5,9 +5,11 @@ import (
"fmt"
"runtime/debug"
"sync"
"time"
"github.com/hashicorp/go-multierror"
"github.com/anchore/syft/internal/log"
"github.com/anchore/syft/internal/sbomsync"
"github.com/anchore/syft/syft/event/monitor"
"github.com/anchore/syft/syft/file"
@ -68,5 +70,8 @@ func runTaskSafely(ctx context.Context, t Task, resolver file.Resolver, s sbomsy
}
}()
return t.Execute(ctx, resolver, s)
start := time.Now()
res := t.Execute(ctx, resolver, s)
log.WithFields("task", t.Name(), "elapsed", time.Since(start)).Info("task completed")
return res
}

View file

@ -15,10 +15,11 @@ import (
"github.com/anchore/syft/syft/artifact"
"github.com/anchore/syft/syft/cataloging"
"github.com/anchore/syft/syft/cataloging/pkgcataloging"
"github.com/anchore/syft/syft/cpe"
"github.com/anchore/syft/syft/event/monitor"
"github.com/anchore/syft/syft/file"
"github.com/anchore/syft/syft/pkg"
"github.com/anchore/syft/syft/pkg/cataloger/common/cpe"
cpeutils "github.com/anchore/syft/syft/pkg/cataloger/common/cpe"
)
type packageTaskFactory func(cfg CatalogingFactoryConfig) Task
@ -81,9 +82,7 @@ func (f PackageTaskFactories) Tasks(cfg CatalogingFactoryConfig) ([]Task, error)
}
// NewPackageTask creates a Task function for a generic pkg.Cataloger, honoring the common configuration options.
//
//nolint:funlen
func NewPackageTask(cfg CatalogingFactoryConfig, c pkg.Cataloger, tags ...string) Task {
func NewPackageTask(cfg CatalogingFactoryConfig, c pkg.Cataloger, tags ...string) Task { //nolint: funlen
fn := func(ctx context.Context, resolver file.Resolver, sbom sbomsync.Builder) error {
catalogerName := c.Name()
log.WithFields("name", catalogerName).Trace("starting package cataloger")
@ -102,21 +101,25 @@ func NewPackageTask(cfg CatalogingFactoryConfig, c pkg.Cataloger, tags ...string
pkgs, relationships, err := c.Catalog(ctx, resolver)
if err != nil {
return fmt.Errorf("unable to catalog packages with %q: %w", c.Name(), err)
return fmt.Errorf("unable to catalog packages with %q: %w", catalogerName, err)
}
log.WithFields("cataloger", c.Name()).Debugf("discovered %d packages", len(pkgs))
log.WithFields("cataloger", catalogerName).Debugf("discovered %d packages", len(pkgs))
for i, p := range pkgs {
if cfg.DataGenerationConfig.GenerateCPEs {
if p.FoundBy == "" {
p.FoundBy = catalogerName
}
if cfg.DataGenerationConfig.GenerateCPEs && !hasAuthoritativeCPE(p.CPEs) {
// generate CPEs (note: this is excluded from package ID, so is safe to mutate)
// we might have binary classified CPE already with the package so we want to append here
dictionaryCPEs, ok := cpe.DictionaryFind(p)
dictionaryCPEs, ok := cpeutils.DictionaryFind(p)
if ok {
log.Tracef("used CPE dictionary to find CPEs for %s package %q: %s", p.Type, p.Name, dictionaryCPEs)
p.CPEs = append(p.CPEs, dictionaryCPEs...)
} else {
p.CPEs = append(p.CPEs, cpe.Generate(p)...)
p.CPEs = append(p.CPEs, cpeutils.Generate(p)...)
}
}
@ -145,7 +148,7 @@ func NewPackageTask(cfg CatalogingFactoryConfig, c pkg.Cataloger, tags ...string
t.Add(int64(len(pkgs)))
t.SetCompleted()
log.WithFields("name", c.Name()).Trace("package cataloger completed")
log.WithFields("name", catalogerName).Trace("package cataloger completed")
return nil
}
@ -154,6 +157,15 @@ func NewPackageTask(cfg CatalogingFactoryConfig, c pkg.Cataloger, tags ...string
return NewTask(c.Name(), fn, tags...)
}
func hasAuthoritativeCPE(cpes []cpe.CPE) bool {
for _, c := range cpes {
if c.Source != cpe.GeneratedSource {
return true
}
}
return false
}
func prettyName(s string) string {
if s == "" {
return ""

View file

@ -0,0 +1,55 @@
package task
import (
"testing"
"github.com/stretchr/testify/assert"
"github.com/anchore/syft/syft/cpe"
)
func Test_hasAuthoritativeCPE(t *testing.T) {
tests := []struct {
name string
cpes []cpe.CPE
want bool
}{
{
name: "no cpes",
cpes: []cpe.CPE{},
want: false,
},
{
name: "no authoritative cpes",
cpes: []cpe.CPE{
{
Source: cpe.GeneratedSource,
},
},
want: false,
},
{
name: "has declared (authoritative) cpe",
cpes: []cpe.CPE{
{
Source: cpe.DeclaredSource,
},
},
want: true,
},
{
name: "has lookup (authoritative) cpe",
cpes: []cpe.CPE{
{
Source: cpe.NVDDictionaryLookupSource,
},
},
want: true,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
assert.Equal(t, tt.want, hasAuthoritativeCPE(tt.cpes))
})
}
}

View file

@ -21,6 +21,7 @@ import (
"github.com/anchore/syft/syft/pkg/cataloger/kernel"
"github.com/anchore/syft/syft/pkg/cataloger/lua"
"github.com/anchore/syft/syft/pkg/cataloger/nix"
"github.com/anchore/syft/syft/pkg/cataloger/ocaml"
"github.com/anchore/syft/syft/pkg/cataloger/php"
"github.com/anchore/syft/syft/pkg/cataloger/python"
"github.com/anchore/syft/syft/pkg/cataloger/r"
@ -29,9 +30,25 @@ import (
"github.com/anchore/syft/syft/pkg/cataloger/rust"
sbomCataloger "github.com/anchore/syft/syft/pkg/cataloger/sbom"
"github.com/anchore/syft/syft/pkg/cataloger/swift"
"github.com/anchore/syft/syft/pkg/cataloger/swipl"
"github.com/anchore/syft/syft/pkg/cataloger/wordpress"
)
const (
// Java ecosystem labels
Java = "java"
Maven = "maven"
// Go ecosystem labels
Go = "go"
Golang = "golang"
// JavaScript ecosystem labels
JavaScript = "javascript"
Node = "node"
NPM = "npm"
)
//nolint:funlen
func DefaultPackageTaskFactories() PackageTaskFactories {
return []packageTaskFactory{
@ -47,7 +64,7 @@ func DefaultPackageTaskFactories() PackageTaskFactories {
// language-specific package installed catalogers ///////////////////////////////////////////////////////////////////////////
newSimplePackageTaskFactory(cpp.NewConanInfoCataloger, pkgcataloging.InstalledTag, pkgcataloging.ImageTag, pkgcataloging.LanguageTag, "cpp", "conan"),
newSimplePackageTaskFactory(javascript.NewPackageCataloger, pkgcataloging.InstalledTag, pkgcataloging.ImageTag, pkgcataloging.LanguageTag, "javascript", "node"),
newSimplePackageTaskFactory(javascript.NewPackageCataloger, pkgcataloging.InstalledTag, pkgcataloging.ImageTag, pkgcataloging.LanguageTag, JavaScript, Node),
newSimplePackageTaskFactory(php.NewComposerInstalledCataloger, pkgcataloging.InstalledTag, pkgcataloging.ImageTag, pkgcataloging.LanguageTag, "php", "composer"),
newSimplePackageTaskFactory(r.NewPackageCataloger, pkgcataloging.InstalledTag, pkgcataloging.ImageTag, pkgcataloging.LanguageTag, "r"),
newSimplePackageTaskFactory(ruby.NewInstalledGemSpecCataloger, pkgcataloging.InstalledTag, pkgcataloging.ImageTag, pkgcataloging.LanguageTag, "ruby", "gem", "gemspec"),
@ -65,20 +82,20 @@ func DefaultPackageTaskFactories() PackageTaskFactories {
func(cfg CatalogingFactoryConfig) pkg.Cataloger {
return golang.NewGoModuleFileCataloger(cfg.PackagesConfig.Golang)
},
pkgcataloging.DeclaredTag, pkgcataloging.DirectoryTag, pkgcataloging.LanguageTag, "go", "golang", "gomod",
pkgcataloging.DeclaredTag, pkgcataloging.DirectoryTag, pkgcataloging.LanguageTag, Go, Golang, "gomod",
),
newSimplePackageTaskFactory(java.NewGradleLockfileCataloger, pkgcataloging.DeclaredTag, pkgcataloging.DirectoryTag, pkgcataloging.LanguageTag, "java", "gradle"),
newSimplePackageTaskFactory(java.NewGradleLockfileCataloger, pkgcataloging.DeclaredTag, pkgcataloging.DirectoryTag, pkgcataloging.LanguageTag, Java, "gradle"),
newPackageTaskFactory(
func(cfg CatalogingFactoryConfig) pkg.Cataloger {
return java.NewPomCataloger(cfg.PackagesConfig.JavaArchive)
},
pkgcataloging.DeclaredTag, pkgcataloging.DirectoryTag, pkgcataloging.LanguageTag, "java", "maven",
pkgcataloging.DeclaredTag, pkgcataloging.DirectoryTag, pkgcataloging.LanguageTag, Java, Maven,
),
newPackageTaskFactory(
func(cfg CatalogingFactoryConfig) pkg.Cataloger {
return javascript.NewLockCataloger(cfg.PackagesConfig.JavaScript)
},
pkgcataloging.DeclaredTag, pkgcataloging.DirectoryTag, pkgcataloging.LanguageTag, "javascript", "node", "npm",
pkgcataloging.DeclaredTag, pkgcataloging.DirectoryTag, pkgcataloging.LanguageTag, JavaScript, Node, NPM,
),
newSimplePackageTaskFactory(php.NewComposerLockCataloger, pkgcataloging.DeclaredTag, pkgcataloging.DirectoryTag, pkgcataloging.LanguageTag, "php", "composer"),
newSimplePackageTaskFactory(php.NewPeclCataloger, pkgcataloging.DeclaredTag, pkgcataloging.DirectoryTag, pkgcataloging.LanguageTag, pkgcataloging.ImageTag, "php", "pecl"),
@ -96,6 +113,8 @@ func DefaultPackageTaskFactories() PackageTaskFactories {
}, pkgcataloging.DeclaredTag, pkgcataloging.DirectoryTag, pkgcataloging.LanguageTag, "rust", "cargo"),
newSimplePackageTaskFactory(swift.NewCocoapodsCataloger, pkgcataloging.DeclaredTag, pkgcataloging.DirectoryTag, pkgcataloging.LanguageTag, "swift", "cocoapods"),
newSimplePackageTaskFactory(swift.NewSwiftPackageManagerCataloger, pkgcataloging.DeclaredTag, pkgcataloging.DirectoryTag, pkgcataloging.LanguageTag, "swift", "spm"),
newSimplePackageTaskFactory(swipl.NewSwiplPackCataloger, pkgcataloging.DeclaredTag, pkgcataloging.DirectoryTag, pkgcataloging.LanguageTag, "swipl", "pack"),
newSimplePackageTaskFactory(ocaml.NewOpamPackageManagerCataloger, pkgcataloging.DeclaredTag, pkgcataloging.DirectoryTag, pkgcataloging.LanguageTag, "ocaml", "opam"),
// language-specific package for both image and directory scans (but not necessarily declared) ////////////////////////////////////////
newSimplePackageTaskFactory(dotnet.NewDotnetPortableExecutableCataloger, pkgcataloging.DirectoryTag, pkgcataloging.InstalledTag, pkgcataloging.ImageTag, pkgcataloging.LanguageTag, "dotnet", "c#", "binary"),
@ -104,15 +123,15 @@ func DefaultPackageTaskFactories() PackageTaskFactories {
func(cfg CatalogingFactoryConfig) pkg.Cataloger {
return golang.NewGoModuleBinaryCataloger(cfg.PackagesConfig.Golang)
},
pkgcataloging.DirectoryTag, pkgcataloging.InstalledTag, pkgcataloging.ImageTag, pkgcataloging.LanguageTag, "go", "golang", "gomod", "binary",
pkgcataloging.DirectoryTag, pkgcataloging.InstalledTag, pkgcataloging.ImageTag, pkgcataloging.LanguageTag, Go, Golang, "gomod", "binary",
),
newPackageTaskFactory(
func(cfg CatalogingFactoryConfig) pkg.Cataloger {
return java.NewArchiveCataloger(cfg.PackagesConfig.JavaArchive)
},
pkgcataloging.DirectoryTag, pkgcataloging.InstalledTag, pkgcataloging.ImageTag, pkgcataloging.LanguageTag, "java", "maven",
pkgcataloging.DirectoryTag, pkgcataloging.InstalledTag, pkgcataloging.ImageTag, pkgcataloging.LanguageTag, Java, Maven,
),
newSimplePackageTaskFactory(java.NewNativeImageCataloger, pkgcataloging.DirectoryTag, pkgcataloging.InstalledTag, pkgcataloging.ImageTag, pkgcataloging.LanguageTag, "java"),
newSimplePackageTaskFactory(java.NewNativeImageCataloger, pkgcataloging.DirectoryTag, pkgcataloging.InstalledTag, pkgcataloging.ImageTag, pkgcataloging.LanguageTag, Java),
newSimplePackageTaskFactory(nix.NewStoreCataloger, pkgcataloging.DirectoryTag, pkgcataloging.InstalledTag, pkgcataloging.ImageTag, pkgcataloging.LanguageTag, "nix"),
newSimplePackageTaskFactory(lua.NewPackageCataloger, pkgcataloging.DirectoryTag, pkgcataloging.InstalledTag, pkgcataloging.ImageTag, pkgcataloging.LanguageTag, "lua"),

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

View file

@ -1,6 +1,6 @@
{
"$schema": "https://json-schema.org/draft/2020-12/schema",
"$id": "anchore.io/schema/syft/json/16.0.14/document",
"$id": "anchore.io/schema/syft/json/16.0.16/document",
"$ref": "#/$defs/Document",
"$defs": {
"AlpmDbEntry": {
@ -1436,6 +1436,50 @@
"files"
]
},
"OpamPackage": {
"properties": {
"name": {
"type": "string"
},
"version": {
"type": "string"
},
"licenses": {
"items": {
"type": "string"
},
"type": "array"
},
"url": {
"type": "string"
},
"checksum": {
"items": {
"type": "string"
},
"type": "array"
},
"homepage": {
"type": "string"
},
"dependencies": {
"items": {
"type": "string"
},
"type": "array"
}
},
"type": "object",
"required": [
"name",
"version",
"licenses",
"url",
"checksum",
"homepage",
"dependencies"
]
},
"Package": {
"properties": {
"id": {
@ -1563,6 +1607,9 @@
{
"$ref": "#/$defs/NixStoreEntry"
},
{
"$ref": "#/$defs/OpamPackage"
},
{
"$ref": "#/$defs/PhpComposerInstalledEntry"
},
@ -1608,6 +1655,9 @@
{
"$ref": "#/$defs/SwiftPackageManagerLockEntry"
},
{
"$ref": "#/$defs/SwiplpackPackage"
},
{
"$ref": "#/$defs/WordpressPluginEntry"
}
@ -2504,6 +2554,48 @@
"revision"
]
},
"SwiplpackPackage": {
"properties": {
"name": {
"type": "string"
},
"version": {
"type": "string"
},
"author": {
"type": "string"
},
"authorEmail": {
"type": "string"
},
"packager": {
"type": "string"
},
"packagerEmail": {
"type": "string"
},
"homepage": {
"type": "string"
},
"dependencies": {
"items": {
"type": "string"
},
"type": "array"
}
},
"type": "object",
"required": [
"name",
"version",
"author",
"authorEmail",
"packager",
"packagerEmail",
"homepage",
"dependencies"
]
},
"WordpressPluginEntry": {
"properties": {
"pluginInstallDirectory": {

View file

@ -175,7 +175,7 @@ func hasElfDynTag(f *elf.File, tag elf.DynTag) bool {
t = elf.DynTag(f.ByteOrder.Uint32(d[0:4]))
d = d[8:]
case elf.ELFCLASS64:
t = elf.DynTag(f.ByteOrder.Uint64(d[0:8]))
t = elf.DynTag(f.ByteOrder.Uint64(d[0:8])) //nolint:gosec
d = d[16:]
}
if t == tag {

View file

@ -0,0 +1,15 @@
.DEFAULT_GOAL := default
default:
@for dir in $(shell find . -mindepth 1 -maxdepth 1 -type d); do \
if [ -f "$$dir/Makefile" ]; then \
$(MAKE) -C $$dir; \
fi; \
done
%:
@for dir in $(shell find . -mindepth 1 -maxdepth 1 -type d); do \
if [ -f "$$dir/Makefile" ]; then \
$(MAKE) -C $$dir $@; \
fi; \
done

View file

@ -1,8 +1,19 @@
BIN=./bin
TOOL_IMAGE=localhost/syft-bin-build-tools:latest
VERIFY_FILE=actual_verify
FINGERPRINT_FILE=$(BIN).fingerprint
all: build verify
ifndef BIN
$(error BIN is not set)
endif
.DEFAULT_GOAL := fixtures
# requirement 1: 'fixtures' goal to generate any and all test fixtures
fixtures: build verify
# requirement 2: 'fingerprint' goal to determine if the fixture input that indicates any existing cache should be busted
fingerprint: $(FINGERPRINT_FILE)
tools-check:
@sha256sum -c Dockerfile.sha256 || (echo "Tools Dockerfile has changed" && exit 1)
@ -25,10 +36,14 @@ verify: tools
debug:
docker run -i --rm -v $(shell pwd):/mount -w /mount/project $(TOOL_IMAGE) bash
cache.fingerprint:
@find project Dockerfile Makefile -type f -exec md5sum {} + | awk '{print $1}' | sort | tee cache.fingerprint
# requirement 3: we always need to recalculate the fingerprint based on source regardless of any existing fingerprint
.PHONY: $(FINGERPRINT_FILE)
$(FINGERPRINT_FILE):
@find project Dockerfile Makefile -type f -exec sha256sum {} \; | sort -k2 > $(FINGERPRINT_FILE)
@#cat $(FINGERPRINT_FILE) | sha256sum | awk '{print $$1}'
# requirement 4: 'clean' goal to remove all generated test fixtures
clean:
rm -f $(BIN)/*
rm -rf $(BIN) Dockerfile.sha256 $(VERIFY_FILE) $(FINGERPRINT_FILE)
.PHONY: build verify debug build-image build-bins clean dockerfile-check cache.fingerprint
.PHONY: tools tools-check build verify debug clean

View file

@ -1,8 +1,20 @@
BIN=./bin
TOOL_IMAGE=localhost/syft-shared-info-build-tools:latest
VERIFY_FILE=actual_verify
FINGERPRINT_FILE=$(BIN).fingerprint
ifndef BIN
$(error BIN is not set)
endif
.DEFAULT_GOAL := fixtures
# requirement 1: 'fixtures' goal to generate any and all test fixtures
fixtures: build
# requirement 2: 'fingerprint' goal to determine if the fixture input that indicates any existing cache should be busted
fingerprint: $(FINGERPRINT_FILE)
all: build
tools-check:
@sha256sum -c Dockerfile.sha256 || (echo "Tools Dockerfile has changed" && exit 1)
@ -10,16 +22,20 @@ tools:
@(docker inspect $(TOOL_IMAGE) > /dev/null && make tools-check) || (docker build -t $(TOOL_IMAGE) . && sha256sum Dockerfile > Dockerfile.sha256)
build: tools
mkdir -p $(BIN)
@mkdir -p $(BIN)
docker run --platform linux/amd64 -i -v $(shell pwd):/mount -w /mount/project $(TOOL_IMAGE) make
debug:
docker run --platform linux/amd64 -i --rm -v $(shell pwd):/mount -w /mount/project $(TOOL_IMAGE) bash
cache.fingerprint:
@find project Dockerfile Makefile -type f -exec md5sum {} + | awk '{print $1}' | sort | tee cache.fingerprint
# requirement 3: we always need to recalculate the fingerprint based on source regardless of any existing fingerprint
.PHONY: $(FINGERPRINT_FILE)
$(FINGERPRINT_FILE):
@find project Dockerfile Makefile -type f -exec sha256sum {} \; | sort -k2 > $(FINGERPRINT_FILE)
@#cat $(FINGERPRINT_FILE) | sha256sum | awk '{print $$1}'
# requirement 4: 'clean' goal to remove all generated test fixtures
clean:
rm -f $(BIN)/*
rm -rf $(BIN) Dockerfile.sha256 $(VERIFY_FILE) $(FINGERPRINT_FILE)
.PHONY: build verify debug build-image build-bins clean dockerfile-check cache.fingerprint
.PHONY: tools tools-check build debug clean

View file

@ -21,7 +21,7 @@ type LicenseEvidence struct {
func NewLicense(value string) License {
spdxExpression, err := license.ParseExpression(value)
if err != nil {
log.Trace("unable to parse license expression: %s, %w", value, err)
log.WithFields("error", err, "value", value).Trace("unable to parse license expression")
}
return License{

View file

@ -211,6 +211,12 @@ func toBomProperties(srcMetadata source.Description) *[]cyclonedx.Property {
metadata, ok := srcMetadata.Metadata.(source.ImageMetadata)
if ok {
props := helpers.EncodeProperties(metadata.Labels, "syft:image:labels")
// return nil if props is nil to avoid creating a pointer to a nil slice,
// which results in a null JSON value that does not comply with the CycloneDX schema.
// https://github.com/anchore/grype/issues/1759
if props == nil {
return nil
}
return &props
}
return nil

View file

@ -236,6 +236,53 @@ func Test_toBomDescriptor(t *testing.T) {
}
}
func Test_toBomProperties(t *testing.T) {
tests := []struct {
name string
srcMetadata source.Description
props *[]cyclonedx.Property
}{
{
name: "ImageMetadata without labels",
srcMetadata: source.Description{
Metadata: source.ImageMetadata{
Labels: map[string]string{},
},
},
props: nil,
},
{
name: "ImageMetadata with labels",
srcMetadata: source.Description{
Metadata: source.ImageMetadata{
Labels: map[string]string{
"label1": "value1",
"label2": "value2",
},
},
},
props: &[]cyclonedx.Property{
{Name: "syft:image:labels:label1", Value: "value1"},
{Name: "syft:image:labels:label2", Value: "value2"},
},
},
{
name: "not ImageMetadata",
srcMetadata: source.Description{
Metadata: source.FileMetadata{},
},
props: nil,
},
}
for _, test := range tests {
t.Run(test.name, func(t *testing.T) {
t.Parallel()
props := toBomProperties(test.srcMetadata)
require.Equal(t, test.props, props)
})
}
}
func Test_toOsComponent(t *testing.T) {
tests := []struct {
name string

View file

@ -16,6 +16,7 @@ import (
"github.com/anchore/packageurl-go"
"github.com/anchore/syft/internal/log"
"github.com/anchore/syft/internal/mimetype"
"github.com/anchore/syft/internal/relationship"
"github.com/anchore/syft/internal/spdxlicense"
"github.com/anchore/syft/syft/artifact"
"github.com/anchore/syft/syft/file"
@ -45,9 +46,10 @@ const (
func ToFormatModel(s sbom.SBOM) *spdx.Document {
name, namespace := helpers.DocumentNameAndNamespace(s.Source, s.Descriptor)
packages := toPackages(s.Artifacts.Packages, s)
rels := relationship.NewIndex(s.Relationships...)
packages := toPackages(rels, s.Artifacts.Packages, s)
relationships := toRelationships(s.RelationshipsSorted())
allRelationships := toRelationships(rels.All())
// for valid SPDX we need a document describes relationship
describesID := spdx.ElementID("DOCUMENT")
@ -57,7 +59,7 @@ func ToFormatModel(s sbom.SBOM) *spdx.Document {
describesID = rootPackage.PackageSPDXIdentifier
// add all relationships from the document root to all other packages
relationships = append(relationships, toRootRelationships(rootPackage, packages)...)
allRelationships = append(allRelationships, toRootRelationships(rootPackage, packages)...)
// append the root package
packages = append(packages, rootPackage)
@ -75,7 +77,7 @@ func ToFormatModel(s sbom.SBOM) *spdx.Document {
}
// add the root document relationship
relationships = append(relationships, documentDescribesRelationship)
allRelationships = append(allRelationships, documentDescribesRelationship)
return &spdx.Document{
// 6.1: SPDX Version; should be in the format "SPDX-x.x"
@ -150,7 +152,7 @@ func ToFormatModel(s sbom.SBOM) *spdx.Document {
},
Packages: packages,
Files: toFiles(s),
Relationships: relationships,
Relationships: allRelationships,
OtherLicenses: toOtherLicenses(s.Artifacts.Packages),
}
}
@ -302,7 +304,7 @@ func toSPDXID(identifiable artifact.Identifiable) spdx.ElementID {
// packages populates all Package Information from the package Collection (see https://spdx.github.io/spdx-spec/3-package-information/)
//
//nolint:funlen
func toPackages(catalog *pkg.Collection, sbom sbom.SBOM) (results []*spdx.Package) {
func toPackages(rels *relationship.Index, catalog *pkg.Collection, sbom sbom.SBOM) (results []*spdx.Package) {
for _, p := range catalog.Sorted() {
// name should be guaranteed to be unique, but semantically useful and stable
id := toSPDXID(p)
@ -318,7 +320,7 @@ func toPackages(catalog *pkg.Collection, sbom sbom.SBOM) (results []*spdx.Packag
// 2. syft has generated a sha1 digest for the package's contents
packageChecksums, filesAnalyzed := toPackageChecksums(p)
packageVerificationCode := newPackageVerificationCode(p, sbom)
packageVerificationCode := newPackageVerificationCode(rels, p, sbom)
if packageVerificationCode != nil {
filesAnalyzed = true
}
@ -503,6 +505,14 @@ func toPackageChecksums(p pkg.Package) ([]spdx.Checksum, bool) {
Algorithm: spdx.ChecksumAlgorithm(algo),
Value: hexStr,
})
case pkg.OpamPackage:
for _, checksum := range meta.Checksums {
parts := strings.Split(checksum, "=")
checksums = append(checksums, spdx.Checksum{
Algorithm: spdx.ChecksumAlgorithm(strings.ToUpper(parts[0])),
Value: parts[1],
})
}
}
return checksums, filesAnalyzed
}
@ -744,12 +754,12 @@ func toOtherLicenses(catalog *pkg.Collection) []*spdx.OtherLicense {
// f file is an "excludes" file, skip it /* exclude SPDX analysis file(s) */
// see: https://spdx.github.io/spdx-spec/v2.3/package-information/#79-package-verification-code-field
// the above link contains the SPDX algorithm for a package verification code
func newPackageVerificationCode(p pkg.Package, sbom sbom.SBOM) *spdx.PackageVerificationCode {
func newPackageVerificationCode(rels *relationship.Index, p pkg.Package, sbom sbom.SBOM) *spdx.PackageVerificationCode {
// key off of the contains relationship;
// spdx validator will fail if a package claims to contain a file but no sha1 provided
// if a sha1 for a file is provided then the validator will fail if the package does not have
// a package verification code
coordinates := sbom.CoordinatesForPackage(p, artifact.ContainsRelationship)
coordinates := rels.Coordinates(p, artifact.ContainsRelationship)
var digests []file.Digest
for _, c := range coordinates {
digest := sbom.Artifacts.FileDigests[c]

View file

@ -12,6 +12,7 @@ import (
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/anchore/syft/internal/relationship"
"github.com/anchore/syft/syft/artifact"
"github.com/anchore/syft/syft/file"
"github.com/anchore/syft/syft/format/internal/spdxutil/helpers"
@ -334,6 +335,31 @@ func Test_toPackageChecksums(t *testing.T) {
},
filesAnalyzed: false,
},
{
name: "Opam Package",
pkg: pkg.Package{
Name: "test",
Version: "1.0.0",
Language: pkg.Go,
Metadata: pkg.OpamPackage{
Checksums: []string{
"sha256=f5f1c0b4ad2e0dfa6f79eaaaa3586411925c16f61702208ddd4bad2fc17dc47c",
"sha512=05a359dc8400d4ca200ff255dbd030acd33d2c4acb5020838f772c02cdb5f243f3dbafbc43a8cd51e6b5923a140f84c9e7ea25b2c0fa277bb68b996190d36e3b",
},
},
},
expected: []spdx.Checksum{
{
Algorithm: "SHA256",
Value: "f5f1c0b4ad2e0dfa6f79eaaaa3586411925c16f61702208ddd4bad2fc17dc47c",
},
{
Algorithm: "SHA512",
Value: "05a359dc8400d4ca200ff255dbd030acd33d2c4acb5020838f772c02cdb5f243f3dbafbc43a8cd51e6b5923a140f84c9e7ea25b2c0fa277bb68b996190d36e3b",
},
},
filesAnalyzed: false,
},
{
name: "Package with no metadata type",
pkg: pkg.Package{
@ -665,7 +691,7 @@ func Test_H1Digest(t *testing.T) {
for _, test := range tests {
t.Run(test.name, func(t *testing.T) {
catalog := pkg.NewCollection(test.pkg)
pkgs := toPackages(catalog, s)
pkgs := toPackages(relationship.NewIndex(), catalog, s)
require.Len(t, pkgs, 1)
for _, p := range pkgs {
if test.expectedDigest == "" {

View file

@ -124,7 +124,7 @@ func decodePackageMetadata(vals map[string]string, c *cyclonedx.Component, typeN
if metadataType == nil {
return nil
}
metaPtrTyp := reflect.PtrTo(metadataType)
metaPtrTyp := reflect.PointerTo(metadataType)
metaPtr := Decode(metaPtrTyp, vals, "syft:metadata", CycloneDXFields)
// Map all explicit metadata properties

View file

@ -39,12 +39,23 @@ func ToSyftModel(bom *cyclonedx.BOM) (*sbom.SBOM, error) {
}
func collectBomPackages(bom *cyclonedx.BOM, s *sbom.SBOM, idMap map[string]interface{}) error {
if bom.Components == nil {
componentsPresent := false
if bom.Components != nil {
for i := range *bom.Components {
collectPackages(&(*bom.Components)[i], s, idMap)
}
componentsPresent = true
}
if bom.Metadata != nil && bom.Metadata.Component != nil {
collectPackages(bom.Metadata.Component, s, idMap)
componentsPresent = true
}
if !componentsPresent {
return fmt.Errorf("no components are defined in the CycloneDX BOM")
}
for i := range *bom.Components {
collectPackages(&(*bom.Components)[i], s, idMap)
}
return nil
}

View file

@ -54,17 +54,14 @@ func decodeLicenses(c *cyclonedx.Component) []pkg.License {
}
for _, l := range *c.Licenses {
if l.License == nil {
continue
}
// these fields are mutually exclusive in the spec
switch {
case l.License.ID != "":
case l.License != nil && l.License.ID != "":
licenses = append(licenses, pkg.NewLicenseFromURLs(l.License.ID, l.License.URL))
case l.License.Name != "":
case l.License != nil && l.License.Name != "":
licenses = append(licenses, pkg.NewLicenseFromURLs(l.License.Name, l.License.URL))
case l.Expression != "":
licenses = append(licenses, pkg.NewLicenseFromURLs(l.Expression, l.License.URL))
licenses = append(licenses, pkg.NewLicense(l.Expression))
default:
}
}
@ -72,7 +69,6 @@ func decodeLicenses(c *cyclonedx.Component) []pkg.License {
return licenses
}
// nolint:funlen
func separateLicenses(p pkg.Package) (spdx, other cyclonedx.Licenses, expressions []string) {
ex := make([]string, 0)
spdxc := cyclonedx.Licenses{}

View file

@ -254,7 +254,8 @@ func TestDecodeLicenses(t *testing.T) {
input: &cyclonedx.Component{
Licenses: &cyclonedx.Licenses{
{
License: &cyclonedx.License{},
// CycloneDX specification doesn't allow to provide License if Expression is provided
License: nil,
Expression: "MIT AND GPL-3.0-only WITH Classpath-exception-2.0",
},
},
@ -264,7 +265,6 @@ func TestDecodeLicenses(t *testing.T) {
Value: "MIT AND GPL-3.0-only WITH Classpath-exception-2.0",
SPDXExpression: "MIT AND GPL-3.0-only WITH Classpath-exception-2.0",
Type: license.Declared,
URLs: []string{},
},
},
},

View file

@ -165,7 +165,7 @@ func Decode(typ reflect.Type, values map[string]string, prefix string, fn FieldN
isSlice := false
if typ.Kind() == reflect.Slice {
typ = reflect.PtrTo(typ)
typ = reflect.PointerTo(typ)
isSlice = true
}

View file

@ -36,6 +36,8 @@ func DownloadLocation(p pkg.Package) string {
return NoneIfEmpty(metadata.Dist.URL)
case pkg.PhpComposerInstalledEntry:
return NoneIfEmpty(metadata.Dist.URL)
case pkg.OpamPackage:
return NoneIfEmpty(metadata.URL)
}
}
return NOASSERTION

View file

@ -34,7 +34,7 @@ const (
//
// Available options are: <omit>, NOASSERTION, Person: <person>, Organization: <org>
// return values are: <type>, <value>
func Originator(p pkg.Package) (typ string, author string) { // nolint: funlen
func Originator(p pkg.Package) (typ string, author string) { //nolint: funlen
if !hasMetadata(p) {
return typ, author
}
@ -56,6 +56,10 @@ func Originator(p pkg.Package) (typ string, author string) { // nolint: funlen
if author == "" {
author = metadata.Manifest.Main.MustGet("Implementation-Vendor")
}
// Vendor is specified, hence set 'Organization' as the PackageSupplier
if author != "" {
typ = orgType
}
}
case pkg.LinuxKernelModule:
@ -103,6 +107,8 @@ func Originator(p pkg.Package) (typ string, author string) { // nolint: funlen
// it seems that the vast majority of the time the author is an org, not a person
typ = orgType
author = metadata.Author
case pkg.SwiplPackEntry:
author = formatPersonOrOrg(metadata.Author, metadata.AuthorEmail)
}
if typ == "" && author != "" {
@ -144,6 +150,10 @@ func Supplier(p pkg.Package) (typ string, author string) {
author = metadata.Packager
}
if metadata, ok := p.Metadata.(pkg.SwiplPackEntry); ok {
author = formatPersonOrOrg(metadata.Packager, metadata.PackagerEmail)
}
if author == "" {
// TODO: this uses the Originator function for now until a better distinction can be made for supplier
return Originator(p)

View file

@ -41,6 +41,8 @@ func Test_OriginatorSupplier(t *testing.T) {
pkg.RustBinaryAuditEntry{},
cargo.RustCargoLockEntry{},
pkg.SwiftPackageManagerResolvedEntry{},
pkg.SwiplPackEntry{},
pkg.OpamPackage{},
pkg.YarnLockEntry{},
)
tests := []struct {
@ -138,8 +140,8 @@ func Test_OriginatorSupplier(t *testing.T) {
},
},
},
originator: "Person: auth-spec",
supplier: "Person: auth-spec",
originator: "Organization: auth-spec",
supplier: "Organization: auth-spec",
},
{
name: "from java -- fallback to impl vendor in main manifest section",
@ -155,8 +157,8 @@ func Test_OriginatorSupplier(t *testing.T) {
},
},
},
originator: "Person: auth-impl",
supplier: "Person: auth-impl",
originator: "Organization: auth-impl",
supplier: "Organization: auth-impl",
},
{
name: "from java -- non-main manifest sections ignored",
@ -337,6 +339,27 @@ func Test_OriginatorSupplier(t *testing.T) {
originator: "Organization: auth",
supplier: "Organization: auth",
},
{
name: "from swipl pack",
input: pkg.Package{
Metadata: pkg.SwiplPackEntry{
Author: "auth",
AuthorEmail: "auth@auth.gov",
Packager: "me",
PackagerEmail: "me@auth.com",
},
},
originator: "Person: auth (auth@auth.gov)",
supplier: "Person: me (me@auth.com)",
},
{
name: "from ocaml opam",
input: pkg.Package{
Metadata: pkg.OpamPackage{},
},
originator: "",
supplier: "",
},
}
for _, test := range tests {
t.Run(test.name, func(t *testing.T) {

View file

@ -62,6 +62,10 @@ func SourceInfo(p pkg.Package) string {
answer = "acquired package info from Rockspec package file"
case pkg.SwiftPkg:
answer = "acquired package info from resolved Swift package manifest"
case pkg.SwiplPackPkg:
answer = "acquired package info from SWI Prolo pack package file"
case pkg.OpamPkg:
answer = "acquired package info from OCaml opam package file"
case pkg.GithubActionPkg, pkg.GithubActionWorkflowPkg:
answer = "acquired package info from GitHub Actions workflow file or composite action file"
case pkg.WordpressPluginPkg:

View file

@ -263,6 +263,22 @@ func Test_SourceInfo(t *testing.T) {
"from resolved Swift package manifest",
},
},
{
input: pkg.Package{
Type: pkg.SwiplPackPkg,
},
expected: []string{
"acquired package info from SWI Prolo pack package file",
},
},
{
input: pkg.Package{
Type: pkg.OpamPkg,
},
expected: []string{
"acquired package info from OCaml opam package file",
},
},
{
input: pkg.Package{
Type: pkg.GithubActionPkg,

View file

@ -146,7 +146,7 @@ func safeFileModeConvert(val int) (fs.FileMode, error) {
if err != nil {
return 0, err
}
return os.FileMode(mode), nil
return os.FileMode(mode), nil //nolint:gosec
}
func toSyftLicenses(m []model.License) (p []pkg.License) {

View file

@ -1,4 +0,0 @@
# Note: changes to this file will result in updating several test values. Consider making a new image fixture instead of editing this one.
FROM scratch
ADD file-1.txt /somefile-1.txt
ADD file-2.txt /somefile-2.txt

Some files were not shown because too many files have changed in this diff Show more