Normalize release assets and refactor install.sh (#630)

* refactor release to keep snapshot assets in parity with release assets

Signed-off-by: Alex Goodman <alex.goodman@anchore.com>

* refactor install.sh and put under test

Signed-off-by: Alex Goodman <alex.goodman@anchore.com>

* tidy go.sum

Signed-off-by: Alex Goodman <alex.goodman@anchore.com>

* add mac acceptance test to github actions workflow

Signed-off-by: Alex Goodman <alex.goodman@anchore.com>

* rm use of goreleaser in cli tests

Signed-off-by: Alex Goodman <alex.goodman@anchore.com>

* go mod tidy with go 1.17

Signed-off-by: Alex Goodman <alex.goodman@anchore.com>
This commit is contained in:
Alex Goodman 2022-02-11 14:24:25 -05:00 committed by GitHub
parent d2dba7d14a
commit 5aa85338d6
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
44 changed files with 1924 additions and 1078 deletions

View file

@ -0,0 +1,3 @@
dev-pki
log
signing-identity.txt

10
.github/scripts/apple-signing/cleanup.sh vendored Executable file
View file

@ -0,0 +1,10 @@
#!/usr/bin/env bash
set -eu
# grab utilities
SCRIPT_DIR=$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )
. "$SCRIPT_DIR"/utils.sh
# cleanup any dev certs left behind
. "$SCRIPT_DIR"/setup-dev.sh
cleanup_signing

53
.github/scripts/apple-signing/notarize.sh vendored Executable file
View file

@ -0,0 +1,53 @@
#!/usr/bin/env bash
set +xu
if [ -z "$AC_USERNAME" ]; then
exit_with_error "AC_USERNAME not set"
fi
if [ -z "$AC_PASSWORD" ]; then
exit_with_error "AC_PASSWORD not set"
fi
set -u
# notarize [archive-path]
#
notarize() {
binary_path=$1
archive_path=${binary_path}-archive-for-notarization.zip
title "archiving release binary into ${archive_path}"
parent=$(dirname "$binary_path")
(
cd "${parent}" && zip "${archive_path}" "$(basename ${binary_path})"
)
if [ ! -f "$archive_path" ]; then
exit_with_error "cannot find payload for notarization: $archive_path"
fi
# install gon
which gon || (brew tap mitchellh/gon && brew install mitchellh/gon/gon)
# create config (note: json via stdin with gon is broken, can only use HCL from file)
hcl_file=$(mktemp).hcl
cat <<EOF > "$hcl_file"
notarize {
path = "$archive_path"
bundle_id = "com.anchore.toolbox.grype"
}
apple_id {
username = "$AC_USERNAME"
password = "@env:AC_PASSWORD"
}
EOF
gon -log-level info "$hcl_file"
rm "${hcl_file}" "${archive_path}"
}

171
.github/scripts/apple-signing/setup-dev.sh vendored Executable file
View file

@ -0,0 +1,171 @@
#!/usr/bin/env bash
set -eu
NAME=grype-dev
SCRIPT_DIR=$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )
DIR=$SCRIPT_DIR/dev-pki
FILE_PREFIX=$DIR/$NAME
IDENTITY=${NAME}-id-415d8c69793
## OpenSSL material
KEY_PASSWORD="letthedevin"
P12_PASSWORD="popeofnope"
KEY_FILE=$FILE_PREFIX-key.pem
CSR_FILE=$FILE_PREFIX-csr.pem
CERT_FILE=$FILE_PREFIX-cert.pem
EXT_FILE=$FILE_PREFIX-ext.cnf
P12_FILE=$FILE_PREFIX.p12
EXT_SECTION=codesign_reqext
## Keychain material
KEYCHAIN_NAME=$NAME
KEYCHAIN_PATH=$HOME/Library/Keychains/$KEYCHAIN_NAME-db
KEYCHAIN_PASSWORD="topsykretts"
# setup_signing
#
# preps the MAC_SIGNING_IDENTITY env var for use in the signing process, using ephemeral developer certificate material
#
function setup_signing() {
# check to see if this has already been done... if so, bail!
set +ue
if security find-identity -p codesigning "$KEYCHAIN_PATH" | grep $IDENTITY ; then
export MAC_SIGNING_IDENTITY=$IDENTITY
commentary "skipping creating dev certificate material (already exists)"
commentary "setting MAC_SIGNING_IDENTITY=${IDENTITY}"
return 0
fi
set -ue
title "setting up developer certificate material"
mkdir -p "${DIR}"
# configure the openssl extensions
cat << EOF > $EXT_FILE
[ req ]
default_bits = 2048 # RSA key size
encrypt_key = yes # Protect private key
default_md = sha256 # MD to use
utf8 = yes # Input is UTF-8
string_mask = utf8only # Emit UTF-8 strings
prompt = yes # Prompt for DN
distinguished_name = codesign_dn # DN template
req_extensions = $EXT_SECTION # Desired extensions
[ codesign_dn ]
commonName = $IDENTITY
commonName_max = 64
[ $EXT_SECTION ]
keyUsage = critical,digitalSignature
extendedKeyUsage = critical,codeSigning
subjectKeyIdentifier = hash
EOF
title "create the private key"
openssl genrsa \
-des3 \
-out "$KEY_FILE" \
-passout "pass:$KEY_PASSWORD" \
2048
title "create the csr"
openssl req \
-new \
-key "$KEY_FILE" \
-out "$CSR_FILE" \
-passin "pass:$KEY_PASSWORD" \
-config "$EXT_FILE" \
-subj "/CN=$IDENTITY"
commentary "verify the csr: we should see X509 v3 extensions for codesigning in the CSR"
openssl req -in "$CSR_FILE" -noout -text | grep -A1 "X509v3" || exit_with_error "could not find x509 extensions in CSR"
title "create the certificate"
# note: Extensions in certificates are not transferred to certificate requests and vice versa. This means that
# just because the CSR has x509 v3 extensions doesn't mean that you'll see these extensions in the cert output.
# To prove this do:
# openssl x509 -text -noout -in server.crt | grep -A10 "X509v3 extensions:"
# ... and you will see no output (if -extensions is not used). (see https://www.openssl.org/docs/man1.1.0/man1/x509.html#BUGS)
# To get the extensions, use "-extensions codesign_reqext" when creating the cert. The codesign_reqext value matches
# the section name in the ext file used in CSR / cert creation (-extfile and -config).
openssl x509 \
-req \
-days 10000 \
-in "$CSR_FILE" \
-signkey "$KEY_FILE" \
-out "$CERT_FILE" \
-extfile "$EXT_FILE" \
-passin "pass:$KEY_PASSWORD" \
-extensions $EXT_SECTION
commentary "verify the certificate: we should see our extensions"
openssl x509 -text -noout -in $CERT_FILE | grep -A1 'X509v3' || exit_with_error "could not find x509 extensions in certificate"
title "export cert and private key to .p12 file"
# note: this step may be entirely optional, however, I found it useful to follow the prod path which goes the route of using a p12
openssl pkcs12 \
-export \
-out "$P12_FILE" \
-inkey "$KEY_FILE" \
-in "$CERT_FILE" \
-passin "pass:$KEY_PASSWORD" \
-passout "pass:$P12_PASSWORD"
title "create the dev keychain"
# delete the keychain if it already exists
if [ -f "$(KEYCHAIN_PATH)" ]; then
security delete-keychain "$KEYCHAIN_NAME" &> /dev/null
fi
security create-keychain -p "$KEYCHAIN_PASSWORD" "$KEYCHAIN_NAME"
set +e
if ! security verify-cert -k "$KEYCHAIN_PATH" -c "$CERT_FILE" &> /dev/null; then
set -e
title "import the cert into the dev keychain if it is not already trusted by the system"
security import "$P12_FILE" -P $P12_PASSWORD -f pkcs12 -k "$KEYCHAIN_PATH" -T /usr/bin/codesign
# note: set the partition list for this certificate's private key to include "apple-tool:" and "apple:" allows the codesign command to access this keychain item without an interactive user prompt.
security set-key-partition-list -S "apple-tool:,apple:,codesign:" -s -k "$KEYCHAIN_PASSWORD" "$KEYCHAIN_PATH"
# note: add-trusted-cert requires user interaction
commentary "adding the developer certificate as a trusted certificate... (requires user interaction)"
security add-trusted-cert -d -r trustRoot -k "$KEYCHAIN_PATH" "$CERT_FILE"
else
set -e
commentary "...dev cert has already been imported onto the dev keychain"
fi
# remove any generated cert material since the keychain now has all of this material loaded
rm -rf "${DIR}"
commentary "make certain there are identities that can be used for code signing"
security find-identity -p codesigning "$KEYCHAIN_PATH" | grep -C 30 "$IDENTITY" || exit_with_error "could not find identity that can be used with codesign"
title "add the dev keychain to the search path for codesign"
add_keychain $KEYCHAIN_NAME
commentary "verify the keychain actually shows up"
security list-keychains | grep "$KEYCHAIN_NAME" || exit_with_error "could not find new keychain"
export MAC_SIGNING_IDENTITY=$IDENTITY
commentary "setting MAC_SIGNING_IDENTITY=${IDENTITY}"
}
function cleanup_signing() {
title "delete the dev keychain and all certificate material"
set -xue
security delete-keychain "$KEYCHAIN_NAME"
rm -f "$KEYCHAIN_PATH"
rm -rf "${DIR}"
}

56
.github/scripts/apple-signing/setup-prod.sh vendored Executable file
View file

@ -0,0 +1,56 @@
#!/usr/bin/env bash
set -eu
assert_in_ci
set +xu
if [ -z "$APPLE_DEVELOPER_ID_CERT" ]; then
exit_with_error "APPLE_DEVELOPER_ID_CERT not set"
fi
if [ -z "$APPLE_DEVELOPER_ID_CERT_PASS" ]; then
exit_with_error "APPLE_DEVELOPER_ID_CERT_PASS not set"
fi
if [ -z "$DOCKER_USERNAME" ]; then
exit_with_error "DOCKER_USERNAME not set"
fi
if [ -z "$DOCKER_PASSWORD" ]; then
exit_with_error "DOCKER_PASSWORD not set"
fi
set -u
# setup_signing
#
# preps the MAC_SIGNING_IDENTITY env var for use in the signing process, using production certificate material
#
setup_signing() {
title "setting up production certificate material"
# Write signing certificate to disk from environment variable.
cert_file="$HOME/developer_id_certificate.p12"
echo -n "$APPLE_DEVELOPER_ID_CERT" | base64 --decode > "$cert_file"
# In order to have all keychain interactions avoid an interactive user prompt, we need to control the password for the keychain in question, which means we need to create a new keychain into which we'll import the signing certificate and from which we'll later access this certificate during code signing.
ephemeral_keychain="ci-ephemeral-keychain"
ephemeral_keychain_password="$(openssl rand -base64 100)"
security create-keychain -p "${ephemeral_keychain_password}" "${ephemeral_keychain}"
# Import signing certificate into the keychain. (This is a pre-requisite for gon, which is invoked via goreleaser.)
ephemeral_keychain_full_path="$HOME/Library/Keychains/${ephemeral_keychain}-db"
security import "${cert_file}" -k "${ephemeral_keychain_full_path}" -P "${APPLE_DEVELOPER_ID_CERT_PASS}" -T "$(command -v codesign)"
# Setting the partition list for this certificate's private key to include "apple-tool:" and "apple:" allows the codesign command to access this keychain item without an interactive user prompt. (codesign is invoked by gon.)
security set-key-partition-list -S "apple-tool:,apple:" -s -k "${ephemeral_keychain_password}" "${ephemeral_keychain_full_path}"
# Make this new keychain the user's default keychain, so that codesign will be able to find this certificate when we specify it during signing.
security default-keychain -d "user" -s "${ephemeral_keychain_full_path}"
# TODO: extract this from the certificate material itself
export MAC_SIGNING_IDENTITY="Developer ID Application: ANCHORE, INC. (9MJHKYX5AT)"
commentary "setting MAC_SIGNING_IDENTITY=${MAC_SIGNING_IDENTITY}"
commentary "log into docker -- required for publishing (since the default keychain has now been replaced)"
echo "${DOCKER_PASSWORD}" | docker login docker.io -u "${DOCKER_USERNAME}" --password-stdin
}

46
.github/scripts/apple-signing/setup.sh vendored Executable file
View file

@ -0,0 +1,46 @@
#!/usr/bin/env bash
set -eu
IS_SNAPSHOT="$1"
## grab utilities
SCRIPT_DIR=$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )
. "$SCRIPT_DIR"/utils.sh
mkdir -p "$SCRIPT_DIR/log"
main() {
case "$IS_SNAPSHOT" in
"1" | "true" | "yes")
commentary "assuming development setup..."
. "$SCRIPT_DIR"/setup-dev.sh
;;
"0" | "false" | "no")
commentary "assuming production setup..."
. "$SCRIPT_DIR"/setup-prod.sh
;;
*)
exit_with_error "could not determine if this was a production build (isSnapshot='$IS_SNAPSHOT')"
;;
esac
# load up all signing material into a keychain (note: this should set the MAC_SIGNING_IDENTITY env var)
setup_signing
# write out identity to a file
echo -n "$MAC_SIGNING_IDENTITY" > "$SCRIPT_DIR/$SIGNING_IDENTITY_FILENAME"
}
# capture all output from a subshell to log output additionally to a file (as well as the terminal)
( (
set +u
if [ -n "$SKIP_SIGNING" ]; then
commentary "skipping signing setup..."
else
set -u
main
fi
) 2>&1) | tee "$SCRIPT_DIR/log/setup.txt"

94
.github/scripts/apple-signing/sign.sh vendored Executable file
View file

@ -0,0 +1,94 @@
#!/usr/bin/env bash
set -eu -o pipefail
BINARY_PATH="$1"
IS_SNAPSHOT="$2"
TARGET_NAME="$3"
## grab utilities
SCRIPT_DIR=$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )
. "$SCRIPT_DIR"/utils.sh
mkdir -p "$SCRIPT_DIR/log"
# sign_binary [binary-path] [signing-identity]
#
# signs a single binary with cosign
#
sign_binary() {
exe_path=$1
identity=$2
if [ -x "$exe_path" ] && file -b "$exe_path" | grep -q "Mach-O"
then
echo "signing $exe_path ..."
else
echo "skip signing $exe_path ..."
return 0
fi
codesign \
-s "$identity" \
-f \
--verbose=4 \
--timestamp \
--options runtime \
$exe_path
if [ $? -ne 0 ]; then
exit_with_error "signing failed"
fi
codesign --verify "$exe_path" --verbose=4
}
main() {
binary_abs_path=$(realpath "$BINARY_PATH")
if [ ! -f "$binary_abs_path" ]; then
echo "archive does not exist: $binary_abs_path"
fi
case "$IS_SNAPSHOT" in
"1" | "true" | "yes")
commentary "disabling notarization..."
perform_notarization=false
;;
"0" | "false" | "no")
commentary "enabling notarization..."
. "$SCRIPT_DIR"/notarize.sh
perform_notarization=true
;;
*)
exit_with_error "could not determine if this was a production build (isSnapshot='$IS_SNAPSHOT')"
;;
esac
# grab the signing identity from the local temp file (setup by setup.sh)
MAC_SIGNING_IDENTITY=$(cat "$SCRIPT_DIR/$SIGNING_IDENTITY_FILENAME")
# sign all of the binaries in the archive and recreate the input archive with the signed binaries
sign_binary "$binary_abs_path" "$MAC_SIGNING_IDENTITY"
# send all of the binaries off to apple to bless
if $perform_notarization ; then
notarize "$binary_abs_path"
else
commentary "skipping notarization..."
fi
}
# capture all output from a subshell to log output additionally to a file (as well as the terminal)
( (
set +u
if [ -n "$SKIP_SIGNING" ]; then
commentary "skipping signing..."
else
set -u
main
fi
) 2>&1) | tee "$SCRIPT_DIR/log/signing-$(basename $BINARY_PATH)-$TARGET_NAME.txt"

78
.github/scripts/apple-signing/utils.sh vendored Normal file
View file

@ -0,0 +1,78 @@
SIGNING_IDENTITY_FILENAME=signing-identity.txt
## terminal goodies
PURPLE='\033[0;35m'
GREEN='\033[0;32m'
RED='\033[0;31m'
BOLD=$(tput -T linux bold)
RESET='\033[0m'
function success() {
echo -e "\n${GREEN}${BOLD}$@${RESET}"
}
function title() {
success "Task: $@"
}
function commentary() {
echo -e "\n${PURPLE}# $@${RESET}"
}
function error() {
echo -e "${RED}${BOLD}error: $@${RESET}"
}
function exit_with_error() {
error $@
exit 1
}
function exit_with_message() {
success $@
exit 0
}
function realpath {
echo "$(cd $(dirname $1); pwd)/$(basename $1)";
}
# this function adds all of the existing keychains plus the new one which is the same as going to Keychain Access
# and selecting "Add Keychain" to make the keychain visible under "Custom Keychains". This is done with
# "security list-keychains -s" for some reason. The downside is that this sets the search path, not appends
# to it, so you will loose existing keychains in the search path... which is truly terrible.
function add_keychain() {
keychains=$(security list-keychains -d user)
keychainNames=();
for keychain in $keychains
do
basename=$(basename "$keychain")
keychainName=${basename::${#basename}-4}
keychainNames+=("$keychainName")
done
echo "existing user keychains: ${keychainNames[@]}"
security -v list-keychains -s "${keychainNames[@]}" "$1"
}
function exit_not_ci() {
printf "WARNING! It looks like this isn't the CI environment. This script modifies the macOS Keychain setup in ways you probably wouldn't want for your own machine. It also requires an Apple Developer ID Certificate that you shouldn't have outside of the CI environment.\n\nExiting early to make sure nothing bad happens.\n"
exit 1
}
CI_HOME="/Users/runner"
function assert_in_ci() {
if [[ "${HOME}" != "${CI_HOME}" ]]; then
exit_not_ci
fi
set +u
if [ -z "${GITHUB_ACTIONS}" ]; then
exit_not_ci
fi
set -u
}

View file

@ -338,7 +338,7 @@ hash_sha256_verify() {
return 1
fi
BASENAME=${TARGET##*/}
want=$(grep "${BASENAME}" "${checksums}" 2>/dev/null | tr '\t' ' ' | cut -d ' ' -f 1)
want=$(grep "${BASENAME}$" "${checksums}" 2>/dev/null | tr '\t' ' ' | cut -d ' ' -f 1)
if [ -z "$want" ]; then
log_err "hash_sha256_verify unable to find checksum for '${TARGET}' in '${checksums}'"
return 1

View file

@ -1,31 +0,0 @@
#!/usr/bin/env bash
set -eu
CI_HOME="/Users/runner"
if [[ "${HOME}" != "${CI_HOME}" ]]; then
printf "WARNING! It looks like this isn't the CI environment. This script modifies the macOS Keychain setup in ways you probably wouldn't want for your own machine. It also requires an Apple Developer ID Certificate that you shouldn't have outside of the CI environment.\n\nExiting early to make sure nothing bad happens.\n"
exit 1
fi
# Install gon (see https://github.com/mitchellh/gon for details).
brew tap mitchellh/gon
brew install mitchellh/gon/gon
# Write signing certificate to disk from environment variable.
CERT_FILE="$HOME/developer_id_certificate.p12"
echo -n "$APPLE_DEVELOPER_ID_CERT" | base64 --decode > "$CERT_FILE"
# In order to have all keychain interactions avoid an interactive user prompt, we need to control the password for the keychain in question, which means we need to create a new keychain into which we'll import the signing certificate and from which we'll later access this certificate during code signing.
EPHEMERAL_KEYCHAIN="ci-ephemeral-keychain"
EPHEMERAL_KEYCHAIN_PASSWORD="$(openssl rand -base64 100)"
security create-keychain -p "${EPHEMERAL_KEYCHAIN_PASSWORD}" "${EPHEMERAL_KEYCHAIN}"
# Import signing certificate into the keychain. (This is a pre-requisite for gon, which is invoked via goreleaser.)
EPHEMERAL_KEYCHAIN_FULL_PATH="$HOME/Library/Keychains/${EPHEMERAL_KEYCHAIN}-db"
security import "${CERT_FILE}" -k "${EPHEMERAL_KEYCHAIN_FULL_PATH}" -P "${APPLE_DEVELOPER_ID_CERT_PASS}" -T "$(command -v codesign)"
# Setting the partition list for this certificate's private key to include "apple-tool:" and "apple:" allows the codesign command to access this keychain item without an interactive user prompt. (codesign is invoked by gon.)
security set-key-partition-list -S "apple-tool:,apple:" -s -k "${EPHEMERAL_KEYCHAIN_PASSWORD}" "${EPHEMERAL_KEYCHAIN_FULL_PATH}"
# Make this new keychain the user's default keychain, so that codesign will be able to find this certificate when we specify it during signing.
security default-keychain -d "user" -s "${EPHEMERAL_KEYCHAIN_FULL_PATH}"

View file

@ -1,19 +0,0 @@
#!/usr/bin/env bash
set -eu
IS_SNAPSHOT="$1" # e.g. "true", "false"
if [[ "${IS_SNAPSHOT}" == "true" ]]; then
# This is a snapshot build —— skipping signing and notarization...
exit 0
fi
GON_CONFIG="$2" # e.g. "gon.hcl"
NEW_NAME_WITHOUT_EXTENSION="$3" # e.g. "./dist/syft-0.1.0"
ORIGINAL_NAME_WITHOUT_EXTENSION="./dist/output" # This should match dmg and zip output_path in the gon config file, without the extension.
gon "${GON_CONFIG}"
# Rename outputs with specified desired name
mv -v "${ORIGINAL_NAME_WITHOUT_EXTENSION}.dmg" "${NEW_NAME_WITHOUT_EXTENSION}.dmg"
mv -v "${ORIGINAL_NAME_WITHOUT_EXTENSION}.zip" "${NEW_NAME_WITHOUT_EXTENSION}.zip"

View file

@ -1,14 +0,0 @@
#!/usr/bin/env bash
set -ue
DISTDIR=$1
export FINGERPRINT=$(gpg --verify ${DISTDIR}/*checksums.txt.sig ${DISTDIR}/*checksums.txt 2>&1 | grep 'using RSA key' | awk '{ print $NF }')
if [[ "${FINGERPRINT}" == "${SIGNING_FINGERPRINT}" ]]; then
echo 'verified signature'
else
echo "signed with unknown fingerprint: ${FINGERPRINT}"
echo " expected fingerprint: ${SIGNING_FINGERPRINT}"
exit 1
fi

View file

@ -1,113 +0,0 @@
name: 'Acceptance'
on:
push:
workflow_dispatch:
# ... only act on pushes to main
branches:
- main
# ... do not act on release tags
tags-ignore:
- v*
env:
GO_VERSION: "1.17.x"
jobs:
# Note: changing this job name requires making the same update in the .github/workflows/release.yaml pipeline
Build-Snapshot-Artifacts:
runs-on: ubuntu-20.04
steps:
- uses: actions/setup-go@v2
with:
go-version: ${{ env.GO_VERSION }}
- uses: actions/checkout@v2
- name: Set up QEMU
uses: docker/setup-qemu-action@v1
- name: Restore bootstrap cache
id: cache
uses: actions/cache@v2
with:
path: |
~/go/pkg/mod
${{ github.workspace }}/.tmp
key: ${{ runner.os }}-go-${{ env.GO_VERSION }}-${{ hashFiles('**/go.sum') }}-${{ hashFiles('Makefile') }}
- name: Bootstrap project dependencies
if: steps.bootstrap-cache.outputs.cache-hit != 'true'
run: make bootstrap
- name: Import GPG key
id: import_gpg
uses: crazy-max/ghaction-import-gpg@v2
env:
GPG_PRIVATE_KEY: ${{ secrets.SIGNING_GPG_PRIVATE_KEY }}
PASSPHRASE: ${{ secrets.SIGNING_GPG_PASSPHRASE }}
- name: GPG signing info
run: |
echo "fingerprint: ${{ steps.import_gpg.outputs.fingerprint }}"
echo "keyid: ${{ steps.import_gpg.outputs.keyid }}"
echo "name: ${{ steps.import_gpg.outputs.name }}"
echo "email: ${{ steps.import_gpg.outputs.email }}"
- name: Build snapshot artifacts
run: make snapshot
env:
GPG_PRIVATE_KEY: ${{ secrets.SIGNING_GPG_PRIVATE_KEY }}
PASSPHRASE: ${{ secrets.SIGNING_GPG_PASSPHRASE }}
- uses: actions/upload-artifact@v2
with:
name: artifacts
path: snapshot/**/*
- uses: 8398a7/action-slack@v3
with:
status: ${{ job.status }}
fields: repo,workflow,job,commit,message,author
text: The grype acceptance tests have failed tragically!
env:
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_TOOLBOX_WEBHOOK_URL }}
if: ${{ failure() }}
# TODO: add basic acceptance tests against snapshot artifacts
# Note: changing this job name requires making the same update in the .github/workflows/release.yaml pipeline
Inline-Compare:
needs: [ Build-Snapshot-Artifacts ]
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Fingerprint inline-compare sources
run: make compare-fingerprint
- name: Restore inline reports cache
id: cache
uses: actions/cache@v2
with:
path: ${{ github.workspace }}/test/inline-compare/inline-reports
key: inline-reports-${{ hashFiles('**/inline-compare.fingerprint') }}
- uses: actions/download-artifact@v2
with:
name: artifacts
path: snapshot
- name: Compare Anchore inline-scan results against snapshot build output
run: make compare-snapshot
- uses: 8398a7/action-slack@v3
with:
status: ${{ job.status }}
fields: repo,workflow,job,commit,message,author
text: The grype acceptance tests have failed tragically!
env:
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_TOOLBOX_WEBHOOK_URL }}
if: ${{ failure() }}

View file

@ -14,7 +14,7 @@ env:
jobs:
quality-gate:
environment: release
runs-on: ubuntu-latest # This OS choice is arbitrary. None of the steps in this job are specific to either Linux or macOS.
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
@ -61,6 +61,15 @@ jobs:
checkName: "Acceptance tests (Linux)"
ref: ${{ github.event.pull_request.head.sha || github.sha }}
- name: Check acceptance test results (mac)
uses: fountainhead/action-wait-for-check@v1.0.0
id: acceptance-mac
with:
token: ${{ secrets.GITHUB_TOKEN }}
# This check name is defined as the github action job name (in .github/workflows/testing.yaml)
checkName: "Acceptance tests (Mac)"
ref: ${{ github.event.pull_request.head.sha || github.sha }}
- name: Check cli test results (linux)
uses: fountainhead/action-wait-for-check@v1.0.0
id: cli-linux
@ -71,19 +80,20 @@ jobs:
ref: ${{ github.event.pull_request.head.sha || github.sha }}
- name: Quality gate
if: steps.static-analysis.outputs.conclusion != 'success' || steps.unit.outputs.conclusion != 'success' || steps.integration.outputs.conclusion != 'success' || steps.cli-linux.outputs.conclusion != 'success' || steps.acceptance-linux.outputs.conclusion != 'success'
if: steps.static-analysis.outputs.conclusion != 'success' || steps.unit.outputs.conclusion != 'success' || steps.integration.outputs.conclusion != 'success' || steps.cli-linux.outputs.conclusion != 'success' || steps.acceptance-linux.outputs.conclusion != 'success' || steps.acceptance-mac.outputs.conclusion != 'success'
run: |
echo "Static Analysis Status: ${{ steps.static-analysis.conclusion }}"
echo "Unit Test Status: ${{ steps.unit.outputs.conclusion }}"
echo "Integration Test Status: ${{ steps.integration.outputs.conclusion }}"
echo "Acceptance Test (Linux) Status: ${{ steps.acceptance-linux.outputs.conclusion }}"
echo "Acceptance Test (Mac) Status: ${{ steps.acceptance-mac.outputs.conclusion }}"
echo "CLI Test (Linux) Status: ${{ steps.cli-linux.outputs.conclusion }}"
false
release:
needs: [ quality-gate ]
runs-on: macos-latest # Due to our code signing process, it's vital that we run our release steps on macOS.
needs: [quality-gate]
# due to our code signing process, it's vital that we run our release steps on macOS
runs-on: macos-latest
steps:
- uses: docker-practice/actions-setup-docker@v1
@ -95,52 +105,41 @@ jobs:
with:
fetch-depth: 0
- name: Set up QEMU
uses: docker/setup-qemu-action@v1
# We are expecting this cache to have been created during the "Build-Snapshot-Artifacts" job in the "Acceptance" workflow.
- name: Restore bootstrap cache
id: bootstrap-cache
uses: actions/cache@v2
- name: Restore tool cache
id: tool-cache
uses: actions/cache@v2.1.3
with:
path: |
~/go/pkg/mod
${{ github.workspace }}/.tmp
key: ${{ runner.os }}-go-${{ env.GO_VERSION }}-${{ hashFiles('Makefile') }}-${{ hashFiles('**/go.sum') }}
path: ${{ github.workspace }}/.tmp
key: ${{ runner.os }}-tool-${{ hashFiles('Makefile') }}
- name: Bootstrap project dependencies
if: steps.bootstrap-cache.outputs.cache-hit != 'true'
- name: Restore go cache
id: go-cache
uses: actions/cache@v2.1.3
with:
path: ~/go/pkg/mod
key: ${{ runner.os }}-go-${{ env.GO_VERSION }}-${{ hashFiles('**/go.sum') }}
restore-keys: |
${{ runner.os }}-go-${{ env.GO_VERSION }}-
- name: (cache-miss) Bootstrap all project dependencies
if: steps.tool-cache.outputs.cache-hit != 'true' || steps.go-cache.outputs.cache-hit != 'true'
run: make bootstrap
- name: Import GPG key
id: import_gpg
uses: crazy-max/ghaction-import-gpg@v2
env:
GPG_PRIVATE_KEY: ${{ secrets.SIGNING_GPG_PRIVATE_KEY }}
PASSPHRASE: ${{ secrets.SIGNING_GPG_PASSPHRASE }}
- name: GPG signing info
run: |
echo "fingerprint: ${{ steps.import_gpg.outputs.fingerprint }}"
echo "keyid: ${{ steps.import_gpg.outputs.keyid }}"
echo "name: ${{ steps.import_gpg.outputs.name }}"
echo "email: ${{ steps.import_gpg.outputs.email }}"
- name: Build release artifacts
- name: Build & publish release artifacts
run: make release
env:
DOCKER_USERNAME: ${{ secrets.TOOLBOX_DOCKER_USER }}
DOCKER_PASSWORD: ${{ secrets.TOOLBOX_DOCKER_PASS }}
# we use a different token than GITHUB_SECRETS to additionally allow updating the homebrew repos
GITHUB_TOKEN: ${{ secrets.ANCHORE_GIT_READ_TOKEN }}
GPG_PRIVATE_KEY: ${{ secrets.SIGNING_GPG_PRIVATE_KEY }}
PASSPHRASE: ${{ secrets.SIGNING_GPG_PASSPHRASE }}
SIGNING_FINGERPRINT: ${{ steps.import_gpg.outputs.fingerprint }}
AWS_ACCESS_KEY_ID: ${{ secrets.TOOLBOX_AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.TOOLBOX_AWS_SECRET_ACCESS_KEY }}
APPLE_DEVELOPER_ID_CERT: ${{ secrets.APPLE_DEVELOPER_ID_CERT }} # Used during macOS code signing.
APPLE_DEVELOPER_ID_CERT_PASS: ${{ secrets.APPLE_DEVELOPER_ID_CERT_PASS }} # Used during macOS code signing.
AC_USERNAME: ${{ secrets.ENG_CI_APPLE_ID }} # Used during macOS notarization.
AC_PASSWORD: ${{ secrets.ENG_CI_APPLE_ID_PASS }} # Used during macOS notarization.
# used during macOS code signing
APPLE_DEVELOPER_ID_CERT: ${{ secrets.APPLE_DEVELOPER_ID_CERT }}
APPLE_DEVELOPER_ID_CERT_PASS: ${{ secrets.APPLE_DEVELOPER_ID_CERT_PASS }}
# used during macOS notarization
AC_USERNAME: ${{ secrets.ENG_CI_APPLE_ID }}
AC_PASSWORD: ${{ secrets.ENG_CI_APPLE_ID_PASS }}
- uses: anchore/sbom-action@v0
with:

View file

@ -199,8 +199,43 @@ jobs:
name: artifacts
path: snapshot
- name: Run Acceptance Tests (Linux)
run: make acceptance-linux
- name: Build key for image cache
run: make install-fingerprint
- name: Restore install.sh test image cache
id: install-test-image-cache
uses: actions/cache@v2.1.3
with:
path: ${{ github.workspace }}/test/install/cache
key: ${{ runner.os }}-install-test-image-cache-${{ hashFiles('test/install/cache.fingerprint') }}
- name: Load test image cache
if: steps.install-test-image-cache.outputs.cache-hit == 'true'
run: make install-test-cache-load
- name: Run install.sh tests (Linux)
run: make install-test
- name: (cache-miss) Create test image cache
if: steps.install-test-image-cache.outputs.cache-hit != 'true'
run: make install-test-cache-save
Acceptance-Mac:
# Note: changing this job name requires making the same update in the .github/workflows/release.yaml pipeline
name: "Acceptance tests (Mac)"
needs: [Build-Snapshot-Artifacts]
runs-on: macos-latest
steps:
- uses: actions/checkout@v2
- uses: actions/download-artifact@v2
with:
name: artifacts
path: snapshot
- name: Run install.sh tests (Mac)
run: make install-test-ci-mac
Cli-Linux:
# Note: changing this job name requires making the same update in the .github/workflows/release.yaml pipeline

View file

@ -1,26 +1,28 @@
release:
# If set to auto, will mark the release as not ready for production
# in case there is an indicator for this in the tag e.g. v1.0.0-rc1
# If set to true, will mark the release as not ready for production.
prerelease: auto
# If set to true, will not auto-publish the release. This is done to allow us to review the changelog before publishing.
draft: true
env:
# required to support multi architecture docker builds
- DOCKER_CLI_EXPERIMENTAL=enabled
before:
hooks:
- ./.github/scripts/apple-signing/setup.sh {{ .IsSnapshot }}
builds:
- binary: grype
id: grype
env:
- CGO_ENABLED=0
- id: linux-build
binary: grype
goos:
- linux
- windows
goarch:
- amd64
- arm64
# Set the modified timestamp on the output binary to the git timestamp (to ensure a reproducible build)
mod_timestamp: '{{ .CommitTimestamp }}'
ldflags: |
# set the modified timestamp on the output binary to the git timestamp to ensure a reproducible build
mod_timestamp: &build-timestamp '{{ .CommitTimestamp }}'
env: &build-env
- CGO_ENABLED=0
ldflags: &build-ldflags |
-w
-s
-extldflags '-static'
@ -28,60 +30,47 @@ builds:
-X github.com/anchore/grype/internal/version.syftVersion={{.Env.SYFT_VERSION}}
-X github.com/anchore/grype/internal/version.gitCommit={{.Commit}}
-X github.com/anchore/grype/internal/version.buildDate={{.Date}}
-X github.com/anchore/grype/internal/version.gitTreeState={{.Env.BUILD_GIT_TREE_STATE}}
-X github.com/anchore/grype/internal/version.gitDescription={{.Summary}}
# For more info on this macOS build, see: https://github.com/mitchellh/gon#usage-with-goreleaser
- binary: grype
id: grype-macos
env:
- CGO_ENABLED=0
- id: darwin-build
binary: grype
goos:
- darwin
goarch:
- amd64
- arm64
# Set the modified timestamp on the output binary to the git timestamp (to ensure a reproducible build)
mod_timestamp: '{{ .CommitTimestamp }}'
ldflags: |
-w
-s
-extldflags '-static'
-X github.com/anchore/grype/internal/version.version={{.Version}}
-X github.com/anchore/grype/internal/version.syftVersion={{.Env.SYFT_VERSION}}
-X github.com/anchore/grype/internal/version.gitCommit={{.Commit}}
-X github.com/anchore/grype/internal/version.buildDate={{.Date}}
-X github.com/anchore/grype/internal/version.gitTreeState={{.Env.BUILD_GIT_TREE_STATE}}
mod_timestamp: *build-timestamp
env: *build-env
ldflags: *build-ldflags
hooks:
post:
# we must have signing as a build hook instead of the signs section. The signs section must register a new asset, where we want to replace an existing asset.
# a post-build hook has the advantage of not needing to unpackage and repackage a tar.gz with a signed binary
- ./.github/scripts/apple-signing/sign.sh "{{ .Path }}" "{{ .IsSnapshot }}" "{{ .Target }}"
- id: windows-build
binary: grype
goos:
- windows
goarch:
- amd64
mod_timestamp: *build-timestamp
env: *build-env
ldflags: *build-ldflags
archives:
- name_template: '{{ .ProjectName }}_{{ .Version }}_{{ .Os }}_{{ .Arch }}{{ if .Arm }}v{{ .Arm }}{{ end }}'
id: grype
format_overrides:
- goos: windows
format: zip
- format: zip # This is a hack for grype-macos! We don't actually intend to use _this_ ZIP file, we just need goreleaser to consider the ZIP file produced by gon (which will have the same file name) to be an artifact so we can use it downstream in publishing (e.g. to a homebrew tap)
id: grype-exception
- id: linux-archives
builds:
- grype-macos
- linux-build
signs:
- artifacts: checksum
cmd: sh
args:
- '-c'
# we should not include the zip artifact, as the artifact is mutated throughout the next macOS notarization step
# note: sed -i is not portable
- 'sed "/.*_darwin_.*\.zip/d" ${artifact} > tmpfile && mv tmpfile ${artifact} && gpg --output ${signature} --detach-sign ${artifact}'
- id: grype-macos-signing
ids:
- grype-macos
cmd: ./.github/scripts/mac-sign-and-notarize.sh
signature: "grype_${VERSION}_darwin_amd64.dmg" # This is somewhat unintuitive. This gets the DMG file recognized as an artifact. In fact, both a DMG and a ZIP file are being produced by this signing step.
args:
- "{{ .IsSnapshot }}"
- "gon.hcl"
- "./dist/grype_{{ .Version }}_darwin_amd64"
artifacts: all
- id: darwin-archives
builds:
- darwin-build
- id: windows-archives
format: zip
builds:
- windows-build
nfpms:
- license: "Apache 2.0"
@ -97,19 +86,11 @@ brews:
owner: anchore
name: homebrew-grype
ids:
- grype
install: |
bin.install "grype"
# Install bash completion
output = Utils.popen_read("#{bin}/grype completion bash")
(bash_completion/"grype").write output
# Install zsh completion
output = Utils.popen_read("#{bin}/grype completion zsh")
(zsh_completion/"_grype").write output
- darwin-archives
- linux-archives
homepage: *website
description: *description
license: "Apache License 2.0"
dockers:
- image_templates:
@ -157,4 +138,3 @@ docker_manifests:
- anchore/grype:{{ .Tag }}-arm64v8
- anchore/grype:v{{ .Major }}-arm64v8
- anchore/grype:v{{ .Major }}.{{ .Minor }}-arm64v8

178
Makefile
View file

@ -5,6 +5,11 @@ COVER_REPORT = $(RESULTSDIR)/cover.report
COVER_TOTAL = $(RESULTSDIR)/cover.total
LICENSES_REPORT = $(RESULTSDIR)/licenses.json
LINTCMD = $(TEMPDIR)/golangci-lint run --tests=false --timeout 5m --config .golangci.yaml
RELEASE_CMD=$(TEMPDIR)/goreleaser release --rm-dist
SNAPSHOT_CMD=$(RELEASE_CMD) --skip-publish --snapshot
VERSION=$(shell git describe --dirty --always --tags)
# formatting variables
BOLD := $(shell tput -T linux bold)
PURPLE := $(shell tput -T linux setaf 5)
GREEN := $(shell tput -T linux setaf 2)
@ -13,28 +18,21 @@ RED := $(shell tput -T linux setaf 1)
RESET := $(shell tput -T linux sgr0)
TITLE := $(BOLD)$(PURPLE)
SUCCESS := $(BOLD)$(GREEN)
# the quality gate lower threshold for unit test total % coverage (by function statements)
COVERAGE_THRESHOLD := 47
# CI cache busting values; change these if you want CI to not use previous stored cache
BOOTSTRAP_CACHE="c7afb99ad"
INTEGRATION_CACHE_BUSTER="894d8ca"
## Build variables
DISTDIR=./dist
SNAPSHOTDIR=./snapshot
GITTREESTATE=$(if $(shell git status --porcelain),dirty,clean)
SYFTVERSION=$(shell go list -m all | grep github.com/anchore/syft | awk '{print $$2}')
OS := $(shell uname)
OS=$(shell uname | tr '[:upper:]' '[:lower:]')
SYFT_VERSION=$(shell go list -m all | grep github.com/anchore/syft | awk '{print $$2}')
SNAPSHOT_BIN=$(shell realpath $(shell pwd)/$(SNAPSHOTDIR)/$(OS)-build_$(OS)_amd64/$(BIN))
ifeq ($(OS),Darwin)
SNAPSHOT_CMD=$(shell realpath $(shell pwd)/$(SNAPSHOTDIR)/$(BIN)-macos_darwin_amd64/$(BIN))
else
SNAPSHOT_CMD=$(shell realpath $(shell pwd)/$(SNAPSHOTDIR)/$(BIN)_linux_amd64/$(BIN))
endif
ifeq "$(strip $(VERSION))" ""
override VERSION = $(shell git describe --always --tags --dirty)
endif
## Variable assertions
@ -54,16 +52,28 @@ ifndef SNAPSHOTDIR
$(error SNAPSHOTDIR is not set)
endif
ifndef VERSION
$(error VERSION is not set)
endif
define title
@printf '$(TITLE)$(1)$(RESET)\n'
endef
define safe_rm_rf
bash -c 'test -z "$(1)" && false || rm -rf $(1)'
endef
define safe_rm_rf_children
bash -c 'test -z "$(1)" && false || rm -rf $(1)/*'
endef
.PHONY: all
all: clean static-analysis test ## Run all checks (linting, license check, unit, integration, and linux acceptance tests tests)
@printf '$(SUCCESS)All checks pass!$(RESET)\n'
.PHONY: test
test: unit validate-cyclonedx-schema integration acceptance-linux cli ## Run all tests (unit, integration, linux acceptance, and CLI tests)
test: unit validate-cyclonedx-schema integration cli ## Run all tests (unit, integration, linux acceptance, and CLI tests)
help:
@grep -E '^[a-zA-Z_-]+:.*?## .*$$' $(MAKEFILE_LIST) | sort | awk 'BEGIN {FS = ":.*?## "}; {printf "$(BOLD)$(CYAN)%-25s$(RESET)%s\n", $$1, $$2}'
@ -72,10 +82,6 @@ help:
ci-bootstrap:
DEBIAN_FRONTEND=noninteractive sudo apt update && sudo -E apt install -y bc jq libxml2-utils
.PHONY:
ci-bootstrap-mac:
github_changelog_generator --version || sudo gem install github_changelog_generator
$(RESULTSDIR):
mkdir -p $(RESULTSDIR)
@ -87,7 +93,7 @@ bootstrap-tools: $(TEMPDIR)
curl -sSfL https://raw.githubusercontent.com/golangci/golangci-lint/master/install.sh | sh -s -- -b $(TEMPDIR)/ v1.42.1
curl -sSfL https://raw.githubusercontent.com/wagoodman/go-bouncer/master/bouncer.sh | sh -s -- -b $(TEMPDIR)/ v0.3.0
curl -sSfL https://raw.githubusercontent.com/anchore/chronicle/main/install.sh | sh -s -- -b $(TEMPDIR)/ v0.3.0
.github/scripts/goreleaser-install.sh -b $(TEMPDIR)/ v0.177.0
.github/scripts/goreleaser-install.sh -b $(TEMPDIR)/ v1.4.1
.PHONY: bootstrap-go
bootstrap-go:
@ -114,6 +120,17 @@ lint: ## Run gofmt + golangci lint checks
$(eval MALFORMED_FILENAMES := $(shell find . | grep -e ':'))
@bash -c "[[ '$(MALFORMED_FILENAMES)' == '' ]] || (printf '\nfound unsupported filename characters:\n$(MALFORMED_FILENAMES)\n\n' && false)"
.PHONY: lint-fix
lint-fix: ## Auto-format all source code + run golangci lint fixers
$(call title,Running lint fixers)
gofmt -w -s .
$(LINTCMD) --fix
go mod tidy
.PHONY: check-licenses
check-licenses:
$(TEMPDIR)/bouncer check
check-go-mod-tidy:
@ .github/scripts/go-mod-tidy-check.sh && echo "go.mod and go.sum are tidy!"
@ -126,17 +143,6 @@ validate-grype-db-schema:
# ensure the codebase is only referencing a single grype-db schema version, multiple is not allowed
python test/validate-grype-db-schema.py
.PHONY: lint-fix
lint-fix: ## Auto-format all source code + run golangci lint fixers
$(call title,Running lint fixers)
gofmt -w -s .
$(LINTCMD) --fix
go mod tidy
.PHONY: check-licenses
check-licenses:
$(TEMPDIR)/bouncer check
.PHONY: unit
unit: ## Run unit tests (with coverage)
$(call title,Running unit tests)
@ -146,6 +152,27 @@ unit: ## Run unit tests (with coverage)
@echo "Coverage: $$(cat $(COVER_TOTAL))"
@if [ $$(echo "$$(cat $(COVER_TOTAL)) >= $(COVERAGE_THRESHOLD)" | bc -l) -ne 1 ]; then echo "$(RED)$(BOLD)Failed coverage quality gate (> $(COVERAGE_THRESHOLD)%)$(RESET)" && false; fi
# note: this is used by CI to determine if the install test fixture cache (docker image tars) should be busted
install-fingerprint:
cd test/install && \
make cache.fingerprint
install-test: $(SNAPSHOTDIR)
cd test/install && \
make
install-test-cache-save: $(SNAPSHOTDIR)
cd test/install && \
make save
install-test-cache-load: $(SNAPSHOTDIR)
cd test/install && \
make load
install-test-ci-mac: $(SNAPSHOTDIR)
cd test/install && \
make ci-test-mac
.PHONY: integration
integration: ## Run integration tests
$(call title,Running integration tests)
@ -163,48 +190,43 @@ cli-fingerprint:
.PHONY: cli
cli: $(SNAPSHOTDIR) ## Run CLI tests
chmod 755 "$(SNAPSHOT_CMD)"
$(SNAPSHOT_CMD) version
GRYPE_BINARY_LOCATION='$(SNAPSHOT_CMD)' \
chmod 755 "$(SNAPSHOT_BIN)"
GRYPE_BINARY_LOCATION='$(SNAPSHOT_BIN)' \
go test -count=1 -v ./test/cli
.PHONY: clear-test-cache
clear-test-cache: ## Delete all test cache (built docker image tars)
find . -type f -wholename "**/test-fixtures/cache/*.tar" -delete
.PHONY: build
build: $(SNAPSHOTDIR) ## Build release snapshot binaries and packages
$(SNAPSHOTDIR): ## Build snapshot release binaries and packages
$(call title,Building snapshot artifacts)
# create a config with the dist dir overridden
echo "dist: $(SNAPSHOTDIR)" > $(TEMPDIR)/goreleaser.yaml
cat .goreleaser.yaml >> $(TEMPDIR)/goreleaser.yaml
# build release snapshots
# DOCKER_CLI_EXPERIMENTAL needed to support multi architecture builds for goreleaser
# the release command protects us from image build regressions if QEMU fails or docker is changed
BUILD_GIT_TREE_STATE=$(GITTREESTATE) \
DOCKER_CLI_EXPERIMENTAL=enabled \
SYFT_VERSION=$(SYFTVERSION) \
$(TEMPDIR)/goreleaser release --skip-publish --skip-sign --rm-dist --snapshot --config $(TEMPDIR)/goreleaser.yaml
bash -c "\
SKIP_SIGNING=true \
SYFT_VERSION=$(SYFT_VERSION)\
$(SNAPSHOT_CMD) --skip-sign --config $(TEMPDIR)/goreleaser.yaml"
.PHONY: acceptance-linux
acceptance-linux: $(SNAPSHOTDIR) ## Run acceptance tests on build snapshot binaries and packages (Linux)
.PHONY: snapshot-with-signing
snapshot-with-signing: ## Build snapshot release binaries and packages (with dummy signing)
$(call title,Building snapshot artifacts (+ signing))
# note: this is used by CI to determine if the inline-scan report cache should be busted for the inline-compare tests
.PHONY: compare-fingerprint
compare-fingerprint: ## Compare a snapshot build run of grype against inline-scan
find test/inline-compare/* -type f -exec md5sum {} + | grep -v '\-reports' | grep -v 'fingerprint' | awk '{print $1}' | sort | md5sum | tee test/inline-compare/inline-compare.fingerprint
# create a config with the dist dir overridden
echo "dist: $(SNAPSHOTDIR)" > $(TEMPDIR)/goreleaser.yaml
cat .goreleaser.yaml >> $(TEMPDIR)/goreleaser.yaml
.PHONY: compare-snapshot
compare-snapshot: $(SNAPSHOTDIR) ## Compare a main branch build run of grype against inline-scan
chmod 755 $(SNAPSHOT_CMD)
@cd test/inline-compare && GRYPE_CMD=$(SNAPSHOT_CMD) make
rm -f .github/scripts/apple-signing/log/*.txt
.PHONY: compare
compare:
@cd test/inline-compare && make
# build release snapshots
bash -c "\
SYFT_VERSION=$(SYFT_VERSION)\
$(SNAPSHOT_CMD) --config $(TEMPDIR)/goreleaser.yaml || (cat .github/scripts/apple-signing/log/*.txt && false)"
# remove the keychain with the trusted self-signed cert automatically
.github/scripts/apple-signing/cleanup.sh
.PHONY: changelog
changelog: clean-changelog CHANGELOG.md
@ -231,50 +253,48 @@ validate-syft-release-version:
@./.github/scripts/syft-released-version-check.sh
.PHONY: release
release: clean-dist validate-grype-test-config CHANGELOG.md ## Build and publish final binaries and packages. Intended to be run only on macOS.
release: clean-dist CHANGELOG.md ## Build and publish final binaries and packages. Intended to be run only on macOS.
$(call title,Publishing release artifacts)
# Prepare for macOS-specific signing process
.github/scripts/mac-prepare-for-signing.sh
# login to docker
# note: the previous step creates a new keychain, so it is important to reauth into docker.io
@echo $${DOCKER_PASSWORD} | docker login docker.io -u $${DOCKER_USERNAME} --password-stdin
# create a config with the dist dir overridden
echo "dist: $(DISTDIR)" > $(TEMPDIR)/goreleaser.yaml
cat .goreleaser.yaml >> $(TEMPDIR)/goreleaser.yaml
# release (note the version transformation from v0.7.0 --> 0.7.0)
# DOCKER_CLI_EXPERIMENTAL needed to support multi architecture builds for goreleaser
rm -f .github/scripts/apple-signing/log/*.txt
# note: notarization cannot be done in parallel, thus --parallelism 1
bash -c "\
BUILD_GIT_TREE_STATE=$(GITTREESTATE) \
DOCKER_CLI_EXPERIMENTAL=enabled \
SYFT_VERSION=$(SYFTVERSION) \
VERSION=$(VERSION:v%=%) \
$(TEMPDIR)/goreleaser \
--rm-dist \
--config $(TEMPDIR)/goreleaser.yaml \
--release-notes <(cat CHANGELOG.md)"
SYFT_VERSION=$(SYFT_VERSION)\
$(RELEASE_CMD) \
--config $(TEMPDIR)/goreleaser.yaml \
--parallelism 1 \
--release-notes <(cat CHANGELOG.md)\
|| (cat .github/scripts/apple-signing/log/*.txt && false)"
# verify checksum signatures
.github/scripts/verify-signature.sh "$(DISTDIR)"
cat .github/scripts/apple-signing/log/*.txt
# TODO: turn this into a post-release hook
# upload the version file that supports the application version update check (excluding pre-releases)
.github/scripts/update-version-file.sh "$(DISTDIR)" "$(VERSION)"
.PHONY: clean
clean: clean-dist clean-snapshot ## Remove previous builds and result reports
rm -rf $(RESULTSDIR)/*
$(call safe_rm_rf_children,$(RESULTSDIR))
.PHONY: clean-snapshot
clean-snapshot:
rm -rf $(SNAPSHOTDIR) $(TEMPDIR)/goreleaser.yaml
$(call safe_rm_rf,$(SNAPSHOTDIR))
rm -f $(TEMPDIR)/goreleaser.yaml
.PHONY: clean-dist
clean-dist: clean-changelog
rm -rf $(DISTDIR) $(TEMPDIR)/goreleaser.yaml
$(call safe_rm_rf,$(DISTDIR))
rm -f $(TEMPDIR)/goreleaser.yaml
.PHONY: clean-changelog
clean-changelog:
rm -f CHANGELOG.md
.PHONY: clean-test-cache
clean-test-cache: ## Delete all test cache (built docker image tars)
find . -type f -wholename "**/test-fixtures/cache/*.tar" -delete

View file

@ -34,7 +34,7 @@ func printVersion(_ *cobra.Command, _ []string) error {
fmt.Println("Syft Version: ", versionInfo.SyftVersion)
fmt.Println("BuildDate: ", versionInfo.BuildDate)
fmt.Println("GitCommit: ", versionInfo.GitCommit)
fmt.Println("GitTreeState: ", versionInfo.GitTreeState)
fmt.Println("GitDescription: ", versionInfo.GitDescription)
fmt.Println("Platform: ", versionInfo.Platform)
fmt.Println("GoVersion: ", versionInfo.GoVersion)
fmt.Println("Compiler: ", versionInfo.Compiler)

6
go.sum
View file

@ -128,7 +128,6 @@ github.com/anchore/go-version v1.2.2-0.20210903204242-51efa5b487c4/go.mod h1:Bkc
github.com/anchore/packageurl-go v0.0.0-20210922164639-b3fa992ebd29 h1:K9LfnxwhqvihqU0+MF325FNy7fsKV9EGaUxdfR4gnWk=
github.com/anchore/packageurl-go v0.0.0-20210922164639-b3fa992ebd29/go.mod h1:Oc1UkGaJwY6ND6vtAqPSlYrptKRJngHwkwB6W7l1uP0=
github.com/anchore/stereoscope v0.0.0-20220209160132-2e595043fa19/go.mod h1:QpDHHV2h1NNfu7klzU75XC8RvSlaPK6HHgi0dy8A6sk=
github.com/anchore/stereoscope v0.0.0-20220209160132-2e595043fa19/go.mod h1:QpDHHV2h1NNfu7klzU75XC8RvSlaPK6HHgi0dy8A6sk=
github.com/anchore/stereoscope v0.0.0-20220209180455-403dd709a3fb h1:yicFaC7dVBS4uYvU7sxsnEVi/2rndM0axZUgfhx+1qs=
github.com/anchore/stereoscope v0.0.0-20220209180455-403dd709a3fb/go.mod h1:QpDHHV2h1NNfu7klzU75XC8RvSlaPK6HHgi0dy8A6sk=
github.com/anchore/syft v0.37.11-0.20220210211800-220f3a24fdf5 h1:GLShI62a8Y5pW+SIWnwsoXC4szWIj98rzwzEurKem84=
@ -1046,11 +1045,16 @@ go.opentelemetry.io/otel/metric v0.26.0/go.mod h1:c6YL0fhRo4YVoNs6GoByzUgBp36hBL
go.opentelemetry.io/otel/sdk v1.3.0/go.mod h1:rIo4suHNhQwBIPg9axF8V9CA72Wz2mKF1teNrup8yzs=
go.opentelemetry.io/otel/trace v1.3.0/go.mod h1:c/VDhno8888bvQYmbYLqe41/Ldmr/KKunbvWM4/fEjk=
go.opentelemetry.io/proto/otlp v0.7.0/go.mod h1:PqfVotwruBrMGOCsRd/89rSnXhoiJIqeYNgFYFoEGnI=
go.opentelemetry.io/proto/otlp v0.7.0/go.mod h1:PqfVotwruBrMGOCsRd/89rSnXhoiJIqeYNgFYFoEGnI=
go.uber.org/atomic v1.3.2/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE=
go.uber.org/atomic v1.3.2/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE=
go.uber.org/atomic v1.4.0/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE=
go.uber.org/atomic v1.4.0/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE=
go.uber.org/atomic v1.5.0/go.mod h1:sABNBOSYdrvTF6hTgEIbc7YasKWGhgEQZyfxyTvoXHQ=
go.uber.org/atomic v1.6.0/go.mod h1:sABNBOSYdrvTF6hTgEIbc7YasKWGhgEQZyfxyTvoXHQ=
go.uber.org/atomic v1.7.0/go.mod h1:fEN4uk6kAWBTFdckzkM89CLk9XfWZrxpCo0nPH17wJc=
go.uber.org/atomic v1.7.0/go.mod h1:fEN4uk6kAWBTFdckzkM89CLk9XfWZrxpCo0nPH17wJc=
go.uber.org/multierr v1.1.0/go.mod h1:wR5kodmAFQ0UK8QlbwjlSNy0Z68gJhDJUG5sjR94q/0=
go.uber.org/multierr v1.1.0/go.mod h1:wR5kodmAFQ0UK8QlbwjlSNy0Z68gJhDJUG5sjR94q/0=
go.uber.org/multierr v1.3.0/go.mod h1:VgVr7evmIr6uPjLBxg28wmKNXyqE9akIJ5XnfpiKl+4=
go.uber.org/multierr v1.5.0/go.mod h1:FeouvMocqHpRaaGuG9EjoKcStLC43Zu/fmqdUMPcKYU=

15
gon.hcl
View file

@ -1,15 +0,0 @@
source = ["./dist/grype-macos_darwin_amd64/grype"] # The 'dist' directory path should ideally reference an env var, where the source of truth is the Makefile. I wasn't able to figure out how to solve this.
bundle_id = "com.anchore.toolbox.grype"
sign {
application_identity = "Developer ID Application: ANCHORE, INC. (9MJHKYX5AT)"
}
dmg {
output_path = "./dist/output.dmg"
volume_name = "Grype"
}
zip {
output_path = "./dist/output.zip"
}

View file

@ -1,198 +1,129 @@
#!/bin/sh
set -e
# note: we require errors to propagate (don't set -e)
set -u
usage() {
PROJECT_NAME="grype"
OWNER=anchore
REPO="${PROJECT_NAME}"
GITHUB_DOWNLOAD_PREFIX=https://github.com/${OWNER}/${REPO}/releases/download
#
# usage [script-name]
#
usage() (
this=$1
cat <<EOF
$this: download go binaries for anchore/grype
Usage: $this [-b] bindir [-d] [tag]
-b sets bindir or installation directory, Defaults to ./bin
-d turns on debug logging
[tag] is a tag from
https://github.com/anchore/grype/releases
If tag is missing, then the latest will be used.
Generated by godownloader
https://github.com/goreleaser/godownloader
$this: download go binaries for anchore/syft
Usage: $this [-b] dir [-d] [tag]
-b the installation directory (dDefaults to ./bin)
-d turns on debug logging
-dd turns on trace logging
[tag] the specific release to use (if missing, then the latest will be used)
EOF
exit 2
}
)
parse_args() {
# BINDIR is ./bin unless set be ENV
# over-ridden by flag below
BINDIR=${BINDIR:-./bin}
while getopts "b:dh?x" arg; do
case "$arg" in
b) BINDIR="$OPTARG" ;;
d) log_set_priority 10 ;;
h | \?) usage "$0" ;;
x) set -x ;;
esac
done
shift $((OPTIND - 1))
TAG=$1
}
# this function wraps all the destructive operations
# if a curl|bash cuts off the end of the script due to
# network, either nothing will happen or will syntax error
# out preventing half-done work
execute() {
tmpdir=$(mktemp -d)
log_debug "downloading files into ${tmpdir}"
http_download "${tmpdir}/${ARCHIVE}" "${ARCHIVE_URL}"
http_download "${tmpdir}/${CHECKSUM}" "${CHECKSUM_URL}"
# ------------------------------------------------------------------------
# https://github.com/client9/shlib - portable posix shell functions
# Public domain - http://unlicense.org
# https://github.com/client9/shlib/blob/master/LICENSE.md
# but credit (and pull requests) appreciated.
# ------------------------------------------------------------------------
# macOS has its own secure verification mechanism, and checksums.txt is not used.
if [ "$OS" != "darwin" ]; then
hash_sha256_verify "${tmpdir}/${ARCHIVE}" "${tmpdir}/${CHECKSUM}"
fi
srcdir="${tmpdir}"
(cd "${tmpdir}" && unpack "${ARCHIVE}")
test ! -d "${BINDIR}" && install -d "${BINDIR}"
for binexe in $BINARIES; do
if [ "$OS" = "windows" ]; then
binexe="${binexe}.exe"
fi
install "${srcdir}/${binexe}" "${BINDIR}/"
log_info "installed ${BINDIR}/${binexe}"
done
rm -rf "${tmpdir}"
}
get_binaries() {
case "$PLATFORM" in
darwin/amd64) BINARIES="grype" ;;
darwin/arm64) BINARIES="grype" ;;
linux/amd64) BINARIES="grype" ;;
windows/amd64) BINARIES="grype" ;;
linux/arm64) BINARIES="grype" ;;
*)
log_crit "platform $PLATFORM is not supported. Make sure this script is up-to-date and file request at https://github.com/${PREFIX}/issues/new"
exit 1
;;
esac
}
tag_to_version() {
if [ -z "${TAG}" ]; then
log_info "checking GitHub for latest tag"
else
log_info "checking GitHub for tag '${TAG}'"
fi
REALTAG=$(github_release "$OWNER/$REPO" "${TAG}") && true
if test -z "$REALTAG"; then
log_crit "unable to find '${TAG}' - use 'latest' or see https://github.com/${PREFIX}/releases for details"
exit 1
fi
# if version starts with 'v', remove it
TAG="$REALTAG"
VERSION=${TAG#v}
}
adjust_format() {
# change format (tar.gz or zip) based on OS
case ${OS} in
darwin) FORMAT=dmg ;;
windows) FORMAT=zip ;;
esac
case "${PLATFORM}" in
darwin/arm64) FORMAT=zip ;;
esac
true
}
adjust_os() {
# adjust archive name based on OS
true
}
adjust_arch() {
# adjust archive name based on ARCH
true
}
cat /dev/null <<EOF
------------------------------------------------------------------------
https://github.com/client9/shlib - portable posix shell functions
Public domain - http://unlicense.org
https://github.com/client9/shlib/blob/master/LICENSE.md
but credit (and pull requests) appreciated.
------------------------------------------------------------------------
EOF
is_command() {
is_command() (
command -v "$1" >/dev/null
}
echoerr() {
)
echo_stderr() (
echo "$@" 1>&2
}
log_prefix() {
echo "$0"
}
_logp=6
)
_logp=2
log_set_priority() {
_logp="$1"
}
log_priority() {
log_priority() (
if test -z "$1"; then
echo "$_logp"
return
fi
[ "$1" -le "$_logp" ]
)
init_colors() {
RED=''
BLUE=''
PURPLE=''
BOLD=''
RESET=''
# check if stdout is a terminal
if test -t 1 && is_command tput; then
# see if it supports colors
ncolors=$(tput colors)
if test -n "$ncolors" && test $ncolors -ge 8; then
RED='\033[0;31m'
BLUE='\033[0;34m'
PURPLE='\033[0;35m'
BOLD='\033[1m'
RESET='\033[0m'
fi
fi
}
log_tag() {
init_colors
log_tag() (
case $1 in
0) echo "emerg" ;;
1) echo "alert" ;;
2) echo "crit" ;;
3) echo "err" ;;
4) echo "warning" ;;
5) echo "notice" ;;
6) echo "info" ;;
7) echo "debug" ;;
*) echo "$1" ;;
0) echo "${RED}${BOLD}[error]${RESET}" ;;
1) echo "${RED}[warn]${RESET}" ;;
2) echo "[info]${RESET}" ;;
3) echo "${BLUE}[debug]${RESET}" ;;
4) echo "${PURPLE}[trace]${RESET}" ;;
*) echo "[$1]" ;;
esac
}
log_debug() {
log_priority 7 || return 0
echoerr "$(log_prefix)" "$(log_tag 7)" "$@"
}
log_info() {
log_priority 6 || return 0
echoerr "$(log_prefix)" "$(log_tag 6)" "$@"
}
log_err() {
log_priority 3 || return 0
echoerr "$(log_prefix)" "$(log_tag 3)" "$@"
}
log_crit() {
log_priority 2 || return 0
echoerr "$(log_prefix)" "$(log_tag 2)" "$@"
}
uname_os() {
os=$(uname -s | tr '[:upper:]' '[:lower:]')
case "$os" in
cygwin_nt*) os="windows" ;;
mingw*) os="windows" ;;
msys_nt*) os="windows" ;;
esac
echo "$os"
}
uname_arch() {
arch=$(uname -m)
case $arch in
x86_64) arch="amd64" ;;
x86) arch="386" ;;
i686) arch="386" ;;
i386) arch="386" ;;
aarch64) arch="arm64" ;;
armv5*) arch="armv5" ;;
armv6*) arch="armv6" ;;
armv7*) arch="armv7" ;;
esac
echo ${arch}
}
uname_os_check() {
os=$(uname_os)
)
log_trace_priority=4
log_trace() (
priority=$log_trace_priority
log_priority "$priority" || return 0
echo_stderr "$(log_tag $priority)" "${@}" "${RESET}"
)
log_debug_priority=3
log_debug() (
priority=$log_debug_priority
log_priority "$priority" || return 0
echo_stderr "$(log_tag $priority)" "${@}" "${RESET}"
)
log_info_priority=2
log_info() (
priority=$log_info_priority
log_priority "$priority" || return 0
echo_stderr "$(log_tag $priority)" "${@}" "${RESET}"
)
log_warn_priority=1
log_warn() (
priority=$log_warn_priority
log_priority "$priority" || return 0
echo_stderr "$(log_tag $priority)" "${@}" "${RESET}"
)
log_err_priority=0
log_err() (
priority=$log_err_priority
log_priority "$priority" || return 0
echo_stderr "$(log_tag $priority)" "${@}" "${RESET}"
)
uname_os_check() (
os=$1
case "$os" in
darwin) return 0 ;;
dragonfly) return 0 ;;
@ -206,11 +137,12 @@ uname_os_check() {
solaris) return 0 ;;
windows) return 0 ;;
esac
log_crit "uname_os_check '$(uname -s)' got converted to '$os' which is not a GOOS value. Please file bug at https://github.com/client9/shlib"
log_err "uname_os_check '$(uname -s)' got converted to '$os' which is not a GOOS value. Please file bug at https://github.com/client9/shlib"
return 1
}
uname_arch_check() {
arch=$(uname_arch)
)
uname_arch_check() (
arch=$1
case "$arch" in
386) return 0 ;;
amd64) return 0 ;;
@ -227,56 +159,72 @@ uname_arch_check() {
s390x) return 0 ;;
amd64p32) return 0 ;;
esac
log_crit "uname_arch_check '$(uname -m)' got converted to '$arch' which is not a GOARCH value. Please file bug report at https://github.com/client9/shlib"
log_err "uname_arch_check '$(uname -m)' got converted to '$arch' which is not a GOARCH value. Please file bug report at https://github.com/client9/shlib"
return 1
}
unpack() {
)
unpack() (
archive=$1
log_trace "unpack(archive=${archive})"
case "${archive}" in
*.tar.gz | *.tgz) tar --no-same-owner -xzf "${archive}" ;;
*.tar) tar --no-same-owner -xf "${archive}" ;;
*.zip) unzip "${archive}" ;;
*.zip) unzip -q "${archive}" ;;
*.dmg) extract_from_dmg "${archive}" ;;
*)
log_err "unpack unknown archive format for ${archive}"
return 1
;;
esac
}
extract_from_dmg() {
)
extract_from_dmg() (
dmg_file=$1
mount_point="/Volumes/tmp-dmg"
hdiutil attach -quiet -nobrowse -mountpoint "${mount_point}" "${dmg_file}"
cp -fR "${mount_point}/." ./
hdiutil detach -quiet -force "${mount_point}"
}
http_download_curl() {
)
http_download_curl() (
local_file=$1
source_url=$2
header=$3
log_trace "http_download_curl(local_file=$local_file, source_url=$source_url, header=$header)"
if [ -z "$header" ]; then
code=$(curl -w '%{http_code}' -sL -o "$local_file" "$source_url")
else
code=$(curl -w '%{http_code}' -sL -H "$header" -o "$local_file" "$source_url")
fi
if [ "$code" != "200" ]; then
log_debug "http_download_curl received HTTP status $code"
log_err "received HTTP status=$code for url='$source_url'"
return 1
fi
return 0
}
http_download_wget() {
)
http_download_wget() (
local_file=$1
source_url=$2
header=$3
log_trace "http_download_wget(local_file=$local_file, source_url=$source_url, header=$header)"
if [ -z "$header" ]; then
wget -q -O "$local_file" "$source_url"
else
wget -q --header "$header" -O "$local_file" "$source_url"
fi
}
http_download() {
log_debug "http_download $2"
)
http_download() (
log_debug "http_download(url=$2)"
if is_command curl; then
http_download_curl "$@"
return
@ -284,28 +232,19 @@ http_download() {
http_download_wget "$@"
return
fi
log_crit "http_download unable to find wget or curl"
log_err "http_download unable to find wget or curl"
return 1
}
http_copy() {
)
http_copy() (
tmp=$(mktemp)
http_download "${tmp}" "$1" "$2" || return 1
body=$(cat "$tmp")
rm -f "${tmp}"
echo "$body"
}
github_release() {
owner_repo=$1
version=$2
test -z "$version" && version="latest"
giturl="https://github.com/${owner_repo}/releases/${version}"
json=$(http_copy "$giturl" "Accept:application/json")
test -z "$json" && return 1
version=$(echo "$json" | tr -s '\n' ' ' | sed 's/.*"tag_name":"//' | sed 's/".*//')
test -z "$version" && return 1
echo "$version"
}
hash_sha256() {
)
hash_sha256() (
TARGET=${1:-/dev/stdin}
if is_command gsha256sum; then
hash=$(gsha256sum "$TARGET") || return 1
@ -320,11 +259,12 @@ hash_sha256() {
hash=$(openssl -dst openssl dgst -sha256 "$TARGET") || return 1
echo "$hash" | cut -d ' ' -f a
else
log_crit "hash_sha256 unable to find command to compute sha-256 hash"
log_err "hash_sha256 unable to find command to compute sha-256 hash"
return 1
fi
}
hash_sha256_verify() {
)
hash_sha256_verify() (
TARGET=$1
checksums=$2
if [ -z "$checksums" ]; then
@ -342,51 +282,400 @@ hash_sha256_verify() {
log_err "hash_sha256_verify checksum for '$TARGET' did not verify ${want} vs $got"
return 1
fi
}
cat /dev/null <<EOF
------------------------------------------------------------------------
End of functions from https://github.com/client9/shlib
------------------------------------------------------------------------
)
# ------------------------------------------------------------------------
# End of functions from https://github.com/client9/shlib
# ------------------------------------------------------------------------
# asset_file_exists [path]
#
# returns 1 if the given file does not exist
#
asset_file_exists() (
path="$1"
if [ ! -f "${path}" ]; then
return 1
fi
)
# github_release_json [owner] [repo] [version]
#
# outputs release json string
#
github_release_json() (
owner=$1
repo=$2
version=$3
test -z "$version" && version="latest"
giturl="https://github.com/${owner}/${repo}/releases/${version}"
json=$(http_copy "$giturl" "Accept:application/json")
log_trace "github_release_json(owner=${owner}, repo=${repo}, version=${version}) returned '${json}'"
test -z "$json" && return 1
echo "${json}"
)
# extract_value [key-value-pair]
#
# outputs value from a colon delimited key-value pair
#
extract_value() (
key_value="$1"
IFS=':' read -r _ value << EOF
${key_value}
EOF
echo "$value"
)
PROJECT_NAME="grype"
OWNER=anchore
REPO="grype"
BINARY=grype
FORMAT=tar.gz
OS=$(uname_os)
ARCH=$(uname_arch)
PREFIX="$OWNER/$REPO"
# extract_json_value [json] [key]
#
# outputs value of the key from the given json string
#
extract_json_value() (
json="$1"
key="$2"
key_value=$(echo "${json}" | grep -o "\"$key\":[^,]*[,}]" | tr -d '",}')
# use in logging routines
log_prefix() {
echo "$PREFIX"
}
PLATFORM="${OS}/${ARCH}"
GITHUB_DOWNLOAD=https://github.com/${OWNER}/${REPO}/releases/download
extract_value "$key_value"
)
uname_os_check "$OS"
uname_arch_check "$ARCH"
# github_release_tag [release-json]
#
# outputs release tag string
#
github_release_tag() (
json="$1"
tag=$(extract_json_value "${json}" "tag_name")
test -z "$tag" && return 1
echo "$tag"
)
parse_args "$@"
# download_github_release_checksums [release-url-prefix] [name] [version] [output-dir]
#
# outputs path to the downloaded checksums file
#
download_github_release_checksums() (
download_url="$1"
name="$2"
version="$3"
output_dir="$4"
get_binaries
log_trace "download_github_release_checksums(url=${download_url}, name=${name}, version=${version}, output_dir=${output_dir})"
tag_to_version
checksum_filename=${name}_${version}_checksums.txt
checksum_url=${download_url}/${checksum_filename}
output_path="${output_dir}/${checksum_filename}"
adjust_format
http_download "${output_path}" "${checksum_url}" ""
asset_file_exists "${output_path}"
adjust_os
log_trace "download_github_release_checksums() returned '${output_path}'"
adjust_arch
echo "${output_path}"
)
log_info "found version: ${VERSION} for ${TAG}/${OS}/${ARCH}"
# search_for_asset [checksums-file-path] [name] [os] [arch] [format]
#
# outputs name of the asset to download
#
search_for_asset() (
checksum_path="$1"
name="$2"
os="$3"
arch="$4"
format="$5"
NAME=${PROJECT_NAME}_${VERSION}_${OS}_${ARCH}
ARCHIVE=${NAME}.${FORMAT}
ARCHIVE_URL=${GITHUB_DOWNLOAD}/${TAG}/${ARCHIVE}
CHECKSUM=${PROJECT_NAME}_${VERSION}_checksums.txt
CHECKSUM_URL=${GITHUB_DOWNLOAD}/${TAG}/${CHECKSUM}
log_trace "search_for_asset(checksum-path=${checksum_path}, name=${name}, os=${os}, arch=${arch}, format=${format})"
asset_glob="${name}_.*_${os}_${arch}.${format}"
output_path=$(grep -o "${asset_glob}" "${checksum_path}" || true)
log_trace "search_for_asset() returned '${output_path}'"
echo "${output_path}"
)
# uname_os
#
# outputs an adjusted os value
#
uname_os() (
os=$(uname -s | tr '[:upper:]' '[:lower:]')
case "$os" in
cygwin_nt*) os="windows" ;;
mingw*) os="windows" ;;
msys_nt*) os="windows" ;;
esac
uname_os_check "$os"
log_trace "uname_os() returned '${os}'"
echo "$os"
)
# uname_arch
#
# outputs an adjusted architecture value
#
uname_arch() (
arch=$(uname -m)
case $arch in
x86_64) arch="amd64" ;;
x86) arch="386" ;;
i686) arch="386" ;;
i386) arch="386" ;;
aarch64) arch="arm64" ;;
armv5*) arch="armv5" ;;
armv6*) arch="armv6" ;;
armv7*) arch="armv7" ;;
esac
uname_arch_check "${arch}"
log_trace "uname_arch() returned '${arch}'"
echo "${arch}"
)
# get_release_tag [owner] [repo] [tag]
#
# outputs tag string
#
get_release_tag() (
owner="$1"
repo="$2"
tag="$3"
log_trace "get_release_tag(owner=${owner}, repo=${repo}, tag=${tag})"
json=$(github_release_json "${owner}" "${repo}" "${tag}")
real_tag=$(github_release_tag "${json}")
if test -z "${real_tag}"; then
return 1
fi
log_trace "get_release_tag() returned '${real_tag}'"
echo "${real_tag}"
)
# tag_to_version [tag]
#
# outputs version string
#
tag_to_version() (
tag="$1"
value="${tag#v}"
log_trace "tag_to_version(tag=${tag}) returned '${value}'"
echo "$value"
)
# get_binary_name [os] [arch] [default-name]
#
# outputs a the binary string name
#
get_binary_name() (
os="$1"
arch="$2"
binary="$3"
original_binary="${binary}"
case "${os}" in
windows) binary="${binary}.exe" ;;
esac
log_trace "get_binary_name(os=${os}, arch=${arch}, binary=${original_binary}) returned '${binary}'"
echo "${binary}"
)
execute
# get_format_name [os] [arch] [default-format]
#
# outputs an adjusted file format
#
get_format_name() (
os="$1"
arch="$2"
format="$3"
original_format="${format}"
case ${os} in
windows) format=zip ;;
esac
log_trace "get_format_name(os=${os}, arch=${arch}, format=${original_format}) returned '${format}'"
echo "${format}"
)
# download_and_install_asset [release-url-prefix] [download-path] [install-path] [name] [os] [arch] [version] [format] [binary]
#
# attempts to download the archive and install it to the given path.
#
download_and_install_asset() (
download_url="$1"
download_path="$2"
install_path=$3
name="$4"
os="$5"
arch="$6"
version="$7"
format="$8"
binary="$9"
asset_filepath=$(download_asset "${download_url}" "${download_path}" "${name}" "${os}" "${arch}" "${version}" "${format}")
# don't continue if we couldn't download an asset
if [ -z "${asset_filepath}" ]; then
log_err "could not find release asset for os='${os}' arch='${arch}' format='${format}' "
return 1
fi
install_asset "${asset_filepath}" "${install_path}" "${binary}"
)
# download_asset [release-url-prefix] [download-path] [name] [os] [arch] [version] [format] [binary]
#
# outputs the path to the downloaded asset asset_filepath
#
download_asset() (
download_url="$1"
destination="$2"
name="$3"
os="$4"
arch="$5"
version="$6"
format="$7"
log_trace "download_asset(url=${download_url}, destination=${destination}, name=${name}, os=${os}, arch=${arch}, version=${version}, format=${format})"
checksums_filepath=$(download_github_release_checksums "${download_url}" "${name}" "${version}" "${destination}")
log_trace "checksums content:\n$(cat ${checksums_filepath})"
asset_filename=$(search_for_asset "${checksums_filepath}" "${name}" "${os}" "${arch}" "${format}")
# don't continue if we couldn't find a matching asset from the checksums file
if [ -z "${asset_filename}" ]; then
return 1
fi
asset_url="${download_url}/${asset_filename}"
asset_filepath="${destination}/${asset_filename}"
http_download "${asset_filepath}" "${asset_url}" ""
hash_sha256_verify "${asset_filepath}" "${checksums_filepath}"
log_trace "download_asset_by_checksums_file() returned '${asset_filepath}'"
echo "${asset_filepath}"
)
# install_asset [asset-path] [destination-path] [binary]
#
install_asset() (
asset_filepath="$1"
destination="$2"
binary="$3"
log_trace "install_asset(asset=${asset_filepath}, destination=${destination}, binary=${binary})"
# don't continue if we don't have anything to install
if [ -z "${asset_filepath}" ]; then
return
fi
archive_dir=$(dirname "${asset_filepath}")
# unarchive the downloaded archive to the temp dir
(cd "${archive_dir}" && unpack "${asset_filepath}")
# create the destination dir
test ! -d "${destination}" && install -d "${destination}"
# install the binary to the destination dir
install "${archive_dir}/${binary}" "${destination}/"
)
main() (
# parse arguments
install_dir=${install_dir:-./bin}
while getopts "b:dh?x" arg; do
case "$arg" in
b) install_dir="$OPTARG" ;;
d)
if [ "$_logp" = "$log_info_priority" ]; then
# -d == debug
log_set_priority $log_debug_priority
else
# -dd (or -ddd...) == trace
log_set_priority $log_trace_priority
fi
;;
h | \?) usage "$0" ;;
x) set -x ;;
esac
done
shift $((OPTIND - 1))
set +u
tag=$1
if [ -z "${tag}" ]; then
log_info "checking github for the current release tag"
tag=""
else
log_info "checking github for release tag='${tag}'"
fi
set -u
tag=$(get_release_tag "${OWNER}" "${REPO}" "${tag}")
if [ "$?" != "0" ]; then
log_err "unable to find tag='${tag}'"
log_err "do not specify a version or select a valid version from https://github.com/${OWNER}/${REPO}/releases"
return 1
fi
version=$(tag_to_version "${tag}")
download_dir=$(mktemp -d)
trap 'rm -rf -- "$download_dir"' EXIT
# run the application
os=$(uname_os)
arch=$(uname_arch)
format=$(get_format_name "${os}" "${arch}" "tar.gz")
binary=$(get_binary_name "${os}" "${arch}" "${PROJECT_NAME}")
download_url="${GITHUB_DOWNLOAD_PREFIX}/${tag}"
log_info "using release tag='${tag}' version='${version}' os='${os}' arch='${arch}'"
log_debug "downloading files into ${download_dir}"
download_and_install_asset "${download_url}" "${download_dir}" "${install_dir}" "${PROJECT_NAME}" "${os}" "${arch}" "${version}" "${format}" "${binary}"
# don't continue if we couldn't install the asset
if [ "$?" != "0" ]; then
log_err "failed to install ${PROJECT_NAME}"
return 1
fi
log_info "installed ${install_dir}/${binary}"
)
# entrypoint
set +u
if [ -z "${TEST_INSTALL_SH}" ]; then
set -u
main "$@"
fi
set -u

View file

@ -3,37 +3,48 @@ package version
import (
"fmt"
"runtime"
"strings"
)
const valueNotProvided = "[not provided]"
// all variables here are provided as build-time arguments, with clear default values
var version = valueNotProvided
var syftVersion = valueNotProvided
var gitCommit = valueNotProvided
var gitTreeState = valueNotProvided
var gitDescription = valueNotProvided
var buildDate = valueNotProvided
var platform = fmt.Sprintf("%s/%s", runtime.GOOS, runtime.GOARCH)
// Version defines the application version details (generally from build information)
type Version struct {
Version string `json:"version"`
SyftVersion string `json:"syftVersion"`
GitCommit string `json:"gitCommit"`
GitTreeState string `json:"gitTreeState"`
BuildDate string `json:"buildDate"`
GoVersion string `json:"goVersion"`
Compiler string `json:"compiler"`
Platform string `json:"platform"`
Version string `json:"version"` // application semantic version
SyftVersion string `json:"syftVersion"` // the version of syft being used by grype
GitCommit string `json:"gitCommit"` // git SHA at build-time
GitDescription string `json:"gitDescription"` // output of 'git describe --dirty --always --tags'
BuildDate string `json:"buildDate"` // date of the build
GoVersion string `json:"goVersion"` // go runtime version at build-time
Compiler string `json:"compiler"` // compiler used at build-time
Platform string `json:"platform"` // GOOS and GOARCH at build-time
}
func (v Version) isProductionBuild() bool {
if strings.Contains(v.Version, "SNAPSHOT") || strings.Contains(v.Version, valueNotProvided) {
return false
}
return true
}
// FromBuild provides all version details
func FromBuild() Version {
return Version{
Version: version,
SyftVersion: syftVersion,
GitCommit: gitCommit,
GitTreeState: gitTreeState,
BuildDate: buildDate,
GoVersion: runtime.Version(),
Compiler: runtime.Compiler,
Platform: platform,
Version: version,
SyftVersion: syftVersion,
GitCommit: gitCommit,
GitDescription: gitDescription,
BuildDate: buildDate,
GoVersion: runtime.Version(),
Compiler: runtime.Compiler,
Platform: platform,
}
}

View file

@ -19,13 +19,13 @@ var latestAppVersionURL = struct {
}
func IsUpdateAvailable() (bool, string, error) {
currentVersionStr := FromBuild().Version
currentVersion, err := hashiVersion.NewVersion(currentVersionStr)
currentBuildInfo := FromBuild()
if !currentBuildInfo.isProductionBuild() {
// don't allow for non-production builds to check for a version.
return false, "", nil
}
currentVersion, err := hashiVersion.NewVersion(currentBuildInfo.Version)
if err != nil {
if currentVersionStr == valueNotProvided {
// this is the default build arg and should be ignored (this is not an error case)
return false, "", nil
}
return false, "", fmt.Errorf("failed to parse current application version: %w", err)
}

View file

@ -1,3 +0,0 @@
*.json
*-reports
!/images/**/Dockerfile

View file

@ -1,50 +0,0 @@
ifndef GRYPE_CMD
GRYPE_CMD = go run ../../main.go -c ../../test/grype-test-config.yaml
endif
IMAGE_CLEAN = $(shell basename $(COMPARE_IMAGE) | tr ":" "_")
GRYPE_DIR = grype-reports
GRYPE_REPORT = $(GRYPE_DIR)/$(IMAGE_CLEAN).json
INLINE_DIR = inline-reports
INLINE_REPORT = $(INLINE_DIR)/$(IMAGE_CLEAN)-content-os.json
ifndef GRYPE_DIR
$(error GRYPE_DIR is not set)
endif
ifndef INLINE_DIR
$(error INLINE_DIR is not set)
endif
.PHONY: all
.DEFAULT_GOAL :=
all: clean-grype
./compare-all.sh
.PHONY: compare-image
compare-image: $(GRYPE_REPORT) $(INLINE_REPORT)
./compare.py $(COMPARE_IMAGE)
.PHONY: gather-image
gather-image: $(GRYPE_REPORT) $(INLINE_REPORT)
$(INLINE_REPORT):
echo "Creating $(INLINE_REPORT)..."
mkdir -p $(INLINE_DIR)
curl -s https://ci-tools.anchore.io/inline_scan-v0.7.0 | bash -s -- -p -r $(COMPARE_IMAGE)
mv anchore-reports/* $(INLINE_DIR)/
rmdir anchore-reports
$(GRYPE_REPORT):
echo "Creating $(GRYPE_REPORT)..."
mkdir -p $(GRYPE_DIR)
$(GRYPE_CMD) $(COMPARE_IMAGE) -o json > $(GRYPE_REPORT)
.PHONY: clean
clean:
rm -f $(INLINE_DIR)/*
.PHONY: clean-grype
clean-grype:
rm -f $(GRYPE_DIR)/*

View file

@ -1,29 +0,0 @@
#!/usr/bin/env bash
set -eu
image_build_dir="./images"
builds=($(ls -d -1 images/*))
images=("debian:10.5" "centos:8.2.2004" "alpine:3.12.0")
# build images
for build_path in "${builds[@]}"; do
echo "Building $build_path"
pushd $build_path
new_image=$(basename $build_path):latest
docker build -q -t $new_image .
images+=($new_image)
popd
done
# gather all image analyses
for img in "${images[@]}"; do
echo "Gathering facts for $img"
COMPARE_IMAGE=${img} make gather-image
done
# compare all results
for img in "${images[@]}"; do
echo "Comparing results for $img"
COMPARE_IMAGE=${img} make compare-image
done

View file

@ -1,336 +0,0 @@
#!/usr/bin/env python3
import os
import re
import sys
import json
import collections
INCLUDE_SEVERITY = False
NO_COMPARE_VALUE = "n/a"
QUALITY_GATE_THRESHOLD = 0.85
INDENT = " "
IMAGE_QUALITY_GATE = collections.defaultdict(lambda: QUALITY_GATE_THRESHOLD, **{
# not necessary if not comparing severity
# "debian:10.5": 0.86, # anchore is replacing "Negligible" severity with "Low" in some (all?) situations
"alpine:3.12.0": 1.0, # no known vulnerabilities
"alpine-vuln:latest": 1.0,
"python-vuln:latest": 1.0,
"java-vuln:latest": 1.0,
})
# We additionally fail if an image is above a particular threshold. Why? We expect the lower threshold to be 90%,
# however additional functionality in grype is still being implemented, so this threshold may not be able to be met.
# In these cases the IMAGE_QUALITY_GATE is set to a lower value to allow the test to pass for known issues. Once these
# issues/enhancements are done we want to ensure that the lower threshold is bumped up to catch regression. The only way
# to do this is to select an upper threshold for images with known threshold values, so we have a failure that
# loudly indicates the lower threshold should be bumped.
IMAGE_UPPER_THRESHOLD = collections.defaultdict(lambda: 1, **{
})
Metadata = collections.namedtuple("Metadata", "version severity")
Package = collections.namedtuple("Package", "name type")
Vulnerability = collections.namedtuple("Vulnerability", "id package")
def clean(image: str) -> str:
return os.path.basename(image.replace(":", "_"))
class InlineScan:
report_tmpl = "{image}-{report}.json"
def __init__(self, image, report_dir="./"):
self.report_dir = report_dir
self.image = image
def _report_path(self, report):
return os.path.join(
self.report_dir,
self.report_tmpl.format(image=clean(self.image), report=report),
)
def _enumerate_section(self, report, section):
report_path = self._report_path(report=report)
os_report_path = self._report_path(report="content-os")
if os.path.exists(os_report_path) and not os.path.exists(report_path):
# if the OS report is there but the target report is not, that is engine's way of saying "no findings"
return
with open(report_path) as json_file:
data = json.load(json_file)
for entry in data[section]:
yield entry
def vulnerabilities(self):
vulnerabilities = set()
metadata = collections.defaultdict(dict)
for entry in self._enumerate_section(report="vuln", section="vulnerabilities"):
package = Package(
name=entry["package_name"],
type=entry["package_type"].lower(),
)
vulnerability = Vulnerability(
id=entry["vuln"],
package=package,
)
vulnerabilities.add(vulnerability)
severity = entry["severity"]
if not INCLUDE_SEVERITY:
severity = NO_COMPARE_VALUE
metadata[package.type][package] = Metadata(version=entry["package_version"], severity=severity)
return vulnerabilities, metadata
def packages(self):
python_packages = self._python_packages()
os_packages = self._os_packages()
return python_packages | os_packages
def _python_packages(self):
packages = set()
for entry in self._enumerate_section(
report="content-python", section="content"
):
package = Package(name=entry["package"], type=entry["type"].lower(),)
packages.add(package)
return packages
def _os_packages(self):
packages = set()
for entry in self._enumerate_section(report="content-os", section="content"):
package = Package(name=entry["package"], type=entry["type"].lower())
packages.add(package)
return packages
class Grype:
report_tmpl = "{image}.json"
def __init__(self, image, report_dir="./"):
self.report_path = os.path.join(
report_dir, self.report_tmpl.format(image=clean(image))
)
def _enumerate_section(self, section):
with open(self.report_path) as json_file:
data = json.load(json_file)
for entry in data[section]:
yield entry
def vulnerabilities(self):
vulnerabilities = set()
metadata = collections.defaultdict(dict)
for entry in self._enumerate_section(section="matches"):
# normalize to inline
pkg_type = entry["artifact"]["type"].lower()
if pkg_type in ("wheel", "egg"):
pkg_type = "python"
elif pkg_type in ("deb",):
pkg_type = "dpkg"
elif pkg_type in ("java-archive",):
pkg_type = "java"
elif pkg_type in ("apk",):
pkg_type = "apkg"
package = Package(name=entry["artifact"]["name"], type=pkg_type,)
vulnerability = Vulnerability(
id=entry["vulnerability"]["id"],
package=package,
)
vulnerabilities.add(vulnerability)
severity = entry["vulnerability"]["severity"]
if not INCLUDE_SEVERITY:
severity = NO_COMPARE_VALUE
# engine doesn't capture epoch info, so we cannot use it during comparison
version = entry["artifact"]["version"]
if re.match(r'^\d+:', version):
version = ":".join(version.split(":")[1:])
metadata[package.type][package] = Metadata(version=version, severity=severity)
return vulnerabilities, metadata
def print_rows(rows):
if not rows:
return
widths = []
for col, _ in enumerate(rows[0]):
width = max(len(row[col]) for row in rows) + 2 # padding
widths.append(width)
for row in rows:
print("".join(word.ljust(widths[col_idx]) for col_idx, word in enumerate(row)))
def main(image):
print(colors.bold+"Image:", image, colors.reset)
if not INCLUDE_SEVERITY:
print(colors.bold + colors.fg.orange + "Warning: not comparing severity", colors.reset)
inline = InlineScan(image=image, report_dir="inline-reports")
inline_vulnerabilities, inline_metadata = inline.vulnerabilities()
grype = Grype(image=image, report_dir="grype-reports")
grype_vulnerabilities, grype_metadata = grype.vulnerabilities()
if len(inline.packages()) == 0:
# we don't want to accidentally pass the vulnerability check if there were no packages discovered.
# (we are purposefully selecting test images that are guaranteed to have packages, so this should never happen)
print(colors.bold + colors.fg.red + "inline found no packages!", colors.reset)
return 1
if len(inline_vulnerabilities) == 0:
if len(grype_vulnerabilities) == 0:
print(colors.bold+"nobody found any vulnerabilities", colors.reset)
return 0
print(colors.bold+"inline does not have any vulnerabilities to compare to", colors.reset)
return 0
same_vulnerabilities = grype_vulnerabilities & inline_vulnerabilities
if len(inline_vulnerabilities) == 0:
percent_overlap_vulnerabilities = 0
else:
percent_overlap_vulnerabilities = (
float(len(same_vulnerabilities)) / float(len(inline_vulnerabilities))
) * 100.0
bonus_vulnerabilities = grype_vulnerabilities - inline_vulnerabilities
missing_vulnerabilities = inline_vulnerabilities - grype_vulnerabilities
inline_metadata_set = set()
for vulnerability in inline_vulnerabilities:
metadata = inline_metadata[vulnerability.package.type][vulnerability.package]
inline_metadata_set.add((vulnerability.package, metadata))
grype_overlap_metadata_set = set()
for vulnerability in grype_vulnerabilities:
metadata = grype_metadata[vulnerability.package.type][vulnerability.package]
# we only want to really count mismatched metadata for packages that are at least found by inline
if vulnerability.package in inline_metadata[vulnerability.package.type]:
grype_overlap_metadata_set.add((vulnerability.package, metadata))
same_metadata = grype_overlap_metadata_set & inline_metadata_set
missing_metadata = inline_metadata_set - same_metadata
if len(inline_metadata_set) == 0:
percent_overlap_metadata = 0
else:
percent_overlap_metadata = (
float(len(same_metadata)) / float(len(inline_metadata_set))
) * 100.0
if len(bonus_vulnerabilities) > 0:
rows = []
print(colors.bold + "Grype found extra vulnerabilities:", colors.reset)
for vulnerability in sorted(list(bonus_vulnerabilities)):
metadata = grype_metadata[vulnerability.package.type][vulnerability.package]
rows.append([INDENT, repr(vulnerability), repr(metadata)])
print_rows(rows)
print()
if len(missing_vulnerabilities) > 0:
rows = []
print(colors.bold + "Grype missed vulnerabilities:", colors.reset)
for vulnerability in sorted(list(missing_vulnerabilities)):
metadata = inline_metadata[vulnerability.package.type][vulnerability.package]
rows.append([INDENT, repr(vulnerability), repr(metadata)])
print_rows(rows)
print()
if len(missing_metadata) > 0:
rows = []
print(colors.bold + "Grype mismatched metadata:", colors.reset)
for inline_metadata_pair in sorted(list(missing_metadata)):
pkg, metadata = inline_metadata_pair
if pkg in grype_metadata[pkg.type]:
grype_metadata_item = grype_metadata[pkg.type][pkg]
else:
grype_metadata_item = "--- MISSING ---"
rows.append([INDENT, "for:", repr(pkg), ":", repr(grype_metadata_item), "!=", repr(metadata)])
print_rows(rows)
print()
print(colors.bold+"Summary:", colors.reset)
print(" Image: %s" % image)
print(" Inline Vulnerabilities : %d" % len(inline_vulnerabilities))
print(" Grype Vulnerabilities : %d " % len(grype_vulnerabilities))
print(" (extra) : %d (note: this is ignored in the analysis!)" % len(bonus_vulnerabilities))
print(" (missing) : %d " % len(missing_vulnerabilities))
print(
" Baseline Vulnerabilities Matched : %2.1f %% (%d/%d vulnerability)"
% (percent_overlap_vulnerabilities, len(same_vulnerabilities), len(inline_vulnerabilities))
)
print(
" Baseline Metadata Matched : %2.1f %% (%d/%d metadata)"
% (percent_overlap_metadata, len(same_metadata), len(inline_metadata_set))
)
overall_score = (percent_overlap_vulnerabilities + percent_overlap_metadata) / 2.0
print(colors.bold + " Overall Score: %2.1f %%" % overall_score, colors.reset)
upper_gate_value = IMAGE_UPPER_THRESHOLD[image] * 100
lower_gate_value = IMAGE_QUALITY_GATE[image] * 100
if overall_score < lower_gate_value:
print(colors.bold + " Quality Gate: " + colors.fg.red + "FAILED (is not >= %d %%)\n" % lower_gate_value, colors.reset)
return 1
elif overall_score > upper_gate_value:
print(colors.bold + " Quality Gate: " + colors.fg.orange + "FAILED (lower threshold is artificially low and should be updated)\n", colors.reset)
return 1
else:
print(colors.bold + " Quality Gate: " + colors.fg.green + "pass (>= %d %%)\n" % lower_gate_value, colors.reset)
return 0
class colors:
reset='\033[0m'
bold='\033[01m'
disable='\033[02m'
underline='\033[04m'
reverse='\033[07m'
strikethrough='\033[09m'
invisible='\033[08m'
class fg:
black='\033[30m'
red='\033[31m'
green='\033[32m'
orange='\033[33m'
blue='\033[34m'
purple='\033[35m'
cyan='\033[36m'
lightgrey='\033[37m'
darkgrey='\033[90m'
lightred='\033[91m'
lightgreen='\033[92m'
yellow='\033[93m'
lightblue='\033[94m'
pink='\033[95m'
lightcyan='\033[96m'
class bg:
black='\033[40m'
red='\033[41m'
green='\033[42m'
orange='\033[43m'
blue='\033[44m'
purple='\033[45m'
cyan='\033[46m'
lightgrey='\033[47m'
if __name__ == "__main__":
if len(sys.argv) != 2:
sys.exit("provide an image")
rc = main(sys.argv[1])
sys.exit(rc)

View file

@ -1,5 +0,0 @@
FROM alpine:3.12.0
RUN wget http://dl-cdn.alpinelinux.org/alpine/v3.9/main/x86_64/libvncserver-0.9.11-r3.apk
RUN apk add libvncserver-0.9.11-r3.apk
# I know this is cheating a bit...
RUN sed -i 's/V:0.9.11-r3/V:0.9.9-r0/' /lib/apk/db/installed

View file

@ -1,2 +0,0 @@
FROM alpine:3.12.0
RUN wget https://repo1.maven.org/maven2/org/quartz-scheduler/quartz/2.3.1/quartz-2.3.1.jar

View file

@ -1,2 +0,0 @@
FROM python:3.8.5-alpine3.12
RUN pip install requests==2.10.0

View file

@ -0,0 +1 @@
**

1
test/install/.gitignore vendored Normal file
View file

@ -0,0 +1 @@
cache/

View file

@ -0,0 +1,40 @@
. test_harness.sh
# search for an asset in a release checksums file
test_search_for_asset_release() {
fixture=./test-fixtures/grype_0.32.0_checksums.txt
# search_for_asset [checksums-file-path] [name] [os] [arch] [format]
# positive case
actual=$(search_for_asset "${fixture}" "grype" "linux" "amd64" "tar.gz")
assertEquals "grype_0.32.0_linux_amd64.tar.gz" "${actual}" "unable to find release asset"
# negative cases
actual=$(search_for_asset "${fixture}" "grype" "Linux" "amd64" "tar.gz")
assertEquals "" "${actual}" "found a release asset but did not expect to (os)"
actual=$(search_for_asset "${fixture}" "grype" "darwin" "amd64" "rpm")
assertEquals "" "${actual}" "found a release asset but did not expect to (format)"
}
run_test_case test_search_for_asset_release
# search for an asset in a snapshot checksums file
test_search_for_asset_snapshot() {
fixture=./test-fixtures/grype_0.32.0-SNAPSHOT-d461f63_checksums.txt
# search_for_asset [checksums-file-path] [name] [os] [arch] [format]
# positive case
actual=$(search_for_asset "${fixture}" "grype" "linux" "amd64" "rpm")
assertEquals "grype_0.32.0-SNAPSHOT-d461f63_linux_amd64.rpm" "${actual}" "unable to find snapshot asset"
# negative case
actual=$(search_for_asset "${fixture}" "grype" "linux" "amd64" "zip")
assertEquals "" "${actual}" "found a snapshot asset but did not expect to (format)"
}
run_test_case test_search_for_asset_snapshot

View file

@ -0,0 +1,86 @@
. test_harness.sh
DOWNLOAD_SNAPSHOT_POSITIVE_CASES=0
# helper for asserting test_positive_snapshot_download_asset positive cases
test_positive_snapshot_download_asset() {
os="$1"
arch="$2"
format="$3"
# for troubleshooting
# log_set_priority 10
name=${PROJECT_NAME}
github_download=$(snapshot_download_url)
version=$(snapshot_version)
tmpdir=$(mktemp -d)
actual_filepath=$(download_asset "${github_download}" "${tmpdir}" "${name}" "${os}" "${arch}" "${version}" "${format}" )
assertFileExists "${actual_filepath}" "download_asset os=${os} arch=${arch} format=${format}"
assertFilesEqual \
"$(snapshot_dir)/${name}_${version}_${os}_${arch}.${format}" \
"${actual_filepath}" \
"unable to download os=${os} arch=${arch} format=${format}"
((DOWNLOAD_SNAPSHOT_POSITIVE_CASES++))
rm -rf -- "$tmpdir"
}
test_download_snapshot_asset_exercised_all_assets() {
expected=$(snapshot_assets_count)
assertEquals "${expected}" "${DOWNLOAD_SNAPSHOT_POSITIVE_CASES}" "did not download all possible assets (missing an os/arch/format variant?)"
}
# helper for asserting download_asset negative cases
test_negative_snapshot_download_asset() {
os="$1"
arch="$2"
format="$3"
# for troubleshooting
# log_set_priority 10
name=${PROJECT_NAME}
github_download=$(snapshot_download_url)
version=$(snapshot_version)
tmpdir=$(mktemp -d)
actual_filepath=$(download_asset "${github_download}" "${tmpdir}" "${name}" "${os}" "${arch}" "${version}" "${format}")
assertEquals "" "${actual_filepath}" "unable to download os=${os} arch=${arch} format=${format}"
rm -rf -- "$tmpdir"
}
worker_pid=$(setup_snapshot_server)
trap 'teardown_snapshot_server ${worker_pid}' EXIT
# exercise all possible assets
run_test_case test_positive_snapshot_download_asset "linux" "amd64" "tar.gz"
run_test_case test_positive_snapshot_download_asset "linux" "amd64" "rpm"
run_test_case test_positive_snapshot_download_asset "linux" "amd64" "deb"
run_test_case test_positive_snapshot_download_asset "linux" "arm64" "tar.gz"
run_test_case test_positive_snapshot_download_asset "linux" "arm64" "rpm"
run_test_case test_positive_snapshot_download_asset "linux" "arm64" "deb"
run_test_case test_positive_snapshot_download_asset "darwin" "amd64" "tar.gz"
run_test_case test_positive_snapshot_download_asset "darwin" "arm64" "tar.gz"
run_test_case test_positive_snapshot_download_asset "windows" "amd64" "zip"
# note: the mac signing process produces a dmg which is not part of the snapshot process (thus is not exercised here)
# let's make certain we covered all assets that were expected
run_test_case test_download_snapshot_asset_exercised_all_assets
# make certain we handle missing assets alright
run_test_case test_negative_snapshot_download_asset "bogus" "amd64" "zip"
trap - EXIT
teardown_snapshot_server "${worker_pid}"

View file

@ -0,0 +1,41 @@
. test_harness.sh
test_download_release_asset() {
release="$1"
os="$2"
arch="$3"
format="$4"
expected_mime_type="$5"
# for troubleshooting
# log_set_priority 10
name=${PROJECT_NAME}
version=$(tag_to_version ${release})
github_download="https://github.com/${OWNER}/${REPO}/releases/download/${release}"
tmpdir=$(mktemp -d)
actual_filepath=$(download_asset "${github_download}" "${tmpdir}" "${name}" "${os}" "${arch}" "${version}" "${format}" )
assertFileExists "${actual_filepath}" "download_asset os=${os} arch=${arch} format=${format}"
actual_mime_type=$(file -b --mime-type ${actual_filepath})
assertEquals "${expected_mime_type}" "${actual_mime_type}" "unexpected mimetype for os=${os} arch=${arch} format=${format}"
rm -rf -- "$tmpdir"
}
# always test against the latest release
release=$(get_release_tag "${OWNER}" "${REPO}" "latest" )
# exercise all possible assets against a real github release (based on asset listing from https://github.com/anchore/grype/releases/tag/v0.32.0)
run_test_case test_download_release_asset "${release}" "darwin" "amd64" "tar.gz" "application/gzip"
run_test_case test_download_release_asset "${release}" "darwin" "arm64" "tar.gz" "application/gzip"
run_test_case test_download_release_asset "${release}" "linux" "amd64" "tar.gz" "application/gzip"
run_test_case test_download_release_asset "${release}" "linux" "amd64" "rpm" "application/x-rpm"
run_test_case test_download_release_asset "${release}" "linux" "amd64" "deb" "application/vnd.debian.binary-package"
run_test_case test_download_release_asset "${release}" "linux" "arm64" "tar.gz" "application/gzip"
run_test_case test_download_release_asset "${release}" "linux" "arm64" "rpm" "application/x-rpm"
run_test_case test_download_release_asset "${release}" "linux" "arm64" "deb" "application/vnd.debian.binary-package"

View file

@ -0,0 +1,90 @@
. test_harness.sh
INSTALL_ARCHIVE_POSITIVE_CASES=0
# helper for asserting install_asset positive cases
test_positive_snapshot_install_asset() {
os="$1"
arch="$2"
format="$3"
# for troubleshooting
# log_set_priority 10
name=${PROJECT_NAME}
binary=$(get_binary_name "${os}" "${arch}" "${PROJECT_NAME}")
github_download=$(snapshot_download_url)
version=$(snapshot_version)
download_dir=$(mktemp -d)
install_dir=$(mktemp -d)
download_and_install_asset "${github_download}" "${download_dir}" "${install_dir}" "${name}" "${os}" "${arch}" "${version}" "${format}" "${binary}"
assertEquals "0" "$?" "download/install did not succeed"
expected_path="${install_dir}/${binary}"
assertFileExists "${expected_path}" "install_asset os=${os} arch=${arch} format=${format}"
assertFilesEqual \
"$(snapshot_dir)/${os}-build_${os}_${arch}/${binary}" \
"${expected_path}" \
"unable to verify installation of os=${os} arch=${arch} format=${format}"
((INSTALL_ARCHIVE_POSITIVE_CASES++))
rm -rf -- "$download_dir"
rm -rf -- "$install_dir"
}
# helper for asserting install_asset negative cases
test_negative_snapshot_install_asset() {
os="$1"
arch="$2"
format="$3"
# for troubleshooting
# log_set_priority 10
name=${PROJECT_NAME}
binary=$(get_binary_name "${os}" "${arch}" "${PROJECT_NAME}")
github_download=$(snapshot_download_url)
version=$(snapshot_version)
download_dir=$(mktemp -d)
install_dir=$(mktemp -d)
download_and_install_asset "${github_download}" "${download_dir}" "${install_dir}" "${name}" "${os}" "${arch}" "${version}" "${format}" "${binary}"
assertNotEquals "0" "$?" "download/install should have failed but did not"
rm -rf -- "$download_dir"
rm -rf -- "$install_dir"
}
test_install_asset_exercised_all_archive_assets() {
expected=$(snapshot_assets_archive_count)
assertEquals "${expected}" "${INSTALL_ARCHIVE_POSITIVE_CASES}" "did not download all possible archive assets (missing an os/arch/format variant?)"
}
worker_pid=$(setup_snapshot_server)
trap 'teardown_snapshot_server ${worker_pid}' EXIT
# exercise all possible archive assets (not rpm/deb/dmg) against a snapshot build
run_test_case test_positive_snapshot_install_asset "linux" "amd64" "tar.gz"
run_test_case test_positive_snapshot_install_asset "linux" "arm64" "tar.gz"
run_test_case test_positive_snapshot_install_asset "darwin" "amd64" "tar.gz"
run_test_case test_positive_snapshot_install_asset "darwin" "arm64" "tar.gz"
run_test_case test_positive_snapshot_install_asset "windows" "amd64" "zip"
# let's make certain we covered all assets that were expected
run_test_case test_install_asset_exercised_all_archive_assets
# make certain we handle missing assets alright
run_test_case test_negative_snapshot_install_asset "bogus" "amd64" "zip"
trap - EXIT
teardown_snapshot_server "${worker_pid}"

103
test/install/Makefile Normal file
View file

@ -0,0 +1,103 @@
NAME=grype
IMAGE_NAME=$(NAME)-install.sh-env
UBUNTU_IMAGE=$(IMAGE_NAME):ubuntu-20.04
ALPINE_IMAGE=$(IMAGE_NAME):alpine-3.6
BUSYBOX_IMAGE=busybox:1.35
ENVS=./environments
DOCKER_RUN=docker run --rm -t -w /project/test/install -v $(shell pwd)/../../:/project
UNIT=make unit-local
# acceptance testing is running the current install.sh against the latest release. Note: this could be a problem down
# the line if there are breaking changes made that don't align with the latest release (but will be OK with the next
# release)
ACCEPTANCE_CMD=sh -c '../../install.sh -b /usr/local/bin && grype version'
# CI cache busting values; change these if you want CI to not use previous stored cache
INSTALL_TEST_CACHE_BUSTER=894d8ca
define title
@printf '\n≡≡≡[ $(1) ]≡≡≡≡≡≡≡≡≡≡≡≡≡≡≡≡≡≡≡≡≡≡≡≡≡≡≡≡≡≡≡≡≡≡≡≡≡≡≡≡≡≡≡≡≡≡≡≡≡≡≡≡≡≡\n'
endef
.PHONY: test
test: unit acceptance
.PHONY: ci-test-mac
ci-test-mac: unit-local acceptance-local
# note: do not add acceptance-local to this list
acceptance: acceptance-ubuntu-20.04 acceptance-alpine-3.6 acceptance-busybox-1.35
unit: unit-ubuntu-20.04
unit-local:
$(call title,unit tests)
@for f in $(shell ls *_test.sh); do echo "Running unit test suite '$${f}'"; bash $${f} || exit 1; done
acceptance-local:
$(acceptance)
save: ubuntu-20.04 alpine-3.6 busybox-1.35
@mkdir cache || true
docker image save -o cache/ubuntu-env.tar $(UBUNTU_IMAGE)
docker image save -o cache/alpine-env.tar $(ALPINE_IMAGE)
docker image save -o cache/busybox-env.tar $(BUSYBOX_IMAGE)
load:
docker image load -i cache/ubuntu-env.tar
docker image load -i cache/alpine-env.tar
docker image load -i cache/busybox-env.tar
## UBUNTU #######################################################
acceptance-ubuntu-20.04: ubuntu-20.04
$(call title,ubuntu:20.04 - acceptance)
$(DOCKER_RUN) $(UBUNTU_IMAGE) \
$(ACCEPTANCE_CMD)
unit-ubuntu-20.04: ubuntu-20.04
$(call title,ubuntu:20.04 - unit)
$(DOCKER_RUN) $(UBUNTU_IMAGE) \
$(UNIT)
ubuntu-20.04:
$(call title,ubuntu:20.04 - build environment)
docker build -t $(UBUNTU_IMAGE) -f $(ENVS)/Dockerfile-ubuntu-20.04 .
## ALPINE #######################################################
# note: unit tests cannot be run with sh (alpine dosn't have bash by default)
acceptance-alpine-3.6: alpine-3.6
$(call title,alpine:3.6 - acceptance)
$(DOCKER_RUN) $(ALPINE_IMAGE) \
$(ACCEPTANCE_CMD)
alpine-3.6:
$(call title,alpine:3.6 - build environment)
docker build -t $(ALPINE_IMAGE) -f $(ENVS)/Dockerfile-alpine-3.6 .
## BUSYBOX #######################################################
# note: unit tests cannot be run with sh (busybox dosn't have bash by default)
# note: busybox by default will not have cacerts, so you will get TLS warnings (we want to test under these conditions)
acceptance-busybox-1.35: busybox-1.35
$(call title,busybox-1.35 - acceptance)
$(DOCKER_RUN) $(BUSYBOX_IMAGE) \
$(ACCEPTANCE_CMD)
@echo "\n*** test note: you should see grype spit out a 'x509: certificate signed by unknown authority' error --this is expected ***"
busybox-1.35:
$(call title,busybox-1.35 - build environment)
docker pull $(BUSYBOX_IMAGE)
## For CI ########################################################
.PHONY: cache.fingerprint
cache.fingerprint:
$(call title,Install test fixture fingerprint)
@find ./environments/* -type f -exec md5sum {} + | awk '{print $1}' | sort | tee /dev/stderr | md5sum | tee cache.fingerprint && echo "$(INSTALL_TEST_CACHE_BUSTER)" >> cache.fingerprint

View file

@ -0,0 +1,2 @@
FROM alpine:3.6
RUN apk update && apk add python3 wget unzip make ca-certificates

View file

@ -0,0 +1,2 @@
FROM ubuntu:20.04
RUN apt update -y && apt install make python3 curl unzip -y

68
test/install/github_test.sh Executable file
View file

@ -0,0 +1,68 @@
. test_harness.sh
# check that we can extract single json values
test_extract_json_value() {
fixture=./test-fixtures/github-api-grype-v0.32.0-release.json
content=$(cat ${fixture})
actual=$(extract_json_value "${content}" "tag_name")
assertEquals "v0.32.0" "${actual}" "unable to find tag_name"
actual=$(extract_json_value "${content}" "id")
assertEquals "57501596" "${actual}" "unable to find tag_name"
}
run_test_case test_extract_json_value
# check that we can extract github release tag from github api json
test_github_release_tag() {
fixture=./test-fixtures/github-api-grype-v0.32.0-release.json
content=$(cat ${fixture})
actual=$(github_release_tag "${content}")
assertEquals "v0.32.0" "${actual}" "unable to find release tag"
}
run_test_case test_github_release_tag
# download a known good github release checksums and compare against a test-fixture
test_download_github_release_checksums() {
tmpdir=$(mktemp -d)
tag=v0.32.0
github_download="https://github.com/anchore/grype/releases/download/${tag}"
name=${PROJECT_NAME}
version=$(tag_to_version "${tag}")
actual_filepath=$(download_github_release_checksums "${github_download}" "${name}" "${version}" "${tmpdir}")
assertFilesEqual \
"./test-fixtures/grype_0.32.0_checksums.txt" \
"${actual_filepath}" \
"unable to find release tag"
rm -rf -- "$tmpdir"
}
run_test_case test_download_github_release_checksums
# download a checksums file from a locally served-up snapshot directory and compare against the file in the snapshot dir
test_download_github_release_checksums_snapshot() {
tmpdir=$(mktemp -d)
github_download=$(snapshot_download_url)
name=${PROJECT_NAME}
version=$(snapshot_version)
actual_filepath=$(download_github_release_checksums "${github_download}" "${name}" "${version}" "${tmpdir}")
assertFilesEqual \
"$(snapshot_checksums_path)" \
"${actual_filepath}" \
"unable to find release tag"
rm -rf -- "$tmpdir"
}
run_test_case_with_snapshot_release test_download_github_release_checksums_snapshot

View file

@ -0,0 +1 @@
{"id":57501596,"tag_name":"v0.32.0","update_url":"/anchore/grype/releases/tag/v0.32.0","update_authenticity_token":"7XbNZgRHpbHegdv-xRlbe84Y983YgyXa3YKWwv_e0ocqTHagsHq5dxCTQUQnuX3vbsgdWQU3A3__hkVNhKGHSg","delete_url":"/anchore/grype/releases/tag/v0.32.0","delete_authenticity_token":"6tLaRtXKUc-zz4tHIwCbbD7CksxIHK5imZE1gnA39oVCe6fYux5a8cPD9J52kGUzM1Hs9JPBjceG7yyszBk_2A","edit_url":"/anchore/grype/releases/edit/v0.32.0"}

View file

@ -0,0 +1,9 @@
250dddf3338d34012b55b4167b72f8bc87944e61aee35879342206a115a0f64b grype_0.32.0-SNAPSHOT-d461f63_darwin_amd64.tar.gz
4b2973604085c14bc4c452f5354110384d371f0d5c3f93c0e3a44498f54283d7 grype_0.32.0-SNAPSHOT-d461f63_linux_amd64.rpm
569b040bde6d369b9e3b96fb3d9d7ee5aa11267f3aa91fad3d8f4095f1cee150 grype_0.32.0-SNAPSHOT-d461f63_darwin_arm64.tar.gz
5c666286bca9d8c84f7355d5afe720186b0a06bed23ac0518a35a79ff905de28 grype_0.32.0-SNAPSHOT-d461f63_linux_arm64.tar.gz
dd1d7492e7a7db9a765a02927b0d019d8f9facb1173ae7c245cd06fefedddfd0 grype_0.32.0-SNAPSHOT-d461f63_windows_amd64.zip
dd4e5857856b4655511a75911fd7b53a3ebb9d2f584ae3c7ff7f52ad0dd93745 grype_0.32.0-SNAPSHOT-d461f63_linux_amd64.tar.gz
dfe9d8212def2eb3685bacf3c77f664830680a475eb6356e67c96abe4af00e74 grype_0.32.0-SNAPSHOT-d461f63_linux_arm64.rpm
e1efed13fa93c207b773cbc2a9252b87049e1a826bacb77b756a20a13a29e465 grype_0.32.0-SNAPSHOT-d461f63_linux_arm64.deb
ef2725de0e154059fb59c6268e68fd0ba3a7ce5b23e604166140f284b54ef9b4 grype_0.32.0-SNAPSHOT-d461f63_linux_amd64.deb

View file

@ -0,0 +1,9 @@
250dddf3338d34012b55b4167b72f8bc87944e61aee35879342206a115a0f64b grype_0.32.0_darwin_amd64.tar.gz
4b2973604085c14bc4c452f5354110384d371f0d5c3f93c0e3a44498f54283d7 grype_0.32.0_linux_amd64.rpm
569b040bde6d369b9e3b96fb3d9d7ee5aa11267f3aa91fad3d8f4095f1cee150 grype_0.32.0_darwin_arm64.tar.gz
5c666286bca9d8c84f7355d5afe720186b0a06bed23ac0518a35a79ff905de28 grype_0.32.0_linux_arm64.tar.gz
dd1d7492e7a7db9a765a02927b0d019d8f9facb1173ae7c245cd06fefedddfd0 grype_0.32.0_windows_amd64.zip
dd4e5857856b4655511a75911fd7b53a3ebb9d2f584ae3c7ff7f52ad0dd93745 grype_0.32.0_linux_amd64.tar.gz
dfe9d8212def2eb3685bacf3c77f664830680a475eb6356e67c96abe4af00e74 grype_0.32.0_linux_arm64.rpm
e1efed13fa93c207b773cbc2a9252b87049e1a826bacb77b756a20a13a29e465 grype_0.32.0_linux_arm64.deb
ef2725de0e154059fb59c6268e68fd0ba3a7ce5b23e604166140f284b54ef9b4 grype_0.32.0_linux_amd64.deb

View file

@ -0,0 +1,163 @@
# disable using the install.sh entrypoint such that we can unit test
# script functions without invoking main()
TEST_INSTALL_SH=true
. ../../install.sh
set -u
assertTrue() {
if eval "$1"; then
echo "assertTrue failed: $2"
exit 2
fi
}
assertFalse() {
if eval "$1"; then
echo "assertFalse failed: $2"
exit 2
fi
}
assertEquals() {
want=$1
got=$2
msg=$3
if [ "$want" != "$got" ]; then
echo "assertEquals failed: want='$want' got='$got' $msg"
exit 2
fi
}
assertFilesDoesNotExist() {
path="$1"
msg=$2
if [ -f "${path}" ]; then
echo "assertFilesDoesNotExist failed: path exists '$path': $msg"
exit 2
fi
}
assertFileExists() {
path="$1"
msg=$2
if [ ! -f "${path}" ]; then
echo "assertFileExists failed: path does not exist '$path': $msg"
exit 2
fi
}
assertFilesEqual() {
want=$1
got=$2
msg=$3
diff "$1" "$2"
if [ $? -ne 0 ]; then
echo "assertFilesEqual failed: $msg"
exit 2
fi
}
assertNotEquals() {
want=$1
got=$2
msg=$3
if [ "$want" = "$got" ]; then
echo "assertNotEquals failed: want='$want' got='$got' $msg"
exit 2
fi
}
log_test_case() {
echo " running $@"
}
run_test_case_with_snapshot_release() {
log_test_case ${@:1}
worker_pid=$(setup_snapshot_server)
trap "teardown_snapshot_server $worker_pid" EXIT
# run test function with all arguments
${@:1}
trap - EXIT
teardown_snapshot_server "${worker_pid}"
}
serve_port=8000
setup_snapshot_server() {
# if you want to see proof in the logs, feel free to adjust the redirection
python3 -m http.server --directory "$(snapshot_dir)" $serve_port &> /dev/null &
worker_pid=$!
# it takes some time for the server to be ready...
sleep 3
echo "$worker_pid"
}
teardown_snapshot_server() {
worker_pid="$1"
kill $worker_pid
}
snapshot_version() {
partial=$(ls ../../snapshot/*_checksums.txt | grep -o "_.*_checksums.txt")
partial="${partial%_checksums.txt}"
echo "${partial#_}"
}
snapshot_download_url() {
echo "localhost:${serve_port}"
}
snapshot_dir() {
echo "../../snapshot"
}
snapshot_checksums_path() {
echo "$(ls $(snapshot_dir)/*_checksums.txt)"
}
snapshot_assets_count() {
# example output before wc -l:
# ../../snapshot/grype_0.32.0-SNAPSHOT-e5e847a_linux_arm64.deb
# ../../snapshot/grype_0.32.0-SNAPSHOT-e5e847a_linux_arm64.tar.gz
# ../../snapshot/grype_0.32.0-SNAPSHOT-e5e847a_linux_amd64.rpm
# ../../snapshot/grype_0.32.0-SNAPSHOT-e5e847a_darwin_arm64.tar.gz
# ../../snapshot/grype_0.32.0-SNAPSHOT-e5e847a_linux_amd64.deb
# ../../snapshot/grype_0.32.0-SNAPSHOT-e5e847a_linux_arm64.rpm
# ../../snapshot/grype_0.32.0-SNAPSHOT-e5e847a_darwin_amd64.zip
# ../../snapshot/grype_0.32.0-SNAPSHOT-e5e847a_windows_amd64.zip
# ../../snapshot/grype_0.32.0-SNAPSHOT-e5e847a_darwin_arm64.zip
# ../../snapshot/grype_0.32.0-SNAPSHOT-e5e847a_linux_amd64.tar.gz
# ../../snapshot/grype_0.32.0-SNAPSHOT-e5e847a_darwin_amd64.tar.gz
echo "$(find ../../snapshot -maxdepth 1 -type f | grep 'grype_' | grep -v checksums | wc -l | tr -d '[:space:]')"
}
snapshot_assets_archive_count() {
# example output before wc -l:
# ../../snapshot/grype_0.32.0-SNAPSHOT-e5e847a_linux_arm64.tar.gz
# ../../snapshot/grype_0.32.0-SNAPSHOT-e5e847a_darwin_arm64.tar.gz
# ../../snapshot/grype_0.32.0-SNAPSHOT-e5e847a_darwin_amd64.zip
# ../../snapshot/grype_0.32.0-SNAPSHOT-e5e847a_windows_amd64.zip
# ../../snapshot/grype_0.32.0-SNAPSHOT-e5e847a_darwin_arm64.zip
# ../../snapshot/grype_0.32.0-SNAPSHOT-e5e847a_linux_amd64.tar.gz
# ../../snapshot/grype_0.32.0-SNAPSHOT-e5e847a_darwin_amd64.tar.gz
echo "$(find ../../snapshot -maxdepth 1 -type f | grep 'grype_' | grep 'tar\|zip' | wc -l | tr -d '[:space:]')"
}
run_test_case() {
log_test_case ${@:1}
${@:1}
}