mirror of
https://github.com/anchore/syft
synced 2024-11-10 22:34:22 +00:00
Normalize cataloger configuration patterns (#2365)
* normalize cataloger patterns Signed-off-by: Alex Goodman <wagoodman@users.noreply.github.com> * remove central reference for maven configurable Signed-off-by: Alex Goodman <wagoodman@users.noreply.github.com> --------- Signed-off-by: Alex Goodman <wagoodman@users.noreply.github.com>
This commit is contained in:
parent
4d0da703bf
commit
1cfc4c7387
49 changed files with 335 additions and 288 deletions
|
@ -12,6 +12,7 @@ import (
|
|||
"github.com/anchore/clio"
|
||||
"github.com/anchore/fangs"
|
||||
"github.com/anchore/syft/internal/log"
|
||||
"github.com/anchore/syft/syft/cataloging"
|
||||
"github.com/anchore/syft/syft/pkg/cataloger"
|
||||
golangCataloger "github.com/anchore/syft/syft/pkg/cataloger/golang"
|
||||
javaCataloger "github.com/anchore/syft/syft/pkg/cataloger/java"
|
||||
|
@ -126,19 +127,24 @@ func (cfg Catalog) ToCatalogerConfig() cataloger.Config {
|
|||
},
|
||||
Catalogers: cfg.Catalogers,
|
||||
Parallelism: cfg.Parallelism,
|
||||
Golang: golangCataloger.NewGoCatalogerOpts().
|
||||
Golang: golangCataloger.DefaultCatalogerConfig().
|
||||
WithSearchLocalModCacheLicenses(cfg.Golang.SearchLocalModCacheLicenses).
|
||||
WithLocalModCacheDir(cfg.Golang.LocalModCacheDir).
|
||||
WithSearchRemoteLicenses(cfg.Golang.SearchRemoteLicenses).
|
||||
WithProxy(cfg.Golang.Proxy).
|
||||
WithNoProxy(cfg.Golang.NoProxy),
|
||||
LinuxKernel: kernel.LinuxCatalogerConfig{
|
||||
LinuxKernel: kernel.LinuxKernelCatalogerConfig{
|
||||
CatalogModules: cfg.LinuxKernel.CatalogModules,
|
||||
},
|
||||
Java: javaCataloger.DefaultCatalogerOpts().
|
||||
Java: javaCataloger.DefaultArchiveCatalogerConfig().
|
||||
WithUseNetwork(cfg.Java.UseNetwork).
|
||||
WithMavenURL(cfg.Java.MavenURL).
|
||||
WithMaxParentRecursiveDepth(cfg.Java.MaxParentRecursiveDepth),
|
||||
WithMavenBaseURL(cfg.Java.MavenURL).
|
||||
WithArchiveTraversal(
|
||||
cataloging.ArchiveSearchConfig{
|
||||
IncludeIndexedArchives: cfg.Package.SearchIndexedArchives,
|
||||
IncludeUnindexedArchives: cfg.Package.SearchUnindexedArchives,
|
||||
},
|
||||
cfg.Java.MaxParentRecursiveDepth),
|
||||
Python: pythonCataloger.CatalogerConfig{
|
||||
GuessUnpinnedRequirements: cfg.Python.GuessUnpinnedRequirements,
|
||||
},
|
||||
|
|
6
syft/cataloging/config.go
Normal file
6
syft/cataloging/config.go
Normal file
|
@ -0,0 +1,6 @@
|
|||
package cataloging
|
||||
|
||||
type ArchiveSearchConfig struct {
|
||||
IncludeIndexedArchives bool `yaml:"include-indexed-archives" json:"include-indexed-archives" mapstructure:"include-indexed-archives"`
|
||||
IncludeUnindexedArchives bool `yaml:"include-unindexed-archives" json:"include-unindexed-archives" mapstructure:"include-unindexed-archives"`
|
||||
}
|
|
@ -9,7 +9,7 @@ import (
|
|||
)
|
||||
|
||||
// NewDBCataloger returns a new cataloger object initialized for Alpine package DB flat-file stores.
|
||||
func NewDBCataloger() *generic.Cataloger {
|
||||
func NewDBCataloger() pkg.Cataloger {
|
||||
return generic.NewCataloger("apk-db-cataloger").
|
||||
WithParserByGlobs(parseApkDB, pkg.ApkDBGlob)
|
||||
}
|
||||
|
|
|
@ -9,7 +9,7 @@ import (
|
|||
)
|
||||
|
||||
// NewDBCataloger returns a new cataloger object initialized for arch linux pacman database flat-file stores.
|
||||
func NewDBCataloger() *generic.Cataloger {
|
||||
func NewDBCataloger() pkg.Cataloger {
|
||||
return generic.NewCataloger("alpm-db-cataloger").
|
||||
WithParserByGlobs(parseAlpmDB, pkg.AlpmDBGlob)
|
||||
}
|
||||
|
|
|
@ -12,7 +12,7 @@ import (
|
|||
|
||||
const catalogerName = "binary-cataloger"
|
||||
|
||||
func NewCataloger() *Cataloger {
|
||||
func NewCataloger() pkg.Cataloger {
|
||||
return &Cataloger{}
|
||||
}
|
||||
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
package cataloger
|
||||
|
||||
import (
|
||||
"github.com/anchore/syft/syft/cataloging"
|
||||
"github.com/anchore/syft/syft/pkg/cataloger/golang"
|
||||
"github.com/anchore/syft/syft/pkg/cataloger/java"
|
||||
"github.com/anchore/syft/syft/pkg/cataloger/kernel"
|
||||
|
@ -10,10 +11,10 @@ import (
|
|||
// TODO: these field naming vs helper function naming schemes are inconsistent.
|
||||
type Config struct {
|
||||
Search SearchConfig
|
||||
Golang golang.GoCatalogerOpts
|
||||
LinuxKernel kernel.LinuxCatalogerConfig
|
||||
Golang golang.CatalogerConfig
|
||||
LinuxKernel kernel.LinuxKernelCatalogerConfig
|
||||
Python python.CatalogerConfig
|
||||
Java java.CatalogerOpts
|
||||
Java java.ArchiveCatalogerConfig
|
||||
Catalogers []string
|
||||
Parallelism int
|
||||
ExcludeBinaryOverlapByOwnership bool
|
||||
|
@ -25,7 +26,7 @@ func DefaultConfig() Config {
|
|||
Parallelism: 1,
|
||||
LinuxKernel: kernel.DefaultLinuxCatalogerConfig(),
|
||||
Python: python.DefaultCatalogerConfig(),
|
||||
Java: java.DefaultCatalogerOpts(),
|
||||
Java: java.DefaultArchiveCatalogerConfig(),
|
||||
ExcludeBinaryOverlapByOwnership: true,
|
||||
}
|
||||
}
|
||||
|
@ -33,12 +34,14 @@ func DefaultConfig() Config {
|
|||
// JavaConfig merges relevant config values from Config to return a java.Config struct.
|
||||
// Values like IncludeUnindexedArchives and IncludeIndexedArchives are used across catalogers
|
||||
// and are not specific to Java requiring this merge.
|
||||
func (c Config) JavaConfig() java.Config {
|
||||
return java.Config{
|
||||
SearchUnindexedArchives: c.Search.IncludeUnindexedArchives,
|
||||
SearchIndexedArchives: c.Search.IncludeIndexedArchives,
|
||||
func (c Config) JavaConfig() java.ArchiveCatalogerConfig {
|
||||
return java.ArchiveCatalogerConfig{
|
||||
ArchiveSearchConfig: cataloging.ArchiveSearchConfig{
|
||||
IncludeUnindexedArchives: c.Search.IncludeUnindexedArchives,
|
||||
IncludeIndexedArchives: c.Search.IncludeIndexedArchives,
|
||||
},
|
||||
UseNetwork: c.Java.UseNetwork,
|
||||
MavenBaseURL: c.Java.MavenURL,
|
||||
MavenBaseURL: c.Java.MavenBaseURL,
|
||||
MaxParentRecursiveDepth: c.Java.MaxParentRecursiveDepth,
|
||||
}
|
||||
}
|
||||
|
|
|
@ -4,18 +4,19 @@ Package cpp provides a concrete Cataloger implementations for the C/C++ language
|
|||
package cpp
|
||||
|
||||
import (
|
||||
"github.com/anchore/syft/syft/pkg"
|
||||
"github.com/anchore/syft/syft/pkg/cataloger/generic"
|
||||
)
|
||||
|
||||
// NewConanCataloger returns a new C/C++ conanfile.txt and conan.lock cataloger object.
|
||||
func NewConanCataloger() *generic.Cataloger {
|
||||
func NewConanCataloger() pkg.Cataloger {
|
||||
return generic.NewCataloger("conan-cataloger").
|
||||
WithParserByGlobs(parseConanfile, "**/conanfile.txt").
|
||||
WithParserByGlobs(parseConanlock, "**/conan.lock")
|
||||
}
|
||||
|
||||
// NewConanInfoCataloger returns a new C/C++ conaninfo.txt cataloger object.
|
||||
func NewConanInfoCataloger() *generic.Cataloger {
|
||||
func NewConanInfoCataloger() pkg.Cataloger {
|
||||
return generic.NewCataloger("conan-info-cataloger").
|
||||
WithParserByGlobs(parseConaninfo, "**/conaninfo.txt")
|
||||
}
|
||||
|
|
|
@ -4,11 +4,12 @@ Package dart provides a concrete Cataloger implementations for the Dart language
|
|||
package dart
|
||||
|
||||
import (
|
||||
"github.com/anchore/syft/syft/pkg"
|
||||
"github.com/anchore/syft/syft/pkg/cataloger/generic"
|
||||
)
|
||||
|
||||
// NewPubspecLockCataloger returns a new Dartlang cataloger object base on pubspec lock files.
|
||||
func NewPubspecLockCataloger() *generic.Cataloger {
|
||||
func NewPubspecLockCataloger() pkg.Cataloger {
|
||||
return generic.NewCataloger("dart-pubspec-lock-cataloger").
|
||||
WithParserByGlobs(parsePubspecLock, "**/pubspec.lock")
|
||||
}
|
||||
|
|
|
@ -4,11 +4,12 @@ Package debian provides a concrete Cataloger implementation relating to packages
|
|||
package debian
|
||||
|
||||
import (
|
||||
"github.com/anchore/syft/syft/pkg"
|
||||
"github.com/anchore/syft/syft/pkg/cataloger/generic"
|
||||
)
|
||||
|
||||
// NewDBCataloger returns a new Deb package cataloger capable of parsing DPKG status DB flat-file stores.
|
||||
func NewDBCataloger() *generic.Cataloger {
|
||||
func NewDBCataloger() pkg.Cataloger {
|
||||
return generic.NewCataloger("dpkg-db-cataloger").
|
||||
// note: these globs have been intentionally split up in order to improve search performance,
|
||||
// please do NOT combine into: "**/var/lib/dpkg/{status,status.d/*}"
|
||||
|
|
|
@ -4,17 +4,18 @@ Package dotnet provides a concrete Cataloger implementation relating to packages
|
|||
package dotnet
|
||||
|
||||
import (
|
||||
"github.com/anchore/syft/syft/pkg"
|
||||
"github.com/anchore/syft/syft/pkg/cataloger/generic"
|
||||
)
|
||||
|
||||
// NewDotnetDepsCataloger returns a new Dotnet cataloger object base on deps json files.
|
||||
func NewDotnetDepsCataloger() *generic.Cataloger {
|
||||
func NewDotnetDepsCataloger() pkg.Cataloger {
|
||||
return generic.NewCataloger("dotnet-deps-cataloger").
|
||||
WithParserByGlobs(parseDotnetDeps, "**/*.deps.json")
|
||||
}
|
||||
|
||||
// NewDotnetPortableExecutableCataloger returns a new Dotnet cataloger object base on portable executable files.
|
||||
func NewDotnetPortableExecutableCataloger() *generic.Cataloger {
|
||||
func NewDotnetPortableExecutableCataloger() pkg.Cataloger {
|
||||
return generic.NewCataloger("dotnet-portable-executable-cataloger").
|
||||
WithParserByGlobs(parseDotnetPortableExecutable, "**/*.dll", "**/*.exe")
|
||||
}
|
||||
|
|
|
@ -3,7 +3,7 @@ package dotnet
|
|||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/anchore/syft/syft/pkg/cataloger/generic"
|
||||
"github.com/anchore/syft/syft/pkg"
|
||||
"github.com/anchore/syft/syft/pkg/cataloger/internal/pkgtest"
|
||||
)
|
||||
|
||||
|
@ -11,7 +11,7 @@ func TestCataloger_Globs(t *testing.T) {
|
|||
tests := []struct {
|
||||
name string
|
||||
fixture string
|
||||
cataloger *generic.Cataloger
|
||||
cataloger pkg.Cataloger
|
||||
expected []string
|
||||
}{
|
||||
{
|
||||
|
|
|
@ -4,11 +4,12 @@ Package elixir provides a concrete Cataloger implementation relating to packages
|
|||
package elixir
|
||||
|
||||
import (
|
||||
"github.com/anchore/syft/syft/pkg"
|
||||
"github.com/anchore/syft/syft/pkg/cataloger/generic"
|
||||
)
|
||||
|
||||
// NewMixLockCataloger returns a cataloger object for Elixir mix.lock files.
|
||||
func NewMixLockCataloger() *generic.Cataloger {
|
||||
func NewMixLockCataloger() pkg.Cataloger {
|
||||
return generic.NewCataloger("elixir-mix-lock-cataloger").
|
||||
WithParserByGlobs(parseMixLock, "**/mix.lock")
|
||||
}
|
||||
|
|
|
@ -4,11 +4,12 @@ Package erlang provides a concrete Cataloger implementation relating to packages
|
|||
package erlang
|
||||
|
||||
import (
|
||||
"github.com/anchore/syft/syft/pkg"
|
||||
"github.com/anchore/syft/syft/pkg/cataloger/generic"
|
||||
)
|
||||
|
||||
// NewRebarLockCataloger returns a new cataloger instance for Erlang rebar.lock files.
|
||||
func NewRebarLockCataloger() *generic.Cataloger {
|
||||
func NewRebarLockCataloger() pkg.Cataloger {
|
||||
return generic.NewCataloger("erlang-rebar-lock-cataloger").
|
||||
WithParserByGlobs(parseRebarLock, "**/rebar.lock")
|
||||
}
|
||||
|
|
|
@ -4,11 +4,12 @@ Package gentoo provides a concrete Cataloger implementation related to packages
|
|||
package gentoo
|
||||
|
||||
import (
|
||||
"github.com/anchore/syft/syft/pkg"
|
||||
"github.com/anchore/syft/syft/pkg/cataloger/generic"
|
||||
)
|
||||
|
||||
// NewPortageCataloger returns a new cataloger object initialized for Gentoo Portage package manager files (a flat-file store).
|
||||
func NewPortageCataloger() *generic.Cataloger {
|
||||
func NewPortageCataloger() pkg.Cataloger {
|
||||
return generic.NewCataloger("portage-cataloger").
|
||||
WithParserByGlobs(parsePortageContents, "**/var/db/pkg/*/*/CONTENTS")
|
||||
}
|
||||
|
|
|
@ -3,17 +3,20 @@ Package githubactions provides a concrete Cataloger implementation for GitHub Ac
|
|||
*/
|
||||
package githubactions
|
||||
|
||||
import "github.com/anchore/syft/syft/pkg/cataloger/generic"
|
||||
import (
|
||||
"github.com/anchore/syft/syft/pkg"
|
||||
"github.com/anchore/syft/syft/pkg/cataloger/generic"
|
||||
)
|
||||
|
||||
// NewActionUsageCataloger returns GitHub Actions used within workflows and composite actions.
|
||||
func NewActionUsageCataloger() *generic.Cataloger {
|
||||
func NewActionUsageCataloger() pkg.Cataloger {
|
||||
return generic.NewCataloger("github-actions-usage-cataloger").
|
||||
WithParserByGlobs(parseWorkflowForActionUsage, "**/.github/workflows/*.yaml", "**/.github/workflows/*.yml").
|
||||
WithParserByGlobs(parseCompositeActionForActionUsage, "**/.github/actions/*/action.yml", "**/.github/actions/*/action.yaml")
|
||||
}
|
||||
|
||||
// NewWorkflowUsageCataloger returns shared workflows used within workflows.
|
||||
func NewWorkflowUsageCataloger() *generic.Cataloger {
|
||||
func NewWorkflowUsageCataloger() pkg.Cataloger {
|
||||
return generic.NewCataloger("github-action-workflow-usage-cataloger").
|
||||
WithParserByGlobs(parseWorkflowForWorkflowUsage, "**/.github/workflows/*.yaml", "**/.github/workflows/*.yml")
|
||||
}
|
||||
|
|
|
@ -3,7 +3,7 @@ package githubactions
|
|||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/anchore/syft/syft/pkg/cataloger/generic"
|
||||
"github.com/anchore/syft/syft/pkg"
|
||||
"github.com/anchore/syft/syft/pkg/cataloger/internal/pkgtest"
|
||||
)
|
||||
|
||||
|
@ -11,7 +11,7 @@ func TestCataloger_Globs(t *testing.T) {
|
|||
tests := []struct {
|
||||
name string
|
||||
fixture string
|
||||
cataloger *generic.Cataloger
|
||||
cataloger pkg.Cataloger
|
||||
expected []string
|
||||
}{
|
||||
{
|
||||
|
|
|
@ -11,7 +11,6 @@ import (
|
|||
"github.com/anchore/syft/internal"
|
||||
"github.com/anchore/syft/syft/artifact"
|
||||
"github.com/anchore/syft/syft/cpe"
|
||||
"github.com/anchore/syft/syft/event/monitor"
|
||||
"github.com/anchore/syft/syft/file"
|
||||
"github.com/anchore/syft/syft/pkg"
|
||||
"github.com/anchore/syft/syft/pkg/cataloger/generic"
|
||||
|
@ -20,31 +19,28 @@ import (
|
|||
var versionCandidateGroups = regexp.MustCompile(`(?P<version>\d+(\.\d+)?(\.\d+)?)(?P<candidate>\w*)`)
|
||||
|
||||
// NewGoModuleFileCataloger returns a new cataloger object that searches within go.mod files.
|
||||
func NewGoModuleFileCataloger(opts GoCatalogerOpts) pkg.Cataloger {
|
||||
func NewGoModuleFileCataloger(opts CatalogerConfig) pkg.Cataloger {
|
||||
c := goModCataloger{
|
||||
licenses: newGoLicenses(opts),
|
||||
}
|
||||
return &progressingCataloger{
|
||||
progress: c.licenses.progress,
|
||||
cataloger: generic.NewCataloger("go-module-file-cataloger").
|
||||
WithParserByGlobs(c.parseGoModFile, "**/go.mod"),
|
||||
}
|
||||
}
|
||||
|
||||
// NewGoModuleBinaryCataloger returns a new cataloger object that searches within binaries built by the go compiler.
|
||||
func NewGoModuleBinaryCataloger(opts GoCatalogerOpts) pkg.Cataloger {
|
||||
func NewGoModuleBinaryCataloger(opts CatalogerConfig) pkg.Cataloger {
|
||||
c := goBinaryCataloger{
|
||||
licenses: newGoLicenses(opts),
|
||||
}
|
||||
return &progressingCataloger{
|
||||
progress: c.licenses.progress,
|
||||
cataloger: generic.NewCataloger("go-module-binary-cataloger").
|
||||
WithParserByMimeTypes(c.parseGoBinary, internal.ExecutableMIMETypeSet.List()...),
|
||||
}
|
||||
}
|
||||
|
||||
type progressingCataloger struct {
|
||||
progress *monitor.CatalogerTask
|
||||
cataloger *generic.Cataloger
|
||||
}
|
||||
|
||||
|
@ -53,7 +49,6 @@ func (p *progressingCataloger) Name() string {
|
|||
}
|
||||
|
||||
func (p *progressingCataloger) Catalog(resolver file.Resolver) ([]pkg.Package, []artifact.Relationship, error) {
|
||||
defer p.progress.SetCompleted()
|
||||
pkgs, relationships, err := p.cataloger.Catalog(resolver)
|
||||
goCompilerPkgs := []pkg.Package{}
|
||||
totalLocations := file.NewLocationSet()
|
||||
|
@ -76,6 +71,7 @@ func (p *progressingCataloger) Catalog(resolver file.Resolver) ([]pkg.Package, [
|
|||
pkgs = append(pkgs, goCompilerPkgs...)
|
||||
return pkgs, relationships, err
|
||||
}
|
||||
|
||||
func newGoStdLib(version string, location file.LocationSet) *pkg.Package {
|
||||
stdlibCpe, err := generateStdlibCpe(version)
|
||||
if err != nil {
|
||||
|
|
|
@ -30,7 +30,7 @@ func Test_Mod_Cataloger_Globs(t *testing.T) {
|
|||
FromDirectory(t, test.fixture).
|
||||
ExpectsResolverContentQueries(test.expected).
|
||||
IgnoreUnfulfilledPathResponses("src/go.sum").
|
||||
TestCataloger(t, NewGoModuleFileCataloger(GoCatalogerOpts{}))
|
||||
TestCataloger(t, NewGoModuleFileCataloger(CatalogerConfig{}))
|
||||
})
|
||||
}
|
||||
}
|
||||
|
@ -55,7 +55,7 @@ func Test_Binary_Cataloger_Globs(t *testing.T) {
|
|||
pkgtest.NewCatalogTester().
|
||||
FromDirectory(t, test.fixture).
|
||||
ExpectsResolverContentQueries(test.expected).
|
||||
TestCataloger(t, NewGoModuleBinaryCataloger(GoCatalogerOpts{}))
|
||||
TestCataloger(t, NewGoModuleBinaryCataloger(CatalogerConfig{}))
|
||||
})
|
||||
}
|
||||
}
|
||||
|
|
114
syft/pkg/cataloger/golang/config.go
Normal file
114
syft/pkg/cataloger/golang/config.go
Normal file
|
@ -0,0 +1,114 @@
|
|||
package golang
|
||||
|
||||
import (
|
||||
"os"
|
||||
"path"
|
||||
"strings"
|
||||
|
||||
"github.com/mitchellh/go-homedir"
|
||||
|
||||
"github.com/anchore/syft/internal/log"
|
||||
)
|
||||
|
||||
const (
|
||||
defaultProxies = "https://proxy.golang.org,direct"
|
||||
directProxyOnly = "direct"
|
||||
)
|
||||
|
||||
var (
|
||||
directProxiesOnly = []string{directProxyOnly}
|
||||
)
|
||||
|
||||
type CatalogerConfig struct {
|
||||
SearchLocalModCacheLicenses bool `yaml:"search-local-mod-cache-licenses" json:"search-local-mod-cache-licenses" mapstructure:"search-local-mod-cache-licenses"`
|
||||
LocalModCacheDir string `yaml:"local-mod-cache-dir" json:"local-mod-cache-dir" mapstructure:"local-mod-cache-dir"`
|
||||
SearchRemoteLicenses bool `yaml:"search-remote-licenses" json:"search-remote-licenses" mapstructure:"search-remote-licenses"`
|
||||
Proxies []string `yaml:"proxies,omitempty" json:"proxies,omitempty" mapstructure:"proxies"`
|
||||
NoProxy []string `yaml:"no-proxy,omitempty" json:"no-proxy,omitempty" mapstructure:"no-proxy"`
|
||||
}
|
||||
|
||||
// DefaultCatalogerConfig create a CatalogerConfig with default options, which includes:
|
||||
// - setting the default remote proxy if none is provided
|
||||
// - setting the default no proxy if none is provided
|
||||
// - setting the default local module cache dir if none is provided
|
||||
func DefaultCatalogerConfig() CatalogerConfig {
|
||||
g := CatalogerConfig{}
|
||||
|
||||
// first process the proxy settings
|
||||
if len(g.Proxies) == 0 {
|
||||
goProxy := os.Getenv("GOPROXY")
|
||||
if goProxy == "" {
|
||||
goProxy = defaultProxies
|
||||
}
|
||||
g = g.WithProxy(goProxy)
|
||||
}
|
||||
|
||||
// next process the gonoproxy settings
|
||||
if len(g.NoProxy) == 0 {
|
||||
goPrivate := os.Getenv("GOPRIVATE")
|
||||
goNoProxy := os.Getenv("GONOPROXY")
|
||||
// we only use the env var if it was not set explicitly
|
||||
if goPrivate != "" {
|
||||
g.NoProxy = append(g.NoProxy, strings.Split(goPrivate, ",")...)
|
||||
}
|
||||
|
||||
// next process the goprivate settings; we always add those
|
||||
if goNoProxy != "" {
|
||||
g.NoProxy = append(g.NoProxy, strings.Split(goNoProxy, ",")...)
|
||||
}
|
||||
}
|
||||
|
||||
if g.LocalModCacheDir == "" {
|
||||
goPath := os.Getenv("GOPATH")
|
||||
|
||||
if goPath == "" {
|
||||
homeDir, err := homedir.Dir()
|
||||
if err != nil {
|
||||
log.Debug("unable to determine user home dir: %v", err)
|
||||
} else {
|
||||
goPath = path.Join(homeDir, "go")
|
||||
}
|
||||
}
|
||||
if goPath != "" {
|
||||
g.LocalModCacheDir = path.Join(goPath, "pkg", "mod")
|
||||
}
|
||||
}
|
||||
return g
|
||||
}
|
||||
|
||||
func (g CatalogerConfig) WithSearchLocalModCacheLicenses(input bool) CatalogerConfig {
|
||||
g.SearchLocalModCacheLicenses = input
|
||||
return g
|
||||
}
|
||||
|
||||
func (g CatalogerConfig) WithLocalModCacheDir(input string) CatalogerConfig {
|
||||
if input == "" {
|
||||
return g
|
||||
}
|
||||
g.LocalModCacheDir = input
|
||||
return g
|
||||
}
|
||||
|
||||
func (g CatalogerConfig) WithSearchRemoteLicenses(input bool) CatalogerConfig {
|
||||
g.SearchRemoteLicenses = input
|
||||
return g
|
||||
}
|
||||
|
||||
func (g CatalogerConfig) WithProxy(input string) CatalogerConfig {
|
||||
if input == "" {
|
||||
return g
|
||||
}
|
||||
if input == "off" {
|
||||
input = directProxyOnly
|
||||
}
|
||||
g.Proxies = strings.Split(input, ",")
|
||||
return g
|
||||
}
|
||||
|
||||
func (g CatalogerConfig) WithNoProxy(input string) CatalogerConfig {
|
||||
if input == "" {
|
||||
return g
|
||||
}
|
||||
g.NoProxy = strings.Split(input, ",")
|
||||
return g
|
||||
}
|
|
@ -34,7 +34,7 @@ func Test_Options(t *testing.T) {
|
|||
name string
|
||||
env map[string]string
|
||||
opts opts
|
||||
expected GoCatalogerOpts
|
||||
expected CatalogerConfig
|
||||
}{
|
||||
{
|
||||
name: "set via env defaults",
|
||||
|
@ -45,12 +45,12 @@ func Test_Options(t *testing.T) {
|
|||
"GONOPROXY": "no.proxy",
|
||||
},
|
||||
opts: opts{},
|
||||
expected: GoCatalogerOpts{
|
||||
searchLocalModCacheLicenses: false,
|
||||
localModCacheDir: "/go/pkg/mod",
|
||||
searchRemoteLicenses: false,
|
||||
proxies: []string{"https://my.proxy"},
|
||||
noProxy: []string{"my.private", "no.proxy"},
|
||||
expected: CatalogerConfig{
|
||||
SearchLocalModCacheLicenses: false,
|
||||
LocalModCacheDir: "/go/pkg/mod",
|
||||
SearchRemoteLicenses: false,
|
||||
Proxies: []string{"https://my.proxy"},
|
||||
NoProxy: []string{"my.private", "no.proxy"},
|
||||
},
|
||||
},
|
||||
{
|
||||
|
@ -68,12 +68,12 @@ func Test_Options(t *testing.T) {
|
|||
proxy: "https://alt.proxy,direct",
|
||||
noProxy: "alt.no.proxy",
|
||||
},
|
||||
expected: GoCatalogerOpts{
|
||||
searchLocalModCacheLicenses: true,
|
||||
localModCacheDir: "/go-cache",
|
||||
searchRemoteLicenses: true,
|
||||
proxies: []string{"https://alt.proxy", "direct"},
|
||||
noProxy: []string{"alt.no.proxy"},
|
||||
expected: CatalogerConfig{
|
||||
SearchLocalModCacheLicenses: true,
|
||||
LocalModCacheDir: "/go-cache",
|
||||
SearchRemoteLicenses: true,
|
||||
Proxies: []string{"https://alt.proxy", "direct"},
|
||||
NoProxy: []string{"alt.no.proxy"},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
@ -86,7 +86,7 @@ func Test_Options(t *testing.T) {
|
|||
for k, v := range test.env {
|
||||
t.Setenv(k, v)
|
||||
}
|
||||
got := NewGoCatalogerOpts().
|
||||
got := DefaultCatalogerConfig().
|
||||
WithSearchLocalModCacheLicenses(test.opts.local).
|
||||
WithLocalModCacheDir(test.opts.cacheDir).
|
||||
WithSearchRemoteLicenses(test.opts.remote).
|
|
@ -29,16 +29,16 @@ import (
|
|||
)
|
||||
|
||||
type goLicenses struct {
|
||||
opts GoCatalogerOpts
|
||||
opts CatalogerConfig
|
||||
localModCacheResolver file.WritableResolver
|
||||
progress *monitor.CatalogerTask
|
||||
lowerLicenseFileNames *strset.Set
|
||||
}
|
||||
|
||||
func newGoLicenses(opts GoCatalogerOpts) goLicenses {
|
||||
func newGoLicenses(opts CatalogerConfig) goLicenses {
|
||||
return goLicenses{
|
||||
opts: opts,
|
||||
localModCacheResolver: modCacheResolver(opts.localModCacheDir),
|
||||
localModCacheResolver: modCacheResolver(opts.LocalModCacheDir),
|
||||
progress: &monitor.CatalogerTask{
|
||||
SubStatus: true,
|
||||
RemoveOnCompletion: true,
|
||||
|
@ -107,7 +107,7 @@ func (c *goLicenses) getLicenses(resolver file.Resolver, moduleName, moduleVersi
|
|||
}
|
||||
|
||||
func (c *goLicenses) getLicensesFromLocal(moduleName, moduleVersion string) ([]pkg.License, error) {
|
||||
if !c.opts.searchLocalModCacheLicenses {
|
||||
if !c.opts.SearchLocalModCacheLicenses {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
|
@ -117,11 +117,11 @@ func (c *goLicenses) getLicensesFromLocal(moduleName, moduleVersion string) ([]p
|
|||
}
|
||||
|
||||
func (c *goLicenses) getLicensesFromRemote(moduleName, moduleVersion string) ([]pkg.License, error) {
|
||||
if !c.opts.searchRemoteLicenses {
|
||||
if !c.opts.SearchRemoteLicenses {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
proxies := remotesForModule(c.opts.proxies, c.opts.noProxy, moduleName)
|
||||
proxies := remotesForModule(c.opts.Proxies, c.opts.NoProxy, moduleName)
|
||||
|
||||
fsys, err := getModule(c.progress, proxies, moduleName, moduleVersion)
|
||||
if err != nil {
|
||||
|
@ -231,15 +231,18 @@ func getModule(progress *monitor.CatalogerTask, proxies []string, moduleName, mo
|
|||
func getModuleProxy(progress *monitor.CatalogerTask, proxy string, moduleName string, moduleVersion string) (out fs.FS, _ error) {
|
||||
u := fmt.Sprintf("%s/%s/@v/%s.zip", proxy, moduleName, moduleVersion)
|
||||
progress.SetValue(u)
|
||||
|
||||
// get the module zip
|
||||
resp, err := http.Get(u) //nolint:gosec
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer func() { _ = resp.Body.Close() }()
|
||||
|
||||
if resp.StatusCode != http.StatusOK {
|
||||
u = fmt.Sprintf("%s/%s/@v/%s.zip", proxy, strings.ToLower(moduleName), moduleVersion)
|
||||
progress.SetValue(u)
|
||||
|
||||
// try lowercasing it; some packages have mixed casing that really messes up the proxy
|
||||
resp, err = http.Get(u) //nolint:gosec
|
||||
if err != nil {
|
||||
|
@ -250,19 +253,23 @@ func getModuleProxy(progress *monitor.CatalogerTask, proxy string, moduleName st
|
|||
return nil, fmt.Errorf("failed to get module zip: %s", resp.Status)
|
||||
}
|
||||
}
|
||||
|
||||
// read the zip
|
||||
b, err := io.ReadAll(resp.Body)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
out, err = zip.NewReader(bytes.NewReader(b), resp.ContentLength)
|
||||
versionPath := findVersionPath(out, ".")
|
||||
out = getSubFS(out, versionPath)
|
||||
|
||||
return out, err
|
||||
}
|
||||
|
||||
func findVersionPath(f fs.FS, dir string) string {
|
||||
list, _ := fs.ReadDir(f, dir)
|
||||
|
||||
for _, entry := range list {
|
||||
name := entry.Name()
|
||||
if strings.Contains(name, "@") {
|
||||
|
@ -273,6 +280,7 @@ func findVersionPath(f fs.FS, dir string) string {
|
|||
return path.Join(name, found)
|
||||
}
|
||||
}
|
||||
|
||||
return ""
|
||||
}
|
||||
|
||||
|
@ -282,7 +290,9 @@ func getModuleRepository(progress *monitor.CatalogerTask, moduleName string, mod
|
|||
if len(parts) > 2 {
|
||||
repoName = fmt.Sprintf("%s/%s/%s", parts[0], parts[1], parts[2])
|
||||
}
|
||||
|
||||
progress.SetValue(fmt.Sprintf("git: %s", repoName))
|
||||
|
||||
f := memfs.New()
|
||||
buf := &bytes.Buffer{}
|
||||
_, err := git.Clone(memory.NewStorage(), f, &git.CloneOptions{
|
||||
|
@ -292,6 +302,7 @@ func getModuleRepository(progress *monitor.CatalogerTask, moduleName string, mod
|
|||
Depth: 1,
|
||||
Progress: buf,
|
||||
})
|
||||
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("%w -- %s", err, buf.String())
|
||||
}
|
||||
|
|
|
@ -67,9 +67,9 @@ func Test_LocalLicenseSearch(t *testing.T) {
|
|||
for _, test := range tests {
|
||||
t.Run(test.name, func(t *testing.T) {
|
||||
l := newGoLicenses(
|
||||
GoCatalogerOpts{
|
||||
searchLocalModCacheLicenses: true,
|
||||
localModCacheDir: path.Join(wd, "test-fixtures", "licenses", "pkg", "mod"),
|
||||
CatalogerConfig{
|
||||
SearchLocalModCacheLicenses: true,
|
||||
LocalModCacheDir: path.Join(wd, "test-fixtures", "licenses", "pkg", "mod"),
|
||||
},
|
||||
)
|
||||
licenses, err := l.getLicenses(fileresolver.Empty{}, test.name, test.version)
|
||||
|
@ -154,10 +154,10 @@ func Test_RemoteProxyLicenseSearch(t *testing.T) {
|
|||
|
||||
for _, test := range tests {
|
||||
t.Run(test.name, func(t *testing.T) {
|
||||
l := newGoLicenses(GoCatalogerOpts{
|
||||
searchRemoteLicenses: true,
|
||||
proxies: []string{server.URL},
|
||||
localModCacheDir: modDir,
|
||||
l := newGoLicenses(CatalogerConfig{
|
||||
SearchRemoteLicenses: true,
|
||||
Proxies: []string{server.URL},
|
||||
LocalModCacheDir: modDir,
|
||||
})
|
||||
|
||||
licenses, err := l.getLicenses(fileresolver.Empty{}, test.name, test.version)
|
||||
|
|
|
@ -1,114 +0,0 @@
|
|||
package golang
|
||||
|
||||
import (
|
||||
"os"
|
||||
"path"
|
||||
"strings"
|
||||
|
||||
"github.com/mitchellh/go-homedir"
|
||||
|
||||
"github.com/anchore/syft/internal/log"
|
||||
)
|
||||
|
||||
const (
|
||||
defaultProxies = "https://proxy.golang.org,direct"
|
||||
directProxyOnly = "direct"
|
||||
)
|
||||
|
||||
var (
|
||||
directProxiesOnly = []string{directProxyOnly}
|
||||
)
|
||||
|
||||
type GoCatalogerOpts struct {
|
||||
searchLocalModCacheLicenses bool
|
||||
localModCacheDir string
|
||||
searchRemoteLicenses bool
|
||||
proxies []string
|
||||
noProxy []string
|
||||
}
|
||||
|
||||
func (g GoCatalogerOpts) WithSearchLocalModCacheLicenses(input bool) GoCatalogerOpts {
|
||||
g.searchLocalModCacheLicenses = input
|
||||
return g
|
||||
}
|
||||
|
||||
func (g GoCatalogerOpts) WithLocalModCacheDir(input string) GoCatalogerOpts {
|
||||
if input == "" {
|
||||
return g
|
||||
}
|
||||
g.localModCacheDir = input
|
||||
return g
|
||||
}
|
||||
|
||||
func (g GoCatalogerOpts) WithSearchRemoteLicenses(input bool) GoCatalogerOpts {
|
||||
g.searchRemoteLicenses = input
|
||||
return g
|
||||
}
|
||||
|
||||
func (g GoCatalogerOpts) WithProxy(input string) GoCatalogerOpts {
|
||||
if input == "" {
|
||||
return g
|
||||
}
|
||||
if input == "off" {
|
||||
input = directProxyOnly
|
||||
}
|
||||
g.proxies = strings.Split(input, ",")
|
||||
return g
|
||||
}
|
||||
|
||||
func (g GoCatalogerOpts) WithNoProxy(input string) GoCatalogerOpts {
|
||||
if input == "" {
|
||||
return g
|
||||
}
|
||||
g.noProxy = strings.Split(input, ",")
|
||||
return g
|
||||
}
|
||||
|
||||
// NewGoCatalogerOpts create a GoCatalogerOpts with default options, which includes:
|
||||
// - setting the default remote proxy if none is provided
|
||||
// - setting the default no proxy if none is provided
|
||||
// - setting the default local module cache dir if none is provided
|
||||
func NewGoCatalogerOpts() GoCatalogerOpts {
|
||||
g := GoCatalogerOpts{}
|
||||
|
||||
// first process the proxy settings
|
||||
if len(g.proxies) == 0 {
|
||||
goProxy := os.Getenv("GOPROXY")
|
||||
if goProxy == "" {
|
||||
goProxy = defaultProxies
|
||||
}
|
||||
g = g.WithProxy(goProxy)
|
||||
}
|
||||
|
||||
// next process the gonoproxy settings
|
||||
if len(g.noProxy) == 0 {
|
||||
goPrivate := os.Getenv("GOPRIVATE")
|
||||
goNoProxy := os.Getenv("GONOPROXY")
|
||||
// we only use the env var if it was not set explicitly
|
||||
if goPrivate != "" {
|
||||
g.noProxy = append(g.noProxy, strings.Split(goPrivate, ",")...)
|
||||
}
|
||||
|
||||
// next process the goprivate settings; we always add those
|
||||
if goNoProxy != "" {
|
||||
g.noProxy = append(g.noProxy, strings.Split(goNoProxy, ",")...)
|
||||
}
|
||||
}
|
||||
|
||||
if g.localModCacheDir == "" {
|
||||
goPath := os.Getenv("GOPATH")
|
||||
|
||||
if goPath == "" {
|
||||
homeDir, err := homedir.Dir()
|
||||
if err != nil {
|
||||
log.Debug("unable to determine user home dir: %v", err)
|
||||
} else {
|
||||
goPath = path.Join(homeDir, "go")
|
||||
}
|
||||
}
|
||||
if goPath != "" {
|
||||
g.localModCacheDir = path.Join(goPath, "pkg", "mod")
|
||||
}
|
||||
}
|
||||
return g
|
||||
}
|
|
@ -142,7 +142,7 @@ func Test_GoSumHashes(t *testing.T) {
|
|||
pkgtest.NewCatalogTester().
|
||||
FromDirectory(t, test.fixture).
|
||||
Expects(test.expected, nil).
|
||||
TestCataloger(t, NewGoModuleFileCataloger(GoCatalogerOpts{}))
|
||||
TestCataloger(t, NewGoModuleFileCataloger(CatalogerConfig{}))
|
||||
})
|
||||
}
|
||||
}
|
||||
|
|
|
@ -4,6 +4,7 @@ Package haskell provides a concrete Cataloger implementation relating to package
|
|||
package haskell
|
||||
|
||||
import (
|
||||
"github.com/anchore/syft/syft/pkg"
|
||||
"github.com/anchore/syft/syft/pkg/cataloger/generic"
|
||||
)
|
||||
|
||||
|
@ -12,7 +13,7 @@ import (
|
|||
// This hints at splitting these into multiple catalogers, but for now we'll keep them together.
|
||||
|
||||
// NewHackageCataloger returns a new Haskell cataloger object.
|
||||
func NewHackageCataloger() *generic.Cataloger {
|
||||
func NewHackageCataloger() pkg.Cataloger {
|
||||
return generic.NewCataloger("haskell-cataloger").
|
||||
WithParserByGlobs(parseStackYaml, "**/stack.yaml").
|
||||
WithParserByGlobs(parseStackLock, "**/stack.yaml.lock").
|
||||
|
|
|
@ -53,14 +53,14 @@ type archiveParser struct {
|
|||
contentPath string
|
||||
fileInfo archiveFilename
|
||||
detectNested bool
|
||||
cfg Config
|
||||
cfg ArchiveCatalogerConfig
|
||||
}
|
||||
|
||||
type genericArchiveParserAdapter struct {
|
||||
cfg Config
|
||||
cfg ArchiveCatalogerConfig
|
||||
}
|
||||
|
||||
func newGenericArchiveParserAdapter(cfg Config) genericArchiveParserAdapter {
|
||||
func newGenericArchiveParserAdapter(cfg ArchiveCatalogerConfig) genericArchiveParserAdapter {
|
||||
return genericArchiveParserAdapter{cfg: cfg}
|
||||
}
|
||||
|
||||
|
@ -85,7 +85,7 @@ func uniquePkgKey(groupID string, p *pkg.Package) string {
|
|||
|
||||
// newJavaArchiveParser returns a new java archive parser object for the given archive. Can be configured to discover
|
||||
// and parse nested archives or ignore them.
|
||||
func newJavaArchiveParser(reader file.LocationReadCloser, detectNested bool, cfg Config) (*archiveParser, func(), error) {
|
||||
func newJavaArchiveParser(reader file.LocationReadCloser, detectNested bool, cfg ArchiveCatalogerConfig) (*archiveParser, func(), error) {
|
||||
// fetch the last element of the virtual path
|
||||
virtualElements := strings.Split(reader.Path(), ":")
|
||||
currentFilepath := virtualElements[len(virtualElements)-1]
|
||||
|
@ -338,7 +338,7 @@ func artifactIDMatchesFilename(artifactID, fileName string) bool {
|
|||
return strings.HasPrefix(artifactID, fileName) || strings.HasSuffix(fileName, artifactID)
|
||||
}
|
||||
|
||||
func findPomLicenses(pomProjectObject *parsedPomProject, cfg Config) {
|
||||
func findPomLicenses(pomProjectObject *parsedPomProject, cfg ArchiveCatalogerConfig) {
|
||||
// If we don't have any licenses until now, and if we have a parent Pom, then we'll check the parent pom in maven central for licenses.
|
||||
if pomProjectObject != nil && pomProjectObject.Parent != nil && len(pomProjectObject.Licenses) == 0 {
|
||||
parentLicenses, err := recursivelyFindLicensesFromParentPom(
|
||||
|
@ -373,11 +373,11 @@ func formatMavenPomURL(groupID, artifactID, version, mavenBaseURL string) (reque
|
|||
return requestURL, err
|
||||
}
|
||||
|
||||
func recursivelyFindLicensesFromParentPom(groupID, artifactID, version string, cfg Config) ([]string, error) {
|
||||
func recursivelyFindLicensesFromParentPom(groupID, artifactID, version string, cfg ArchiveCatalogerConfig) ([]string, error) {
|
||||
var licenses []string
|
||||
// As there can be nested parent poms, we'll recursively check for licenses until we reach the max depth
|
||||
for i := 0; i < cfg.MaxParentRecursiveDepth; i++ {
|
||||
parentPom, err := getPomFromMavenCentral(groupID, artifactID, version, cfg.MavenBaseURL)
|
||||
parentPom, err := getPomFromMavenRepo(groupID, artifactID, version, cfg.MavenBaseURL)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
@ -395,7 +395,7 @@ func recursivelyFindLicensesFromParentPom(groupID, artifactID, version string, c
|
|||
return licenses, nil
|
||||
}
|
||||
|
||||
func getPomFromMavenCentral(groupID, artifactID, version, mavenBaseURL string) (*gopom.Project, error) {
|
||||
func getPomFromMavenRepo(groupID, artifactID, version, mavenBaseURL string) (*gopom.Project, error) {
|
||||
requestURL, err := formatMavenPomURL(groupID, artifactID, version, mavenBaseURL)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
|
@ -542,7 +542,7 @@ func (j *archiveParser) discoverPkgsFromNestedArchives(parentPkg *pkg.Package) (
|
|||
|
||||
// discoverPkgsFromZip finds Java archives within Java archives, returning all listed Java packages found and
|
||||
// associating each discovered package to the given parent package.
|
||||
func discoverPkgsFromZip(location file.Location, archivePath, contentPath string, fileManifest intFile.ZipFileManifest, parentPkg *pkg.Package, cfg Config) ([]pkg.Package, []artifact.Relationship, error) {
|
||||
func discoverPkgsFromZip(location file.Location, archivePath, contentPath string, fileManifest intFile.ZipFileManifest, parentPkg *pkg.Package, cfg ArchiveCatalogerConfig) ([]pkg.Package, []artifact.Relationship, error) {
|
||||
// search and parse pom.properties files & fetch the contents
|
||||
openers, err := intFile.ExtractFromZipToUniqueTempFile(archivePath, contentPath, fileManifest.GlobMatch(false, archiveFormatGlobs...)...)
|
||||
if err != nil {
|
||||
|
@ -553,7 +553,7 @@ func discoverPkgsFromZip(location file.Location, archivePath, contentPath string
|
|||
}
|
||||
|
||||
// discoverPkgsFromOpeners finds Java archives within the given files and associates them with the given parent package.
|
||||
func discoverPkgsFromOpeners(location file.Location, openers map[string]intFile.Opener, parentPkg *pkg.Package, cfg Config) ([]pkg.Package, []artifact.Relationship, error) {
|
||||
func discoverPkgsFromOpeners(location file.Location, openers map[string]intFile.Opener, parentPkg *pkg.Package, cfg ArchiveCatalogerConfig) ([]pkg.Package, []artifact.Relationship, error) {
|
||||
var pkgs []pkg.Package
|
||||
var relationships []artifact.Relationship
|
||||
|
||||
|
@ -582,7 +582,7 @@ func discoverPkgsFromOpeners(location file.Location, openers map[string]intFile.
|
|||
}
|
||||
|
||||
// discoverPkgsFromOpener finds Java archives within the given file.
|
||||
func discoverPkgsFromOpener(location file.Location, pathWithinArchive string, archiveOpener intFile.Opener, cfg Config) ([]pkg.Package, []artifact.Relationship, error) {
|
||||
func discoverPkgsFromOpener(location file.Location, pathWithinArchive string, archiveOpener intFile.Opener, cfg ArchiveCatalogerConfig) ([]pkg.Package, []artifact.Relationship, error) {
|
||||
archiveReadCloser, err := archiveOpener.Open()
|
||||
if err != nil {
|
||||
return nil, nil, fmt.Errorf("unable to open archived file from tempdir: %w", err)
|
||||
|
@ -669,7 +669,7 @@ func pomProjectByParentPath(archivePath string, location file.Location, extractP
|
|||
|
||||
// newPackageFromMavenData processes a single Maven POM properties for a given parent package, returning all listed Java packages found and
|
||||
// associating each discovered package to the given parent package. Note the pom.xml is optional, the pom.properties is not.
|
||||
func newPackageFromMavenData(pomProperties pkg.JavaPomProperties, parsedPomProject *parsedPomProject, parentPkg *pkg.Package, location file.Location, cfg Config) *pkg.Package {
|
||||
func newPackageFromMavenData(pomProperties pkg.JavaPomProperties, parsedPomProject *parsedPomProject, parentPkg *pkg.Package, location file.Location, cfg ArchiveCatalogerConfig) *pkg.Package {
|
||||
// keep the artifact name within the virtual path if this package does not match the parent package
|
||||
vPathSuffix := ""
|
||||
groupID := ""
|
||||
|
|
|
@ -78,7 +78,7 @@ func TestSearchMavenForLicenses(t *testing.T) {
|
|||
name string
|
||||
fixture string
|
||||
detectNested bool
|
||||
config Config
|
||||
config ArchiveCatalogerConfig
|
||||
requestPath string
|
||||
requestHandlers []handlerPath
|
||||
expectedLicenses []pkg.License
|
||||
|
@ -87,7 +87,7 @@ func TestSearchMavenForLicenses(t *testing.T) {
|
|||
name: "searchMavenForLicenses returns the expected licenses when search is set to true",
|
||||
fixture: "opensaml-core-3.4.6",
|
||||
detectNested: false,
|
||||
config: Config{
|
||||
config: ArchiveCatalogerConfig{
|
||||
UseNetwork: true,
|
||||
MavenBaseURL: url,
|
||||
MaxParentRecursiveDepth: 2,
|
||||
|
@ -161,7 +161,7 @@ func TestFormatMavenURL(t *testing.T) {
|
|||
|
||||
for _, tc := range tests {
|
||||
t.Run(tc.name, func(t *testing.T) {
|
||||
requestURL, err := formatMavenPomURL(tc.groupID, tc.artifactID, tc.version, MavenBaseURL)
|
||||
requestURL, err := formatMavenPomURL(tc.groupID, tc.artifactID, tc.version, mavenBaseURL)
|
||||
assert.NoError(t, err, "expected no err; got %w", err)
|
||||
assert.Equal(t, tc.expected, requestURL)
|
||||
})
|
||||
|
@ -401,7 +401,7 @@ func TestParseJar(t *testing.T) {
|
|||
parser, cleanupFn, err := newJavaArchiveParser(file.LocationReadCloser{
|
||||
Location: file.NewLocation(fixture.Name()),
|
||||
ReadCloser: fixture,
|
||||
}, false, Config{UseNetwork: false})
|
||||
}, false, ArchiveCatalogerConfig{UseNetwork: false})
|
||||
defer cleanupFn()
|
||||
require.NoError(t, err)
|
||||
|
||||
|
@ -667,7 +667,7 @@ func TestParseNestedJar(t *testing.T) {
|
|||
|
||||
fixture, err := os.Open(test.fixture)
|
||||
require.NoError(t, err)
|
||||
gap := newGenericArchiveParserAdapter(Config{})
|
||||
gap := newGenericArchiveParserAdapter(ArchiveCatalogerConfig{})
|
||||
|
||||
actual, _, err := gap.parseJavaArchive(nil, nil, file.LocationReadCloser{
|
||||
Location: file.NewLocation(fixture.Name()),
|
||||
|
@ -1089,7 +1089,7 @@ func Test_newPackageFromMavenData(t *testing.T) {
|
|||
}
|
||||
test.expectedParent.Locations = locations
|
||||
|
||||
actualPackage := newPackageFromMavenData(test.props, test.project, test.parent, file.NewLocation(virtualPath), Config{})
|
||||
actualPackage := newPackageFromMavenData(test.props, test.project, test.parent, file.NewLocation(virtualPath), DefaultArchiveCatalogerConfig())
|
||||
if test.expectedPackage == nil {
|
||||
require.Nil(t, actualPackage)
|
||||
} else {
|
||||
|
@ -1309,7 +1309,7 @@ func Test_parseJavaArchive_regressions(t *testing.T) {
|
|||
}
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
gap := newGenericArchiveParserAdapter(Config{})
|
||||
gap := newGenericArchiveParserAdapter(ArchiveCatalogerConfig{})
|
||||
if tt.assignParent {
|
||||
assignParent(&tt.expectedPkgs[0], tt.expectedPkgs[1:]...)
|
||||
}
|
||||
|
|
|
@ -4,23 +4,24 @@ Package java provides a concrete Cataloger implementation for packages relating
|
|||
package java
|
||||
|
||||
import (
|
||||
"github.com/anchore/syft/syft/pkg"
|
||||
"github.com/anchore/syft/syft/pkg/cataloger/generic"
|
||||
)
|
||||
|
||||
// NewArchiveCataloger returns a new Java archive cataloger object for detecting packages with archives (jar, war, ear, par, sar, jpi, hpi, and native-image formats)
|
||||
func NewArchiveCataloger(cfg Config) *generic.Cataloger {
|
||||
func NewArchiveCataloger(cfg ArchiveCatalogerConfig) *generic.Cataloger {
|
||||
gap := newGenericArchiveParserAdapter(cfg)
|
||||
|
||||
c := generic.NewCataloger("java-archive-cataloger").
|
||||
WithParserByGlobs(gap.parseJavaArchive, archiveFormatGlobs...)
|
||||
|
||||
if cfg.SearchIndexedArchives {
|
||||
if cfg.IncludeIndexedArchives {
|
||||
// java archives wrapped within zip files
|
||||
gzp := newGenericZipWrappedJavaArchiveParser(cfg)
|
||||
c.WithParserByGlobs(gzp.parseZipWrappedJavaArchive, genericZipGlobs...)
|
||||
}
|
||||
|
||||
if cfg.SearchUnindexedArchives {
|
||||
if cfg.IncludeUnindexedArchives {
|
||||
// java archives wrapped within tar files
|
||||
gtp := newGenericTarWrappedJavaArchiveParser(cfg)
|
||||
c.WithParserByGlobs(gtp.parseTarWrappedJavaArchive, genericTarGlobs...)
|
||||
|
@ -30,14 +31,14 @@ func NewArchiveCataloger(cfg Config) *generic.Cataloger {
|
|||
|
||||
// NewPomCataloger returns a cataloger capable of parsing dependencies from a pom.xml file.
|
||||
// Pom files list dependencies that maybe not be locally installed yet.
|
||||
func NewPomCataloger() *generic.Cataloger {
|
||||
func NewPomCataloger() pkg.Cataloger {
|
||||
return generic.NewCataloger("java-pom-cataloger").
|
||||
WithParserByGlobs(parserPomXML, "**/pom.xml")
|
||||
}
|
||||
|
||||
// NewGradleLockfileCataloger returns a cataloger capable of parsing dependencies from a gradle.lockfile file.
|
||||
// Note: Older versions of lockfiles aren't supported yet
|
||||
func NewGradleLockfileCataloger() *generic.Cataloger {
|
||||
func NewGradleLockfileCataloger() pkg.Cataloger {
|
||||
return generic.NewCataloger("java-gradle-lockfile-cataloger").
|
||||
WithParserByGlobs(parseGradleLockfile, gradleLockfileGlob)
|
||||
}
|
||||
|
|
|
@ -3,6 +3,7 @@ package java
|
|||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/anchore/syft/syft/cataloging"
|
||||
"github.com/anchore/syft/syft/pkg/cataloger/internal/pkgtest"
|
||||
)
|
||||
|
||||
|
@ -54,10 +55,16 @@ func Test_ArchiveCataloger_Globs(t *testing.T) {
|
|||
pkgtest.NewCatalogTester().
|
||||
FromDirectory(t, test.fixture).
|
||||
ExpectsResolverContentQueries(test.expected).
|
||||
TestCataloger(t, NewArchiveCataloger(Config{
|
||||
SearchUnindexedArchives: true,
|
||||
SearchIndexedArchives: true,
|
||||
}))
|
||||
TestCataloger(t,
|
||||
NewArchiveCataloger(
|
||||
ArchiveCatalogerConfig{
|
||||
ArchiveSearchConfig: cataloging.ArchiveSearchConfig{
|
||||
IncludeIndexedArchives: true,
|
||||
IncludeUnindexedArchives: true,
|
||||
},
|
||||
},
|
||||
),
|
||||
)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,9 +1,44 @@
|
|||
package java
|
||||
|
||||
type Config struct {
|
||||
SearchUnindexedArchives bool
|
||||
SearchIndexedArchives bool
|
||||
UseNetwork bool
|
||||
MavenBaseURL string
|
||||
MaxParentRecursiveDepth int
|
||||
import "github.com/anchore/syft/syft/cataloging"
|
||||
|
||||
const mavenBaseURL = "https://repo1.maven.org/maven2"
|
||||
|
||||
type ArchiveCatalogerConfig struct {
|
||||
cataloging.ArchiveSearchConfig `yaml:",inline" json:"" mapstructure:",squash"`
|
||||
UseNetwork bool `yaml:"use-network" json:"use-network" mapstructure:"use-network"`
|
||||
MavenBaseURL string `yaml:"maven-base-url" json:"maven-base-url" mapstructure:"maven-base-url"`
|
||||
MaxParentRecursiveDepth int `yaml:"max-parent-recursive-depth" json:"max-parent-recursive-depth" mapstructure:"max-parent-recursive-depth"`
|
||||
}
|
||||
|
||||
func DefaultArchiveCatalogerConfig() ArchiveCatalogerConfig {
|
||||
return ArchiveCatalogerConfig{
|
||||
ArchiveSearchConfig: cataloging.ArchiveSearchConfig{
|
||||
IncludeIndexedArchives: true,
|
||||
IncludeUnindexedArchives: false,
|
||||
},
|
||||
UseNetwork: false,
|
||||
MavenBaseURL: mavenBaseURL,
|
||||
MaxParentRecursiveDepth: 5,
|
||||
}
|
||||
}
|
||||
|
||||
func (j ArchiveCatalogerConfig) WithUseNetwork(input bool) ArchiveCatalogerConfig {
|
||||
j.UseNetwork = input
|
||||
return j
|
||||
}
|
||||
|
||||
func (j ArchiveCatalogerConfig) WithMavenBaseURL(input string) ArchiveCatalogerConfig {
|
||||
if input != "" {
|
||||
j.MavenBaseURL = input
|
||||
}
|
||||
return j
|
||||
}
|
||||
|
||||
func (j ArchiveCatalogerConfig) WithArchiveTraversal(search cataloging.ArchiveSearchConfig, maxDepth int) ArchiveCatalogerConfig {
|
||||
if maxDepth > 0 {
|
||||
j.MaxParentRecursiveDepth = maxDepth
|
||||
}
|
||||
j.ArchiveSearchConfig = search
|
||||
return j
|
||||
}
|
||||
|
|
|
@ -102,7 +102,7 @@ const nativeImageInvalidIndexError = "parsing the executable file generated an i
|
|||
const nativeImageMissingExportedDataDirectoryError = "exported data directory is missing"
|
||||
|
||||
// newNativeImageCataloger returns a new Native Image cataloger object.
|
||||
func NewNativeImageCataloger() *NativeImageCataloger {
|
||||
func NewNativeImageCataloger() pkg.Cataloger {
|
||||
return &NativeImageCataloger{}
|
||||
}
|
||||
|
||||
|
|
|
@ -1,36 +0,0 @@
|
|||
package java
|
||||
|
||||
const MavenBaseURL = "https://repo1.maven.org/maven2"
|
||||
|
||||
type CatalogerOpts struct {
|
||||
UseNetwork bool
|
||||
MavenURL string
|
||||
MaxParentRecursiveDepth int
|
||||
}
|
||||
|
||||
func (j CatalogerOpts) WithUseNetwork(input bool) CatalogerOpts {
|
||||
j.UseNetwork = input
|
||||
return j
|
||||
}
|
||||
|
||||
func (j CatalogerOpts) WithMavenURL(input string) CatalogerOpts {
|
||||
if input != "" {
|
||||
j.MavenURL = input
|
||||
}
|
||||
return j
|
||||
}
|
||||
|
||||
func (j CatalogerOpts) WithMaxParentRecursiveDepth(input int) CatalogerOpts {
|
||||
if input > 0 {
|
||||
j.MaxParentRecursiveDepth = input
|
||||
}
|
||||
return j
|
||||
}
|
||||
|
||||
func DefaultCatalogerOpts() CatalogerOpts {
|
||||
return CatalogerOpts{
|
||||
UseNetwork: false,
|
||||
MavenURL: MavenBaseURL,
|
||||
MaxParentRecursiveDepth: 5,
|
||||
}
|
||||
}
|
|
@ -47,10 +47,10 @@ var genericTarGlobs = []string{
|
|||
// a file listing within the archive you must decompress the entire archive and seek through all of the entries.
|
||||
|
||||
type genericTarWrappedJavaArchiveParser struct {
|
||||
cfg Config
|
||||
cfg ArchiveCatalogerConfig
|
||||
}
|
||||
|
||||
func newGenericTarWrappedJavaArchiveParser(cfg Config) genericTarWrappedJavaArchiveParser {
|
||||
func newGenericTarWrappedJavaArchiveParser(cfg ArchiveCatalogerConfig) genericTarWrappedJavaArchiveParser {
|
||||
return genericTarWrappedJavaArchiveParser{
|
||||
cfg: cfg,
|
||||
}
|
||||
|
@ -68,7 +68,7 @@ func (gtp genericTarWrappedJavaArchiveParser) parseTarWrappedJavaArchive(_ file.
|
|||
return discoverPkgsFromTar(reader.Location, archivePath, contentPath, gtp.cfg)
|
||||
}
|
||||
|
||||
func discoverPkgsFromTar(location file.Location, archivePath, contentPath string, cfg Config) ([]pkg.Package, []artifact.Relationship, error) {
|
||||
func discoverPkgsFromTar(location file.Location, archivePath, contentPath string, cfg ArchiveCatalogerConfig) ([]pkg.Package, []artifact.Relationship, error) {
|
||||
openers, err := intFile.ExtractGlobsFromTarToUniqueTempFile(archivePath, contentPath, archiveFormatGlobs...)
|
||||
if err != nil {
|
||||
return nil, nil, fmt.Errorf("unable to extract files from tar: %w", err)
|
||||
|
|
|
@ -40,7 +40,7 @@ func Test_parseTarWrappedJavaArchive(t *testing.T) {
|
|||
t.Fatalf("failed to open fixture: %+v", err)
|
||||
}
|
||||
|
||||
gtp := newGenericTarWrappedJavaArchiveParser(Config{})
|
||||
gtp := newGenericTarWrappedJavaArchiveParser(ArchiveCatalogerConfig{})
|
||||
actualPkgs, _, err := gtp.parseTarWrappedJavaArchive(nil, nil, file.LocationReadCloser{
|
||||
Location: file.NewLocation(test.fixture),
|
||||
ReadCloser: fixture,
|
||||
|
|
|
@ -19,10 +19,10 @@ var genericZipGlobs = []string{
|
|||
// parseZipWrappedJavaArchive is a parser function for java archive contents contained within arbitrary zip files.
|
||||
|
||||
type genericZipWrappedJavaArchiveParser struct {
|
||||
cfg Config
|
||||
cfg ArchiveCatalogerConfig
|
||||
}
|
||||
|
||||
func newGenericZipWrappedJavaArchiveParser(cfg Config) genericZipWrappedJavaArchiveParser {
|
||||
func newGenericZipWrappedJavaArchiveParser(cfg ArchiveCatalogerConfig) genericZipWrappedJavaArchiveParser {
|
||||
return genericZipWrappedJavaArchiveParser{
|
||||
cfg: cfg,
|
||||
}
|
||||
|
|
|
@ -33,7 +33,7 @@ func Test_parseZipWrappedJavaArchive(t *testing.T) {
|
|||
t.Fatalf("failed to open fixture: %+v", err)
|
||||
}
|
||||
|
||||
gzp := newGenericZipWrappedJavaArchiveParser(Config{})
|
||||
gzp := newGenericZipWrappedJavaArchiveParser(ArchiveCatalogerConfig{})
|
||||
|
||||
actualPkgs, _, err := gzp.parseZipWrappedJavaArchive(nil, nil, file.LocationReadCloser{
|
||||
Location: file.NewLocation(test.fixture),
|
||||
|
|
|
@ -4,17 +4,18 @@ Package javascript provides a concrete Cataloger implementation for packages rel
|
|||
package javascript
|
||||
|
||||
import (
|
||||
"github.com/anchore/syft/syft/pkg"
|
||||
"github.com/anchore/syft/syft/pkg/cataloger/generic"
|
||||
)
|
||||
|
||||
// NewPackageCataloger returns a new cataloger object for NPM.
|
||||
func NewPackageCataloger() *generic.Cataloger {
|
||||
func NewPackageCataloger() pkg.Cataloger {
|
||||
return generic.NewCataloger("javascript-package-cataloger").
|
||||
WithParserByGlobs(parsePackageJSON, "**/package.json")
|
||||
}
|
||||
|
||||
// NewLockCataloger returns a new cataloger object for NPM (and NPM-adjacent, such as yarn) lock files.
|
||||
func NewLockCataloger() *generic.Cataloger {
|
||||
func NewLockCataloger() pkg.Cataloger {
|
||||
return generic.NewCataloger("javascript-lock-cataloger").
|
||||
WithParserByGlobs(parsePackageLock, "**/package-lock.json").
|
||||
WithParserByGlobs(parseYarnLock, "**/yarn.lock").
|
||||
|
|
|
@ -15,16 +15,16 @@ import (
|
|||
|
||||
var _ pkg.Cataloger = (*LinuxKernelCataloger)(nil)
|
||||
|
||||
type LinuxCatalogerConfig struct {
|
||||
CatalogModules bool
|
||||
type LinuxKernelCatalogerConfig struct {
|
||||
CatalogModules bool `yaml:"catalog-modules" json:"catalog-modules" mapstructure:"catalog-modules"`
|
||||
}
|
||||
|
||||
type LinuxKernelCataloger struct {
|
||||
cfg LinuxCatalogerConfig
|
||||
cfg LinuxKernelCatalogerConfig
|
||||
}
|
||||
|
||||
func DefaultLinuxCatalogerConfig() LinuxCatalogerConfig {
|
||||
return LinuxCatalogerConfig{
|
||||
func DefaultLinuxCatalogerConfig() LinuxKernelCatalogerConfig {
|
||||
return LinuxKernelCatalogerConfig{
|
||||
CatalogModules: true,
|
||||
}
|
||||
}
|
||||
|
@ -43,7 +43,7 @@ var kernelModuleGlobs = []string{
|
|||
}
|
||||
|
||||
// NewLinuxKernelCataloger returns a new kernel files cataloger object.
|
||||
func NewLinuxKernelCataloger(cfg LinuxCatalogerConfig) *LinuxKernelCataloger {
|
||||
func NewLinuxKernelCataloger(cfg LinuxKernelCatalogerConfig) *LinuxKernelCataloger {
|
||||
return &LinuxKernelCataloger{
|
||||
cfg: cfg,
|
||||
}
|
||||
|
|
|
@ -87,7 +87,7 @@ func Test_KernelCataloger(t *testing.T) {
|
|||
Expects(expectedPkgs, expectedRelationships).
|
||||
TestCataloger(t,
|
||||
NewLinuxKernelCataloger(
|
||||
LinuxCatalogerConfig{
|
||||
LinuxKernelCatalogerConfig{
|
||||
CatalogModules: true,
|
||||
},
|
||||
),
|
||||
|
|
|
@ -19,7 +19,7 @@ const catalogerName = "nix-store-cataloger"
|
|||
// StoreCataloger finds package outputs installed in the Nix store location (/nix/store/*).
|
||||
type StoreCataloger struct{}
|
||||
|
||||
func NewStoreCataloger() *StoreCataloger {
|
||||
func NewStoreCataloger() pkg.Cataloger {
|
||||
return &StoreCataloger{}
|
||||
}
|
||||
|
||||
|
|
|
@ -4,6 +4,7 @@ Package php provides a concrete Cataloger implementation relating to packages wi
|
|||
package php
|
||||
|
||||
import (
|
||||
"github.com/anchore/syft/syft/pkg"
|
||||
"github.com/anchore/syft/syft/pkg/cataloger/generic"
|
||||
)
|
||||
|
||||
|
@ -11,13 +12,13 @@ import (
|
|||
// semantic meanings. The lock file represents what should be installed, whereas the installed file represents what is installed.
|
||||
|
||||
// NewComposerInstalledCataloger returns a new cataloger for PHP installed.json files.
|
||||
func NewComposerInstalledCataloger() *generic.Cataloger {
|
||||
func NewComposerInstalledCataloger() pkg.Cataloger {
|
||||
return generic.NewCataloger("php-composer-installed-cataloger").
|
||||
WithParserByGlobs(parseInstalledJSON, "**/installed.json")
|
||||
}
|
||||
|
||||
// NewComposerLockCataloger returns a new cataloger for PHP composer.lock files.
|
||||
func NewComposerLockCataloger() *generic.Cataloger {
|
||||
func NewComposerLockCataloger() pkg.Cataloger {
|
||||
return generic.NewCataloger("php-composer-lock-cataloger").
|
||||
WithParserByGlobs(parseComposerLock, "**/composer.lock")
|
||||
}
|
||||
|
|
|
@ -4,13 +4,14 @@ Package python provides a concrete Cataloger implementation relating to packages
|
|||
package python
|
||||
|
||||
import (
|
||||
"github.com/anchore/syft/syft/pkg"
|
||||
"github.com/anchore/syft/syft/pkg/cataloger/generic"
|
||||
)
|
||||
|
||||
const eggInfoGlob = "**/*.egg-info"
|
||||
|
||||
type CatalogerConfig struct {
|
||||
GuessUnpinnedRequirements bool
|
||||
GuessUnpinnedRequirements bool `yaml:"guess-unpinned-requirements" json:"guess-unpinned-requirements" mapstructure:"guess-unpinned-requirements"`
|
||||
}
|
||||
|
||||
func DefaultCatalogerConfig() CatalogerConfig {
|
||||
|
@ -30,7 +31,7 @@ func NewPackageCataloger(cfg CatalogerConfig) *generic.Cataloger {
|
|||
}
|
||||
|
||||
// NewInstalledPackageCataloger returns a new cataloger for python packages within egg or wheel installation directories.
|
||||
func NewInstalledPackageCataloger() *generic.Cataloger {
|
||||
func NewInstalledPackageCataloger() pkg.Cataloger {
|
||||
return generic.NewCataloger("python-installed-package-cataloger").
|
||||
WithParserByGlobs(
|
||||
parseWheelOrEgg,
|
||||
|
|
|
@ -4,11 +4,12 @@ Package r provides a concrete Cataloger implementation relating to packages with
|
|||
package r
|
||||
|
||||
import (
|
||||
"github.com/anchore/syft/syft/pkg"
|
||||
"github.com/anchore/syft/syft/pkg/cataloger/generic"
|
||||
)
|
||||
|
||||
// NewPackageCataloger returns a new R cataloger object based on detection of R package DESCRIPTION files.
|
||||
func NewPackageCataloger() *generic.Cataloger {
|
||||
func NewPackageCataloger() pkg.Cataloger {
|
||||
return generic.NewCataloger("r-package-cataloger").
|
||||
WithParserByGlobs(parseDescriptionFile, "**/DESCRIPTION")
|
||||
}
|
||||
|
|
|
@ -12,7 +12,7 @@ import (
|
|||
)
|
||||
|
||||
// NewDBCataloger returns a new RPM DB cataloger object.
|
||||
func NewDBCataloger() *generic.Cataloger {
|
||||
func NewDBCataloger() pkg.Cataloger {
|
||||
// check if a sqlite driver is available
|
||||
if !isSqliteDriverAvailable() {
|
||||
log.Warnf("sqlite driver is not available, newer RPM databases might not be cataloged")
|
||||
|
@ -24,7 +24,7 @@ func NewDBCataloger() *generic.Cataloger {
|
|||
}
|
||||
|
||||
// NewArchiveCataloger returns a new RPM file cataloger object.
|
||||
func NewArchiveCataloger() *generic.Cataloger {
|
||||
func NewArchiveCataloger() pkg.Cataloger {
|
||||
return generic.NewCataloger("rpm-archive-cataloger").
|
||||
WithParserByGlobs(parseRpmArchive, "**/*.rpm")
|
||||
}
|
||||
|
|
|
@ -4,23 +4,24 @@ Package ruby provides a concrete Cataloger implementation relating to packages w
|
|||
package ruby
|
||||
|
||||
import (
|
||||
"github.com/anchore/syft/syft/pkg"
|
||||
"github.com/anchore/syft/syft/pkg/cataloger/generic"
|
||||
)
|
||||
|
||||
// NewGemFileLockCataloger returns a new Bundler cataloger object tailored for parsing index-oriented files (e.g. Gemfile.lock).
|
||||
func NewGemFileLockCataloger() *generic.Cataloger {
|
||||
func NewGemFileLockCataloger() pkg.Cataloger {
|
||||
return generic.NewCataloger("ruby-gemfile-cataloger").
|
||||
WithParserByGlobs(parseGemFileLockEntries, "**/Gemfile.lock")
|
||||
}
|
||||
|
||||
// NewInstalledGemSpecCataloger returns a new Bundler cataloger object tailored for detecting installations of gems (e.g. Gemspec).
|
||||
func NewInstalledGemSpecCataloger() *generic.Cataloger {
|
||||
func NewInstalledGemSpecCataloger() pkg.Cataloger {
|
||||
return generic.NewCataloger("ruby-installed-gemspec-cataloger").
|
||||
WithParserByGlobs(parseGemSpecEntries, "**/specifications/**/*.gemspec")
|
||||
}
|
||||
|
||||
// NewGemSpecCataloger looks for gems without the additional requirement of the gem being installed.
|
||||
func NewGemSpecCataloger() *generic.Cataloger {
|
||||
func NewGemSpecCataloger() pkg.Cataloger {
|
||||
return generic.NewCataloger("ruby-gemspec-cataloger").
|
||||
WithParserByGlobs(parseGemSpecEntries, "**/*.gemspec")
|
||||
}
|
|
@ -5,18 +5,19 @@ package rust
|
|||
|
||||
import (
|
||||
"github.com/anchore/syft/internal"
|
||||
"github.com/anchore/syft/syft/pkg"
|
||||
"github.com/anchore/syft/syft/pkg/cataloger/generic"
|
||||
)
|
||||
|
||||
// NewCargoLockCataloger returns a new Rust Cargo lock file cataloger object.
|
||||
func NewCargoLockCataloger() *generic.Cataloger {
|
||||
func NewCargoLockCataloger() pkg.Cataloger {
|
||||
return generic.NewCataloger("rust-cargo-lock-cataloger").
|
||||
WithParserByGlobs(parseCargoLock, "**/Cargo.lock")
|
||||
}
|
||||
|
||||
// NewAuditBinaryCataloger returns a new Rust auditable binary cataloger object that can detect dependencies
|
||||
// in binaries produced with https://github.com/Shnatsel/rust-audit
|
||||
func NewAuditBinaryCataloger() *generic.Cataloger {
|
||||
func NewAuditBinaryCataloger() pkg.Cataloger {
|
||||
return generic.NewCataloger("cargo-auditable-binary-cataloger").
|
||||
WithParserByMimeTypes(parseAuditBinary, internal.ExecutableMIMETypeSet.List()...)
|
||||
}
|
||||
|
|
|
@ -19,7 +19,7 @@ import (
|
|||
const catalogerName = "sbom-cataloger"
|
||||
|
||||
// NewCataloger returns a new SBOM cataloger object loaded from saved SBOM JSON.
|
||||
func NewCataloger() *generic.Cataloger {
|
||||
func NewCataloger() pkg.Cataloger {
|
||||
return generic.NewCataloger(catalogerName).
|
||||
WithParserByGlobs(parseSBOM,
|
||||
"**/*.syft.json",
|
||||
|
|
|
@ -4,16 +4,17 @@ Package swift provides a concrete Cataloger implementation relating to packages
|
|||
package swift
|
||||
|
||||
import (
|
||||
"github.com/anchore/syft/syft/pkg"
|
||||
"github.com/anchore/syft/syft/pkg/cataloger/generic"
|
||||
)
|
||||
|
||||
func NewSwiftPackageManagerCataloger() *generic.Cataloger {
|
||||
func NewSwiftPackageManagerCataloger() pkg.Cataloger {
|
||||
return generic.NewCataloger("swift-package-manager-cataloger").
|
||||
WithParserByGlobs(parsePackageResolved, "**/Package.resolved", "**/.package.resolved")
|
||||
}
|
||||
|
||||
// NewCocoapodsCataloger returns a new Swift Cocoapods lock file cataloger object.
|
||||
func NewCocoapodsCataloger() *generic.Cataloger {
|
||||
func NewCocoapodsCataloger() pkg.Cataloger {
|
||||
return generic.NewCataloger("cocoapods-cataloger").
|
||||
WithParserByGlobs(parsePodfileLock, "**/Podfile.lock")
|
||||
}
|
||||
|
|
Loading…
Reference in a new issue