Merge pull request #203 from anchore/syft_bundler_wip

Add gemspec support
This commit is contained in:
Toure Dunnon 2020-10-08 11:03:30 -04:00 committed by GitHub
commit 1be5f5756a
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
40 changed files with 672 additions and 545 deletions

View file

@ -130,7 +130,7 @@ unit: fixtures ## Run unit tests (with coverage)
.PHONY: integration .PHONY: integration
integration: ## Run integration tests integration: ## Run integration tests
$(call title,Running integration tests) $(call title,Running integration tests)
go test -v -tags=integration ./test/integration go test -tags=integration ./test/integration
# note: this is used by CI to determine if the integration test fixture cache (docker image tars) should be busted # note: this is used by CI to determine if the integration test fixture cache (docker image tars) should be busted
integration-fingerprint: integration-fingerprint:

View file

@ -40,6 +40,9 @@
"architecture": { "architecture": {
"type": "string" "type": "string"
}, },
"authors": {
"type": "null"
},
"description": { "description": {
"type": "string" "type": "string"
}, },
@ -48,31 +51,38 @@
}, },
"files": { "files": {
"items": { "items": {
"properties": { "anyOf": [
"checksum": { {
"type": "string" "type": "string"
}, },
"ownerGid": { {
"type": "string" "properties": {
}, "checksum": {
"ownerUid": { "type": "string"
"type": "string" },
}, "ownerGid": {
"path": { "type": "string"
"type": "string" },
}, "ownerUid": {
"permissions": { "type": "string"
"type": "string" },
"path": {
"type": "string"
},
"permissions": {
"type": "string"
}
},
"required": [
"checksum",
"ownerGid",
"ownerUid",
"path",
"permissions"
],
"type": "object"
} }
}, ]
"required": [
"checksum",
"ownerGid",
"ownerUid",
"path",
"permissions"
],
"type": "object"
}, },
"type": "array" "type": "array"
}, },
@ -85,6 +95,12 @@
"license": { "license": {
"type": "string" "type": "string"
}, },
"licenses": {
"items": {
"type": "string"
},
"type": "array"
},
"maintainer": { "maintainer": {
"type": "string" "type": "string"
}, },

View file

@ -4,39 +4,14 @@ Package apkdb provides a concrete Cataloger implementation for Alpine DB files.
package apkdb package apkdb
import ( import (
"github.com/anchore/stereoscope/pkg/file"
"github.com/anchore/syft/syft/cataloger/common" "github.com/anchore/syft/syft/cataloger/common"
"github.com/anchore/syft/syft/pkg"
"github.com/anchore/syft/syft/scope"
) )
// Cataloger catalogs pkg.ApkPkg Package Types defined in Alpine DB files. // NewApkdbCataloger returns a new Alpine DB cataloger object.
type Cataloger struct { func NewApkdbCataloger() *common.GenericCataloger {
cataloger common.GenericCataloger
}
// New returns a new Alpine DB cataloger object.
func New() *Cataloger {
globParsers := map[string]common.ParserFn{ globParsers := map[string]common.ParserFn{
"**/lib/apk/db/installed": parseApkDB, "**/lib/apk/db/installed": parseApkDB,
} }
return &Cataloger{ return common.NewGenericCataloger(nil, globParsers, "apkdb-cataloger")
cataloger: common.NewGenericCataloger(nil, globParsers),
}
}
// Name returns a string that uniquely describes this cataloger.
func (a *Cataloger) Name() string {
return "apkdb-cataloger"
}
// SelectFiles returns a set of discovered Alpine DB files from the user content source.
func (a *Cataloger) SelectFiles(resolver scope.FileResolver) []file.Reference {
return a.cataloger.SelectFiles(resolver)
}
// Catalog returns the Packages indexed from all Alpine DB files discovered.
func (a *Cataloger) Catalog(contents map[file.Reference]string) ([]pkg.Package, error) {
return a.cataloger.Catalog(contents, a.Name())
} }

View file

@ -1,42 +0,0 @@
/*
Package bundler provides a concrete Cataloger implementation for Ruby Gemfile.lock bundler files.
*/
package bundler
import (
"github.com/anchore/stereoscope/pkg/file"
"github.com/anchore/syft/syft/cataloger/common"
"github.com/anchore/syft/syft/pkg"
"github.com/anchore/syft/syft/scope"
)
// Cataloger catalogs pkg.GemPkg Package Types defined in Bundler Gemfile.lock files.
type Cataloger struct {
cataloger common.GenericCataloger
}
// New returns a new Bundler cataloger object.
func New() *Cataloger {
globParsers := map[string]common.ParserFn{
"**/Gemfile.lock": parseGemfileLockEntries,
}
return &Cataloger{
cataloger: common.NewGenericCataloger(nil, globParsers),
}
}
// Name returns a string that uniquely describes this cataloger.
func (a *Cataloger) Name() string {
return "bundler-cataloger"
}
// SelectFiles returns a set of discovered Gemfile.lock files from the user content source.
func (a *Cataloger) SelectFiles(resolver scope.FileResolver) []file.Reference {
return a.cataloger.SelectFiles(resolver)
}
// Catalog returns the Packages indexed from all Gemfile.lock files discovered.
func (a *Cataloger) Catalog(contents map[file.Reference]string) ([]pkg.Package, error) {
return a.cataloger.Catalog(contents, a.Name())
}

View file

@ -1,100 +0,0 @@
package bundler
import (
"os"
"testing"
"github.com/anchore/syft/syft/pkg"
)
var expected = map[string]string{
"actionmailer": "4.1.1",
"actionpack": "4.1.1",
"actionview": "4.1.1",
"activemodel": "4.1.1",
"activerecord": "4.1.1",
"activesupport": "4.1.1",
"arel": "5.0.1.20140414130214",
"bootstrap-sass": "3.1.1.1",
"builder": "3.2.2",
"coffee-rails": "4.0.1",
"coffee-script": "2.2.0",
"coffee-script-source": "1.7.0",
"erubis": "2.7.0",
"execjs": "2.0.2",
"hike": "1.2.3",
"i18n": "0.6.9",
"jbuilder": "2.0.7",
"jquery-rails": "3.1.0",
"json": "1.8.1",
"kgio": "2.9.2",
"libv8": "3.16.14.3",
"mail": "2.5.4",
"mime-types": "1.25.1",
"minitest": "5.3.4",
"multi_json": "1.10.1",
"mysql2": "0.3.16",
"polyglot": "0.3.4",
"rack": "1.5.2",
"rack-test": "0.6.2",
"rails": "4.1.1",
"railties": "4.1.1",
"raindrops": "0.13.0",
"rake": "10.3.2",
"rdoc": "4.1.1",
"ref": "1.0.5",
"sass": "3.2.19",
"sass-rails": "4.0.3",
"sdoc": "0.4.0",
"spring": "1.1.3",
"sprockets": "2.11.0",
"sprockets-rails": "2.1.3",
"sqlite3": "1.3.9",
"therubyracer": "0.12.1",
"thor": "0.19.1",
"thread_safe": "0.3.3",
"tilt": "1.4.1",
"treetop": "1.4.15",
"turbolinks": "2.2.2",
"tzinfo": "1.2.0",
"uglifier": "2.5.0",
"unicorn": "4.8.3",
}
func TestParseGemfileLockEntries(t *testing.T) {
fixture, err := os.Open("test-fixtures/Gemfile.lock")
if err != nil {
t.Fatalf("failed to open fixture: %+v", err)
}
actual, err := parseGemfileLockEntries(fixture.Name(), fixture)
if err != nil {
t.Fatalf("failed to parse gemfile lock: %+v", err)
}
if len(actual) != len(expected) {
for _, a := range actual {
t.Log(" ", a)
}
t.Fatalf("unexpected package count: %d!=%d", len(actual), len(expected))
}
for _, a := range actual {
expectedVersion, ok := expected[a.Name]
if !ok {
t.Errorf("unexpected package found: %s", a.Name)
}
if expectedVersion != a.Version {
t.Errorf("unexpected package version (pkg=%s): %s", a.Name, a.Version)
}
if a.Language != pkg.Ruby {
t.Errorf("bad language (pkg=%+v): %+v", a.Name, a.Language)
}
if a.Type != pkg.BundlerPkg {
t.Errorf("bad package type (pkg=%+v): %+v", a.Name, a.Type)
}
}
}

View file

@ -8,13 +8,13 @@ package cataloger
import ( import (
"github.com/anchore/stereoscope/pkg/file" "github.com/anchore/stereoscope/pkg/file"
"github.com/anchore/syft/syft/cataloger/apkdb" "github.com/anchore/syft/syft/cataloger/apkdb"
"github.com/anchore/syft/syft/cataloger/bundler" "github.com/anchore/syft/syft/cataloger/deb"
"github.com/anchore/syft/syft/cataloger/dpkg"
"github.com/anchore/syft/syft/cataloger/golang" "github.com/anchore/syft/syft/cataloger/golang"
"github.com/anchore/syft/syft/cataloger/java" "github.com/anchore/syft/syft/cataloger/java"
"github.com/anchore/syft/syft/cataloger/javascript" "github.com/anchore/syft/syft/cataloger/javascript"
"github.com/anchore/syft/syft/cataloger/python" "github.com/anchore/syft/syft/cataloger/python"
"github.com/anchore/syft/syft/cataloger/rpmdb" "github.com/anchore/syft/syft/cataloger/rpmdb"
"github.com/anchore/syft/syft/cataloger/ruby"
"github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg"
"github.com/anchore/syft/syft/scope" "github.com/anchore/syft/syft/scope"
) )
@ -33,16 +33,30 @@ type Cataloger interface {
// TODO: we should consider refactoring to return a set of io.Readers instead of the full contents themselves (allow for optional buffering). // TODO: we should consider refactoring to return a set of io.Readers instead of the full contents themselves (allow for optional buffering).
} }
// All returns a slice of all locally defined catalogers (defined in child packages). // ImageCatalogers returns a slice of locally implemented catalogers that are fit for detecting installations of packages.
func All() []Cataloger { func ImageCatalogers() []Cataloger {
return []Cataloger{ return []Cataloger{
dpkg.New(), ruby.NewGemSpecCataloger(),
bundler.New(), python.NewPythonCataloger(), // TODO: split and replace me
python.New(), javascript.NewJavascriptCataloger(), // TODO: split and replace me
rpmdb.New(), deb.NewDpkgdbCataloger(),
java.New(), rpmdb.NewRpmdbCataloger(),
apkdb.New(), java.NewJavaCataloger(),
golang.New(), apkdb.NewApkdbCataloger(),
javascript.New(), golang.NewGoModCataloger(),
}
}
// DirectoryCatalogers returns a slice of locally implemented catalogers that are fit for detecting packages from index files (and select installations)
func DirectoryCatalogers() []Cataloger {
return []Cataloger{
ruby.NewGemFileLockCataloger(),
python.NewPythonCataloger(), // TODO: split and replace me
javascript.NewJavascriptCataloger(), // TODO: split and replace me
deb.NewDpkgdbCataloger(),
rpmdb.NewRpmdbCataloger(),
java.NewJavaCataloger(),
apkdb.NewApkdbCataloger(),
golang.NewGoModCataloger(),
} }
} }

View file

@ -15,22 +15,29 @@ import (
// GenericCataloger implements the Catalog interface and is responsible for dispatching the proper parser function for // GenericCataloger implements the Catalog interface and is responsible for dispatching the proper parser function for
// a given path or glob pattern. This is intended to be reusable across many package cataloger types. // a given path or glob pattern. This is intended to be reusable across many package cataloger types.
type GenericCataloger struct { type GenericCataloger struct {
globParsers map[string]ParserFn globParsers map[string]ParserFn
pathParsers map[string]ParserFn pathParsers map[string]ParserFn
selectedFiles []file.Reference selectedFiles []file.Reference
parsers map[file.Reference]ParserFn parsers map[file.Reference]ParserFn
upstreamCataloger string
} }
// NewGenericCataloger if provided path-to-parser-function and glob-to-parser-function lookups creates a GenericCataloger // NewGenericCataloger if provided path-to-parser-function and glob-to-parser-function lookups creates a GenericCataloger
func NewGenericCataloger(pathParsers map[string]ParserFn, globParsers map[string]ParserFn) GenericCataloger { func NewGenericCataloger(pathParsers map[string]ParserFn, globParsers map[string]ParserFn, upstreamCataloger string) *GenericCataloger {
return GenericCataloger{ return &GenericCataloger{
globParsers: globParsers, globParsers: globParsers,
pathParsers: pathParsers, pathParsers: pathParsers,
selectedFiles: make([]file.Reference, 0), selectedFiles: make([]file.Reference, 0),
parsers: make(map[file.Reference]ParserFn), parsers: make(map[file.Reference]ParserFn),
upstreamCataloger: upstreamCataloger,
} }
} }
// Name returns a string that uniquely describes the upstream cataloger that this Generic Cataloger represents.
func (a *GenericCataloger) Name() string {
return a.upstreamCataloger
}
// register pairs a set of file references with a parser function for future cataloging (when the file contents are resolved) // register pairs a set of file references with a parser function for future cataloging (when the file contents are resolved)
func (a *GenericCataloger) register(files []file.Reference, parser ParserFn) { func (a *GenericCataloger) register(files []file.Reference, parser ParserFn) {
a.selectedFiles = append(a.selectedFiles, files...) a.selectedFiles = append(a.selectedFiles, files...)
@ -73,7 +80,7 @@ func (a *GenericCataloger) SelectFiles(resolver scope.FileResolver) []file.Refer
} }
// Catalog takes a set of file contents and uses any configured parser functions to resolve and return discovered packages // Catalog takes a set of file contents and uses any configured parser functions to resolve and return discovered packages
func (a *GenericCataloger) Catalog(contents map[file.Reference]string, upstreamMatcher string) ([]pkg.Package, error) { func (a *GenericCataloger) Catalog(contents map[file.Reference]string) ([]pkg.Package, error) {
defer a.clear() defer a.clear()
packages := make([]pkg.Package, 0) packages := make([]pkg.Package, 0)
@ -81,19 +88,19 @@ func (a *GenericCataloger) Catalog(contents map[file.Reference]string, upstreamM
for reference, parser := range a.parsers { for reference, parser := range a.parsers {
content, ok := contents[reference] content, ok := contents[reference]
if !ok { if !ok {
log.Errorf("cataloger '%s' missing file content: %+v", upstreamMatcher, reference) log.Errorf("cataloger '%s' missing file content: %+v", a.upstreamCataloger, reference)
continue continue
} }
entries, err := parser(string(reference.Path), strings.NewReader(content)) entries, err := parser(string(reference.Path), strings.NewReader(content))
if err != nil { if err != nil {
// TODO: should we fail? or only log? // TODO: should we fail? or only log?
log.Errorf("cataloger '%s' failed to parse entries (reference=%+v): %+v", upstreamMatcher, reference, err) log.Errorf("cataloger '%s' failed to parse entries (reference=%+v): %+v", a.upstreamCataloger, reference, err)
continue continue
} }
for _, entry := range entries { for _, entry := range entries {
entry.FoundBy = upstreamMatcher entry.FoundBy = a.upstreamCataloger
entry.Source = []file.Reference{reference} entry.Source = []file.Reference{reference}
packages = append(packages, entry) packages = append(packages, entry)

View file

@ -60,9 +60,9 @@ func TestGenericCataloger(t *testing.T) {
"/another-path.txt": parser, "/another-path.txt": parser,
"/last/path.txt": parser, "/last/path.txt": parser,
} }
upstream := "some-other-cataloger"
resolver := newTestResolver() resolver := newTestResolver()
cataloger := NewGenericCataloger(pathParsers, globParsers) cataloger := NewGenericCataloger(pathParsers, globParsers, upstream)
selected := cataloger.SelectFiles(resolver) selected := cataloger.SelectFiles(resolver)
@ -79,7 +79,6 @@ func TestGenericCataloger(t *testing.T) {
selectionByPath[string(s.Path)] = s selectionByPath[string(s.Path)] = s
} }
upstream := "some-other-cataloger"
expectedPkgs := make(map[file.Reference]pkg.Package) expectedPkgs := make(map[file.Reference]pkg.Package)
for path, ref := range selectionByPath { for path, ref := range selectionByPath {
expectedPkgs[ref] = pkg.Package{ expectedPkgs[ref] = pkg.Package{
@ -89,7 +88,7 @@ func TestGenericCataloger(t *testing.T) {
} }
} }
actualPkgs, err := cataloger.Catalog(resolver.contents, upstream) actualPkgs, err := cataloger.Catalog(resolver.contents)
if err != nil { if err != nil {
t.Fatalf("cataloger catalog action failed: %+v", err) t.Fatalf("cataloger catalog action failed: %+v", err)
} }

View file

@ -0,0 +1,17 @@
/*
Package dpkg provides a concrete Cataloger implementation for Debian package DB status files.
*/
package deb
import (
"github.com/anchore/syft/syft/cataloger/common"
)
// NewDpkgdbCataloger returns a new Deb package cataloger object.
func NewDpkgdbCataloger() *common.GenericCataloger {
globParsers := map[string]common.ParserFn{
"**/var/lib/dpkg/status": parseDpkgStatus,
}
return common.NewGenericCataloger(nil, globParsers, "dpkgdb-cataloger")
}

View file

@ -1,4 +1,4 @@
package dpkg package deb
import ( import (
"bufio" "bufio"

View file

@ -1,4 +1,4 @@
package dpkg package deb
import ( import (
"bufio" "bufio"

View file

@ -1,42 +0,0 @@
/*
Package dpkg provides a concrete Cataloger implementation for Debian package DB status files.
*/
package dpkg
import (
"github.com/anchore/stereoscope/pkg/file"
"github.com/anchore/syft/syft/cataloger/common"
"github.com/anchore/syft/syft/pkg"
"github.com/anchore/syft/syft/scope"
)
// Cataloger catalogs pkg.DebPkg Package Types defined in DPKG status files.
type Cataloger struct {
cataloger common.GenericCataloger
}
// New returns a new Deb package cataloger object.
func New() *Cataloger {
globParsers := map[string]common.ParserFn{
"**/var/lib/dpkg/status": parseDpkgStatus,
}
return &Cataloger{
cataloger: common.NewGenericCataloger(nil, globParsers),
}
}
// Name returns a string that uniquely describes this cataloger.
func (a *Cataloger) Name() string {
return "dpkg-cataloger"
}
// SelectFiles returns a set of discovered DPKG status files from the user content source.
func (a *Cataloger) SelectFiles(resolver scope.FileResolver) []file.Reference {
return a.cataloger.SelectFiles(resolver)
}
// Catalog returns the Packages indexed from all DPKG status files discovered.
func (a *Cataloger) Catalog(contents map[file.Reference]string) ([]pkg.Package, error) {
return a.cataloger.Catalog(contents, a.Name())
}

View file

@ -4,39 +4,14 @@ Package golang provides a concrete Cataloger implementation for go.mod files.
package golang package golang
import ( import (
"github.com/anchore/stereoscope/pkg/file"
"github.com/anchore/syft/syft/cataloger/common" "github.com/anchore/syft/syft/cataloger/common"
"github.com/anchore/syft/syft/pkg"
"github.com/anchore/syft/syft/scope"
) )
// Cataloger catalogs pkg.GoModulePkg Package Types defined in go.mod files. // NewGoModCataloger returns a new Go module cataloger object.
type Cataloger struct { func NewGoModCataloger() *common.GenericCataloger {
cataloger common.GenericCataloger
}
// New returns a new Go module cataloger object.
func New() *Cataloger {
globParsers := map[string]common.ParserFn{ globParsers := map[string]common.ParserFn{
"**/go.mod": parseGoMod, "**/go.mod": parseGoMod,
} }
return &Cataloger{ return common.NewGenericCataloger(nil, globParsers, "go-cataloger")
cataloger: common.NewGenericCataloger(nil, globParsers),
}
}
// Name returns a string that uniquely describes this cataloger.
func (a *Cataloger) Name() string {
return "go-cataloger"
}
// SelectFiles returns a set of discovered go.mod files from the user content source.
func (a *Cataloger) SelectFiles(resolver scope.FileResolver) []file.Reference {
return a.cataloger.SelectFiles(resolver)
}
// Catalog returns the Packages indexed from all go.mod files discovered.
func (a *Cataloger) Catalog(contents map[file.Reference]string) ([]pkg.Package, error) {
return a.cataloger.Catalog(contents, a.Name())
} }

View file

@ -4,40 +4,15 @@ Package java provides a concrete Cataloger implementation for Java archives (jar
package java package java
import ( import (
"github.com/anchore/stereoscope/pkg/file"
"github.com/anchore/syft/syft/cataloger/common" "github.com/anchore/syft/syft/cataloger/common"
"github.com/anchore/syft/syft/pkg"
"github.com/anchore/syft/syft/scope"
) )
// Cataloger catalogs pkg.JavaPkg and pkg.JenkinsPluginPkg Package Types defined in java archive files. // NewJavaCataloger returns a new Java archive cataloger object.
type Cataloger struct { func NewJavaCataloger() *common.GenericCataloger {
cataloger common.GenericCataloger
}
// New returns a new Java archive cataloger object.
func New() *Cataloger {
globParsers := make(map[string]common.ParserFn) globParsers := make(map[string]common.ParserFn)
for _, pattern := range archiveFormatGlobs { for _, pattern := range archiveFormatGlobs {
globParsers[pattern] = parseJavaArchive globParsers[pattern] = parseJavaArchive
} }
return &Cataloger{ return common.NewGenericCataloger(nil, globParsers, "java-cataloger")
cataloger: common.NewGenericCataloger(nil, globParsers),
}
}
// Name returns a string that uniquely describes this cataloger.
func (a *Cataloger) Name() string {
return "java-cataloger"
}
// SelectFiles returns a set of discovered Java archive files from the user content source.
func (a *Cataloger) SelectFiles(resolver scope.FileResolver) []file.Reference {
return a.cataloger.SelectFiles(resolver)
}
// Catalog returns the Packages indexed from all Java archive files discovered.
func (a *Cataloger) Catalog(contents map[file.Reference]string) ([]pkg.Package, error) {
return a.cataloger.Catalog(contents, a.Name())
} }

View file

@ -4,40 +4,15 @@ Package javascript provides a concrete Cataloger implementation for JavaScript e
package javascript package javascript
import ( import (
"github.com/anchore/stereoscope/pkg/file"
"github.com/anchore/syft/syft/cataloger/common" "github.com/anchore/syft/syft/cataloger/common"
"github.com/anchore/syft/syft/pkg"
"github.com/anchore/syft/syft/scope"
) )
// Cataloger catalogs pkg.YarnPkg and pkg.NpmPkg Package Types defined in package-lock.json and yarn.lock files. // NewJavascriptCataloger returns a new JavaScript cataloger object.
type Cataloger struct { func NewJavascriptCataloger() *common.GenericCataloger {
cataloger common.GenericCataloger
}
// New returns a new JavaScript cataloger object.
func New() *Cataloger {
globParsers := map[string]common.ParserFn{ globParsers := map[string]common.ParserFn{
"**/package-lock.json": parsePackageLock, "**/package-lock.json": parsePackageLock,
"**/yarn.lock": parseYarnLock, "**/yarn.lock": parseYarnLock,
} }
return &Cataloger{ return common.NewGenericCataloger(nil, globParsers, "javascript-cataloger")
cataloger: common.NewGenericCataloger(nil, globParsers),
}
}
// Name returns a string that uniquely describes this cataloger.
func (a *Cataloger) Name() string {
return "javascript-cataloger"
}
// SelectFiles returns a set of discovered Javascript ecosystem files from the user content source.
func (a *Cataloger) SelectFiles(resolver scope.FileResolver) []file.Reference {
return a.cataloger.SelectFiles(resolver)
}
// Catalog returns the Packages indexed from all Javascript ecosystem files discovered.
func (a *Cataloger) Catalog(contents map[file.Reference]string) ([]pkg.Package, error) {
return a.cataloger.Catalog(contents, a.Name())
} }

View file

@ -4,19 +4,11 @@ Package python provides a concrete Cataloger implementation for Python ecosystem
package python package python
import ( import (
"github.com/anchore/stereoscope/pkg/file"
"github.com/anchore/syft/syft/cataloger/common" "github.com/anchore/syft/syft/cataloger/common"
"github.com/anchore/syft/syft/pkg"
"github.com/anchore/syft/syft/scope"
) )
// Cataloger catalogs pkg.WheelPkg, pkg.EggPkg, and pkg.PythonRequirementsPkg Package Types defined in Python ecosystem files. // NewPythonCataloger returns a new Python cataloger object.
type Cataloger struct { func NewPythonCataloger() *common.GenericCataloger {
cataloger common.GenericCataloger
}
// New returns a new Python cataloger object.
func New() *Cataloger {
globParsers := map[string]common.ParserFn{ globParsers := map[string]common.ParserFn{
"**/*egg-info/PKG-INFO": parseEggMetadata, "**/*egg-info/PKG-INFO": parseEggMetadata,
"**/*dist-info/METADATA": parseWheelMetadata, "**/*dist-info/METADATA": parseWheelMetadata,
@ -25,22 +17,5 @@ func New() *Cataloger {
"**/setup.py": parseSetup, "**/setup.py": parseSetup,
} }
return &Cataloger{ return common.NewGenericCataloger(nil, globParsers, "python-cataloger")
cataloger: common.NewGenericCataloger(nil, globParsers),
}
}
// Name returns a string that uniquely describes this cataloger.
func (a *Cataloger) Name() string {
return "python-cataloger"
}
// SelectFiles returns a set of discovered Python ecosystem files from the user content source.
func (a *Cataloger) SelectFiles(resolver scope.FileResolver) []file.Reference {
return a.cataloger.SelectFiles(resolver)
}
// Catalog returns the Packages indexed from all Python ecosystem files discovered.
func (a *Cataloger) Catalog(contents map[file.Reference]string) ([]pkg.Package, error) {
return a.cataloger.Catalog(contents, a.Name())
} }

View file

@ -4,39 +4,13 @@ Package rpmdb provides a concrete Cataloger implementation for RPM "Package" DB
package rpmdb package rpmdb
import ( import (
"github.com/anchore/stereoscope/pkg/file"
"github.com/anchore/syft/syft/cataloger/common" "github.com/anchore/syft/syft/cataloger/common"
"github.com/anchore/syft/syft/pkg"
"github.com/anchore/syft/syft/scope"
) )
// Cataloger catalogs pkg.RpmPkg Package Types defined in RPM DB files. // NewRpmdbCataloger returns a new RPM DB cataloger object.
type Cataloger struct { func NewRpmdbCataloger() *common.GenericCataloger {
cataloger common.GenericCataloger
}
// New returns a new RPM DB cataloger object.
func New() *Cataloger {
globParsers := map[string]common.ParserFn{ globParsers := map[string]common.ParserFn{
"**/var/lib/rpm/Packages": parseRpmDB, "**/var/lib/rpm/Packages": parseRpmDB,
} }
return common.NewGenericCataloger(nil, globParsers, "rpmdb-cataloger")
return &Cataloger{
cataloger: common.NewGenericCataloger(nil, globParsers),
}
}
// Name returns a string that uniquely describes this cataloger.
func (a *Cataloger) Name() string {
return "rpmdb-cataloger"
}
// SelectFiles returns a set of discovered RPM DB files from the user content source.
func (a *Cataloger) SelectFiles(resolver scope.FileResolver) []file.Reference {
return a.cataloger.SelectFiles(resolver)
}
// Catalog returns the Packages indexed from all RPM DB files discovered.
func (a *Cataloger) Catalog(contents map[file.Reference]string) ([]pkg.Package, error) {
return a.cataloger.Catalog(contents, a.Name())
} }

View file

@ -0,0 +1,26 @@
/*
Package bundler provides a concrete Cataloger implementation for Ruby Gemfile.lock bundler files.
*/
package ruby
import (
"github.com/anchore/syft/syft/cataloger/common"
)
// NewGemFileLockCataloger returns a new Bundler cataloger object tailored for parsing index-oriented files (e.g. Gemfile.lock).
func NewGemFileLockCataloger() *common.GenericCataloger {
globParsers := map[string]common.ParserFn{
"**/Gemfile.lock": parseGemFileLockEntries,
}
return common.NewGenericCataloger(nil, globParsers, "ruby-gemfile-cataloger")
}
// NewGemSpecCataloger returns a new Bundler cataloger object tailored for detecting installations of gems (e.g. Gemspec).
func NewGemSpecCataloger() *common.GenericCataloger {
globParsers := map[string]common.ParserFn{
"**/specification/*.gemspec": parseGemSpecEntries,
}
return common.NewGenericCataloger(nil, globParsers, "ruby-gemspec-cataloger")
}

View file

@ -1,4 +1,4 @@
package bundler package ruby
import ( import (
"bufio" "bufio"
@ -11,12 +11,12 @@ import (
) )
// integrity check // integrity check
var _ common.ParserFn = parseGemfileLockEntries var _ common.ParserFn = parseGemFileLockEntries
var sectionsOfInterest = internal.NewStringSetFromSlice([]string{"GEM"}) var sectionsOfInterest = internal.NewStringSetFromSlice([]string{"GEM"})
// parseGemfileLockEntries is a parser function for Gemfile.lock contents, returning all Gems discovered. // parseGemFileLockEntries is a parser function for Gemfile.lock contents, returning all Gems discovered.
func parseGemfileLockEntries(_ string, reader io.Reader) ([]pkg.Package, error) { func parseGemFileLockEntries(_ string, reader io.Reader) ([]pkg.Package, error) {
pkgs := make([]pkg.Package, 0) pkgs := make([]pkg.Package, 0)
scanner := bufio.NewScanner(reader) scanner := bufio.NewScanner(reader)
@ -44,7 +44,7 @@ func parseGemfileLockEntries(_ string, reader io.Reader) ([]pkg.Package, error)
Name: candidate[0], Name: candidate[0],
Version: strings.Trim(candidate[1], "()"), Version: strings.Trim(candidate[1], "()"),
Language: pkg.Ruby, Language: pkg.Ruby,
Type: pkg.BundlerPkg, Type: pkg.GemPkg,
}) })
} }
} }

View file

@ -0,0 +1,101 @@
package ruby
import (
"os"
"testing"
"github.com/anchore/syft/syft/pkg"
)
func TestParseGemfileLockEntries(t *testing.T) {
var expectedGems = map[string]string{
"actionmailer": "4.1.1",
"actionpack": "4.1.1",
"actionview": "4.1.1",
"activemodel": "4.1.1",
"activerecord": "4.1.1",
"activesupport": "4.1.1",
"arel": "5.0.1.20140414130214",
"bootstrap-sass": "3.1.1.1",
"builder": "3.2.2",
"coffee-rails": "4.0.1",
"coffee-script": "2.2.0",
"coffee-script-source": "1.7.0",
"erubis": "2.7.0",
"execjs": "2.0.2",
"hike": "1.2.3",
"i18n": "0.6.9",
"jbuilder": "2.0.7",
"jquery-rails": "3.1.0",
"json": "1.8.1",
"kgio": "2.9.2",
"libv8": "3.16.14.3",
"mail": "2.5.4",
"mime-types": "1.25.1",
"minitest": "5.3.4",
"multi_json": "1.10.1",
"mysql2": "0.3.16",
"polyglot": "0.3.4",
"rack": "1.5.2",
"rack-test": "0.6.2",
"rails": "4.1.1",
"railties": "4.1.1",
"raindrops": "0.13.0",
"rake": "10.3.2",
"rdoc": "4.1.1",
"ref": "1.0.5",
"sass": "3.2.19",
"sass-rails": "4.0.3",
"sdoc": "0.4.0",
"spring": "1.1.3",
"sprockets": "2.11.0",
"sprockets-rails": "2.1.3",
"sqlite3": "1.3.9",
"therubyracer": "0.12.1",
"thor": "0.19.1",
"thread_safe": "0.3.3",
"tilt": "1.4.1",
"treetop": "1.4.15",
"turbolinks": "2.2.2",
"tzinfo": "1.2.0",
"uglifier": "2.5.0",
"unicorn": "4.8.3",
}
fixture, err := os.Open("test-fixtures/Gemfile.lock")
if err != nil {
t.Fatalf("failed to open fixture: %+v", err)
}
actual, err := parseGemFileLockEntries(fixture.Name(), fixture)
if err != nil {
t.Fatalf("failed to parse gemfile lock: %+v", err)
}
if len(actual) != len(expectedGems) {
for _, a := range actual {
t.Log(" ", a)
}
t.Fatalf("unexpected package count: %d!=%d", len(actual), len(expectedGems))
}
for _, a := range actual {
expectedVersion, ok := expectedGems[a.Name]
if !ok {
t.Errorf("unexpected package found: %s", a.Name)
}
if expectedVersion != a.Version {
t.Errorf("unexpected package version (pkg=%s): %s", a.Name, a.Version)
}
if a.Language != pkg.Ruby {
t.Errorf("bad language (pkg=%+v): %+v", a.Name, a.Language)
}
if a.Type != pkg.GemPkg {
t.Errorf("bad package type (pkg=%+v): %+v", a.Name, a.Type)
}
}
}

View file

@ -0,0 +1,135 @@
package ruby
import (
"bufio"
"encoding/json"
"fmt"
"io"
"regexp"
"strings"
"github.com/mitchellh/mapstructure"
"github.com/anchore/syft/syft/cataloger/common"
"github.com/anchore/syft/syft/pkg"
)
// integrity check
var _ common.ParserFn = parseGemFileLockEntries
type postProcessor func(string) []string
// match example: Al\u003Ex ---> 003E
var unicodePattern = regexp.MustCompile(`\\u(?P<unicode>[0-9A-F]{4})`)
var patterns = map[string]*regexp.Regexp{
// match example: name = "railties".freeze ---> railties
"name": regexp.MustCompile(`.*\.name\s*=\s*["']{1}(?P<name>.*)["']{1} *`),
// match example: version = "1.0.4".freeze ---> 1.0.4
"version": regexp.MustCompile(`.*\.version\s*=\s*["']{1}(?P<version>.*)["']{1} *`),
// match example:
// homepage = "https://github.com/anchore/syft".freeze ---> https://github.com/anchore/syft
"homepage": regexp.MustCompile(`.*\.homepage\s*=\s*["']{1}(?P<homepage>.*)["']{1} *`),
// match example: files = ["exe/bundle".freeze, "exe/bundler".freeze] ---> "exe/bundle".freeze, "exe/bundler".freeze
"files": regexp.MustCompile(`.*\.files\s*=\s*\[(?P<files>.*)\] *`),
// match example: authors = ["Andr\u00E9 Arko".freeze, "Samuel Giddins".freeze, "Colby Swandale".freeze,
// "Hiroshi Shibata".freeze, "David Rodr\u00EDguez".freeze, "Grey Baker".freeze...]
"authors": regexp.MustCompile(`.*\.authors\s*=\s*\[(?P<authors>.*)\] *`),
// match example: licenses = ["MIT".freeze] ----> "MIT".freeze
"licenses": regexp.MustCompile(`.*\.licenses\s*=\s*\[(?P<licenses>.*)\] *`),
}
var postProcessors = map[string]postProcessor{
"files": processList,
"authors": processList,
"licenses": processList,
}
func processList(s string) []string {
// nolint:prealloc
var results []string
for _, item := range strings.Split(s, ",") {
results = append(results, strings.Trim(item, "\" "))
}
return results
}
func parseGemSpecEntries(_ string, reader io.Reader) ([]pkg.Package, error) {
var pkgs []pkg.Package
var fields = make(map[string]interface{})
scanner := bufio.NewScanner(reader)
for scanner.Scan() {
line := scanner.Text()
sanitizedLine := strings.TrimSpace(line)
sanitizedLine = strings.ReplaceAll(sanitizedLine, ".freeze", "")
sanitizedLine = renderUtf8(sanitizedLine)
if sanitizedLine == "" {
continue
}
for field, pattern := range patterns {
matchMap := matchCaptureGroups(pattern, sanitizedLine)
if value := matchMap[field]; value != "" {
if postProcessor := postProcessors[field]; postProcessor != nil {
fields[field] = postProcessor(value)
} else {
fields[field] = value
}
// TODO: know that a line could actually match on multiple patterns, this is unlikely though
break
}
}
}
if fields["name"] != "" && fields["version"] != "" {
var metadata pkg.GemMetadata
if err := mapstructure.Decode(fields, &metadata); err != nil {
return nil, fmt.Errorf("unable to decode gem metadata: %w", err)
}
pkgs = append(pkgs, pkg.Package{
Name: metadata.Name,
Version: metadata.Version,
Licenses: metadata.Licenses,
Language: pkg.Ruby,
Type: pkg.GemPkg,
Metadata: metadata,
})
}
return pkgs, nil
}
// renderUtf8 takes any string escaped string sub-sections from the ruby string and replaces those sections with the UTF8 runes.
func renderUtf8(s string) string {
fullReplacement := unicodePattern.ReplaceAllStringFunc(s, func(unicodeSection string) string {
var replacement string
// note: the json parser already has support for interpreting hex-representations of unicode escaped strings as unicode runes.
// we can do this ourselves with strconv.Atoi, or leverage the existing json package.
if err := json.Unmarshal([]byte(`"`+unicodeSection+`"`), &replacement); err != nil {
return unicodeSection
}
return replacement
})
return fullReplacement
}
// matchCaptureGroups takes a regular expression and string and returns all of the named capture group results in a map.
func matchCaptureGroups(regEx *regexp.Regexp, str string) map[string]string {
match := regEx.FindStringSubmatch(str)
results := make(map[string]string)
for i, name := range regEx.SubexpNames() {
if i > 0 && i <= len(match) {
results[name] = match[i]
}
}
return results
}

View file

@ -0,0 +1,47 @@
package ruby
import (
"os"
"testing"
"github.com/anchore/syft/syft/pkg"
"github.com/go-test/deep"
)
func TestParseGemspec(t *testing.T) {
var expectedPkg = pkg.Package{
Name: "bundler",
Version: "2.1.4",
Type: pkg.GemPkg,
Licenses: []string{"MIT"},
Language: pkg.Ruby,
Metadata: pkg.GemMetadata{
Name: "bundler",
Version: "2.1.4",
Files: []string{"exe/bundle", "exe/bundler"},
Authors: []string{"André Arko", "Samuel Giddins", "Colby Swandale", "Hiroshi Shibata", "David Rodríguez", "Grey Baker", "Stephanie Morillo", "Chris Morris", "James Wen", "Tim Moore", "André Medeiros", "Jessica Lynn Suttles", "Terence Lee", "Carl Lerche", "Yehuda Katz"},
Licenses: []string{"MIT"},
},
}
fixture, err := os.Open("test-fixtures/bundler.gemspec")
if err != nil {
t.Fatalf("failed to open fixture: %+v", err)
}
actual, err := parseGemSpecEntries(fixture.Name(), fixture)
if err != nil {
t.Fatalf("failed to parse gemspec: %+v", err)
}
if len(actual) != 1 {
for _, a := range actual {
t.Log(" ", a)
}
t.Fatalf("unexpected package count: %d!=1", len(actual))
}
for _, d := range deep.Equal(actual[0], expectedPkg) {
t.Errorf("diff: %+v", d)
}
}

View file

@ -0,0 +1,26 @@
# frozen_string_literal: true
# -*- encoding: utf-8 -*-
# stub: bundler 2.1.4 ruby lib
Gem::Specification.new do |s|
s.name = "bundler".freeze
s.version = "2.1.4"
s.required_rubygems_version = Gem::Requirement.new(">= 2.5.2".freeze) if s.respond_to? :required_rubygems_version=
s.metadata = { "bug_tracker_uri" => "https://github.com/bundler/bundler/issues", "changelog_uri" => "https://github.com/bundler/bundler/blob/master/CHANGELOG.md", "homepage_uri" => "https://bundler.io/", "source_code_uri" => "https://github.com/bundler/bundler/" } if s.respond_to? :metadata=
s.require_paths = ["lib".freeze]
s.authors = ["Andr\u00E9 Arko".freeze, "Samuel Giddins".freeze, "Colby Swandale".freeze, "Hiroshi Shibata".freeze, "David Rodr\u00EDguez".freeze, "Grey Baker".freeze, "Stephanie Morillo".freeze, "Chris Morris".freeze, "James Wen".freeze, "Tim Moore".freeze, "Andr\u00E9 Medeiros".freeze, "Jessica Lynn Suttles".freeze, "Terence Lee".freeze, "Carl Lerche".freeze, "Yehuda Katz".freeze]
s.bindir = "exe".freeze
s.date = "2020-01-05"
s.description = "Bundler manages an application's dependencies through its entire life, across many machines, systematically and repeatably".freeze
s.email = ["team@bundler.io".freeze]
s.executables = ["bundle".freeze, "bundler".freeze]
s.files = ["exe/bundle".freeze, "exe/bundler".freeze]
s.homepage = "https://bundler.io".freeze
s.licenses = ["MIT".freeze]
s.required_ruby_version = Gem::Requirement.new(">= 2.3.0".freeze)
s.rubygems_version = "3.1.2".freeze
s.summary = "The best way to manage your application's dependencies".freeze
s.installed_by_version = "3.1.2" if s.respond_to? :installed_by_version
end

View file

@ -17,6 +17,8 @@ Similar to the cataloging process, Linux distribution identification is also per
package syft package syft
import ( import (
"fmt"
"github.com/anchore/syft/internal/bus" "github.com/anchore/syft/internal/bus"
"github.com/anchore/syft/internal/log" "github.com/anchore/syft/internal/log"
"github.com/anchore/syft/syft/cataloger" "github.com/anchore/syft/syft/cataloger"
@ -62,7 +64,19 @@ func IdentifyDistro(s scope.Scope) distro.Distro {
// Catalog the given scope, which may represent a container image or filesystem. Returns the discovered set of packages. // Catalog the given scope, which may represent a container image or filesystem. Returns the discovered set of packages.
func CatalogFromScope(s scope.Scope) (*pkg.Catalog, error) { func CatalogFromScope(s scope.Scope) (*pkg.Catalog, error) {
log.Info("building the catalog") log.Info("building the catalog")
return cataloger.Catalog(s.Resolver, cataloger.All()...)
// conditionally have two sets of catalogers
var catalogers []cataloger.Cataloger
switch s.Scheme {
case scope.ImageScheme:
catalogers = cataloger.ImageCatalogers()
case scope.DirectoryScheme:
catalogers = cataloger.DirectoryCatalogers()
default:
return nil, fmt.Errorf("unable to determine cataloger set from scheme=%+v", s.Scheme)
}
return cataloger.Catalog(s.Resolver, catalogers...)
} }
// SetLogger sets the logger object used for all syft logging calls. // SetLogger sets the logger object used for all syft logging calls.

9
syft/pkg/gem_metadata.go Normal file
View file

@ -0,0 +1,9 @@
package pkg
type GemMetadata struct {
Name string `mapstructure:"name" json:"name"`
Version string `mapstructure:"version" json:"version"`
Files []string `mapstructure:"files" json:"files"`
Authors []string `mapstructure:"authors" json:"authors"`
Licenses []string `mapstructure:"licenses" json:"licenses"`
}

View file

@ -1,9 +1,10 @@
package pkg package pkg
import ( import (
"testing"
"github.com/anchore/syft/syft/distro" "github.com/anchore/syft/syft/distro"
"github.com/sergi/go-diff/diffmatchpatch" "github.com/sergi/go-diff/diffmatchpatch"
"testing"
) )
func TestPackage_pURL(t *testing.T) { func TestPackage_pURL(t *testing.T) {
@ -56,7 +57,7 @@ func TestPackage_pURL(t *testing.T) {
pkg: Package{ pkg: Package{
Name: "name", Name: "name",
Version: "v0.1.0", Version: "v0.1.0",
Type: BundlerPkg, Type: GemPkg,
}, },
expected: "pkg:gem/name@v0.1.0", expected: "pkg:gem/name@v0.1.0",
}, },

View file

@ -8,7 +8,7 @@ type Type string
const ( const (
UnknownPkg Type = "UnknownPackage" UnknownPkg Type = "UnknownPackage"
ApkPkg Type = "apk" ApkPkg Type = "apk"
BundlerPkg Type = "bundle" GemPkg Type = "gem"
DebPkg Type = "deb" DebPkg Type = "deb"
EggPkg Type = "egg" EggPkg Type = "egg"
// PacmanPkg Type = "pacman" // PacmanPkg Type = "pacman"
@ -26,7 +26,7 @@ const (
var AllPkgs = []Type{ var AllPkgs = []Type{
ApkPkg, ApkPkg,
BundlerPkg, GemPkg,
DebPkg, DebPkg,
EggPkg, EggPkg,
// PacmanPkg, // PacmanPkg,
@ -45,7 +45,7 @@ func (t Type) PackageURLType() string {
switch t { switch t {
case ApkPkg: case ApkPkg:
return "alpine" return "alpine"
case BundlerPkg: case GemPkg:
return packageurl.TypeGem return packageurl.TypeGem
case DebPkg: case DebPkg:
return "deb" return "deb"

View file

@ -34,9 +34,7 @@ func NewPresenter(catalog *pkg.Catalog, s scope.Scope, d distro.Distro) *Present
func (pres *Presenter) Present(output io.Writer) error { func (pres *Presenter) Present(output io.Writer) error {
bom := NewDocumentFromCatalog(pres.catalog, pres.distro) bom := NewDocumentFromCatalog(pres.catalog, pres.distro)
srcObj := pres.scope.Source() switch src := pres.scope.Source.(type) {
switch src := srcObj.(type) {
case scope.DirSource: case scope.DirSource:
bom.BomDescriptor.Component = &BdComponent{ bom.BomDescriptor.Component = &BdComponent{
Component: Component{ Component: Component{

View file

@ -15,8 +15,7 @@ type ImageLocation struct {
} }
func NewLocations(p *pkg.Package, s scope.Scope) (Locations, error) { func NewLocations(p *pkg.Package, s scope.Scope) (Locations, error) {
srcObj := s.Source() switch src := s.Source.(type) {
switch src := srcObj.(type) {
case scope.ImageSource: case scope.ImageSource:
locations := make([]ImageLocation, len(p.Source)) locations := make([]ImageLocation, len(p.Source))
for idx := range p.Source { for idx := range p.Source {

View file

@ -12,8 +12,7 @@ type Source struct {
} }
func NewSource(s scope.Scope) (Source, error) { func NewSource(s scope.Scope) (Source, error) {
srcObj := s.Source() switch src := s.Source.(type) {
switch src := srcObj.(type) {
case scope.ImageSource: case scope.ImageSource:
return Source{ return Source{
Type: "image", Type: "image",
@ -22,7 +21,7 @@ func NewSource(s scope.Scope) (Source, error) {
case scope.DirSource: case scope.DirSource:
return Source{ return Source{
Type: "directory", Type: "directory",
Target: s.DirSrc.Path, Target: src.Path,
}, nil }, nil
default: default:
return Source{}, fmt.Errorf("unsupported source: %T", src) return Source{}, fmt.Errorf("unsupported source: %T", src)

View file

@ -27,9 +27,8 @@ func (pres *Presenter) Present(output io.Writer) error {
// init the tabular writer // init the tabular writer
w := new(tabwriter.Writer) w := new(tabwriter.Writer)
w.Init(output, 0, 8, 0, '\t', tabwriter.AlignRight) w.Init(output, 0, 8, 0, '\t', tabwriter.AlignRight)
srcObj := pres.scope.Source()
switch src := srcObj.(type) { switch src := pres.scope.Source.(type) {
case scope.DirSource: case scope.DirSource:
fmt.Fprintln(w, fmt.Sprintf("[Path: %s]", src.Path)) fmt.Fprintln(w, fmt.Sprintf("[Path: %s]", src.Path))
case scope.ImageSource: case scope.ImageSource:

View file

@ -20,12 +20,12 @@ import (
) )
const ( const (
unknownScheme scheme = "unknown-scheme" UnknownScheme Scheme = "unknown-scheme"
directoryScheme scheme = "directory-scheme" DirectoryScheme Scheme = "directory-scheme"
imageScheme scheme = "image-scheme" ImageScheme Scheme = "image-scheme"
) )
type scheme string type Scheme string
// ImageSource represents a data source that is a container image // ImageSource represents a data source that is a container image
type ImageSource struct { type ImageSource struct {
@ -42,8 +42,8 @@ type DirSource struct {
type Scope struct { type Scope struct {
Option Option // specific perspective to catalog Option Option // specific perspective to catalog
Resolver Resolver // a Resolver object to use in file path/glob resolution and file contents resolution Resolver Resolver // a Resolver object to use in file path/glob resolution and file contents resolution
ImgSrc ImageSource // the specific image to be cataloged Source interface{} // the specific source object to be cataloged
DirSrc DirSource // the specific directory to be cataloged Scheme Scheme // the source data scheme type (directory or image)
} }
// NewScope produces a Scope based on userInput like dir: or image:tag // NewScope produces a Scope based on userInput like dir: or image:tag
@ -55,7 +55,7 @@ func NewScope(userInput string, o Option) (Scope, func(), error) {
} }
switch parsedScheme { switch parsedScheme {
case directoryScheme: case DirectoryScheme:
fileMeta, err := fs.Stat(location) fileMeta, err := fs.Stat(location)
if err != nil { if err != nil {
return Scope{}, func() {}, fmt.Errorf("unable to stat dir=%q: %w", location, err) return Scope{}, func() {}, fmt.Errorf("unable to stat dir=%q: %w", location, err)
@ -71,7 +71,7 @@ func NewScope(userInput string, o Option) (Scope, func(), error) {
} }
return s, func() {}, nil return s, func() {}, nil
case imageScheme: case ImageScheme:
img, err := stereoscope.GetImage(location) img, err := stereoscope.GetImage(location)
cleanup := func() { cleanup := func() {
stereoscope.Cleanup() stereoscope.Cleanup()
@ -97,9 +97,10 @@ func NewScopeFromDir(path string) (Scope, error) {
Resolver: &resolvers.DirectoryResolver{ Resolver: &resolvers.DirectoryResolver{
Path: path, Path: path,
}, },
DirSrc: DirSource{ Source: DirSource{
Path: path, Path: path,
}, },
Scheme: DirectoryScheme,
}, nil }, nil
} }
@ -118,59 +119,48 @@ func NewScopeFromImage(img *image.Image, option Option) (Scope, error) {
return Scope{ return Scope{
Option: option, Option: option,
Resolver: resolver, Resolver: resolver,
ImgSrc: ImageSource{ Source: ImageSource{
Img: img, Img: img,
}, },
Scheme: ImageScheme,
}, nil }, nil
} }
// Source returns the configured data source (either a dir source or container image source)
func (s Scope) Source() interface{} {
if s.ImgSrc != (ImageSource{}) {
return s.ImgSrc
}
if s.DirSrc != (DirSource{}) {
return s.DirSrc
}
return nil
}
type sourceDetector func(string) (image.Source, string, error) type sourceDetector func(string) (image.Source, string, error)
func detectScheme(fs afero.Fs, imageDetector sourceDetector, userInput string) (scheme, string, error) { func detectScheme(fs afero.Fs, imageDetector sourceDetector, userInput string) (Scheme, string, error) {
if strings.HasPrefix(userInput, "dir:") { if strings.HasPrefix(userInput, "dir:") {
// blindly trust the user's scheme // blindly trust the user's scheme
dirLocation, err := homedir.Expand(strings.TrimPrefix(userInput, "dir:")) dirLocation, err := homedir.Expand(strings.TrimPrefix(userInput, "dir:"))
if err != nil { if err != nil {
return unknownScheme, "", fmt.Errorf("unable to expand directory path: %w", err) return UnknownScheme, "", fmt.Errorf("unable to expand directory path: %w", err)
} }
return directoryScheme, dirLocation, nil return DirectoryScheme, dirLocation, nil
} }
// we should attempt to let stereoscope determine what the source is first --just because the source is a valid directory // we should attempt to let stereoscope determine what the source is first --just because the source is a valid directory
// doesn't mean we yet know if it is an OCI layout directory (to be treated as an image) or if it is a generic filesystem directory. // doesn't mean we yet know if it is an OCI layout directory (to be treated as an image) or if it is a generic filesystem directory.
source, imageSpec, err := imageDetector(userInput) source, imageSpec, err := imageDetector(userInput)
if err != nil { if err != nil {
return unknownScheme, "", fmt.Errorf("unable to detect the scheme from %q: %w", userInput, err) return UnknownScheme, "", fmt.Errorf("unable to detect the scheme from %q: %w", userInput, err)
} }
if source == image.UnknownSource { if source == image.UnknownSource {
dirLocation, err := homedir.Expand(userInput) dirLocation, err := homedir.Expand(userInput)
if err != nil { if err != nil {
return unknownScheme, "", fmt.Errorf("unable to expand potential directory path: %w", err) return UnknownScheme, "", fmt.Errorf("unable to expand potential directory path: %w", err)
} }
fileMeta, err := fs.Stat(dirLocation) fileMeta, err := fs.Stat(dirLocation)
if err != nil { if err != nil {
return unknownScheme, "", nil return UnknownScheme, "", nil
} }
if fileMeta.IsDir() { if fileMeta.IsDir() {
return directoryScheme, dirLocation, nil return DirectoryScheme, dirLocation, nil
} }
return unknownScheme, "", nil return UnknownScheme, "", nil
} }
return imageScheme, imageSpec, nil return ImageScheme, imageSpec, nil
} }

View file

@ -1,13 +1,13 @@
package scope package scope
import ( import (
"github.com/mitchellh/go-homedir"
"github.com/spf13/afero"
"os" "os"
"testing" "testing"
"github.com/anchore/stereoscope/pkg/file" "github.com/anchore/stereoscope/pkg/file"
"github.com/anchore/stereoscope/pkg/image" "github.com/anchore/stereoscope/pkg/image"
"github.com/mitchellh/go-homedir"
"github.com/spf13/afero"
) )
func TestNewScopeFromImageFails(t *testing.T) { func TestNewScopeFromImageFails(t *testing.T) {
@ -78,8 +78,8 @@ func TestDirectoryScope(t *testing.T) {
if err != nil { if err != nil {
t.Errorf("could not create NewDirScope: %w", err) t.Errorf("could not create NewDirScope: %w", err)
} }
if p.DirSrc.Path != test.input { if p.Source.(DirSource).Path != test.input {
t.Errorf("mismatched stringer: '%s' != '%s'", p.DirSrc.Path, test.input) t.Errorf("mismatched stringer: '%s' != '%s'", p.Source.(DirSource).Path, test.input)
} }
refs, err := p.Resolver.FilesByPath(test.inputPaths...) refs, err := p.Resolver.FilesByPath(test.inputPaths...)
@ -229,7 +229,7 @@ func TestDetectScheme(t *testing.T) {
userInput string userInput string
dirs []string dirs []string
detection detectorResult detection detectorResult
expectedScheme scheme expectedScheme Scheme
expectedLocation string expectedLocation string
}{ }{
{ {
@ -239,7 +239,7 @@ func TestDetectScheme(t *testing.T) {
src: image.DockerDaemonSource, src: image.DockerDaemonSource,
ref: "wagoodman/dive:latest", ref: "wagoodman/dive:latest",
}, },
expectedScheme: imageScheme, expectedScheme: ImageScheme,
expectedLocation: "wagoodman/dive:latest", expectedLocation: "wagoodman/dive:latest",
}, },
{ {
@ -249,7 +249,7 @@ func TestDetectScheme(t *testing.T) {
src: image.DockerDaemonSource, src: image.DockerDaemonSource,
ref: "wagoodman/dive", ref: "wagoodman/dive",
}, },
expectedScheme: imageScheme, expectedScheme: ImageScheme,
expectedLocation: "wagoodman/dive", expectedLocation: "wagoodman/dive",
}, },
{ {
@ -259,7 +259,7 @@ func TestDetectScheme(t *testing.T) {
src: image.DockerDaemonSource, src: image.DockerDaemonSource,
ref: "wagoodman/dive:latest", ref: "wagoodman/dive:latest",
}, },
expectedScheme: imageScheme, expectedScheme: ImageScheme,
expectedLocation: "wagoodman/dive:latest", expectedLocation: "wagoodman/dive:latest",
}, },
{ {
@ -269,7 +269,7 @@ func TestDetectScheme(t *testing.T) {
src: image.DockerDaemonSource, src: image.DockerDaemonSource,
ref: "wagoodman/dive", ref: "wagoodman/dive",
}, },
expectedScheme: imageScheme, expectedScheme: ImageScheme,
expectedLocation: "wagoodman/dive", expectedLocation: "wagoodman/dive",
}, },
{ {
@ -279,7 +279,7 @@ func TestDetectScheme(t *testing.T) {
src: image.DockerDaemonSource, src: image.DockerDaemonSource,
ref: "latest", ref: "latest",
}, },
expectedScheme: imageScheme, expectedScheme: ImageScheme,
// we want to be able to handle this case better, however, I don't see a way to do this // we want to be able to handle this case better, however, I don't see a way to do this
// the user will need to provide more explicit input (docker:docker:latest) // the user will need to provide more explicit input (docker:docker:latest)
expectedLocation: "latest", expectedLocation: "latest",
@ -291,7 +291,7 @@ func TestDetectScheme(t *testing.T) {
src: image.DockerDaemonSource, src: image.DockerDaemonSource,
ref: "docker:latest", ref: "docker:latest",
}, },
expectedScheme: imageScheme, expectedScheme: ImageScheme,
// we want to be able to handle this case better, however, I don't see a way to do this // we want to be able to handle this case better, however, I don't see a way to do this
// the user will need to provide more explicit input (docker:docker:latest) // the user will need to provide more explicit input (docker:docker:latest)
expectedLocation: "docker:latest", expectedLocation: "docker:latest",
@ -303,7 +303,7 @@ func TestDetectScheme(t *testing.T) {
src: image.OciTarballSource, src: image.OciTarballSource,
ref: "some/path-to-file", ref: "some/path-to-file",
}, },
expectedScheme: imageScheme, expectedScheme: ImageScheme,
expectedLocation: "some/path-to-file", expectedLocation: "some/path-to-file",
}, },
{ {
@ -314,7 +314,7 @@ func TestDetectScheme(t *testing.T) {
ref: "some/path-to-dir", ref: "some/path-to-dir",
}, },
dirs: []string{"some/path-to-dir"}, dirs: []string{"some/path-to-dir"},
expectedScheme: imageScheme, expectedScheme: ImageScheme,
expectedLocation: "some/path-to-dir", expectedLocation: "some/path-to-dir",
}, },
{ {
@ -325,7 +325,7 @@ func TestDetectScheme(t *testing.T) {
ref: "", ref: "",
}, },
dirs: []string{"some/path-to-dir"}, dirs: []string{"some/path-to-dir"},
expectedScheme: directoryScheme, expectedScheme: DirectoryScheme,
expectedLocation: "some/path-to-dir", expectedLocation: "some/path-to-dir",
}, },
{ {
@ -335,7 +335,7 @@ func TestDetectScheme(t *testing.T) {
src: image.DockerDaemonSource, src: image.DockerDaemonSource,
ref: "some/path-to-dir", ref: "some/path-to-dir",
}, },
expectedScheme: imageScheme, expectedScheme: ImageScheme,
expectedLocation: "some/path-to-dir", expectedLocation: "some/path-to-dir",
}, },
{ {
@ -346,7 +346,7 @@ func TestDetectScheme(t *testing.T) {
ref: "", ref: "",
}, },
dirs: []string{"some/path-to-dir"}, dirs: []string{"some/path-to-dir"},
expectedScheme: directoryScheme, expectedScheme: DirectoryScheme,
expectedLocation: "some/path-to-dir", expectedLocation: "some/path-to-dir",
}, },
{ {
@ -356,7 +356,7 @@ func TestDetectScheme(t *testing.T) {
src: image.UnknownSource, src: image.UnknownSource,
ref: "", ref: "",
}, },
expectedScheme: directoryScheme, expectedScheme: DirectoryScheme,
expectedLocation: ".", expectedLocation: ".",
}, },
{ {
@ -366,7 +366,7 @@ func TestDetectScheme(t *testing.T) {
src: image.UnknownSource, src: image.UnknownSource,
ref: "", ref: "",
}, },
expectedScheme: directoryScheme, expectedScheme: DirectoryScheme,
expectedLocation: ".", expectedLocation: ".",
}, },
// we should support tilde expansion // we should support tilde expansion
@ -377,7 +377,7 @@ func TestDetectScheme(t *testing.T) {
src: image.OciDirectorySource, src: image.OciDirectorySource,
ref: "~/some-path", ref: "~/some-path",
}, },
expectedScheme: imageScheme, expectedScheme: ImageScheme,
expectedLocation: "~/some-path", expectedLocation: "~/some-path",
}, },
{ {
@ -388,26 +388,26 @@ func TestDetectScheme(t *testing.T) {
ref: "", ref: "",
}, },
dirs: []string{"~/some-path"}, dirs: []string{"~/some-path"},
expectedScheme: directoryScheme, expectedScheme: DirectoryScheme,
expectedLocation: "~/some-path", expectedLocation: "~/some-path",
}, },
{ {
name: "tilde-expansion-dir-explicit-exists", name: "tilde-expansion-dir-explicit-exists",
userInput: "dir:~/some-path", userInput: "dir:~/some-path",
dirs: []string{"~/some-path"}, dirs: []string{"~/some-path"},
expectedScheme: directoryScheme, expectedScheme: DirectoryScheme,
expectedLocation: "~/some-path", expectedLocation: "~/some-path",
}, },
{ {
name: "tilde-expansion-dir-explicit-dne", name: "tilde-expansion-dir-explicit-dne",
userInput: "dir:~/some-path", userInput: "dir:~/some-path",
expectedScheme: directoryScheme, expectedScheme: DirectoryScheme,
expectedLocation: "~/some-path", expectedLocation: "~/some-path",
}, },
{ {
name: "tilde-expansion-dir-implicit-dne", name: "tilde-expansion-dir-implicit-dne",
userInput: "~/some-path", userInput: "~/some-path",
expectedScheme: unknownScheme, expectedScheme: UnknownScheme,
expectedLocation: "", expectedLocation: "",
}, },
} }

View file

@ -108,6 +108,10 @@ func TestJsonSchemaImg(t *testing.T) {
t.Fatalf("failed to catalog image: %+v", err) t.Fatalf("failed to catalog image: %+v", err)
} }
var cases []testCase
cases = append(cases, commonTestCases...)
cases = append(cases, imageOnlyTestCases...)
for _, c := range cases { for _, c := range cases {
t.Run(c.name, func(t *testing.T) { t.Run(c.name, func(t *testing.T) {
testJsonSchema(t, catalog, theScope, "img") testJsonSchema(t, catalog, theScope, "img")
@ -121,6 +125,10 @@ func TestJsonSchemaDirs(t *testing.T) {
t.Errorf("unable to create scope from dir: %+v", err) t.Errorf("unable to create scope from dir: %+v", err)
} }
var cases []testCase
cases = append(cases, commonTestCases...)
cases = append(cases, dirOnlyTestCases...)
for _, c := range cases { for _, c := range cases {
t.Run(c.name, func(t *testing.T) { t.Run(c.name, func(t *testing.T) {
testJsonSchema(t, catalog, theScope, "dir") testJsonSchema(t, catalog, theScope, "dir")

View file

@ -4,12 +4,86 @@ package integration
import "github.com/anchore/syft/syft/pkg" import "github.com/anchore/syft/syft/pkg"
var cases = []struct { type testCase struct {
name string name string
pkgType pkg.Type pkgType pkg.Type
pkgLanguage pkg.Language pkgLanguage pkg.Language
pkgInfo map[string]string pkgInfo map[string]string
}{ }
var imageOnlyTestCases = []testCase{
{
name: "find gemspec packages",
pkgType: pkg.GemPkg,
pkgLanguage: pkg.Ruby,
pkgInfo: map[string]string{
"bundler": "2.1.4",
},
},
}
var dirOnlyTestCases = []testCase{
{
name: "find gemfile packages",
pkgType: pkg.GemPkg,
pkgLanguage: pkg.Ruby,
pkgInfo: map[string]string{
"actionmailer": "4.1.1",
"actionpack": "4.1.1",
"actionview": "4.1.1",
"activemodel": "4.1.1",
"activerecord": "4.1.1",
"activesupport": "4.1.1",
"arel": "5.0.1.20140414130214",
"bootstrap-sass": "3.1.1.1",
"builder": "3.2.2",
"coffee-rails": "4.0.1",
"coffee-script": "2.2.0",
"coffee-script-source": "1.7.0",
"erubis": "2.7.0",
"execjs": "2.0.2",
"hike": "1.2.3",
"i18n": "0.6.9",
"jbuilder": "2.0.7",
"jquery-rails": "3.1.0",
"json": "1.8.1",
"kgio": "2.9.2",
"libv8": "3.16.14.3",
"mail": "2.5.4",
"mime-types": "1.25.1",
"minitest": "5.3.4",
"multi_json": "1.10.1",
"mysql2": "0.3.16",
"polyglot": "0.3.4",
"rack": "1.5.2",
"rack-test": "0.6.2",
"rails": "4.1.1",
"railties": "4.1.1",
"raindrops": "0.13.0",
"rake": "10.3.2",
"rdoc": "4.1.1",
"ref": "1.0.5",
"sass": "3.2.19",
"sass-rails": "4.0.3",
"sdoc": "0.4.0",
"spring": "1.1.3",
"sprockets": "2.11.0",
"sprockets-rails": "2.1.3",
"sqlite3": "1.3.9",
"therubyracer": "0.12.1",
"thor": "0.19.1",
"thread_safe": "0.3.3",
"tilt": "1.4.1",
"treetop": "1.4.15",
"turbolinks": "2.2.2",
"tzinfo": "1.2.0",
"uglifier": "2.5.0",
"unicorn": "4.8.3",
},
},
}
var commonTestCases = []testCase{
{ {
name: "find rpmdb packages", name: "find rpmdb packages",
pkgType: pkg.RpmPkg, pkgType: pkg.RpmPkg,
@ -98,64 +172,6 @@ var cases = []struct {
"mypy": "v0.770", "mypy": "v0.770",
}, },
}, },
{
name: "find bundler packages",
pkgType: pkg.BundlerPkg,
pkgLanguage: pkg.Ruby,
pkgInfo: map[string]string{
"actionmailer": "4.1.1",
"actionpack": "4.1.1",
"actionview": "4.1.1",
"activemodel": "4.1.1",
"activerecord": "4.1.1",
"activesupport": "4.1.1",
"arel": "5.0.1.20140414130214",
"bootstrap-sass": "3.1.1.1",
"builder": "3.2.2",
"coffee-rails": "4.0.1",
"coffee-script": "2.2.0",
"coffee-script-source": "1.7.0",
"erubis": "2.7.0",
"execjs": "2.0.2",
"hike": "1.2.3",
"i18n": "0.6.9",
"jbuilder": "2.0.7",
"jquery-rails": "3.1.0",
"json": "1.8.1",
"kgio": "2.9.2",
"libv8": "3.16.14.3",
"mail": "2.5.4",
"mime-types": "1.25.1",
"minitest": "5.3.4",
"multi_json": "1.10.1",
"mysql2": "0.3.16",
"polyglot": "0.3.4",
"rack": "1.5.2",
"rack-test": "0.6.2",
"rails": "4.1.1",
"railties": "4.1.1",
"raindrops": "0.13.0",
"rake": "10.3.2",
"rdoc": "4.1.1",
"ref": "1.0.5",
"sass": "3.2.19",
"sass-rails": "4.0.3",
"sdoc": "0.4.0",
"spring": "1.1.3",
"sprockets": "2.11.0",
"sprockets-rails": "2.1.3",
"sqlite3": "1.3.9",
"therubyracer": "0.12.1",
"thor": "0.19.1",
"thread_safe": "0.3.3",
"tilt": "1.4.1",
"treetop": "1.4.15",
"turbolinks": "2.2.2",
"tzinfo": "1.2.0",
"uglifier": "2.5.0",
"unicorn": "4.8.3",
},
},
{ {
name: "find apkdb packages", name: "find apkdb packages",

View file

@ -3,9 +3,11 @@
package integration package integration
import ( import (
"github.com/anchore/stereoscope/pkg/imagetest"
"testing" "testing"
"github.com/anchore/stereoscope/pkg/imagetest"
"github.com/go-test/deep"
"github.com/anchore/syft/internal" "github.com/anchore/syft/internal"
"github.com/anchore/syft/syft" "github.com/anchore/syft/syft"
"github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg"
@ -35,6 +37,10 @@ func TestPkgCoverageImage(t *testing.T) {
definedPkgs.Add(string(p)) definedPkgs.Add(string(p))
} }
var cases []testCase
cases = append(cases, commonTestCases...)
cases = append(cases, imageOnlyTestCases...)
for _, c := range cases { for _, c := range cases {
t.Run(c.name, func(t *testing.T) { t.Run(c.name, func(t *testing.T) {
pkgCount := 0 pkgCount := 0
@ -81,10 +87,16 @@ func TestPkgCoverageImage(t *testing.T) {
// ensure that integration test cases stay in sync with the available catalogers // ensure that integration test cases stay in sync with the available catalogers
if len(observedLanguages) < len(definedLanguages) { if len(observedLanguages) < len(definedLanguages) {
t.Errorf("language coverage incomplete (languages=%d, coverage=%d)", len(definedLanguages), len(observedLanguages)) t.Errorf("language coverage incomplete (languages=%d, coverage=%d)", len(definedLanguages), len(observedLanguages))
for _, d := range deep.Equal(observedLanguages, definedLanguages) {
t.Errorf("diff: %+v", d)
}
} }
if len(observedPkgs) < len(definedPkgs) { if len(observedPkgs) < len(definedPkgs) {
t.Errorf("package coverage incomplete (packages=%d, coverage=%d)", len(definedPkgs), len(observedPkgs)) t.Errorf("package coverage incomplete (packages=%d, coverage=%d)", len(definedPkgs), len(observedPkgs))
for _, d := range deep.Equal(observedPkgs, definedPkgs) {
t.Errorf("diff: %+v", d)
}
} }
} }
@ -107,6 +119,10 @@ func TestPkgCoverageDirectory(t *testing.T) {
definedPkgs.Add(string(p)) definedPkgs.Add(string(p))
} }
var cases []testCase
cases = append(cases, commonTestCases...)
cases = append(cases, dirOnlyTestCases...)
for _, c := range cases { for _, c := range cases {
t.Run(c.name, func(t *testing.T) { t.Run(c.name, func(t *testing.T) {
pkgCount := 0 pkgCount := 0
@ -150,7 +166,7 @@ func TestPkgCoverageDirectory(t *testing.T) {
observedPkgs.Remove(string(pkg.UnknownPkg)) observedPkgs.Remove(string(pkg.UnknownPkg))
definedPkgs.Remove(string(pkg.UnknownPkg)) definedPkgs.Remove(string(pkg.UnknownPkg))
// ensure that integration test cases stay in sync with the available catalogers // ensure that integration test commonTestCases stay in sync with the available catalogers
if len(observedLanguages) < len(definedLanguages) { if len(observedLanguages) < len(definedLanguages) {
t.Errorf("language coverage incomplete (languages=%d, coverage=%d)", len(definedLanguages), len(observedLanguages)) t.Errorf("language coverage incomplete (languages=%d, coverage=%d)", len(definedLanguages), len(observedLanguages))
} }

View file

@ -0,0 +1,25 @@
# frozen_string_literal: true
# -*- encoding: utf-8 -*-
# stub: bundler 2.1.4 ruby lib
Gem::Specification.new do |s|
s.name = "bundler".freeze
s.version = "2.1.4"
s.required_rubygems_version = Gem::Requirement.new(">= 2.5.2".freeze) if s.respond_to? :required_rubygems_version=
s.require_paths = ["lib".freeze]
s.authors = ["Andr\u00E9 Arko".freeze, "Samuel Giddins".freeze, "Colby Swandale".freeze, "Hiroshi Shibata".freeze, "David Rodr\u00EDguez".freeze, "Grey Baker".f
s.bindir = "exe".freeze
s.date = "2020-01-05"
s.description = "Bundler manages an application's dependencies through its entire life, across many machines, systematically and repeatably".freeze
s.email = ["team@bundler.io".freeze]
s.executables = ["bundle".freeze, "bundler".freeze]
s.files = ["exe/bundle".freeze, "exe/bundler".freeze]
s.homepage = "https://bundler.io".freeze
s.licenses = ["MIT".freeze]
s.required_ruby_version = Gem::Requirement.new(">= 2.3.0".freeze)
s.rubygems_version = "3.1.2".freeze
s.summary = "The best way to manage your application's dependencies".freeze
s.installed_by_version = "3.1.2" if s.respond_to? :installed_by_version
end