fix tests to use location instead of file.Reference

Signed-off-by: Alex Goodman <alex.goodman@anchore.com>
This commit is contained in:
Alex Goodman 2020-11-13 13:17:12 -05:00
parent b694dacb21
commit aa0d444fd4
No known key found for this signature in database
GPG key ID: 5CB45AE22BAB7EA7
40 changed files with 327 additions and 382 deletions

8
go.sum
View file

@ -124,16 +124,12 @@ github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuy
github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc= github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc=
github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0= github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0=
github.com/alecthomas/units v0.0.0-20190924025748-f65c72e2690d/go.mod h1:rBZYJk541a8SKzHPHnH3zbiI+7dagKZ0cgpgrD7Fyho= github.com/alecthomas/units v0.0.0-20190924025748-f65c72e2690d/go.mod h1:rBZYJk541a8SKzHPHnH3zbiI+7dagKZ0cgpgrD7Fyho=
github.com/anchore/go-rpmdb v0.0.0-20200811175839-cbc751c28e8e h1:kty6r0R2JeaNPeWKSYDC+HW3hkqwFh4PP5TQ8pUPYFw=
github.com/anchore/go-rpmdb v0.0.0-20200811175839-cbc751c28e8e/go.mod h1:iYuIG0Nai8dR0ri3LhZQKUyO1loxUWAGvoWhXDmjy1A=
github.com/anchore/go-rpmdb v0.0.0-20201106153645-0043963c2e12 h1:xbeIbn5F52JVx3RUIajxCj8b0y+9lywspql4sFhcxWQ= github.com/anchore/go-rpmdb v0.0.0-20201106153645-0043963c2e12 h1:xbeIbn5F52JVx3RUIajxCj8b0y+9lywspql4sFhcxWQ=
github.com/anchore/go-rpmdb v0.0.0-20201106153645-0043963c2e12/go.mod h1:juoyWXIj7sJ1IDl4E/KIfyLtovbs5XQVSIdaQifFQT8= github.com/anchore/go-rpmdb v0.0.0-20201106153645-0043963c2e12/go.mod h1:juoyWXIj7sJ1IDl4E/KIfyLtovbs5XQVSIdaQifFQT8=
github.com/anchore/go-testutils v0.0.0-20200925183923-d5f45b0d3c04 h1:VzprUTpc0vW0nnNKJfJieyH/TZ9UYAnTZs5/gHTdAe8= github.com/anchore/go-testutils v0.0.0-20200925183923-d5f45b0d3c04 h1:VzprUTpc0vW0nnNKJfJieyH/TZ9UYAnTZs5/gHTdAe8=
github.com/anchore/go-testutils v0.0.0-20200925183923-d5f45b0d3c04/go.mod h1:6dK64g27Qi1qGQZ67gFmBFvEHScy0/C8qhQhNe5B5pQ= github.com/anchore/go-testutils v0.0.0-20200925183923-d5f45b0d3c04/go.mod h1:6dK64g27Qi1qGQZ67gFmBFvEHScy0/C8qhQhNe5B5pQ=
github.com/anchore/go-version v1.2.2-0.20200701162849-18adb9c92b9b h1:e1bmaoJfZVsCYMrIZBpFxwV26CbsuoEh5muXD5I1Ods= github.com/anchore/go-version v1.2.2-0.20200701162849-18adb9c92b9b h1:e1bmaoJfZVsCYMrIZBpFxwV26CbsuoEh5muXD5I1Ods=
github.com/anchore/go-version v1.2.2-0.20200701162849-18adb9c92b9b/go.mod h1:Bkc+JYWjMCF8OyZ340IMSIi2Ebf3uwByOk6ho4wne1E= github.com/anchore/go-version v1.2.2-0.20200701162849-18adb9c92b9b/go.mod h1:Bkc+JYWjMCF8OyZ340IMSIi2Ebf3uwByOk6ho4wne1E=
github.com/anchore/stereoscope v0.0.0-20200925184903-c82da54e98fe h1:m4NSyTo2fVUoUHAV/ZVqE/PFMr/y8oz9HRrhWLk9It0=
github.com/anchore/stereoscope v0.0.0-20200925184903-c82da54e98fe/go.mod h1:2Jja/4l0zYggW52og+nn0rut4i+OYjCf9vTyrM8RT4E=
github.com/anchore/stereoscope v0.0.0-20201106140100-12e75c48f409 h1:xKSpDRjmYrEFrdMeDh4AuSUAFc99pdro6YFBKxy2um0= github.com/anchore/stereoscope v0.0.0-20201106140100-12e75c48f409 h1:xKSpDRjmYrEFrdMeDh4AuSUAFc99pdro6YFBKxy2um0=
github.com/anchore/stereoscope v0.0.0-20201106140100-12e75c48f409/go.mod h1:2Jja/4l0zYggW52og+nn0rut4i+OYjCf9vTyrM8RT4E= github.com/anchore/stereoscope v0.0.0-20201106140100-12e75c48f409/go.mod h1:2Jja/4l0zYggW52og+nn0rut4i+OYjCf9vTyrM8RT4E=
github.com/andreyvit/diff v0.0.0-20170406064948-c7f18ee00883/go.mod h1:rCTlJbsFo29Kk6CurOXKm700vrz8f0KW0JNfpkRJY/8= github.com/andreyvit/diff v0.0.0-20170406064948-c7f18ee00883/go.mod h1:rCTlJbsFo29Kk6CurOXKm700vrz8f0KW0JNfpkRJY/8=
@ -164,8 +160,6 @@ github.com/bketelsen/crypt v0.0.3-0.20200106085610-5cbc8cc4026c/go.mod h1:MKsuJm
github.com/blakesmith/ar v0.0.0-20190502131153-809d4375e1fb h1:m935MPodAbYS46DG4pJSv7WO+VECIWUQ7OJYSoTrMh4= github.com/blakesmith/ar v0.0.0-20190502131153-809d4375e1fb h1:m935MPodAbYS46DG4pJSv7WO+VECIWUQ7OJYSoTrMh4=
github.com/blakesmith/ar v0.0.0-20190502131153-809d4375e1fb/go.mod h1:PkYb9DJNAwrSvRx5DYA+gUcOIgTGVMNkfSCbZM8cWpI= github.com/blakesmith/ar v0.0.0-20190502131153-809d4375e1fb/go.mod h1:PkYb9DJNAwrSvRx5DYA+gUcOIgTGVMNkfSCbZM8cWpI=
github.com/blang/semver v3.5.0+incompatible/go.mod h1:kRBLl5iJ+tD4TcOOxsy/0fnwebNt5EWlYSAyrTnjyyk= github.com/blang/semver v3.5.0+incompatible/go.mod h1:kRBLl5iJ+tD4TcOOxsy/0fnwebNt5EWlYSAyrTnjyyk=
github.com/bmatcuk/doublestar v1.3.1 h1:rT8rxDPsavp9G+4ZULzqhhUSaI/OPsTZNG88Z3i0xvY=
github.com/bmatcuk/doublestar v1.3.1/go.mod h1:wiQtGV+rzVYxB7WIlirSN++5HPtPlXEo9MEoZQC/PmE=
github.com/bmatcuk/doublestar v1.3.3 h1:pVP1d49CcQQaNOl+PI6sPybIrIOD/6sux31PFdmhTH0= github.com/bmatcuk/doublestar v1.3.3 h1:pVP1d49CcQQaNOl+PI6sPybIrIOD/6sux31PFdmhTH0=
github.com/bmatcuk/doublestar v1.3.3/go.mod h1:wiQtGV+rzVYxB7WIlirSN++5HPtPlXEo9MEoZQC/PmE= github.com/bmatcuk/doublestar v1.3.3/go.mod h1:wiQtGV+rzVYxB7WIlirSN++5HPtPlXEo9MEoZQC/PmE=
github.com/bombsimon/wsl/v2 v2.0.0/go.mod h1:mf25kr/SqFEPhhcxW1+7pxzGlW+hIl/hYTKY95VwV8U= github.com/bombsimon/wsl/v2 v2.0.0/go.mod h1:mf25kr/SqFEPhhcxW1+7pxzGlW+hIl/hYTKY95VwV8U=
@ -301,8 +295,6 @@ github.com/go-sql-driver/mysql v1.4.0/go.mod h1:zAC/RDZ24gD3HViQzih4MyKcchzm+sOG
github.com/go-sql-driver/mysql v1.4.1/go.mod h1:zAC/RDZ24gD3HViQzih4MyKcchzm+sOG5ZlKdlhCg5w= github.com/go-sql-driver/mysql v1.4.1/go.mod h1:zAC/RDZ24gD3HViQzih4MyKcchzm+sOG5ZlKdlhCg5w=
github.com/go-sql-driver/mysql v1.5.0/go.mod h1:DCzpHaOWr8IXmIStZouvnhqoel9Qv2LBy8hT2VhHyBg= github.com/go-sql-driver/mysql v1.5.0/go.mod h1:DCzpHaOWr8IXmIStZouvnhqoel9Qv2LBy8hT2VhHyBg=
github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY= github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY=
github.com/go-test/deep v1.0.6 h1:UHSEyLZUwX9Qoi99vVwvewiMC8mM2bf7XEM2nqvzEn8=
github.com/go-test/deep v1.0.6/go.mod h1:QV8Hv/iy04NyLBxAdO9njL0iVPN1S4d/A3NVv1V36o8=
github.com/go-test/deep v1.0.7 h1:/VSMRlnY/JSyqxQUzQLKVMAskpY/NZKFA5j2P+0pP2M= github.com/go-test/deep v1.0.7 h1:/VSMRlnY/JSyqxQUzQLKVMAskpY/NZKFA5j2P+0pP2M=
github.com/go-test/deep v1.0.7/go.mod h1:QV8Hv/iy04NyLBxAdO9njL0iVPN1S4d/A3NVv1V36o8= github.com/go-test/deep v1.0.7/go.mod h1:QV8Hv/iy04NyLBxAdO9njL0iVPN1S4d/A3NVv1V36o8=
github.com/go-toolsmith/astcast v1.0.0 h1:JojxlmI6STnFVG9yOImLeGREv8W2ocNUM+iOhR6jE7g= github.com/go-toolsmith/astcast v1.0.0 h1:JojxlmI6STnFVG9yOImLeGREv8W2ocNUM+iOhR6jE7g=

View file

@ -80,7 +80,7 @@ func (cfg *Application) Build() error {
cfg.PresenterOpt = presenterOption cfg.PresenterOpt = presenterOption
// set the source // set the source
scopeOption := source.ParseOption(cfg.Scope) scopeOption := source.ParseScope(cfg.Scope)
if scopeOption == source.UnknownScope { if scopeOption == source.UnknownScope {
return fmt.Errorf("bad --scope value '%s'", cfg.Scope) return fmt.Errorf("bad --scope value '%s'", cfg.Scope)
} }

View file

@ -1,6 +1,6 @@
package file package file
// Source: https://research.swtch.com/glob.go // Locations: https://research.swtch.com/glob.go
func GlobMatch(pattern, name string) bool { func GlobMatch(pattern, name string) bool {
px := 0 px := 0
nx := 0 nx := 0

View file

@ -10,6 +10,9 @@
}, },
"type": "array" "type": "array"
}, },
"language": {
"type": "string"
},
"licenses": { "licenses": {
"anyOf": [ "anyOf": [
{ {
@ -24,13 +27,16 @@
] ]
}, },
"locations": { "locations": {
"items": { "anyOf": [
"anyOf": [ {
{ "type": "null"
"type": "string" },
}, {
{ "items": {
"properties": { "properties": {
"layerID": {
"type": "string"
},
"layerIndex": { "layerIndex": {
"type": "integer" "type": "integer"
}, },
@ -39,14 +45,15 @@
} }
}, },
"required": [ "required": [
"layerID",
"layerIndex", "layerIndex",
"path" "path"
], ],
"type": "object" "type": "object"
} },
] "type": "array"
}, }
"type": "array" ]
}, },
"metadata": { "metadata": {
"properties": { "properties": {
@ -315,6 +322,9 @@
}, },
"type": "object" "type": "object"
}, },
"metadataType": {
"type": "string"
},
"name": { "name": {
"type": "string" "type": "string"
}, },
@ -327,8 +337,10 @@
}, },
"required": [ "required": [
"foundBy", "foundBy",
"language",
"licenses", "licenses",
"locations", "locations",
"metadataType",
"name", "name",
"type", "type",
"version" "version"
@ -337,6 +349,44 @@
}, },
"type": "array" "type": "array"
}, },
"descriptor": {
"properties": {
"name": {
"type": "string"
},
"reportTimestamp": {
"type": "string"
},
"version": {
"type": "string"
}
},
"required": [
"name",
"reportTimestamp",
"version"
],
"type": "object"
},
"distro": {
"properties": {
"idLike": {
"type": "string"
},
"name": {
"type": "string"
},
"version": {
"type": "string"
}
},
"required": [
"idLike",
"name",
"version"
],
"type": "object"
},
"source": { "source": {
"properties": { "properties": {
"target": { "target": {
@ -408,6 +458,8 @@
}, },
"required": [ "required": [
"artifacts", "artifacts",
"descriptor",
"distro",
"source" "source"
], ],
"type": "object" "type": "object"

View file

@ -6,7 +6,6 @@ package common
import ( import (
"strings" "strings"
"github.com/anchore/stereoscope/pkg/file"
"github.com/anchore/syft/internal/log" "github.com/anchore/syft/internal/log"
"github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg"
"github.com/anchore/syft/syft/source" "github.com/anchore/syft/syft/source"
@ -17,8 +16,8 @@ import (
type GenericCataloger struct { type GenericCataloger struct {
globParsers map[string]ParserFn globParsers map[string]ParserFn
pathParsers map[string]ParserFn pathParsers map[string]ParserFn
selectedFiles []file.Reference selectedFiles []source.Location
parsers map[file.Reference]ParserFn parsers map[source.Location]ParserFn
upstreamCataloger string upstreamCataloger string
} }
@ -27,8 +26,8 @@ func NewGenericCataloger(pathParsers map[string]ParserFn, globParsers map[string
return &GenericCataloger{ return &GenericCataloger{
globParsers: globParsers, globParsers: globParsers,
pathParsers: pathParsers, pathParsers: pathParsers,
selectedFiles: make([]file.Reference, 0), selectedFiles: make([]source.Location, 0),
parsers: make(map[file.Reference]ParserFn), parsers: make(map[source.Location]ParserFn),
upstreamCataloger: upstreamCataloger, upstreamCataloger: upstreamCataloger,
} }
} }
@ -39,7 +38,7 @@ func (c *GenericCataloger) Name() string {
} }
// register pairs a set of file references with a parser function for future cataloging (when the file contents are resolved) // register pairs a set of file references with a parser function for future cataloging (when the file contents are resolved)
func (c *GenericCataloger) register(files []file.Reference, parser ParserFn) { func (c *GenericCataloger) register(files []source.Location, parser ParserFn) {
c.selectedFiles = append(c.selectedFiles, files...) c.selectedFiles = append(c.selectedFiles, files...)
for _, f := range files { for _, f := range files {
c.parsers[f] = parser c.parsers[f] = parser
@ -48,14 +47,14 @@ func (c *GenericCataloger) register(files []file.Reference, parser ParserFn) {
// clear deletes all registered file-reference-to-parser-function pairings from former SelectFiles() and register() calls // clear deletes all registered file-reference-to-parser-function pairings from former SelectFiles() and register() calls
func (c *GenericCataloger) clear() { func (c *GenericCataloger) clear() {
c.selectedFiles = make([]file.Reference, 0) c.selectedFiles = make([]source.Location, 0)
c.parsers = make(map[file.Reference]ParserFn) c.parsers = make(map[source.Location]ParserFn)
} }
// Catalog is given an object to resolve file references and content, this function returns any discovered Packages after analyzing the catalog source. // Catalog is given an object to resolve file references and content, this function returns any discovered Packages after analyzing the catalog source.
func (c *GenericCataloger) Catalog(resolver source.Resolver) ([]pkg.Package, error) { func (c *GenericCataloger) Catalog(resolver source.Resolver) ([]pkg.Package, error) {
fileSelection := c.selectFiles(resolver) fileSelection := c.selectFiles(resolver)
contents, err := resolver.MultipleFileContentsByRef(fileSelection...) contents, err := resolver.MultipleFileContentsByLocation(fileSelection)
if err != nil { if err != nil {
return nil, err return nil, err
} }
@ -63,10 +62,10 @@ func (c *GenericCataloger) Catalog(resolver source.Resolver) ([]pkg.Package, err
} }
// SelectFiles takes a set of file trees and resolves and file references of interest for future cataloging // SelectFiles takes a set of file trees and resolves and file references of interest for future cataloging
func (c *GenericCataloger) selectFiles(resolver source.FileResolver) []file.Reference { func (c *GenericCataloger) selectFiles(resolver source.FileResolver) []source.Location {
// select by exact path // select by exact path
for path, parser := range c.pathParsers { for path, parser := range c.pathParsers {
files, err := resolver.FilesByPath(file.Path(path)) files, err := resolver.FilesByPath(path)
if err != nil { if err != nil {
log.Warnf("cataloger failed to select files by path: %+v", err) log.Warnf("cataloger failed to select files by path: %+v", err)
} }
@ -90,28 +89,28 @@ func (c *GenericCataloger) selectFiles(resolver source.FileResolver) []file.Refe
} }
// catalog takes a set of file contents and uses any configured parser functions to resolve and return discovered packages // catalog takes a set of file contents and uses any configured parser functions to resolve and return discovered packages
func (c *GenericCataloger) catalog(contents map[file.Reference]string) ([]pkg.Package, error) { func (c *GenericCataloger) catalog(contents map[source.Location]string) ([]pkg.Package, error) {
defer c.clear() defer c.clear()
packages := make([]pkg.Package, 0) packages := make([]pkg.Package, 0)
for reference, parser := range c.parsers { for location, parser := range c.parsers {
content, ok := contents[reference] content, ok := contents[location]
if !ok { if !ok {
log.Warnf("cataloger '%s' missing file content: %+v", c.upstreamCataloger, reference) log.Warnf("cataloger '%s' missing file content: %+v", c.upstreamCataloger, location)
continue continue
} }
entries, err := parser(string(reference.Path), strings.NewReader(content)) entries, err := parser(location.Path, strings.NewReader(content))
if err != nil { if err != nil {
// TODO: should we fail? or only log? // TODO: should we fail? or only log?
log.Warnf("cataloger '%s' failed to parse entries (reference=%+v): %+v", c.upstreamCataloger, reference, err) log.Warnf("cataloger '%s' failed to parse entries (location=%+v): %+v", c.upstreamCataloger, location, err)
continue continue
} }
for _, entry := range entries { for _, entry := range entries {
entry.FoundBy = c.upstreamCataloger entry.FoundBy = c.upstreamCataloger
entry.Source = []file.Reference{reference} entry.Locations = []source.Location{location}
packages = append(packages, entry) packages = append(packages, entry)
} }

View file

@ -6,48 +6,50 @@ import (
"io/ioutil" "io/ioutil"
"testing" "testing"
"github.com/anchore/stereoscope/pkg/file" "github.com/anchore/syft/syft/source"
"github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg"
) )
type testResolverMock struct { type testResolverMock struct {
contents map[file.Reference]string contents map[source.Location]string
} }
func newTestResolver() *testResolverMock { func newTestResolver() *testResolverMock {
return &testResolverMock{ return &testResolverMock{
contents: make(map[file.Reference]string), contents: make(map[source.Location]string),
} }
} }
func (r *testResolverMock) FileContentsByRef(_ file.Reference) (string, error) { func (r *testResolverMock) FileContentsByLocation(_ source.Location) (string, error) {
return "", fmt.Errorf("not implemented") return "", fmt.Errorf("not implemented")
} }
func (r *testResolverMock) MultipleFileContentsByRef(_ ...file.Reference) (map[file.Reference]string, error) { func (r *testResolverMock) MultipleFileContentsByLocation([]source.Location) (map[source.Location]string, error) {
return r.contents, nil return r.contents, nil
} }
func (r *testResolverMock) FilesByPath(paths ...file.Path) ([]file.Reference, error) { func (r *testResolverMock) FilesByPath(paths ...string) ([]source.Location, error) {
results := make([]file.Reference, len(paths)) results := make([]source.Location, len(paths))
for idx, p := range paths { for idx, p := range paths {
results[idx] = file.NewFileReference(p) results[idx] = source.NewLocation(p)
r.contents[results[idx]] = fmt.Sprintf("%s file contents!", p) r.contents[results[idx]] = fmt.Sprintf("%s file contents!", p)
} }
return results, nil return results, nil
} }
func (r *testResolverMock) FilesByGlob(_ ...string) ([]file.Reference, error) { func (r *testResolverMock) FilesByGlob(_ ...string) ([]source.Location, error) {
path := "/a-path.txt" path := "/a-path.txt"
ref := file.NewFileReference(file.Path(path)) location := source.NewLocation(path)
r.contents[ref] = fmt.Sprintf("%s file contents!", path) r.contents[location] = fmt.Sprintf("%s file contents!", path)
return []file.Reference{ref}, nil return []source.Location{location}, nil
} }
func (r *testResolverMock) RelativeFileByPath(_ file.Reference, _ string) (*file.Reference, error) { func (r *testResolverMock) RelativeFileByPath(_ source.Location, _ string) *source.Location {
return nil, fmt.Errorf("not implemented") panic(fmt.Errorf("not implemented"))
return nil
} }
func parser(_ string, reader io.Reader) ([]pkg.Package, error) { func parser(_ string, reader io.Reader) ([]pkg.Package, error) {
@ -94,8 +96,8 @@ func TestGenericCataloger(t *testing.T) {
} }
for _, p := range actualPkgs { for _, p := range actualPkgs {
ref := p.Source[0] ref := p.Locations[0]
exP, ok := expectedPkgs[string(ref.Path)] exP, ok := expectedPkgs[ref.Path]
if !ok { if !ok {
t.Errorf("missing expected pkg: ref=%+v", ref) t.Errorf("missing expected pkg: ref=%+v", ref)
continue continue
@ -106,7 +108,7 @@ func TestGenericCataloger(t *testing.T) {
} }
if exP.Name != p.Name { if exP.Name != p.Name {
t.Errorf("bad contents mapping: %+v", p.Source) t.Errorf("bad contents mapping: %+v", p.Locations)
} }
} }
} }

View file

@ -7,9 +7,9 @@ import (
"fmt" "fmt"
"io" "io"
"path" "path"
"path/filepath"
"strings" "strings"
"github.com/anchore/stereoscope/pkg/file"
"github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg"
"github.com/anchore/syft/syft/source" "github.com/anchore/syft/syft/source"
) )
@ -40,23 +40,23 @@ func (c *Cataloger) Catalog(resolver source.Resolver) ([]pkg.Package, error) {
} }
var pkgs []pkg.Package var pkgs []pkg.Package
for _, dbRef := range dbFileMatches { for _, dbLocation := range dbFileMatches {
dbContents, err := resolver.FileContentsByRef(dbRef) dbContents, err := resolver.FileContentsByLocation(dbLocation)
if err != nil { if err != nil {
return nil, err return nil, err
} }
pkgs, err = parseDpkgStatus(strings.NewReader(dbContents)) pkgs, err = parseDpkgStatus(strings.NewReader(dbContents))
if err != nil { if err != nil {
return nil, fmt.Errorf("unable to catalog dpkg package=%+v: %w", dbRef.Path, err) return nil, fmt.Errorf("unable to catalog dpkg package=%+v: %w", dbLocation.Path, err)
} }
md5ContentsByName, md5RefsByName, err := fetchMd5Contents(resolver, dbRef, pkgs) md5ContentsByName, md5RefsByName, err := fetchMd5Contents(resolver, dbLocation, pkgs)
if err != nil { if err != nil {
return nil, fmt.Errorf("unable to find dpkg md5 contents: %w", err) return nil, fmt.Errorf("unable to find dpkg md5 contents: %w", err)
} }
copyrightContentsByName, copyrightRefsByName, err := fetchCopyrightContents(resolver, dbRef, pkgs) copyrightContentsByName, copyrightRefsByName, err := fetchCopyrightContents(resolver, dbLocation, pkgs)
if err != nil { if err != nil {
return nil, fmt.Errorf("unable to find dpkg copyright contents: %w", err) return nil, fmt.Errorf("unable to find dpkg copyright contents: %w", err)
} }
@ -64,7 +64,7 @@ func (c *Cataloger) Catalog(resolver source.Resolver) ([]pkg.Package, error) {
for i := range pkgs { for i := range pkgs {
p := &pkgs[i] p := &pkgs[i]
p.FoundBy = c.Name() p.FoundBy = c.Name()
p.Source = []file.Reference{dbRef} p.Locations = []source.Location{dbLocation}
if md5Reader, ok := md5ContentsByName[md5Key(*p)]; ok { if md5Reader, ok := md5ContentsByName[md5Key(*p)]; ok {
// attach the file list // attach the file list
@ -74,7 +74,7 @@ func (c *Cataloger) Catalog(resolver source.Resolver) ([]pkg.Package, error) {
// keep a record of the file where this was discovered // keep a record of the file where this was discovered
if ref, ok := md5RefsByName[md5Key(*p)]; ok { if ref, ok := md5RefsByName[md5Key(*p)]; ok {
p.Source = append(p.Source, ref) p.Locations = append(p.Locations, ref)
} }
} }
@ -85,7 +85,7 @@ func (c *Cataloger) Catalog(resolver source.Resolver) ([]pkg.Package, error) {
// keep a record of the file where this was discovered // keep a record of the file where this was discovered
if ref, ok := copyrightRefsByName[p.Name]; ok { if ref, ok := copyrightRefsByName[p.Name]; ok {
p.Source = append(p.Source, ref) p.Locations = append(p.Locations, ref)
} }
} }
} }
@ -93,93 +93,82 @@ func (c *Cataloger) Catalog(resolver source.Resolver) ([]pkg.Package, error) {
return pkgs, nil return pkgs, nil
} }
func fetchMd5Contents(resolver source.Resolver, dbRef file.Reference, pkgs []pkg.Package) (map[string]io.Reader, map[string]file.Reference, error) { func fetchMd5Contents(resolver source.Resolver, dbLocation source.Location, pkgs []pkg.Package) (map[string]io.Reader, map[string]source.Location, error) {
// fetch all MD5 file contents. This approach is more efficient than fetching each MD5 file one at a time // fetch all MD5 file contents. This approach is more efficient than fetching each MD5 file one at a time
var md5FileMatches []file.Reference var md5FileMatches []source.Location
var nameByRef = make(map[file.Reference]string) var nameByRef = make(map[source.Location]string)
parentPath, err := dbRef.Path.ParentPath() parentPath := filepath.Dir(dbLocation.Path)
if err != nil {
return nil, nil, fmt.Errorf("unable to find parent of path=%+v: %w", dbRef.Path, err)
}
for _, p := range pkgs { for _, p := range pkgs {
// look for /var/lib/dpkg/info/NAME:ARCH.md5sums // look for /var/lib/dpkg/info/NAME:ARCH.md5sums
name := md5Key(p) name := md5Key(p)
md5sumPath := path.Join(string(parentPath), "info", name+md5sumsExt) md5sumPath := path.Join(parentPath, "info", name+md5sumsExt)
md5SumRef, err := resolver.RelativeFileByPath(dbRef, md5sumPath) md5SumLocation := resolver.RelativeFileByPath(dbLocation, md5sumPath)
if err != nil {
return nil, nil, fmt.Errorf("unable to find relative md5sum from path=%+v: %w", dbRef.Path, err)
}
if md5SumRef == nil { if md5SumLocation == nil {
// the most specific key did not work, fallback to just the name // the most specific key did not work, fallback to just the name
// look for /var/lib/dpkg/info/NAME.md5sums // look for /var/lib/dpkg/info/NAME.md5sums
name := p.Name name := p.Name
md5sumPath := path.Join(string(parentPath), "info", name+md5sumsExt) md5sumPath := path.Join(parentPath, "info", name+md5sumsExt)
md5SumRef, err = resolver.RelativeFileByPath(dbRef, md5sumPath) md5SumLocation = resolver.RelativeFileByPath(dbLocation, md5sumPath)
if err != nil {
return nil, nil, fmt.Errorf("unable to find relative md5sum from path=%+v: %w", dbRef.Path, err)
}
} }
// we should have at least one reference // we should have at least one reference
if md5SumRef != nil { if md5SumLocation != nil {
md5FileMatches = append(md5FileMatches, *md5SumRef) md5FileMatches = append(md5FileMatches, *md5SumLocation)
nameByRef[*md5SumRef] = name nameByRef[*md5SumLocation] = name
} }
} }
// fetch the md5 contents // fetch the md5 contents
md5ContentsByRef, err := resolver.MultipleFileContentsByRef(md5FileMatches...) md5ContentsByLocation, err := resolver.MultipleFileContentsByLocation(md5FileMatches)
if err != nil { if err != nil {
return nil, nil, err return nil, nil, err
} }
// organize content results and refs by a combination of name and architecture // organize content results and refs by a combination of name and architecture
var contentsByName = make(map[string]io.Reader) var contentsByName = make(map[string]io.Reader)
var refsByName = make(map[string]file.Reference) var refsByName = make(map[string]source.Location)
for ref, contents := range md5ContentsByRef { for location, contents := range md5ContentsByLocation {
name := nameByRef[ref] name := nameByRef[location]
contentsByName[name] = strings.NewReader(contents) contentsByName[name] = strings.NewReader(contents)
refsByName[name] = ref refsByName[name] = location
} }
return contentsByName, refsByName, nil return contentsByName, refsByName, nil
} }
func fetchCopyrightContents(resolver source.Resolver, dbRef file.Reference, pkgs []pkg.Package) (map[string]io.Reader, map[string]file.Reference, error) { func fetchCopyrightContents(resolver source.Resolver, dbLocation source.Location, pkgs []pkg.Package) (map[string]io.Reader, map[string]source.Location, error) {
// fetch all copyright file contents. This approach is more efficient than fetching each copyright file one at a time // fetch all copyright file contents. This approach is more efficient than fetching each copyright file one at a time
var copyrightFileMatches []file.Reference var copyrightFileMatches []source.Location
var nameByRef = make(map[file.Reference]string) var nameByLocation = make(map[source.Location]string)
for _, p := range pkgs { for _, p := range pkgs {
// look for /usr/share/docs/NAME/copyright files // look for /usr/share/docs/NAME/copyright files
name := p.Name name := p.Name
copyrightPath := path.Join(docsPath, name, "copyright") copyrightPath := path.Join(docsPath, name, "copyright")
copyrightRef, err := resolver.RelativeFileByPath(dbRef, copyrightPath) copyrightLocation := resolver.RelativeFileByPath(dbLocation, copyrightPath)
if err != nil {
return nil, nil, fmt.Errorf("unable to find relative copyright from path=%+v: %w", dbRef.Path, err)
}
// we may not have a copyright file for each package, ignore missing files // we may not have a copyright file for each package, ignore missing files
if copyrightRef != nil { if copyrightLocation != nil {
copyrightFileMatches = append(copyrightFileMatches, *copyrightRef) copyrightFileMatches = append(copyrightFileMatches, *copyrightLocation)
nameByRef[*copyrightRef] = name nameByLocation[*copyrightLocation] = name
} }
} }
// fetch the copyright contents // fetch the copyright contents
copyrightContentsByRef, err := resolver.MultipleFileContentsByRef(copyrightFileMatches...) copyrightContentsByLocation, err := resolver.MultipleFileContentsByLocation(copyrightFileMatches)
if err != nil { if err != nil {
return nil, nil, err return nil, nil, err
} }
// organize content results and refs by package name // organize content results and refs by package name
var contentsByName = make(map[string]io.Reader) var contentsByName = make(map[string]io.Reader)
var refsByName = make(map[string]file.Reference) var refsByName = make(map[string]source.Location)
for ref, contents := range copyrightContentsByRef { for location, contents := range copyrightContentsByLocation {
name := nameByRef[ref] name := nameByLocation[location]
contentsByName[name] = strings.NewReader(contents) contentsByName[name] = strings.NewReader(contents)
refsByName[name] = ref refsByName[name] = location
} }
return contentsByName, refsByName, nil return contentsByName, refsByName, nil

View file

@ -77,11 +77,11 @@ func TestDpkgCataloger(t *testing.T) {
for idx := range actual { for idx := range actual {
a := &actual[idx] a := &actual[idx]
// we will test the sources separately // we will test the sources separately
var sourcesList = make([]string, len(a.Source)) var sourcesList = make([]string, len(a.Locations))
for i, s := range a.Source { for i, s := range a.Locations {
sourcesList[i] = string(s.Path) sourcesList[i] = string(s.Path)
} }
a.Source = nil a.Locations = nil
for _, d := range deep.Equal(sourcesList, test.sources[a.Name]) { for _, d := range deep.Equal(sourcesList, test.sources[a.Name]) {
t.Errorf("diff: %+v", d) t.Errorf("diff: %+v", d)

View file

@ -8,8 +8,6 @@ import (
"github.com/anchore/syft/internal/log" "github.com/anchore/syft/internal/log"
"github.com/anchore/stereoscope/pkg/file"
"github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg"
"github.com/anchore/syft/syft/source" "github.com/anchore/syft/syft/source"
@ -35,7 +33,7 @@ func (c *PackageCataloger) Name() string {
// Catalog is given an object to resolve file references and content, this function returns any discovered Packages after analyzing python egg and wheel installations. // Catalog is given an object to resolve file references and content, this function returns any discovered Packages after analyzing python egg and wheel installations.
func (c *PackageCataloger) Catalog(resolver source.Resolver) ([]pkg.Package, error) { func (c *PackageCataloger) Catalog(resolver source.Resolver) ([]pkg.Package, error) {
// nolint:prealloc // nolint:prealloc
var fileMatches []file.Reference var fileMatches []source.Location
for _, glob := range []string{eggMetadataGlob, wheelMetadataGlob} { for _, glob := range []string{eggMetadataGlob, wheelMetadataGlob} {
matches, err := resolver.FilesByGlob(glob) matches, err := resolver.FilesByGlob(glob)
@ -46,10 +44,10 @@ func (c *PackageCataloger) Catalog(resolver source.Resolver) ([]pkg.Package, err
} }
var pkgs []pkg.Package var pkgs []pkg.Package
for _, ref := range fileMatches { for _, location := range fileMatches {
p, err := c.catalogEggOrWheel(resolver, ref) p, err := c.catalogEggOrWheel(resolver, location)
if err != nil { if err != nil {
return nil, fmt.Errorf("unable to catalog python package=%+v: %w", ref.Path, err) return nil, fmt.Errorf("unable to catalog python package=%+v: %w", location.Path, err)
} }
if p != nil { if p != nil {
pkgs = append(pkgs, *p) pkgs = append(pkgs, *p)
@ -59,8 +57,8 @@ func (c *PackageCataloger) Catalog(resolver source.Resolver) ([]pkg.Package, err
} }
// catalogEggOrWheel takes the primary metadata file reference and returns the python package it represents. // catalogEggOrWheel takes the primary metadata file reference and returns the python package it represents.
func (c *PackageCataloger) catalogEggOrWheel(resolver source.Resolver, metadataRef file.Reference) (*pkg.Package, error) { func (c *PackageCataloger) catalogEggOrWheel(resolver source.Resolver, metadataLocation source.Location) (*pkg.Package, error) {
metadata, sources, err := c.assembleEggOrWheelMetadata(resolver, metadataRef) metadata, sources, err := c.assembleEggOrWheelMetadata(resolver, metadataLocation)
if err != nil { if err != nil {
return nil, err return nil, err
} }
@ -74,7 +72,7 @@ func (c *PackageCataloger) catalogEggOrWheel(resolver source.Resolver, metadataR
Name: metadata.Name, Name: metadata.Name,
Version: metadata.Version, Version: metadata.Version,
FoundBy: c.Name(), FoundBy: c.Name(),
Source: sources, Locations: sources,
Licenses: licenses, Licenses: licenses,
Language: pkg.Python, Language: pkg.Python,
Type: pkg.PythonPkg, Type: pkg.PythonPkg,
@ -84,22 +82,19 @@ func (c *PackageCataloger) catalogEggOrWheel(resolver source.Resolver, metadataR
} }
// fetchRecordFiles finds a corresponding RECORD file for the given python package metadata file and returns the set of file records contained. // fetchRecordFiles finds a corresponding RECORD file for the given python package metadata file and returns the set of file records contained.
func (c *PackageCataloger) fetchRecordFiles(resolver source.Resolver, metadataRef file.Reference) (files []pkg.PythonFileRecord, sources []file.Reference, err error) { func (c *PackageCataloger) fetchRecordFiles(resolver source.Resolver, metadataLocation source.Location) (files []pkg.PythonFileRecord, sources []source.Location, err error) {
// we've been given a file reference to a specific wheel METADATA file. note: this may be for a directory // we've been given a file reference to a specific wheel METADATA file. note: this may be for a directory
// or for an image... for an image the METADATA file may be present within multiple layers, so it is important // or for an image... for an image the METADATA file may be present within multiple layers, so it is important
// to reconcile the RECORD path to the same layer (or the next adjacent lower layer). // to reconcile the RECORD path to the same layer (or the next adjacent lower layer).
// lets find the RECORD file relative to the directory where the METADATA file resides (in path AND layer structure) // lets find the RECORD file relative to the directory where the METADATA file resides (in path AND layer structure)
recordPath := filepath.Join(filepath.Dir(string(metadataRef.Path)), "RECORD") recordPath := filepath.Join(filepath.Dir(metadataLocation.Path), "RECORD")
recordRef, err := resolver.RelativeFileByPath(metadataRef, recordPath) recordRef := resolver.RelativeFileByPath(metadataLocation, recordPath)
if err != nil {
return nil, nil, err
}
if recordRef != nil { if recordRef != nil {
sources = append(sources, *recordRef) sources = append(sources, *recordRef)
recordContents, err := resolver.FileContentsByRef(*recordRef) recordContents, err := resolver.FileContentsByLocation(*recordRef)
if err != nil { if err != nil {
return nil, nil, err return nil, nil, err
} }
@ -116,22 +111,20 @@ func (c *PackageCataloger) fetchRecordFiles(resolver source.Resolver, metadataRe
} }
// fetchTopLevelPackages finds a corresponding top_level.txt file for the given python package metadata file and returns the set of package names contained. // fetchTopLevelPackages finds a corresponding top_level.txt file for the given python package metadata file and returns the set of package names contained.
func (c *PackageCataloger) fetchTopLevelPackages(resolver source.Resolver, metadataRef file.Reference) (pkgs []string, sources []file.Reference, err error) { func (c *PackageCataloger) fetchTopLevelPackages(resolver source.Resolver, metadataLocation source.Location) (pkgs []string, sources []source.Location, err error) {
// a top_level.txt file specifies the python top-level packages (provided by this python package) installed into site-packages // a top_level.txt file specifies the python top-level packages (provided by this python package) installed into site-packages
parentDir := filepath.Dir(string(metadataRef.Path)) parentDir := filepath.Dir(metadataLocation.Path)
topLevelPath := filepath.Join(parentDir, "top_level.txt") topLevelPath := filepath.Join(parentDir, "top_level.txt")
topLevelRef, err := resolver.RelativeFileByPath(metadataRef, topLevelPath) topLevelRef := resolver.RelativeFileByPath(metadataLocation, topLevelPath)
if err != nil {
return nil, nil, err
}
if topLevelRef == nil { if topLevelRef == nil {
log.Warnf("missing python package top_level.txt (package=%q)", string(metadataRef.Path)) log.Warnf("missing python package top_level.txt (package=%q)", metadataLocation.Path)
return nil, nil, nil return nil, nil, nil
} }
sources = append(sources, *topLevelRef) sources = append(sources, *topLevelRef)
topLevelContents, err := resolver.FileContentsByRef(*topLevelRef) topLevelContents, err := resolver.FileContentsByLocation(*topLevelRef)
if err != nil { if err != nil {
return nil, nil, err return nil, nil, err
} }
@ -149,21 +142,21 @@ func (c *PackageCataloger) fetchTopLevelPackages(resolver source.Resolver, metad
} }
// assembleEggOrWheelMetadata discovers and accumulates python package metadata from multiple file sources and returns a single metadata object as well as a list of files where the metadata was derived from. // assembleEggOrWheelMetadata discovers and accumulates python package metadata from multiple file sources and returns a single metadata object as well as a list of files where the metadata was derived from.
func (c *PackageCataloger) assembleEggOrWheelMetadata(resolver source.Resolver, metadataRef file.Reference) (*pkg.PythonPackageMetadata, []file.Reference, error) { func (c *PackageCataloger) assembleEggOrWheelMetadata(resolver source.Resolver, metadataLocation source.Location) (*pkg.PythonPackageMetadata, []source.Location, error) {
var sources = []file.Reference{metadataRef} var sources = []source.Location{metadataLocation}
metadataContents, err := resolver.FileContentsByRef(metadataRef) metadataContents, err := resolver.FileContentsByLocation(metadataLocation)
if err != nil { if err != nil {
return nil, nil, err return nil, nil, err
} }
metadata, err := parseWheelOrEggMetadata(metadataRef.Path, strings.NewReader(metadataContents)) metadata, err := parseWheelOrEggMetadata(metadataLocation.Path, strings.NewReader(metadataContents))
if err != nil { if err != nil {
return nil, nil, err return nil, nil, err
} }
// attach any python files found for the given wheel/egg installation // attach any python files found for the given wheel/egg installation
r, s, err := c.fetchRecordFiles(resolver, metadataRef) r, s, err := c.fetchRecordFiles(resolver, metadataLocation)
if err != nil { if err != nil {
return nil, nil, err return nil, nil, err
} }
@ -171,7 +164,7 @@ func (c *PackageCataloger) assembleEggOrWheelMetadata(resolver source.Resolver,
metadata.Files = r metadata.Files = r
// attach any top-level package names found for the given wheel/egg installation // attach any top-level package names found for the given wheel/egg installation
p, s, err := c.fetchTopLevelPackages(resolver, metadataRef) p, s, err := c.fetchTopLevelPackages(resolver, metadataLocation)
if err != nil { if err != nil {
return nil, nil, err return nil, nil, err
} }

View file

@ -8,7 +8,7 @@ import (
"strings" "strings"
"testing" "testing"
"github.com/anchore/stereoscope/pkg/file" "github.com/anchore/syft/syft/source"
"github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg"
"github.com/go-test/deep" "github.com/go-test/deep"
@ -18,10 +18,10 @@ type pythonTestResolverMock struct {
metadataReader io.Reader metadataReader io.Reader
recordReader io.Reader recordReader io.Reader
topLevelReader io.Reader topLevelReader io.Reader
metadataRef *file.Reference metadataRef *source.Location
recordRef *file.Reference recordRef *source.Location
topLevelRef *file.Reference topLevelRef *source.Location
contents map[file.Reference]string contents map[source.Location]string
} }
func newTestResolver(metaPath, recordPath, topPath string) *pythonTestResolverMock { func newTestResolver(metaPath, recordPath, topPath string) *pythonTestResolverMock {
@ -46,17 +46,17 @@ func newTestResolver(metaPath, recordPath, topPath string) *pythonTestResolverMo
} }
} }
var recordRef *file.Reference var recordRef *source.Location
if recordReader != nil { if recordReader != nil {
ref := file.NewFileReference("test-fixtures/dist-info/RECORD") ref := source.NewLocation("test-fixtures/dist-info/RECORD")
recordRef = &ref recordRef = &ref
} }
var topLevelRef *file.Reference var topLevelRef *source.Location
if topLevelReader != nil { if topLevelReader != nil {
ref := file.NewFileReference("test-fixtures/dist-info/top_level.txt") ref := source.NewLocation("test-fixtures/dist-info/top_level.txt")
topLevelRef = &ref topLevelRef = &ref
} }
metadataRef := file.NewFileReference("test-fixtures/dist-info/METADATA") metadataRef := source.NewLocation("test-fixtures/dist-info/METADATA")
return &pythonTestResolverMock{ return &pythonTestResolverMock{
recordReader: recordReader, recordReader: recordReader,
metadataReader: metadataReader, metadataReader: metadataReader,
@ -64,11 +64,11 @@ func newTestResolver(metaPath, recordPath, topPath string) *pythonTestResolverMo
metadataRef: &metadataRef, metadataRef: &metadataRef,
recordRef: recordRef, recordRef: recordRef,
topLevelRef: topLevelRef, topLevelRef: topLevelRef,
contents: make(map[file.Reference]string), contents: make(map[source.Location]string),
} }
} }
func (r *pythonTestResolverMock) FileContentsByRef(ref file.Reference) (string, error) { func (r *pythonTestResolverMock) FileContentsByLocation(ref source.Location) (string, error) {
switch { switch {
case r.topLevelRef != nil && ref.Path == r.topLevelRef.Path: case r.topLevelRef != nil && ref.Path == r.topLevelRef.Path:
b, err := ioutil.ReadAll(r.topLevelReader) b, err := ioutil.ReadAll(r.topLevelReader)
@ -92,25 +92,25 @@ func (r *pythonTestResolverMock) FileContentsByRef(ref file.Reference) (string,
return "", fmt.Errorf("invalid value given") return "", fmt.Errorf("invalid value given")
} }
func (r *pythonTestResolverMock) MultipleFileContentsByRef(_ ...file.Reference) (map[file.Reference]string, error) { func (r *pythonTestResolverMock) MultipleFileContentsByLocation(_ []source.Location) (map[source.Location]string, error) {
return nil, fmt.Errorf("not implemented") return nil, fmt.Errorf("not implemented")
} }
func (r *pythonTestResolverMock) FilesByPath(_ ...file.Path) ([]file.Reference, error) { func (r *pythonTestResolverMock) FilesByPath(_ ...string) ([]source.Location, error) {
return nil, fmt.Errorf("not implemented") return nil, fmt.Errorf("not implemented")
} }
func (r *pythonTestResolverMock) FilesByGlob(_ ...string) ([]file.Reference, error) { func (r *pythonTestResolverMock) FilesByGlob(_ ...string) ([]source.Location, error) {
return nil, fmt.Errorf("not implemented") return nil, fmt.Errorf("not implemented")
} }
func (r *pythonTestResolverMock) RelativeFileByPath(_ file.Reference, path string) (*file.Reference, error) { func (r *pythonTestResolverMock) RelativeFileByPath(_ source.Location, path string) *source.Location {
switch { switch {
case strings.Contains(path, "RECORD"): case strings.Contains(path, "RECORD"):
return r.recordRef, nil return r.recordRef
case strings.Contains(path, "top_level.txt"): case strings.Contains(path, "top_level.txt"):
return r.topLevelRef, nil return r.topLevelRef
default: default:
return nil, fmt.Errorf("invalid RelativeFileByPath value given: %q", path) panic(fmt.Errorf("invalid RelativeFileByPath value given: %q", path))
} }
} }
@ -214,13 +214,13 @@ func TestPythonPackageWheelCataloger(t *testing.T) {
resolver := newTestResolver(test.MetadataFixture, test.RecordFixture, test.TopLevelFixture) resolver := newTestResolver(test.MetadataFixture, test.RecordFixture, test.TopLevelFixture)
// note that the source is the record ref created by the resolver mock... attach the expected values // note that the source is the record ref created by the resolver mock... attach the expected values
test.ExpectedPackage.Source = []file.Reference{*resolver.metadataRef} test.ExpectedPackage.Locations = []source.Location{*resolver.metadataRef}
if resolver.recordRef != nil { if resolver.recordRef != nil {
test.ExpectedPackage.Source = append(test.ExpectedPackage.Source, *resolver.recordRef) test.ExpectedPackage.Locations = append(test.ExpectedPackage.Locations, *resolver.recordRef)
} }
if resolver.topLevelRef != nil { if resolver.topLevelRef != nil {
test.ExpectedPackage.Source = append(test.ExpectedPackage.Source, *resolver.topLevelRef) test.ExpectedPackage.Locations = append(test.ExpectedPackage.Locations, *resolver.topLevelRef)
} }
// end patching expected values with runtime data... // end patching expected values with runtime data...

View file

@ -7,8 +7,6 @@ import (
"path/filepath" "path/filepath"
"strings" "strings"
"github.com/anchore/stereoscope/pkg/file"
"github.com/mitchellh/mapstructure" "github.com/mitchellh/mapstructure"
"github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg"
@ -16,7 +14,7 @@ import (
// parseWheelOrEggMetadata takes a Python Egg or Wheel (which share the same format and values for our purposes), // parseWheelOrEggMetadata takes a Python Egg or Wheel (which share the same format and values for our purposes),
// returning all Python packages listed. // returning all Python packages listed.
func parseWheelOrEggMetadata(path file.Path, reader io.Reader) (pkg.PythonPackageMetadata, error) { func parseWheelOrEggMetadata(path string, reader io.Reader) (pkg.PythonPackageMetadata, error) {
fields := make(map[string]string) fields := make(map[string]string)
var key string var key string
@ -73,7 +71,7 @@ func parseWheelOrEggMetadata(path file.Path, reader io.Reader) (pkg.PythonPackag
// add additional metadata not stored in the egg/wheel metadata file // add additional metadata not stored in the egg/wheel metadata file
sitePackagesRoot := filepath.Clean(filepath.Join(filepath.Dir(string(path)), "..")) sitePackagesRoot := filepath.Clean(filepath.Join(filepath.Dir(path), ".."))
metadata.SitePackagesRootPath = sitePackagesRoot metadata.SitePackagesRootPath = sitePackagesRoot
return metadata, nil return metadata, nil

View file

@ -4,8 +4,6 @@ import (
"os" "os"
"testing" "testing"
"github.com/anchore/stereoscope/pkg/file"
"github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg"
"github.com/go-test/deep" "github.com/go-test/deep"
) )
@ -48,7 +46,7 @@ func TestParseWheelEggMetadata(t *testing.T) {
t.Fatalf("failed to open fixture: %+v", err) t.Fatalf("failed to open fixture: %+v", err)
} }
actual, err := parseWheelOrEggMetadata(file.Path(test.Fixture), fixture) actual, err := parseWheelOrEggMetadata(test.Fixture, fixture)
if err != nil { if err != nil {
t.Fatalf("failed to parse: %+v", err) t.Fatalf("failed to parse: %+v", err)
} }

View file

@ -35,15 +35,15 @@ func (c *Cataloger) Catalog(resolver source.Resolver) ([]pkg.Package, error) {
} }
var pkgs []pkg.Package var pkgs []pkg.Package
for _, dbRef := range fileMatches { for _, location := range fileMatches {
dbContents, err := resolver.FileContentsByRef(dbRef) dbContents, err := resolver.FileContentsByLocation(location)
if err != nil { if err != nil {
return nil, err return nil, err
} }
pkgs, err = parseRpmDB(resolver, dbRef, strings.NewReader(dbContents)) pkgs, err = parseRpmDB(resolver, location, strings.NewReader(dbContents))
if err != nil { if err != nil {
return nil, fmt.Errorf("unable to catalog rpmdb package=%+v: %w", dbRef.Path, err) return nil, fmt.Errorf("unable to catalog rpmdb package=%+v: %w", location.Path, err)
} }
} }
return pkgs, nil return pkgs, nil

View file

@ -6,19 +6,15 @@ import (
"io/ioutil" "io/ioutil"
"os" "os"
"github.com/anchore/stereoscope/pkg/file"
"github.com/anchore/syft/syft/scope"
"github.com/anchore/syft/syft/source"
rpmdb "github.com/anchore/go-rpmdb/pkg" rpmdb "github.com/anchore/go-rpmdb/pkg"
"github.com/anchore/syft/internal" "github.com/anchore/syft/internal"
"github.com/anchore/syft/internal/log" "github.com/anchore/syft/internal/log"
"github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg"
"github.com/anchore/syft/syft/source"
) )
// parseApkDb parses an "Packages" RPM DB and returns the Packages listed within it. // parseApkDb parses an "Packages" RPM DB and returns the Packages listed within it.
func parseRpmDB(resolver scope.FileResolver, dbRef file.Reference, reader io.Reader) ([]pkg.Package, error) { func parseRpmDB(resolver source.FileResolver, dbLocation source.Location, reader io.Reader) ([]pkg.Package, error) {
f, err := ioutil.TempFile("", internal.ApplicationName+"-rpmdb") f, err := ioutil.TempFile("", internal.ApplicationName+"-rpmdb")
if err != nil { if err != nil {
return nil, fmt.Errorf("failed to create temp rpmdb file: %w", err) return nil, fmt.Errorf("failed to create temp rpmdb file: %w", err)
@ -58,7 +54,7 @@ func parseRpmDB(resolver scope.FileResolver, dbRef file.Reference, reader io.Rea
Name: entry.Name, Name: entry.Name,
Version: fmt.Sprintf("%s-%s", entry.Version, entry.Release), // this is what engine does Version: fmt.Sprintf("%s-%s", entry.Version, entry.Release), // this is what engine does
//Version: fmt.Sprintf("%d:%s-%s.%s", entry.Epoch, entry.Version, entry.Release, entry.Arch), //Version: fmt.Sprintf("%d:%s-%s.%s", entry.Epoch, entry.Version, entry.Release, entry.Arch),
Source: []file.Reference{dbRef}, Locations: []source.Location{dbLocation},
Type: pkg.RpmPkg, Type: pkg.RpmPkg,
MetadataType: pkg.RpmdbMetadataType, MetadataType: pkg.RpmdbMetadataType,
Metadata: pkg.RpmdbMetadata{ Metadata: pkg.RpmdbMetadata{
@ -85,7 +81,7 @@ func extractRpmdbFileRecords(resolver source.FileResolver, entry *rpmdb.PackageI
var records = make([]pkg.RpmdbFileRecord, 0) var records = make([]pkg.RpmdbFileRecord, 0)
for _, record := range entry.Files { for _, record := range entry.Files {
refs, err := resolver.FilesByPath(file.Path(record.Path)) refs, err := resolver.FilesByPath(record.Path)
if err != nil { if err != nil {
return nil, fmt.Errorf("failed to resolve path=%+v: %w", record.Path, err) return nil, fmt.Errorf("failed to resolve path=%+v: %w", record.Path, err)
} }

View file

@ -5,7 +5,8 @@ import (
"os" "os"
"testing" "testing"
"github.com/anchore/stereoscope/pkg/file" "github.com/anchore/syft/syft/source"
"github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg"
"github.com/go-test/deep" "github.com/go-test/deep"
) )
@ -20,24 +21,25 @@ func newTestFileResolver(ignorePaths bool) *rpmdbTestFileResolverMock {
} }
} }
func (r *rpmdbTestFileResolverMock) FilesByPath(paths ...file.Path) ([]file.Reference, error) { func (r *rpmdbTestFileResolverMock) FilesByPath(paths ...string) ([]source.Location, error) {
if r.ignorePaths { if r.ignorePaths {
// act as if no paths exist // act as if no paths exist
return nil, nil return nil, nil
} }
// act as if all files exist // act as if all files exist
var refs = make([]file.Reference, len(paths)) var locations = make([]source.Location, len(paths))
for i, p := range paths { for i, p := range paths {
refs[i] = file.NewFileReference(p) locations[i] = source.NewLocation(p)
} }
return refs, nil return locations, nil
} }
func (r *rpmdbTestFileResolverMock) FilesByGlob(...string) ([]file.Reference, error) { func (r *rpmdbTestFileResolverMock) FilesByGlob(...string) ([]source.Location, error) {
return nil, fmt.Errorf("not implemented") return nil, fmt.Errorf("not implemented")
} }
func (r *rpmdbTestFileResolverMock) RelativeFileByPath(file.Reference, string) (*file.Reference, error) { func (r *rpmdbTestFileResolverMock) RelativeFileByPath(source.Location, string) *source.Location {
return nil, fmt.Errorf("not implemented") panic(fmt.Errorf("not implemented"))
return nil
} }
func TestParseRpmDB(t *testing.T) { func TestParseRpmDB(t *testing.T) {

View file

@ -4,7 +4,6 @@ import (
"regexp" "regexp"
"strings" "strings"
"github.com/anchore/stereoscope/pkg/file"
"github.com/anchore/syft/internal/log" "github.com/anchore/syft/internal/log"
"github.com/anchore/syft/syft/source" "github.com/anchore/syft/syft/source"
) )
@ -13,7 +12,7 @@ import (
type parseFunc func(string) *Distro type parseFunc func(string) *Distro
type parseEntry struct { type parseEntry struct {
path file.Path path string
fn parseFunc fn parseFunc
} }
@ -41,25 +40,19 @@ func Identify(resolver source.Resolver) Distro {
identifyLoop: identifyLoop:
for _, entry := range identityFiles { for _, entry := range identityFiles {
refs, err := resolver.FilesByPath(entry.path) locations, err := resolver.FilesByPath(entry.path)
if err != nil { if err != nil {
log.Errorf("unable to get path refs from %s: %s", entry.path, err) log.Errorf("unable to get path locations from %s: %s", entry.path, err)
break break
} }
if len(refs) == 0 { if len(locations) == 0 {
log.Debugf("No Refs found from path: %s", entry.path) log.Debugf("No Refs found from path: %s", entry.path)
continue continue
} }
for _, ref := range refs { for _, location := range locations {
contents, err := resolver.MultipleFileContentsByRef(ref) content, err := resolver.FileContentsByLocation(location)
content, ok := contents[ref]
if !ok {
log.Infof("no content present for ref: %s", ref)
continue
}
if err != nil { if err != nil {
log.Debugf("unable to get contents from %s: %s", entry.path, err) log.Debugf("unable to get contents from %s: %s", entry.path, err)

View file

@ -7,8 +7,8 @@ Here is what the main execution path for syft does:
2. Invoke all catalogers to catalog the image, adding discovered packages to a single catalog object 2. Invoke all catalogers to catalog the image, adding discovered packages to a single catalog object
3. Invoke a single presenter to show the contents of the catalog 3. Invoke a single presenter to show the contents of the catalog
A Source object encapsulates the image object to be cataloged and the user options (catalog all layers vs. squashed layer), A Locations object encapsulates the image object to be cataloged and the user options (catalog all layers vs. squashed layer),
providing a way to inspect paths and file content within the image. The Source object, not the image object, is used providing a way to inspect paths and file content within the image. The Locations object, not the image object, is used
throughout the main execution path. This abstraction allows for decoupling of what is cataloged (a docker image, an OCI throughout the main execution path. This abstraction allows for decoupling of what is cataloged (a docker image, an OCI
image, a filesystem, etc) and how it is cataloged (the individual catalogers). image, a filesystem, etc) and how it is cataloged (the individual catalogers).
@ -110,8 +110,8 @@ func CatalogFromJSON(reader io.Reader) (*pkg.Catalog, *distro.Distro, error) {
} }
//var theImg *jsonPresenter.Image //var theImg *jsonPresenter.Image
//if doc.Source.Type == "image" { //if doc.Locations.Type == "image" {
// img := doc.Source.Target.(jsonPresenter.Image) // img := doc.Locations.Target.(jsonPresenter.Image)
// theImg = &img // theImg = &img
//} //}

View file

@ -4,7 +4,8 @@ import (
"sort" "sort"
"sync" "sync"
"github.com/anchore/stereoscope/pkg/file" "github.com/anchore/syft/syft/source"
"github.com/anchore/syft/internal/log" "github.com/anchore/syft/internal/log"
) )
@ -14,7 +15,7 @@ var nextPackageID int64
type Catalog struct { type Catalog struct {
byID map[ID]*Package byID map[ID]*Package
byType map[Type][]*Package byType map[Type][]*Package
byFile map[file.Reference][]*Package byFile map[source.Location][]*Package
lock sync.RWMutex lock sync.RWMutex
} }
@ -23,7 +24,7 @@ func NewCatalog(pkgs ...Package) *Catalog {
catalog := Catalog{ catalog := Catalog{
byID: make(map[ID]*Package), byID: make(map[ID]*Package),
byType: make(map[Type][]*Package), byType: make(map[Type][]*Package),
byFile: make(map[file.Reference][]*Package), byFile: make(map[source.Location][]*Package),
} }
for _, p := range pkgs { for _, p := range pkgs {
@ -44,8 +45,8 @@ func (c *Catalog) Package(id ID) *Package {
} }
// PackagesByFile returns all packages that were discovered from the given source file reference. // PackagesByFile returns all packages that were discovered from the given source file reference.
func (c *Catalog) PackagesByFile(ref file.Reference) []*Package { func (c *Catalog) PackagesByFile(location source.Location) []*Package {
return c.byFile[ref] return c.byFile[location]
} }
// Add a package to the Catalog. // Add a package to the Catalog.
@ -71,7 +72,7 @@ func (c *Catalog) Add(p Package) {
c.byType[p.Type] = append(c.byType[p.Type], &p) c.byType[p.Type] = append(c.byType[p.Type], &p)
// store by file references // store by file references
for _, s := range p.Source { for _, s := range p.Locations {
_, ok := c.byFile[s] _, ok := c.byFile[s]
if !ok { if !ok {
c.byFile[s] = make([]*Package, 0) c.byFile[s] = make([]*Package, 0)

View file

@ -9,7 +9,7 @@ import (
// at http://manpages.ubuntu.com/manpages/xenial/man1/dpkg-query.1.html in the --showformat section. // at http://manpages.ubuntu.com/manpages/xenial/man1/dpkg-query.1.html in the --showformat section.
type DpkgMetadata struct { type DpkgMetadata struct {
Package string `mapstructure:"Package" json:"package"` Package string `mapstructure:"Package" json:"package"`
Source string `mapstructure:"Source" json:"source"` Source string `mapstructure:"Locations" json:"source"`
Version string `mapstructure:"Version" json:"version"` Version string `mapstructure:"Version" json:"version"`
Architecture string `mapstructure:"Architecture" json:"architecture"` Architecture string `mapstructure:"Architecture" json:"architecture"`
Maintainer string `mapstructure:"Maintainer" json:"maintainer"` Maintainer string `mapstructure:"Maintainer" json:"maintainer"`

View file

@ -8,7 +8,8 @@ import (
"regexp" "regexp"
"strings" "strings"
"github.com/anchore/stereoscope/pkg/file" "github.com/anchore/syft/syft/source"
"github.com/anchore/syft/syft/distro" "github.com/anchore/syft/syft/distro"
"github.com/package-url/packageurl-go" "github.com/package-url/packageurl-go"
) )
@ -17,12 +18,11 @@ type ID int64
// Package represents an application or library that has been bundled into a distributable format. // Package represents an application or library that has been bundled into a distributable format.
type Package struct { type Package struct {
id ID // uniquely identifies a package, set by the cataloger id ID // uniquely identifies a package, set by the cataloger
Name string `json:"manifest"` // the package name Name string `json:"manifest"` // the package name
Version string `json:"version"` // the version of the package Version string `json:"version"` // the version of the package
FoundBy string `json:"foundBy"` // the specific cataloger that discovered this package FoundBy string `json:"foundBy"` // the specific cataloger that discovered this package
Source []file.Reference `json:"-"` // the locations that lead to the discovery of this package (note: this is not necessarily the locations that make up this package) Locations []source.Location `json:"-"` // the locations that lead to the discovery of this package (note: this is not necessarily the locations that make up this package)
Location interface{} `json:"locations"`
// TODO: should we move licenses into metadata? // TODO: should we move licenses into metadata?
Licenses []string `json:"licenses"` // licenses discovered with the package metadata Licenses []string `json:"licenses"` // licenses discovered with the package metadata
Language Language `json:"language"` // the language ecosystem this package belongs to (e.g. JavaScript, Python, etc) Language Language `json:"language"` // the language ecosystem this package belongs to (e.g. JavaScript, Python, etc)

View file

@ -8,7 +8,7 @@ import (
"github.com/anchore/syft/internal/version" "github.com/anchore/syft/internal/version"
) )
// Source: https://cyclonedx.org/ext/bom-descriptor/ // Locations: https://cyclonedx.org/ext/bom-descriptor/
// BomDescriptor represents all metadata surrounding the BOM report (such as when the BOM was made, with which tool, and the item being cataloged). // BomDescriptor represents all metadata surrounding the BOM report (such as when the BOM was made, with which tool, and the item being cataloged).
type BomDescriptor struct { type BomDescriptor struct {

View file

@ -9,7 +9,7 @@ import (
"github.com/google/uuid" "github.com/google/uuid"
) )
// Source: https://github.com/CycloneDX/specification // Locations: https://github.com/CycloneDX/specification
// Document represents a CycloneDX BOM Document. // Document represents a CycloneDX BOM Document.
type Document struct { type Document struct {

View file

@ -14,14 +14,14 @@ import (
"github.com/anchore/syft/syft/source" "github.com/anchore/syft/syft/source"
) )
// Presenter writes a CycloneDX report from the given Catalog and Source contents // Presenter writes a CycloneDX report from the given Catalog and Locations contents
type Presenter struct { type Presenter struct {
catalog *pkg.Catalog catalog *pkg.Catalog
source source.Source source source.Source
distro distro.Distro distro distro.Distro
} }
// NewPresenter creates a CycloneDX presenter from the given Catalog and Source objects. // NewPresenter creates a CycloneDX presenter from the given Catalog and Locations objects.
func NewPresenter(catalog *pkg.Catalog, s source.Source, d distro.Distro) *Presenter { func NewPresenter(catalog *pkg.Catalog, s source.Source, d distro.Distro) *Presenter {
return &Presenter{ return &Presenter{
catalog: catalog, catalog: catalog,

View file

@ -10,7 +10,6 @@ import (
"github.com/anchore/syft/syft/distro" "github.com/anchore/syft/syft/distro"
"github.com/anchore/go-testutils" "github.com/anchore/go-testutils"
"github.com/anchore/stereoscope/pkg/file"
"github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg"
"github.com/anchore/syft/syft/source" "github.com/anchore/syft/syft/source"
"github.com/sergi/go-diff/diffmatchpatch" "github.com/sergi/go-diff/diffmatchpatch"
@ -29,7 +28,7 @@ func TestCycloneDxDirsPresenter(t *testing.T) {
Version: "1.0.1", Version: "1.0.1",
Type: pkg.DebPkg, Type: pkg.DebPkg,
FoundBy: "the-cataloger-1", FoundBy: "the-cataloger-1",
Source: []file.Reference{ Locations: []source.Location{
{Path: "/some/path/pkg1"}, {Path: "/some/path/pkg1"},
}, },
Metadata: pkg.DpkgMetadata{ Metadata: pkg.DpkgMetadata{
@ -43,7 +42,7 @@ func TestCycloneDxDirsPresenter(t *testing.T) {
Version: "2.0.1", Version: "2.0.1",
Type: pkg.DebPkg, Type: pkg.DebPkg,
FoundBy: "the-cataloger-2", FoundBy: "the-cataloger-2",
Source: []file.Reference{ Locations: []source.Location{
{Path: "/some/path/pkg1"}, {Path: "/some/path/pkg1"},
}, },
Licenses: []string{ Licenses: []string{
@ -105,8 +104,8 @@ func TestCycloneDxImgsPresenter(t *testing.T) {
catalog.Add(pkg.Package{ catalog.Add(pkg.Package{
Name: "package1", Name: "package1",
Version: "1.0.1", Version: "1.0.1",
Source: []file.Reference{ Locations: []source.Location{
*img.SquashedTree().File("/somefile-1.txt"), source.NewLocationFromImage(*img.SquashedTree().File("/somefile-1.txt"), img),
}, },
Type: pkg.RpmPkg, Type: pkg.RpmPkg,
FoundBy: "the-cataloger-1", FoundBy: "the-cataloger-1",
@ -125,8 +124,8 @@ func TestCycloneDxImgsPresenter(t *testing.T) {
catalog.Add(pkg.Package{ catalog.Add(pkg.Package{
Name: "package2", Name: "package2",
Version: "2.0.1", Version: "2.0.1",
Source: []file.Reference{ Locations: []source.Location{
*img.SquashedTree().File("/somefile-2.txt"), source.NewLocationFromImage(*img.SquashedTree().File("/somefile-2.txt"), img),
}, },
Type: pkg.RpmPkg, Type: pkg.RpmPkg,
FoundBy: "the-cataloger-2", FoundBy: "the-cataloger-2",

View file

@ -14,13 +14,13 @@ type Artifact struct {
} }
type ArtifactBasicMetadata struct { type ArtifactBasicMetadata struct {
Name string `json:"name"` Name string `json:"name"`
Version string `json:"version"` Version string `json:"version"`
Type string `json:"type"` Type string `json:"type"`
FoundBy []string `json:"foundBy"` FoundBy []string `json:"foundBy"`
Locations Locations `json:"locations,omitempty"` Locations []source.Location `json:"locations"`
Licenses []string `json:"licenses"` Licenses []string `json:"licenses"`
Language string `json:"language"` Language string `json:"language"`
} }
type ArtifactCustomMetadata struct { type ArtifactCustomMetadata struct {
@ -34,10 +34,6 @@ type ArtifactMetadataUnpacker struct {
} }
func NewArtifact(p *pkg.Package, s source.Source) (Artifact, error) { func NewArtifact(p *pkg.Package, s source.Source) (Artifact, error) {
locations, err := NewLocations(p, s)
if err != nil {
return Artifact{}, err
}
return Artifact{ return Artifact{
ArtifactBasicMetadata: ArtifactBasicMetadata{ ArtifactBasicMetadata: ArtifactBasicMetadata{
@ -45,7 +41,7 @@ func NewArtifact(p *pkg.Package, s source.Source) (Artifact, error) {
Version: p.Version, Version: p.Version,
Type: string(p.Type), Type: string(p.Type),
FoundBy: []string{p.FoundBy}, FoundBy: []string{p.FoundBy},
Locations: locations, Locations: p.Locations,
Licenses: p.Licenses, Licenses: p.Licenses,
Language: string(p.Language), Language: string(p.Language),
}, },

View file

@ -1,45 +0,0 @@
package json
import (
"fmt"
"github.com/anchore/syft/syft/pkg"
"github.com/anchore/syft/syft/source"
)
type Locations interface{}
type ImageLocation struct {
Path string `json:"path"`
LayerIndex uint `json:"layerIndex"`
}
func NewLocations(p *pkg.Package, s source.Source) (Locations, error) {
switch src := s.Target.(type) {
case source.ImageSource:
locations := make([]ImageLocation, len(p.Source))
for idx := range p.Source {
entry, err := src.Img.FileCatalog.Get(p.Source[idx])
if err != nil {
return nil, fmt.Errorf("unable to find layer index for source-idx=%d package=%s", idx, p.Name)
}
artifactSource := ImageLocation{
LayerIndex: entry.Source.Metadata.Index,
Path: string(p.Source[idx].Path),
}
locations[idx] = artifactSource
}
return locations, nil
case source.DirSource:
locations := make([]string, len(p.Source))
for idx := range p.Source {
locations[idx] = string(p.Source[idx].Path)
}
return locations, nil
default:
return nil, fmt.Errorf("unable to determine source: %T", src)
}
}

View file

@ -6,7 +6,6 @@ import (
"testing" "testing"
"github.com/anchore/go-testutils" "github.com/anchore/go-testutils"
"github.com/anchore/stereoscope/pkg/file"
"github.com/anchore/stereoscope/pkg/imagetest" "github.com/anchore/stereoscope/pkg/imagetest"
"github.com/anchore/syft/syft/distro" "github.com/anchore/syft/syft/distro"
"github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg"
@ -27,7 +26,7 @@ func TestJsonDirsPresenter(t *testing.T) {
Version: "1.0.1", Version: "1.0.1",
Type: pkg.PythonPkg, Type: pkg.PythonPkg,
FoundBy: "the-cataloger-1", FoundBy: "the-cataloger-1",
Source: []file.Reference{ Locations: []source.Location{
{Path: "/some/path/pkg1"}, {Path: "/some/path/pkg1"},
}, },
Language: pkg.Python, Language: pkg.Python,
@ -43,7 +42,7 @@ func TestJsonDirsPresenter(t *testing.T) {
Version: "2.0.1", Version: "2.0.1",
Type: pkg.DebPkg, Type: pkg.DebPkg,
FoundBy: "the-cataloger-2", FoundBy: "the-cataloger-2",
Source: []file.Reference{ Locations: []source.Location{
{Path: "/some/path/pkg1"}, {Path: "/some/path/pkg1"},
}, },
MetadataType: pkg.DpkgMetadataType, MetadataType: pkg.DpkgMetadataType,
@ -96,8 +95,8 @@ func TestJsonImgsPresenter(t *testing.T) {
catalog.Add(pkg.Package{ catalog.Add(pkg.Package{
Name: "package-1", Name: "package-1",
Version: "1.0.1", Version: "1.0.1",
Source: []file.Reference{ Locations: []source.Location{
*img.SquashedTree().File("/somefile-1.txt"), source.NewLocationFromImage(*img.SquashedTree().File("/somefile-1.txt"), img),
}, },
Type: pkg.PythonPkg, Type: pkg.PythonPkg,
FoundBy: "the-cataloger-1", FoundBy: "the-cataloger-1",
@ -112,8 +111,8 @@ func TestJsonImgsPresenter(t *testing.T) {
catalog.Add(pkg.Package{ catalog.Add(pkg.Package{
Name: "package-2", Name: "package-2",
Version: "2.0.1", Version: "2.0.1",
Source: []file.Reference{ Locations: []source.Location{
*img.SquashedTree().File("/somefile-2.txt"), source.NewLocationFromImage(*img.SquashedTree().File("/somefile-2.txt"), img),
}, },
Type: pkg.DebPkg, Type: pkg.DebPkg,
FoundBy: "the-cataloger-2", FoundBy: "the-cataloger-2",

View file

@ -8,7 +8,6 @@ import (
"github.com/go-test/deep" "github.com/go-test/deep"
"github.com/anchore/go-testutils" "github.com/anchore/go-testutils"
"github.com/anchore/stereoscope/pkg/file"
"github.com/anchore/stereoscope/pkg/imagetest" "github.com/anchore/stereoscope/pkg/imagetest"
"github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg"
"github.com/anchore/syft/syft/source" "github.com/anchore/syft/syft/source"
@ -30,16 +29,16 @@ func TestTablePresenter(t *testing.T) {
catalog.Add(pkg.Package{ catalog.Add(pkg.Package{
Name: "package-1", Name: "package-1",
Version: "1.0.1", Version: "1.0.1",
Source: []file.Reference{ Locations: []source.Location{
*img.SquashedTree().File("/somefile-1.txt"), source.NewLocationFromImage(*img.SquashedTree().File("/somefile-1.txt"), img),
}, },
Type: pkg.DebPkg, Type: pkg.DebPkg,
}) })
catalog.Add(pkg.Package{ catalog.Add(pkg.Package{
Name: "package-2", Name: "package-2",
Version: "2.0.1", Version: "2.0.1",
Source: []file.Reference{ Locations: []source.Location{
*img.SquashedTree().File("/somefile-2.txt"), source.NewLocationFromImage(*img.SquashedTree().File("/somefile-2.txt"), img),
}, },
Type: pkg.DebPkg, Type: pkg.DebPkg,
}) })

View file

@ -6,7 +6,6 @@ import (
"testing" "testing"
"github.com/anchore/go-testutils" "github.com/anchore/go-testutils"
"github.com/anchore/stereoscope/pkg/file"
"github.com/anchore/stereoscope/pkg/imagetest" "github.com/anchore/stereoscope/pkg/imagetest"
"github.com/anchore/syft/syft/pkg" "github.com/anchore/syft/syft/pkg"
"github.com/anchore/syft/syft/source" "github.com/anchore/syft/syft/source"
@ -75,8 +74,8 @@ func TestTextImgPresenter(t *testing.T) {
catalog.Add(pkg.Package{ catalog.Add(pkg.Package{
Name: "package-1", Name: "package-1",
Version: "1.0.1", Version: "1.0.1",
Source: []file.Reference{ Locations: []source.Location{
*img.SquashedTree().File("/somefile-1.txt"), source.NewLocationFromImage(*img.SquashedTree().File("/somefile-1.txt"), img),
}, },
FoundBy: "dpkg", FoundBy: "dpkg",
Type: pkg.DebPkg, Type: pkg.DebPkg,
@ -84,8 +83,8 @@ func TestTextImgPresenter(t *testing.T) {
catalog.Add(pkg.Package{ catalog.Add(pkg.Package{
Name: "package-2", Name: "package-2",
Version: "2.0.1", Version: "2.0.1",
Source: []file.Reference{ Locations: []source.Location{
*img.SquashedTree().File("/somefile-2.txt"), source.NewLocationFromImage(*img.SquashedTree().File("/somefile-2.txt"), img),
}, },
FoundBy: "dpkg", FoundBy: "dpkg",
Metadata: PackageInfo{Name: "package-2", Version: "1.0.2"}, Metadata: PackageInfo{Name: "package-2", Version: "1.0.2"},

View file

@ -94,7 +94,7 @@ func (r *AllLayersResolver) FilesByPath(paths ...string) ([]Location, error) {
return nil, err return nil, err
} }
for _, result := range results { for _, result := range results {
uniqueLocations = append(uniqueLocations, newLocationFromImage(result, r.img)) uniqueLocations = append(uniqueLocations, NewLocationFromImage(result, r.img))
} }
} }
} }
@ -132,7 +132,7 @@ func (r *AllLayersResolver) FilesByGlob(patterns ...string) ([]Location, error)
return nil, err return nil, err
} }
for _, result := range results { for _, result := range results {
uniqueLocations = append(uniqueLocations, newLocationFromImage(result, r.img)) uniqueLocations = append(uniqueLocations, NewLocationFromImage(result, r.img))
} }
} }
} }
@ -152,20 +152,20 @@ func (r *AllLayersResolver) RelativeFileByPath(location Location, path string) *
return nil return nil
} }
relativeLocation := newLocationFromImage(*relativeRef, r.img) relativeLocation := NewLocationFromImage(*relativeRef, r.img)
return &relativeLocation return &relativeLocation
} }
// MultipleFileContentsByRef returns the file contents for all file.References relative to the image. Note that a // MultipleFileContentsByRef returns the file contents for all file.References relative to the image. Note that a
// file.Reference is a path relative to a particular layer. // file.Reference is a path relative to a particular layer.
func (r *AllLayersResolver) MultipleFileContentsByRef(locations []Location) (map[Location]string, error) { func (r *AllLayersResolver) MultipleFileContentsByLocation(locations []Location) (map[Location]string, error) {
return mapLocationRefs(r.img.MultipleFileContentsByRef, locations) return mapLocationRefs(r.img.MultipleFileContentsByRef, locations)
} }
// FileContentsByRef fetches file contents for a single file reference, irregardless of the source layer. // FileContentsByRef fetches file contents for a single file reference, irregardless of the source layer.
// If the path does not exist an error is returned. // If the path does not exist an error is returned.
func (r *AllLayersResolver) FileContentsByRef(location Location) (string, error) { func (r *AllLayersResolver) FileContentsByLocation(location Location) (string, error) {
return r.img.FileContentsByRef(location.ref) return r.img.FileContentsByRef(location.ref)
} }

View file

@ -4,8 +4,6 @@ import (
"testing" "testing"
"github.com/anchore/stereoscope/pkg/imagetest" "github.com/anchore/stereoscope/pkg/imagetest"
"github.com/anchore/stereoscope/pkg/file"
) )
type resolution struct { type resolution struct {
@ -97,7 +95,7 @@ func TestAllLayersResolver_FilesByPath(t *testing.T) {
t.Fatalf("could not create resolver: %+v", err) t.Fatalf("could not create resolver: %+v", err)
} }
refs, err := resolver.FilesByPath(file.Path(c.linkPath)) refs, err := resolver.FilesByPath(c.linkPath)
if err != nil { if err != nil {
t.Fatalf("could not use resolver: %+v", err) t.Fatalf("could not use resolver: %+v", err)
} }
@ -109,11 +107,11 @@ func TestAllLayersResolver_FilesByPath(t *testing.T) {
for idx, actual := range refs { for idx, actual := range refs {
expected := c.resolutions[idx] expected := c.resolutions[idx]
if actual.Path != file.Path(expected.path) { if actual.Path != expected.path {
t.Errorf("bad resolve path: '%s'!='%s'", actual.Path, expected.path) t.Errorf("bad resolve path: '%s'!='%s'", actual.Path, expected.path)
} }
entry, err := img.FileCatalog.Get(actual) entry, err := img.FileCatalog.Get(actual.ref)
if err != nil { if err != nil {
t.Fatalf("failed to get metadata: %+v", err) t.Fatalf("failed to get metadata: %+v", err)
} }
@ -222,11 +220,11 @@ func TestAllLayersResolver_FilesByGlob(t *testing.T) {
for idx, actual := range refs { for idx, actual := range refs {
expected := c.resolutions[idx] expected := c.resolutions[idx]
if actual.Path != file.Path(expected.path) { if actual.Path != expected.path {
t.Errorf("bad resolve path: '%s'!='%s'", actual.Path, expected.path) t.Errorf("bad resolve path: '%s'!='%s'", actual.Path, expected.path)
} }
entry, err := img.FileCatalog.Get(actual) entry, err := img.FileCatalog.Get(actual.ref)
if err != nil { if err != nil {
t.Fatalf("failed to get metadata: %+v", err) t.Fatalf("failed to get metadata: %+v", err)
} }

View file

@ -7,8 +7,6 @@ import (
"path" "path"
"path/filepath" "path/filepath"
"github.com/docker/distribution/reference"
"github.com/anchore/syft/internal/log" "github.com/anchore/syft/internal/log"
"github.com/bmatcuk/doublestar" "github.com/bmatcuk/doublestar"
) )
@ -48,7 +46,7 @@ func (s DirectoryResolver) FilesByPath(userPaths ...string) ([]Location, error)
continue continue
} }
references = append(references, newLocation(userStrPath)) references = append(references, NewLocation(userStrPath))
} }
return references, nil return references, nil
@ -75,7 +73,7 @@ func (s DirectoryResolver) FilesByGlob(patterns ...string) ([]Location, error) {
continue continue
} }
result = append(result, newLocation(matchedPath)) result = append(result, NewLocation(matchedPath))
} }
} }
@ -95,7 +93,7 @@ func (s *DirectoryResolver) RelativeFileByPath(_ Location, path string) *Locatio
} }
// MultipleFileContentsByRef returns the file contents for all file.References relative a directory. // MultipleFileContentsByRef returns the file contents for all file.References relative a directory.
func (s DirectoryResolver) MultipleFileContentsByRef(locations []Location) (map[Location]string, error) { func (s DirectoryResolver) MultipleFileContentsByLocation(locations []Location) (map[Location]string, error) {
refContents := make(map[Location]string) refContents := make(map[Location]string)
for _, location := range locations { for _, location := range locations {
contents, err := fileContents(location.Path) contents, err := fileContents(location.Path)
@ -110,10 +108,10 @@ func (s DirectoryResolver) MultipleFileContentsByRef(locations []Location) (map[
// FileContentsByRef fetches file contents for a single file reference relative to a directory. // FileContentsByRef fetches file contents for a single file reference relative to a directory.
// If the path does not exist an error is returned. // If the path does not exist an error is returned.
func (s DirectoryResolver) FileContentsByRef(location Location) (string, error) { func (s DirectoryResolver) FileContentsByLocation(location Location) (string, error) {
contents, err := fileContents(location.Path) contents, err := fileContents(location.Path)
if err != nil { if err != nil {
return "", fmt.Errorf("could not read contents of file: %s", reference.Path) return "", fmt.Errorf("could not read contents of file: %s", location.Path)
} }
return string(contents), nil return string(contents), nil

View file

@ -2,8 +2,6 @@ package source
import ( import (
"testing" "testing"
"github.com/anchore/stereoscope/pkg/file"
) )
func TestDirectoryResolver_FilesByPath(t *testing.T) { func TestDirectoryResolver_FilesByPath(t *testing.T) {
@ -58,7 +56,7 @@ func TestDirectoryResolver_FilesByPath(t *testing.T) {
for _, c := range cases { for _, c := range cases {
t.Run(c.name, func(t *testing.T) { t.Run(c.name, func(t *testing.T) {
resolver := DirectoryResolver{c.root} resolver := DirectoryResolver{c.root}
refs, err := resolver.FilesByPath(file.Path(c.input)) refs, err := resolver.FilesByPath(c.input)
if err != nil { if err != nil {
t.Fatalf("could not use resolver: %+v, %+v", err, refs) t.Fatalf("could not use resolver: %+v, %+v", err, refs)
} }
@ -68,7 +66,7 @@ func TestDirectoryResolver_FilesByPath(t *testing.T) {
} }
for _, actual := range refs { for _, actual := range refs {
if actual.Path != file.Path(c.expected) { if actual.Path != c.expected {
t.Errorf("bad resolve path: '%s'!='%s'", actual.Path, c.expected) t.Errorf("bad resolve path: '%s'!='%s'", actual.Path, c.expected)
} }
} }
@ -79,22 +77,22 @@ func TestDirectoryResolver_FilesByPath(t *testing.T) {
func TestDirectoryResolver_MultipleFilesByPath(t *testing.T) { func TestDirectoryResolver_MultipleFilesByPath(t *testing.T) {
cases := []struct { cases := []struct {
name string name string
input []file.Path input []string
refCount int refCount int
}{ }{
{ {
name: "finds multiple files", name: "finds multiple files",
input: []file.Path{file.Path("test-fixtures/image-symlinks/file-1.txt"), file.Path("test-fixtures/image-symlinks/file-2.txt")}, input: []string{"test-fixtures/image-symlinks/file-1.txt", "test-fixtures/image-symlinks/file-2.txt"},
refCount: 2, refCount: 2,
}, },
{ {
name: "skips non-existing files", name: "skips non-existing files",
input: []file.Path{file.Path("test-fixtures/image-symlinks/bogus.txt"), file.Path("test-fixtures/image-symlinks/file-1.txt")}, input: []string{"test-fixtures/image-symlinks/bogus.txt", "test-fixtures/image-symlinks/file-1.txt"},
refCount: 1, refCount: 1,
}, },
{ {
name: "does not return anything for non-existing directories", name: "does not return anything for non-existing directories",
input: []file.Path{file.Path("test-fixtures/non-existing/bogus.txt"), file.Path("test-fixtures/non-existing/file-1.txt")}, input: []string{"test-fixtures/non-existing/bogus.txt", "test-fixtures/non-existing/file-1.txt"},
refCount: 0, refCount: 0,
}, },
} }
@ -117,47 +115,47 @@ func TestDirectoryResolver_MultipleFilesByPath(t *testing.T) {
func TestDirectoryResolver_MultipleFileContentsByRef(t *testing.T) { func TestDirectoryResolver_MultipleFileContentsByRef(t *testing.T) {
cases := []struct { cases := []struct {
name string name string
input []file.Path input []string
refCount int refCount int
contents []string contents []string
}{ }{
{ {
name: "gets multiple file contents", name: "gets multiple file contents",
input: []file.Path{file.Path("test-fixtures/image-symlinks/file-1.txt"), file.Path("test-fixtures/image-symlinks/file-2.txt")}, input: []string{"test-fixtures/image-symlinks/file-1.txt", "test-fixtures/image-symlinks/file-2.txt"},
refCount: 2, refCount: 2,
}, },
{ {
name: "skips non-existing files", name: "skips non-existing files",
input: []file.Path{file.Path("test-fixtures/image-symlinks/bogus.txt"), file.Path("test-fixtures/image-symlinks/file-1.txt")}, input: []string{"test-fixtures/image-symlinks/bogus.txt", "test-fixtures/image-symlinks/file-1.txt"},
refCount: 1, refCount: 1,
}, },
{ {
name: "does not return anything for non-existing directories", name: "does not return anything for non-existing directories",
input: []file.Path{file.Path("test-fixtures/non-existing/bogus.txt"), file.Path("test-fixtures/non-existing/file-1.txt")}, input: []string{"test-fixtures/non-existing/bogus.txt", "test-fixtures/non-existing/file-1.txt"},
refCount: 0, refCount: 0,
}, },
} }
for _, c := range cases { for _, c := range cases {
t.Run(c.name, func(t *testing.T) { t.Run(c.name, func(t *testing.T) {
refs := make([]file.Reference, 0) locations := make([]Location, 0)
resolver := DirectoryResolver{"test-fixtures"} resolver := DirectoryResolver{"test-fixtures"}
for _, p := range c.input { for _, p := range c.input {
newRefs, err := resolver.FilesByPath(p) newRefs, err := resolver.FilesByPath(p)
if err != nil { if err != nil {
t.Errorf("could not generate refs: %+v", err) t.Errorf("could not generate locations: %+v", err)
} }
for _, ref := range newRefs { for _, ref := range newRefs {
refs = append(refs, ref) locations = append(locations, ref)
} }
} }
contents, err := resolver.MultipleFileContentsByRef(refs...) contents, err := resolver.MultipleFileContentsByLocation(locations)
if err != nil { if err != nil {
t.Fatalf("unable to generate file contents by ref: %+v", err) t.Fatalf("unable to generate file contents by ref: %+v", err)
} }
if len(contents) != c.refCount { if len(contents) != c.refCount {
t.Errorf("unexpected number of refs produced: %d != %d", len(contents), c.refCount) t.Errorf("unexpected number of locations produced: %d != %d", len(contents), c.refCount)
} }
}) })

View file

@ -56,7 +56,7 @@ func (r *ImageSquashResolver) FilesByPath(paths ...string) ([]Location, error) {
if resolvedRef != nil && !uniqueFileIDs.Contains(*resolvedRef) { if resolvedRef != nil && !uniqueFileIDs.Contains(*resolvedRef) {
uniqueFileIDs.Add(*resolvedRef) uniqueFileIDs.Add(*resolvedRef)
uniqueLocations = append(uniqueLocations, newLocationFromImage(*resolvedRef, r.img)) uniqueLocations = append(uniqueLocations, NewLocationFromImage(*resolvedRef, r.img))
} }
} }
@ -118,12 +118,12 @@ func (r *ImageSquashResolver) RelativeFileByPath(_ Location, path string) *Locat
// MultipleFileContentsByRef returns the file contents for all file.References relative to the image. Note that a // MultipleFileContentsByRef returns the file contents for all file.References relative to the image. Note that a
// file.Reference is a path relative to a particular layer, in this case only from the squashed representation. // file.Reference is a path relative to a particular layer, in this case only from the squashed representation.
func (r *ImageSquashResolver) MultipleFileContentsByRef(locations []Location) (map[Location]string, error) { func (r *ImageSquashResolver) MultipleFileContentsByLocation(locations []Location) (map[Location]string, error) {
return mapLocationRefs(r.img.MultipleFileContentsByRef, locations) return mapLocationRefs(r.img.MultipleFileContentsByRef, locations)
} }
// FileContentsByRef fetches file contents for a single file reference, irregardless of the source layer. // FileContentsByRef fetches file contents for a single file reference, irregardless of the source layer.
// If the path does not exist an error is returned. // If the path does not exist an error is returned.
func (r *ImageSquashResolver) FileContentsByRef(location Location) (string, error) { func (r *ImageSquashResolver) FileContentsByLocation(location Location) (string, error) {
return r.img.FileContentsByRef(location.ref) return r.img.FileContentsByRef(location.ref)
} }

View file

@ -4,8 +4,6 @@ import (
"testing" "testing"
"github.com/anchore/stereoscope/pkg/imagetest" "github.com/anchore/stereoscope/pkg/imagetest"
"github.com/anchore/stereoscope/pkg/file"
) )
func TestImageSquashResolver_FilesByPath(t *testing.T) { func TestImageSquashResolver_FilesByPath(t *testing.T) {
@ -61,7 +59,7 @@ func TestImageSquashResolver_FilesByPath(t *testing.T) {
t.Fatalf("could not create resolver: %+v", err) t.Fatalf("could not create resolver: %+v", err)
} }
refs, err := resolver.FilesByPath(file.Path(c.linkPath)) refs, err := resolver.FilesByPath(c.linkPath)
if err != nil { if err != nil {
t.Fatalf("could not use resolver: %+v", err) t.Fatalf("could not use resolver: %+v", err)
} }
@ -82,11 +80,11 @@ func TestImageSquashResolver_FilesByPath(t *testing.T) {
actual := refs[0] actual := refs[0]
if actual.Path != file.Path(c.resolvePath) { if actual.Path != c.resolvePath {
t.Errorf("bad resolve path: '%s'!='%s'", actual.Path, c.resolvePath) t.Errorf("bad resolve path: '%s'!='%s'", actual.Path, c.resolvePath)
} }
entry, err := img.FileCatalog.Get(actual) entry, err := img.FileCatalog.Get(actual.ref)
if err != nil { if err != nil {
t.Fatalf("failed to get metadata: %+v", err) t.Fatalf("failed to get metadata: %+v", err)
} }
@ -172,11 +170,11 @@ func TestImageSquashResolver_FilesByGlob(t *testing.T) {
actual := refs[0] actual := refs[0]
if actual.Path != file.Path(c.resolvePath) { if actual.Path != c.resolvePath {
t.Errorf("bad resolve path: '%s'!='%s'", actual.Path, c.resolvePath) t.Errorf("bad resolve path: '%s'!='%s'", actual.Path, c.resolvePath)
} }
entry, err := img.FileCatalog.Get(actual) entry, err := img.FileCatalog.Get(actual.ref)
if err != nil { if err != nil {
t.Fatalf("failed to get metadata: %+v", err) t.Fatalf("failed to get metadata: %+v", err)
} }

View file

@ -14,20 +14,13 @@ type Location struct {
ref file.Reference ref file.Reference
} }
func newLocation(path string) Location { func NewLocation(path string) Location {
return Location{ return Location{
Path: path, Path: path,
} }
} }
func newLocationFromRef(ref file.Reference) Location { func NewLocationFromImage(ref file.Reference, img *image.Image) Location {
return Location{
Path: string(ref.Path),
ref: ref,
}
}
func newLocationFromImage(ref file.Reference, img *image.Image) Location {
entry, err := img.FileCatalog.Get(ref) entry, err := img.FileCatalog.Get(ref)
if err != nil { if err != nil {
log.Warnf("unable to find file catalog entry for ref=%+v", ref) log.Warnf("unable to find file catalog entry for ref=%+v", ref)

View file

@ -14,8 +14,8 @@ type Resolver interface {
// ContentResolver knows how to get file content for given file.References // ContentResolver knows how to get file content for given file.References
type ContentResolver interface { type ContentResolver interface {
FileContentsByRef(Location) (string, error) FileContentsByLocation(Location) (string, error)
MultipleFileContentsByRef([]Location) (map[Location]string, error) MultipleFileContentsByLocation([]Location) (map[Location]string, error)
// TODO: we should consider refactoring to return a set of io.Readers or file.Openers instead of the full contents themselves (allow for optional buffering). // TODO: we should consider refactoring to return a set of io.Readers or file.Openers instead of the full contents themselves (allow for optional buffering).
} }

View file

@ -21,7 +21,7 @@ var Options = []Scope{
AllLayersScope, AllLayersScope,
} }
func ParseOption(userStr string) Scope { func ParseScope(userStr string) Scope {
switch strings.ToLower(userStr) { switch strings.ToLower(userStr) {
case strings.ToLower(SquashedScope.String()): case strings.ToLower(SquashedScope.String()):
return SquashedScope return SquashedScope

View file

@ -4,7 +4,6 @@ import (
"os" "os"
"testing" "testing"
"github.com/anchore/stereoscope/pkg/file"
"github.com/anchore/stereoscope/pkg/image" "github.com/anchore/stereoscope/pkg/image"
"github.com/mitchellh/go-homedir" "github.com/mitchellh/go-homedir"
"github.com/spf13/afero" "github.com/spf13/afero"
@ -36,10 +35,10 @@ func TestNewScopeFromImage(t *testing.T) {
Layers: []*image.Layer{layer}, Layers: []*image.Layer{layer},
} }
t.Run("create a new Source object from image", func(t *testing.T) { t.Run("create a new Locations object from image", func(t *testing.T) {
_, err := NewFromImage(&img, AllLayersScope) _, err := NewFromImage(&img, AllLayersScope)
if err != nil { if err != nil {
t.Errorf("unexpected error when creating a new Source from img: %w", err) t.Errorf("unexpected error when creating a new Locations from img: %+v", err)
} }
}) })
} }
@ -49,31 +48,31 @@ func TestDirectoryScope(t *testing.T) {
desc string desc string
input string input string
expString string expString string
inputPaths []file.Path inputPaths []string
expRefs int expRefs int
}{ }{
{ {
desc: "no paths exist", desc: "no paths exist",
input: "foobar/", input: "foobar/",
inputPaths: []file.Path{file.Path("/opt/"), file.Path("/other")}, inputPaths: []string{"/opt/", "/other"},
expRefs: 0, expRefs: 0,
}, },
{ {
desc: "path detected", desc: "path detected",
input: "test-fixtures", input: "test-fixtures",
inputPaths: []file.Path{file.Path("test-fixtures/path-detected/.vimrc")}, inputPaths: []string{"test-fixtures/path-detected/.vimrc"},
expRefs: 1, expRefs: 1,
}, },
{ {
desc: "directory ignored", desc: "directory ignored",
input: "test-fixtures", input: "test-fixtures",
inputPaths: []file.Path{file.Path("test-fixtures/path-detected")}, inputPaths: []string{"test-fixtures/path-detected"},
expRefs: 0, expRefs: 0,
}, },
{ {
desc: "no files-by-path detected", desc: "no files-by-path detected",
input: "test-fixtures", input: "test-fixtures",
inputPaths: []file.Path{file.Path("test-fixtures/no-path-detected")}, inputPaths: []string{"test-fixtures/no-path-detected"},
expRefs: 0, expRefs: 0,
}, },
} }
@ -82,7 +81,7 @@ func TestDirectoryScope(t *testing.T) {
p, err := NewFromDirectory(test.input) p, err := NewFromDirectory(test.input)
if err != nil { if err != nil {
t.Errorf("could not create NewDirScope: %w", err) t.Errorf("could not create NewDirScope: %+v", err)
} }
if p.Target.(DirSource).Path != test.input { if p.Target.(DirSource).Path != test.input {
t.Errorf("mismatched stringer: '%s' != '%s'", p.Target.(DirSource).Path, test.input) t.Errorf("mismatched stringer: '%s' != '%s'", p.Target.(DirSource).Path, test.input)
@ -90,7 +89,7 @@ func TestDirectoryScope(t *testing.T) {
refs, err := p.Resolver.FilesByPath(test.inputPaths...) refs, err := p.Resolver.FilesByPath(test.inputPaths...)
if err != nil { if err != nil {
t.Errorf("FilesByPath call produced an error: %w", err) t.Errorf("FilesByPath call produced an error: %+v", err)
} }
if len(refs) != test.expRefs { if len(refs) != test.expRefs {
t.Errorf("unexpected number of refs returned: %d != %d", len(refs), test.expRefs) t.Errorf("unexpected number of refs returned: %d != %d", len(refs), test.expRefs)
@ -125,20 +124,19 @@ func TestMultipleFileContentsByRefContents(t *testing.T) {
t.Run(test.desc, func(t *testing.T) { t.Run(test.desc, func(t *testing.T) {
p, err := NewFromDirectory(test.input) p, err := NewFromDirectory(test.input)
if err != nil { if err != nil {
t.Errorf("could not create NewDirScope: %w", err) t.Errorf("could not create NewDirScope: %+v", err)
} }
refs, err := p.Resolver.FilesByPath(file.Path(test.path)) locations, err := p.Resolver.FilesByPath(test.path)
if err != nil { if err != nil {
t.Errorf("could not get file references from path: %s, %v", test.path, err) t.Errorf("could not get file references from path: %s, %v", test.path, err)
} }
if len(refs) != 1 { if len(locations) != 1 {
t.Fatalf("expected a single ref to be generated but got: %d", len(refs)) t.Fatalf("expected a single location to be generated but got: %d", len(locations))
} }
ref := refs[0] location := locations[0]
contents, err := p.Resolver.MultipleFileContentsByRef(ref) content, err := p.Resolver.FileContentsByLocation(location)
content := contents[ref]
if content != test.expected { if content != test.expected {
t.Errorf("unexpected contents from file: '%s' != '%s'", content, test.expected) t.Errorf("unexpected contents from file: '%s' != '%s'", content, test.expected)
@ -165,9 +163,9 @@ func TestMultipleFileContentsByRefNoContents(t *testing.T) {
t.Run(test.desc, func(t *testing.T) { t.Run(test.desc, func(t *testing.T) {
p, err := NewFromDirectory(test.input) p, err := NewFromDirectory(test.input)
if err != nil { if err != nil {
t.Errorf("could not create NewDirScope: %w", err) t.Errorf("could not create NewDirScope: %+v", err)
} }
refs, err := p.Resolver.FilesByPath(file.Path(test.path)) refs, err := p.Resolver.FilesByPath(test.path)
if err != nil { if err != nil {
t.Errorf("could not get file references from path: %s, %v", test.path, err) t.Errorf("could not get file references from path: %s, %v", test.path, err)
} }
@ -210,7 +208,7 @@ func TestFilesByGlob(t *testing.T) {
t.Run(test.desc, func(t *testing.T) { t.Run(test.desc, func(t *testing.T) {
p, err := NewFromDirectory(test.input) p, err := NewFromDirectory(test.input)
if err != nil { if err != nil {
t.Errorf("could not create NewDirScope: %w", err) t.Errorf("could not create NewDirScope: %+v", err)
} }
contents, err := p.Resolver.FilesByGlob(test.glob) contents, err := p.Resolver.FilesByGlob(test.glob)

View file

@ -73,7 +73,7 @@ func TestCatalogFromJSON(t *testing.T) {
a := actualPackages[i] a := actualPackages[i]
// omit fields that should be missing // omit fields that should be missing
e.Source = nil e.Locations = nil
e.FoundBy = "" e.FoundBy = ""
if e.MetadataType == pkg.JavaMetadataType { if e.MetadataType == pkg.JavaMetadataType {
metadata := e.Metadata.(pkg.JavaMetadata) metadata := e.Metadata.(pkg.JavaMetadata)