mirror of
https://github.com/trufflesecurity/trufflehog.git
synced 2024-11-10 07:04:24 +00:00
test: fix compile errors (#2964)
This commit is contained in:
parent
cb4d332cbf
commit
4addd81e29
6 changed files with 12 additions and 16 deletions
|
@ -44,9 +44,8 @@ func TestGcpapplicationdefaultcredentials_Pattern(t *testing.T) {
|
|||
|
||||
for _, test := range tests {
|
||||
t.Run(test.name, func(t *testing.T) {
|
||||
chunkSpecificDetectors := make(map[ahocorasick.DetectorKey]detectors.Detector, 2)
|
||||
ahoCorasickCore.FindDetectorMatches(test.input, chunkSpecificDetectors)
|
||||
if len(chunkSpecificDetectors) == 0 {
|
||||
detectorMatches := ahoCorasickCore.FindDetectorMatches([]byte(test.input))
|
||||
if len(detectorMatches) == 0 {
|
||||
t.Errorf("keywords '%v' not matched by: %s", d.Keywords(), test.input)
|
||||
return
|
||||
}
|
||||
|
|
|
@ -35,9 +35,8 @@ func TestGroq_Pattern(t *testing.T) {
|
|||
|
||||
for _, test := range tests {
|
||||
t.Run(test.name, func(t *testing.T) {
|
||||
chunkSpecificDetectors := make(map[ahocorasick.DetectorKey]detectors.Detector, 2)
|
||||
ahoCorasickCore.PopulateMatchingDetectors(test.input, chunkSpecificDetectors)
|
||||
if len(chunkSpecificDetectors) == 0 {
|
||||
detectorMatches := ahoCorasickCore.FindDetectorMatches([]byte(test.input))
|
||||
if len(detectorMatches) == 0 {
|
||||
t.Errorf("keywords '%v' not matched by: %s", d.Keywords(), test.input)
|
||||
return
|
||||
}
|
||||
|
|
|
@ -11,6 +11,7 @@ import (
|
|||
|
||||
"github.com/google/go-cmp/cmp"
|
||||
"github.com/google/go-cmp/cmp/cmpopts"
|
||||
|
||||
"github.com/trufflesecurity/trufflehog/v3/pkg/common"
|
||||
"github.com/trufflesecurity/trufflehog/v3/pkg/detectors"
|
||||
"github.com/trufflesecurity/trufflehog/v3/pkg/engine/ahocorasick"
|
||||
|
@ -39,9 +40,8 @@ intra_client_secret = 's-s4t2ud-d91c558a2ba6b47f60f690efc20a33d28c252d5bed840034
|
|||
|
||||
for _, test := range tests {
|
||||
t.Run(test.name, func(t *testing.T) {
|
||||
chunkSpecificDetectors := make(map[ahocorasick.DetectorKey]detectors.Detector, 2)
|
||||
ahoCorasickCore.PopulateMatchingDetectors(test.input, chunkSpecificDetectors)
|
||||
if len(chunkSpecificDetectors) == 0 {
|
||||
detectorMatches := ahoCorasickCore.FindDetectorMatches([]byte(test.input))
|
||||
if len(detectorMatches) == 0 {
|
||||
t.Errorf("keywords '%v' not matched by: %s", d.Keywords(), test.input)
|
||||
return
|
||||
}
|
||||
|
|
|
@ -45,9 +45,8 @@ func TestOpenAI_Pattern(t *testing.T) {
|
|||
|
||||
for _, test := range tests {
|
||||
t.Run(test.name, func(t *testing.T) {
|
||||
chunkSpecificDetectors := make(map[ahocorasick.DetectorKey]detectors.Detector, 2)
|
||||
ahoCorasickCore.PopulateMatchingDetectors(test.input, chunkSpecificDetectors)
|
||||
if len(chunkSpecificDetectors) == 0 {
|
||||
detectorMatches := ahoCorasickCore.FindDetectorMatches([]byte(test.input))
|
||||
if len(detectorMatches) == 0 {
|
||||
t.Errorf("keywords '%v' not matched by: %s", d.Keywords(), test.input)
|
||||
return
|
||||
}
|
||||
|
|
|
@ -48,9 +48,8 @@ set PATH=%PATH%;C:\Program Files\nodejs\;C:\Program Files\Git\cmd`,
|
|||
|
||||
for _, test := range tests {
|
||||
t.Run(test.name, func(t *testing.T) {
|
||||
chunkSpecificDetectors := make(map[ahocorasick.DetectorKey]detectors.Detector, 2)
|
||||
ahoCorasickCore.FindDetectorMatches(test.input, chunkSpecificDetectors)
|
||||
if len(chunkSpecificDetectors) == 0 {
|
||||
matches := ahoCorasickCore.FindDetectorMatches([]byte(test.input))
|
||||
if len(matches) == 0 {
|
||||
t.Errorf("keywords '%v' not matched by: %s", d.Keywords(), test.input)
|
||||
return
|
||||
}
|
||||
|
|
|
@ -59,7 +59,7 @@ func TestSource_Token(t *testing.T) {
|
|||
memberCache: map[string]struct{}{},
|
||||
repoInfoCache: newRepoInfoCache(),
|
||||
}
|
||||
s.filteredRepoCache = s.newFilteredRepoCache(memory.New(), nil, nil)
|
||||
s.filteredRepoCache = s.newFilteredRepoCache(memory.New[string](), nil, nil)
|
||||
|
||||
installationClient, err := s.enumerateWithApp(ctx, "https://api.github.com", conn.GetGithubApp())
|
||||
assert.NoError(t, err)
|
||||
|
|
Loading…
Reference in a new issue