[chore] Remove logrus from trufflehog (#1095)

* [chore] Remove logrus from trufflehog

* Minor fixes

* Fix logFatal call

* Fix logrus call
This commit is contained in:
Miccah 2023-02-14 17:00:07 -06:00 committed by GitHub
parent c6826c4574
commit 161e499142
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
15 changed files with 160 additions and 127 deletions

3
go.mod
View file

@ -41,14 +41,12 @@ require (
github.com/jpillora/overseer v1.1.6 github.com/jpillora/overseer v1.1.6
github.com/kylelemons/godebug v1.1.0 github.com/kylelemons/godebug v1.1.0
github.com/lib/pq v1.10.7 github.com/lib/pq v1.10.7
github.com/mattn/go-colorable v0.1.13
github.com/mattn/go-sqlite3 v1.14.16 github.com/mattn/go-sqlite3 v1.14.16
github.com/mholt/archiver/v4 v4.0.0-alpha.7 github.com/mholt/archiver/v4 v4.0.0-alpha.7
github.com/paulbellamy/ratecounter v0.2.0 github.com/paulbellamy/ratecounter v0.2.0
github.com/pkg/errors v0.9.1 github.com/pkg/errors v0.9.1
github.com/rabbitmq/amqp091-go v1.6.0 github.com/rabbitmq/amqp091-go v1.6.0
github.com/sergi/go-diff v1.3.1 github.com/sergi/go-diff v1.3.1
github.com/sirupsen/logrus v1.9.0
github.com/stretchr/testify v1.8.1 github.com/stretchr/testify v1.8.1
github.com/tailscale/depaware v0.0.0-20210622194025-720c4b409502 github.com/tailscale/depaware v0.0.0-20210622194025-720c4b409502
github.com/xanzy/go-gitlab v0.78.0 github.com/xanzy/go-gitlab v0.78.0
@ -116,6 +114,7 @@ require (
github.com/kevinburke/ssh_config v1.2.0 // indirect github.com/kevinburke/ssh_config v1.2.0 // indirect
github.com/klauspost/compress v1.15.11 // indirect github.com/klauspost/compress v1.15.11 // indirect
github.com/klauspost/pgzip v1.2.5 // indirect github.com/klauspost/pgzip v1.2.5 // indirect
github.com/mattn/go-colorable v0.1.13 // indirect
github.com/mattn/go-isatty v0.0.16 // indirect github.com/mattn/go-isatty v0.0.16 // indirect
github.com/mitchellh/go-homedir v1.1.0 // indirect github.com/mitchellh/go-homedir v1.1.0 // indirect
github.com/montanaflynn/stats v0.0.0-20171201202039-1bf9dbcd8cbe // indirect github.com/montanaflynn/stats v0.0.0-20171201202039-1bf9dbcd8cbe // indirect

2
go.sum
View file

@ -302,8 +302,6 @@ github.com/sergi/go-diff v1.1.0/go.mod h1:STckp+ISIX8hZLjrqAeVduY0gWCT9IjLuqbuNX
github.com/sergi/go-diff v1.3.1 h1:xkr+Oxo4BOQKmkn/B9eMK0g5Kg/983T9DqqPHwYqD+8= github.com/sergi/go-diff v1.3.1 h1:xkr+Oxo4BOQKmkn/B9eMK0g5Kg/983T9DqqPHwYqD+8=
github.com/sergi/go-diff v1.3.1/go.mod h1:aMJSSKb2lpPvRNec0+w3fl7LP9IOFzdc9Pa4NFbPK1I= github.com/sergi/go-diff v1.3.1/go.mod h1:aMJSSKb2lpPvRNec0+w3fl7LP9IOFzdc9Pa4NFbPK1I=
github.com/sirupsen/logrus v1.7.0/go.mod h1:yWOB1SBYBC5VeMP7gHvWumXLIWorT60ONWic61uBYv0= github.com/sirupsen/logrus v1.7.0/go.mod h1:yWOB1SBYBC5VeMP7gHvWumXLIWorT60ONWic61uBYv0=
github.com/sirupsen/logrus v1.9.0 h1:trlNQbNUG3OdDrDil03MCb1H2o9nJ1x4/5LYw7byDE0=
github.com/sirupsen/logrus v1.9.0/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ=
github.com/skeema/knownhosts v1.1.0 h1:Wvr9V0MxhjRbl3f9nMnKnFfiWTJmtECJ9Njkea3ysW0= github.com/skeema/knownhosts v1.1.0 h1:Wvr9V0MxhjRbl3f9nMnKnFfiWTJmtECJ9Njkea3ysW0=
github.com/skeema/knownhosts v1.1.0/go.mod h1:sKFq3RD6/TKZkSWn8boUbDC7Qkgcv+8XXijpFO6roag= github.com/skeema/knownhosts v1.1.0/go.mod h1:sKFq3RD6/TKZkSWn8boUbDC7Qkgcv+8XXijpFO6roag=
github.com/smartystreets/assertions v1.0.1 h1:voD4ITNjPL5jjBfgR/r8fPIIBrliWrWHeiJApdr3r4w= github.com/smartystreets/assertions v1.0.1 h1:voD4ITNjPL5jjBfgR/r8fPIIBrliWrWHeiJApdr3r4w=

View file

@ -11,7 +11,6 @@ import (
"time" "time"
"github.com/paulbellamy/ratecounter" "github.com/paulbellamy/ratecounter"
log "github.com/sirupsen/logrus"
"golang.org/x/sync/semaphore" "golang.org/x/sync/semaphore"
"gopkg.in/alecthomas/kingpin.v2" "gopkg.in/alecthomas/kingpin.v2"
@ -19,6 +18,7 @@ import (
"github.com/trufflesecurity/trufflehog/v3/pkg/decoders" "github.com/trufflesecurity/trufflehog/v3/pkg/decoders"
"github.com/trufflesecurity/trufflehog/v3/pkg/detectors" "github.com/trufflesecurity/trufflehog/v3/pkg/detectors"
"github.com/trufflesecurity/trufflehog/v3/pkg/engine" "github.com/trufflesecurity/trufflehog/v3/pkg/engine"
"github.com/trufflesecurity/trufflehog/v3/pkg/log"
"github.com/trufflesecurity/trufflehog/v3/pkg/pb/source_metadatapb" "github.com/trufflesecurity/trufflehog/v3/pkg/pb/source_metadatapb"
"github.com/trufflesecurity/trufflehog/v3/pkg/pb/sourcespb" "github.com/trufflesecurity/trufflehog/v3/pkg/pb/sourcespb"
"github.com/trufflesecurity/trufflehog/v3/pkg/sources" "github.com/trufflesecurity/trufflehog/v3/pkg/sources"
@ -42,6 +42,20 @@ var (
) )
func main() { func main() {
// setup logger
logger, flush := log.New("trufflehog", log.WithConsoleSink(os.Stderr))
// make it the default logger for contexts
context.SetDefaultLogger(logger)
defer func() { _ = flush() }()
logFatal := func(err error, message string, keyAndVals ...any) {
logger.Error(err, message, keyAndVals...)
if err != nil {
os.Exit(1)
return
}
os.Exit(0)
}
ctx, cancel := context.WithTimeout(context.Background(), time.Hour*2) ctx, cancel := context.WithTimeout(context.Background(), time.Hour*2)
var cancelOnce sync.Once var cancelOnce sync.Once
defer cancelOnce.Do(cancel) defer cancelOnce.Do(cancel)
@ -68,19 +82,19 @@ func main() {
} else { } else {
_, ok := allScanners[input] _, ok := allScanners[input]
if !ok { if !ok {
log.Fatal("could not find scanner by that name") logFatal(fmt.Errorf("invalid input"), "could not find scanner by that name")
} }
selectedScanners[input] = allScanners[input] selectedScanners[input] = allScanners[input]
} }
if len(selectedScanners) == 0 { if len(selectedScanners) == 0 {
log.Fatal("no detectors selected") logFatal(fmt.Errorf("invalid input"), "no detectors selected")
} }
for _, excluded := range *scanCmdExclude { for _, excluded := range *scanCmdExclude {
delete(selectedScanners, excluded) delete(selectedScanners, excluded)
} }
log.Infof("loaded %d secret detectors", len(selectedScanners)+3) logger.Info("loaded secret detectors", "count", len(selectedScanners)+3)
var wgScanners sync.WaitGroup var wgScanners sync.WaitGroup
@ -92,7 +106,7 @@ func main() {
time.Sleep(60 * time.Second) time.Sleep(60 * time.Second)
counter.Incr(int64(chunkCounter - prev)) counter.Incr(int64(chunkCounter - prev))
prev = chunkCounter prev = chunkCounter
log.Infof("chunk scan rate: %d/sec", counter.Rate()/60) logger.Info("chunk scan rate per second", "rate", counter.Rate()/60)
} }
}() }()
@ -122,7 +136,7 @@ func main() {
res, err := scanner.FromData(ctx, *scanVerify, decoded.Data) res, err := scanner.FromData(ctx, *scanVerify, decoded.Data)
if err != nil { if err != nil {
log.Fatal(err) logFatal(err, "error scanning chunk")
} }
if len(res) > 0 { if len(res) > 0 {
if resCounter[name] == nil { if resCounter[name] == nil {
@ -131,16 +145,19 @@ func main() {
} }
atomic.AddUint64(resCounter[name], uint64(len(res))) atomic.AddUint64(resCounter[name], uint64(len(res)))
if *scanThreshold != 0 && int(*resCounter[name]) > *scanThreshold { if *scanThreshold != 0 && int(*resCounter[name]) > *scanThreshold {
log.WithField("scanner", name).Errorf("exceeded result threshold of %d", *scanThreshold) logger.Error(
fmt.Errorf("exceeded result threshold"), "snifftest failed",
"scanner", name, "threshold", *scanThreshold,
)
failed = true failed = true
os.Exit(1) os.Exit(1)
} }
if *scanPrintRes { if *scanPrintRes {
for _, r := range res { for _, r := range res {
logger := log.WithField("secret", name).WithField("meta", chunk.SourceMetadata).WithField("result", string(r.Raw)) logger := logger.WithValues("secret", name, "meta", chunk.SourceMetadata, "result", string(r.Raw))
if *scanPrintChunkRes { if *scanPrintChunkRes {
logger = logger.WithField("chunk", string(decoded.Data)) logger = logger.WithValues("chunk", string(decoded.Data))
} }
logger.Info("result") logger.Info("result")
} }
@ -161,13 +178,13 @@ func main() {
go func(r string) { go func(r string) {
defer sem.Release(1) defer sem.Release(1)
defer wgChunkers.Done() defer wgChunkers.Done()
log.Infof("cloning %s", r) logger.Info("cloning repo", "repo", r)
path, repo, err := git.CloneRepoUsingUnauthenticated(ctx, r) path, repo, err := git.CloneRepoUsingUnauthenticated(ctx, r)
if err != nil { if err != nil {
log.Fatal(err) logFatal(err, "error cloning repo", "repo", r)
} }
log.Infof("cloned %s", r) logger.Info("cloned repo", "repo", r)
s := git.NewGit(sourcespb.SourceType_SOURCE_TYPE_GIT, 0, 0, "snifftest", false, runtime.NumCPU(), s := git.NewGit(sourcespb.SourceType_SOURCE_TYPE_GIT, 0, 0, "snifftest", false, runtime.NumCPU(),
func(file, email, commit, timestamp, repository string, line int64) *source_metadatapb.MetaData { func(file, email, commit, timestamp, repository string, line int64) *source_metadatapb.MetaData {
@ -184,12 +201,12 @@ func main() {
} }
}) })
log.Infof("scanning %s", r) logger.Info("scanning repo", "repo", r)
err = s.ScanRepo(ctx, repo, path, git.NewScanOptions(), chunksChan) err = s.ScanRepo(ctx, repo, path, git.NewScanOptions(), chunksChan)
if err != nil { if err != nil {
log.Fatal(err) logFatal(err, "error scanning repo")
} }
log.Infof("scanned %s", r) logger.Info("scanned repo", "repo", r)
defer os.RemoveAll(path) defer os.RemoveAll(path)
}(repo) }(repo)
} }
@ -201,9 +218,9 @@ func main() {
wgScanners.Wait() wgScanners.Wait()
log.WithField("chunks", chunkCounter).Info("completed") logger.Info("completed snifftest", "chunks", chunkCounter)
for scanner, resultsCount := range resCounter { for scanner, resultsCount := range resCounter {
log.WithField("results", *resultsCount).Info(scanner) logger.Info(scanner, "results", *resultsCount)
} }
if failed { if failed {

104
main.go
View file

@ -12,9 +12,9 @@ import (
"time" "time"
"github.com/felixge/fgprof" "github.com/felixge/fgprof"
"github.com/go-logr/logr"
"github.com/gorilla/mux" "github.com/gorilla/mux"
"github.com/jpillora/overseer" "github.com/jpillora/overseer"
"github.com/sirupsen/logrus"
"gopkg.in/alecthomas/kingpin.v2" "gopkg.in/alecthomas/kingpin.v2"
"github.com/trufflesecurity/trufflehog/v3/pkg/common" "github.com/trufflesecurity/trufflehog/v3/pkg/common"
@ -118,24 +118,26 @@ func init() {
cli.Version("trufflehog " + version.BuildVersion) cli.Version("trufflehog " + version.BuildVersion)
cmd = kingpin.MustParse(cli.Parse(os.Args[1:])) cmd = kingpin.MustParse(cli.Parse(os.Args[1:]))
if *jsonOut {
logrus.SetFormatter(&logrus.JSONFormatter{})
}
switch { switch {
case *trace: case *trace:
log.SetLevel(5) log.SetLevel(5)
logrus.SetLevel(logrus.TraceLevel)
logrus.Debugf("running version %s", version.BuildVersion)
case *debug: case *debug:
log.SetLevel(2) log.SetLevel(2)
logrus.SetLevel(logrus.DebugLevel)
logrus.Debugf("running version %s", version.BuildVersion)
default:
logrus.SetLevel(logrus.InfoLevel)
} }
} }
func main() { func main() {
// setup logger
logFormat := log.WithConsoleSink
if *jsonOut {
logFormat = log.WithJSONSink
}
logger, sync := log.New("trufflehog", logFormat(os.Stderr))
// make it the default logger for contexts
context.SetDefaultLogger(logger)
defer func() { _ = sync() }()
logFatal := logFatalFunc(logger)
updateCfg := overseer.Config{ updateCfg := overseer.Config{
Program: run, Program: run,
Debug: *debug, Debug: *debug,
@ -153,14 +155,16 @@ func main() {
err := overseer.RunErr(updateCfg) err := overseer.RunErr(updateCfg)
if err != nil { if err != nil {
logrus.WithError(err).Fatal("error occured with trufflehog updater 🐷") logFatal(err, "error occured with trufflehog updater 🐷")
} }
} }
func run(state overseer.State) { func run(state overseer.State) {
if *debug { ctx := context.Background()
logrus.Debugf("trufflehog %s", version.BuildVersion) logger := ctx.Logger()
} logFatal := logFatalFunc(logger)
logger.V(2).Info(fmt.Sprintf("trufflehog %s", version.BuildVersion))
if *githubScanToken != "" { if *githubScanToken != "" {
// NOTE: this kludge is here to do an authenticated shallow commit // NOTE: this kludge is here to do an authenticated shallow commit
@ -178,27 +182,19 @@ func run(state overseer.State) {
router := mux.NewRouter() router := mux.NewRouter()
router.PathPrefix("/debug/pprof").Handler(http.DefaultServeMux) router.PathPrefix("/debug/pprof").Handler(http.DefaultServeMux)
router.PathPrefix("/debug/fgprof").Handler(fgprof.Handler()) router.PathPrefix("/debug/fgprof").Handler(fgprof.Handler())
logrus.Info("starting pprof and fgprof server on :18066 /debug/pprof and /debug/fgprof") logger.Info("starting pprof and fgprof server on :18066 /debug/pprof and /debug/fgprof")
if err := http.ListenAndServe(":18066", router); err != nil { if err := http.ListenAndServe(":18066", router); err != nil {
logrus.Error(err) logger.Error(err, "error serving pprof and fgprof")
} }
}() }()
} }
logFormat := log.WithConsoleSink
if *jsonOut {
logFormat = log.WithJSONSink
}
logger, sync := log.New("trufflehog", logFormat(os.Stderr))
context.SetDefaultLogger(logger)
defer func() { _ = sync() }()
conf := &config.Config{} conf := &config.Config{}
if *configFilename != "" { if *configFilename != "" {
var err error var err error
conf, err = config.Read(*configFilename) conf, err = config.Read(*configFilename)
if err != nil { if err != nil {
logger.Error(err, "error parsing the provided configuration file") logFatal(err, "error parsing the provided configuration file")
os.Exit(1)
} }
} }
@ -212,7 +208,6 @@ func run(state overseer.State) {
handlers.SetArchiveMaxTimeout(*archiveTimeout) handlers.SetArchiveMaxTimeout(*archiveTimeout)
} }
ctx := context.TODO()
e := engine.Start(ctx, e := engine.Start(ctx,
engine.WithConcurrency(*concurrency), engine.WithConcurrency(*concurrency),
engine.WithDecoders(decoders.DefaultDecoders()...), engine.WithDecoders(decoders.DefaultDecoders()...),
@ -227,11 +222,11 @@ func run(state overseer.State) {
case gitScan.FullCommand(): case gitScan.FullCommand():
filter, err := common.FilterFromFiles(*gitScanIncludePaths, *gitScanExcludePaths) filter, err := common.FilterFromFiles(*gitScanIncludePaths, *gitScanExcludePaths)
if err != nil { if err != nil {
logrus.WithError(err).Fatal("could not create filter") logFatal(err, "could not create filter")
} }
repoPath, remote, err = git.PrepareRepoSinceCommit(ctx, *gitScanURI, *gitScanSinceCommit) repoPath, remote, err = git.PrepareRepoSinceCommit(ctx, *gitScanURI, *gitScanSinceCommit)
if err != nil || repoPath == "" { if err != nil || repoPath == "" {
logrus.WithError(err).Fatal("error preparing git repo for scanning") logFatal(err, "error preparing git repo for scanning")
} }
if remote { if remote {
defer os.RemoveAll(repoPath) defer os.RemoveAll(repoPath)
@ -245,15 +240,15 @@ func run(state overseer.State) {
Filter: filter, Filter: filter,
} }
if err = e.ScanGit(ctx, cfg); err != nil { if err = e.ScanGit(ctx, cfg); err != nil {
logrus.WithError(err).Fatal("Failed to scan Git.") logFatal(err, "Failed to scan Git.")
} }
case githubScan.FullCommand(): case githubScan.FullCommand():
filter, err := common.FilterFromFiles(*githubScanIncludePaths, *githubScanExcludePaths) filter, err := common.FilterFromFiles(*githubScanIncludePaths, *githubScanExcludePaths)
if err != nil { if err != nil {
logrus.WithError(err).Fatal("could not create filter") logFatal(err, "could not create filter")
} }
if len(*githubScanOrgs) == 0 && len(*githubScanRepos) == 0 { if len(*githubScanOrgs) == 0 && len(*githubScanRepos) == 0 {
logrus.Fatal("You must specify at least one organization or repository.") logFatal(fmt.Errorf("invalid config"), "You must specify at least one organization or repository.")
} }
cfg := sources.GithubConfig{ cfg := sources.GithubConfig{
@ -269,12 +264,12 @@ func run(state overseer.State) {
Filter: filter, Filter: filter,
} }
if err := e.ScanGitHub(ctx, cfg); err != nil { if err := e.ScanGitHub(ctx, cfg); err != nil {
logrus.WithError(err).Fatal("Failed to scan Github.") logFatal(err, "Failed to scan Github.")
} }
case gitlabScan.FullCommand(): case gitlabScan.FullCommand():
filter, err := common.FilterFromFiles(*gitlabScanIncludePaths, *gitlabScanExcludePaths) filter, err := common.FilterFromFiles(*gitlabScanIncludePaths, *gitlabScanExcludePaths)
if err != nil { if err != nil {
logrus.WithError(err).Fatal("could not create filter") logFatal(err, "could not create filter")
} }
cfg := sources.GitlabConfig{ cfg := sources.GitlabConfig{
@ -284,12 +279,12 @@ func run(state overseer.State) {
Filter: filter, Filter: filter,
} }
if err := e.ScanGitLab(ctx, cfg); err != nil { if err := e.ScanGitLab(ctx, cfg); err != nil {
logrus.WithError(err).Fatal("Failed to scan GitLab.") logFatal(err, "Failed to scan GitLab.")
} }
case filesystemScan.FullCommand(): case filesystemScan.FullCommand():
filter, err := common.FilterFromFiles(*filesystemScanIncludePaths, *filesystemScanExcludePaths) filter, err := common.FilterFromFiles(*filesystemScanIncludePaths, *filesystemScanExcludePaths)
if err != nil { if err != nil {
logrus.WithError(err).Fatal("could not create filter") logFatal(err, "could not create filter")
} }
cfg := sources.FilesystemConfig{ cfg := sources.FilesystemConfig{
@ -297,7 +292,7 @@ func run(state overseer.State) {
Filter: filter, Filter: filter,
} }
if err = e.ScanFileSystem(ctx, cfg); err != nil { if err = e.ScanFileSystem(ctx, cfg); err != nil {
logrus.WithError(err).Fatal("Failed to scan filesystem") logFatal(err, "Failed to scan filesystem")
} }
case s3Scan.FullCommand(): case s3Scan.FullCommand():
cfg := sources.S3Config{ cfg := sources.S3Config{
@ -307,7 +302,7 @@ func run(state overseer.State) {
CloudCred: *s3ScanCloudEnv, CloudCred: *s3ScanCloudEnv,
} }
if err := e.ScanS3(ctx, cfg); err != nil { if err := e.ScanS3(ctx, cfg); err != nil {
logrus.WithError(err).Fatal("Failed to scan S3.") logFatal(err, "Failed to scan S3.")
} }
case syslogScan.FullCommand(): case syslogScan.FullCommand():
cfg := sources.SyslogConfig{ cfg := sources.SyslogConfig{
@ -319,11 +314,11 @@ func run(state overseer.State) {
Concurrency: *concurrency, Concurrency: *concurrency,
} }
if err := e.ScanSyslog(ctx, cfg); err != nil { if err := e.ScanSyslog(ctx, cfg); err != nil {
logrus.WithError(err).Fatal("Failed to scan syslog.") logFatal(err, "Failed to scan syslog.")
} }
case circleCiScan.FullCommand(): case circleCiScan.FullCommand():
if err := e.ScanCircleCI(ctx, *circleCiScanToken); err != nil { if err := e.ScanCircleCI(ctx, *circleCiScanToken); err != nil {
logrus.WithError(err).Fatal("Failed to scan CircleCI.") logFatal(err, "Failed to scan CircleCI.")
} }
} }
// asynchronously wait for scanning to finish and cleanup // asynchronously wait for scanning to finish and cleanup
@ -342,24 +337,30 @@ func run(state overseer.State) {
} }
foundResults = true foundResults = true
var err error
switch { switch {
case *jsonLegacy: case *jsonLegacy:
output.PrintLegacyJSON(ctx, &r) err = output.PrintLegacyJSON(ctx, &r)
case *jsonOut: case *jsonOut:
output.PrintJSON(&r) err = output.PrintJSON(&r)
default: default:
output.PrintPlainOutput(&r) err = output.PrintPlainOutput(&r)
}
if err != nil {
logFatal(err, "error printing results")
} }
} }
logrus.Debugf("scanned %d chunks", e.ChunksScanned()) logger.V(2).Info("finished scanning",
logrus.Debugf("scanned %d bytes", e.BytesScanned()) "chunks", e.ChunksScanned(),
"bytes", e.BytesScanned(),
)
if *printAvgDetectorTime { if *printAvgDetectorTime {
printAverageDetectorTime(e) printAverageDetectorTime(e)
} }
if foundResults && *fail { if foundResults && *fail {
logrus.Debug("exiting with code 183 because results were found") logger.V(2).Info("exiting with code 183 because results were found")
os.Exit(183) os.Exit(183)
} }
} }
@ -375,3 +376,16 @@ func printAverageDetectorTime(e *engine.Engine) {
fmt.Fprintf(os.Stderr, "%s: %s\n", detectorName, avgDuration) fmt.Fprintf(os.Stderr, "%s: %s\n", detectorName, avgDuration)
} }
} }
// logFatalFunc returns a log.Fatal style function. Calling the returned
// function will terminate the program without cleanup.
func logFatalFunc(logger logr.Logger) func(error, string, ...any) {
return func(err error, message string, keyAndVals ...any) {
logger.Error(err, message, keyAndVals...)
if err != nil {
os.Exit(1)
return
}
os.Exit(0)
}
}

View file

@ -6,7 +6,7 @@ import (
"os" "os"
"regexp" "regexp"
log "github.com/sirupsen/logrus" "github.com/trufflesecurity/trufflehog/v3/pkg/context"
) )
type Filter struct { type Filter struct {
@ -20,7 +20,8 @@ type FilterRuleSet []regexp.Regexp
func FilterEmpty() *Filter { func FilterEmpty() *Filter {
filter, err := FilterFromFiles("", "") filter, err := FilterFromFiles("", "")
if err != nil { if err != nil {
log.WithError(err).Fatalf("could not create empty filter") context.Background().Logger().Error(err, "could not create empty filter")
os.Exit(1)
} }
return filter return filter
} }
@ -60,13 +61,16 @@ func FilterRulesFromFile(source string) (*FilterRuleSet, error) {
emptyLinePattern := regexp.MustCompile(`^\s*$`) emptyLinePattern := regexp.MustCompile(`^\s*$`)
file, err := os.Open(source) file, err := os.Open(source)
logger := context.Background().Logger().WithValues("file", source)
if err != nil { if err != nil {
log.WithError(err).Fatalf("unable to open filter file: %s", source) logger.Error(err, "unable to open filter file", "file", source)
os.Exit(1)
} }
defer func(file *os.File) { defer func(file *os.File) {
err := file.Close() err := file.Close()
if err != nil { if err != nil {
log.WithError(err).Fatalf("unable to close filter file: %s", source) logger.Error(err, "unable to close filter file")
os.Exit(1)
} }
}(file) }(file)

View file

@ -4,8 +4,6 @@ import (
"fmt" "fmt"
"strconv" "strconv"
"strings" "strings"
log "github.com/sirupsen/logrus"
) )
const EmailPattern = `\b(?:[a-z0-9!#$%&'*+/=?^_\x60{|}~-]+(?:\.[a-z0-9!#$%&'*+/=?^_\x60{|}~-]+)*|"(?:[\x01-\x08\x0b\x0c\x0e-\x1f\x21\x23-\x5b\x5d-\x7f]|\\[\x01-\x09\x0b\x0c\x0e-\x7f])*")@(?:(?:[a-z0-9](?:[a-z0-9-]*[a-z0-9])?\.)+[a-z0-9](?:[a-z0-9-]*[a-z0-9])?|\[(?:(?:(2(5[0-5]|[0-4][0-9])|1[0-9][0-9]|[1-9]?[0-9]))\.){3}(?:(2(5[0-5]|[0-4][0-9])|1[0-9][0-9]|[1-9]?[0-9])|[a-z0-9-]*[a-z0-9]:(?:[\x01-\x08\x0b\x0c\x0e-\x1f\x21-\x5a\x53-\x7f]|\\[\x01-\x09\x0b\x0c\x0e-\x7f])+)\])\b` const EmailPattern = `\b(?:[a-z0-9!#$%&'*+/=?^_\x60{|}~-]+(?:\.[a-z0-9!#$%&'*+/=?^_\x60{|}~-]+)*|"(?:[\x01-\x08\x0b\x0c\x0e-\x1f\x21\x23-\x5b\x5d-\x7f]|\\[\x01-\x09\x0b\x0c\x0e-\x7f])*")@(?:(?:[a-z0-9](?:[a-z0-9-]*[a-z0-9])?\.)+[a-z0-9](?:[a-z0-9-]*[a-z0-9])?|\[(?:(?:(2(5[0-5]|[0-4][0-9])|1[0-9][0-9]|[1-9]?[0-9]))\.){3}(?:(2(5[0-5]|[0-4][0-9])|1[0-9][0-9]|[1-9]?[0-9])|[a-z0-9-]*[a-z0-9]:(?:[\x01-\x08\x0b\x0c\x0e-\x1f\x21-\x5a\x53-\x7f]|\\[\x01-\x09\x0b\x0c\x0e-\x7f])+)\])\b`
@ -24,7 +22,7 @@ func BuildRegex(pattern string, specialChar string, length int) string {
func BuildRegexJWT(firstRange, secondRange, thirdRange string) string { func BuildRegexJWT(firstRange, secondRange, thirdRange string) string {
if RangeValidation(firstRange) || RangeValidation(secondRange) || RangeValidation(thirdRange) { if RangeValidation(firstRange) || RangeValidation(secondRange) || RangeValidation(thirdRange) {
log.Error("Min value should not be greater than or equal to max") panic("Min value should not be greater than or equal to max")
} }
return fmt.Sprintf(`\b(ey[%s]{%s}.ey[%s-\/_]{%s}.[%s-\/_]{%s})\b`, AlphaNumPattern, firstRange, AlphaNumPattern, secondRange, AlphaNumPattern, thirdRange) return fmt.Sprintf(`\b(ey[%s]{%s}.ey[%s-\/_]{%s}.[%s-\/_]{%s})\b`, AlphaNumPattern, firstRange, AlphaNumPattern, secondRange, AlphaNumPattern, thirdRange)
} }

View file

@ -8,7 +8,6 @@ import (
"regexp" "regexp"
"time" "time"
log "github.com/sirupsen/logrus"
"github.com/trufflesecurity/trufflehog/v3/pkg/common" "github.com/trufflesecurity/trufflehog/v3/pkg/common"
"github.com/trufflesecurity/trufflehog/v3/pkg/detectors" "github.com/trufflesecurity/trufflehog/v3/pkg/detectors"
"github.com/trufflesecurity/trufflehog/v3/pkg/pb/detectorspb" "github.com/trufflesecurity/trufflehog/v3/pkg/pb/detectorspb"
@ -66,8 +65,6 @@ func (s Scanner) FromData(ctx context.Context, verify bool, data []byte) ([]dete
if data != nil { if data != nil {
secret.Verified = true secret.Verified = true
} }
} else {
log.Warn(err)
} }
} }

View file

@ -11,8 +11,6 @@ import (
"strings" "strings"
"time" "time"
log "github.com/sirupsen/logrus"
"github.com/trufflesecurity/trufflehog/v3/pkg/common" "github.com/trufflesecurity/trufflehog/v3/pkg/common"
"github.com/trufflesecurity/trufflehog/v3/pkg/context" "github.com/trufflesecurity/trufflehog/v3/pkg/context"
) )
@ -174,14 +172,14 @@ func (c *Parser) executeCommand(ctx context.Context, cmd *exec.Cmd) (chan Commit
go func() { go func() {
scanner := bufio.NewScanner(stdErr) scanner := bufio.NewScanner(stdErr)
for scanner.Scan() { for scanner.Scan() {
log.Debug(scanner.Text()) ctx.Logger().V(2).Info(scanner.Text())
} }
}() }()
go func() { go func() {
c.fromReader(ctx, stdOut, commitChan) c.fromReader(ctx, stdOut, commitChan)
if err := cmd.Wait(); err != nil { if err := cmd.Wait(); err != nil {
log.WithError(err).Debugf("Error waiting for git command to complete.") ctx.Logger().V(2).Info("Error waiting for git command to complete.", "error", err)
} }
}() }()
@ -227,7 +225,7 @@ func (c *Parser) fromReader(ctx context.Context, stdOut io.Reader, commitChan ch
case isDateLine(line): case isDateLine(line):
date, err := time.Parse(c.dateFormat, strings.TrimSpace(string(line[6:]))) date, err := time.Parse(c.dateFormat, strings.TrimSpace(string(line[6:])))
if err != nil { if err != nil {
log.WithError(err).Debug("Could not parse date from git stream.") ctx.Logger().V(2).Info("Could not parse date from git stream.", "error", err)
} }
currentCommit.Date = date currentCommit.Date = date
case isDiffLine(line): case isDiffLine(line):
@ -293,7 +291,9 @@ func (c *Parser) fromReader(ctx context.Context, stdOut io.Reader, commitChan ch
} }
} }
if currentDiff.Content.Len() > c.maxDiffSize { if currentDiff.Content.Len() > c.maxDiffSize {
log.Debugf("Diff for %s exceeded MaxDiffSize(%d)", currentDiff.PathB, c.maxDiffSize) ctx.Logger().V(2).Info(fmt.Sprintf(
"Diff for %s exceeded MaxDiffSize(%d)", currentDiff.PathB, c.maxDiffSize,
))
break break
} }
} }
@ -423,9 +423,10 @@ func isLineNumberDiffLine(line []byte) bool {
// Get the b/ file path. // Get the b/ file path.
func pathFromBinaryLine(line []byte) string { func pathFromBinaryLine(line []byte) string {
logger := context.Background().Logger()
sbytes := bytes.Split(line, []byte(" and ")) sbytes := bytes.Split(line, []byte(" and "))
if len(sbytes) != 2 { if len(sbytes) != 2 {
log.Debugf("Expected binary line to be in 'Binary files a/filaA and b/fileB differ' format. Got: %s", line) logger.V(2).Info("Expected binary line to be in 'Binary files a/filaA and b/fileB differ' format.", "got", line)
return "" return ""
} }
bRaw := sbytes[1] bRaw := sbytes[1]

View file

@ -9,8 +9,8 @@ import (
"time" "time"
"github.com/mholt/archiver/v4" "github.com/mholt/archiver/v4"
log "github.com/sirupsen/logrus"
"github.com/trufflesecurity/trufflehog/v3/pkg/common" "github.com/trufflesecurity/trufflehog/v3/pkg/common"
logContext "github.com/trufflesecurity/trufflehog/v3/pkg/context"
) )
type ctxKey int type ctxKey int
@ -55,6 +55,7 @@ func (d *Archive) FromFile(originalCtx context.Context, data io.Reader) chan ([]
archiveChan := make(chan ([]byte), 512) archiveChan := make(chan ([]byte), 512)
go func() { go func() {
ctx, cancel := context.WithTimeout(originalCtx, maxTimeout) ctx, cancel := context.WithTimeout(originalCtx, maxTimeout)
logger := logContext.AddLogger(ctx).Logger()
defer cancel() defer cancel()
defer close(archiveChan) defer close(archiveChan)
err := d.openArchive(ctx, 0, data, archiveChan) err := d.openArchive(ctx, 0, data, archiveChan)
@ -62,7 +63,7 @@ func (d *Archive) FromFile(originalCtx context.Context, data io.Reader) chan ([]
if errors.Is(err, archiver.ErrNoMatch) { if errors.Is(err, archiver.ErrNoMatch) {
return return
} }
log.WithError(err).Debug("Error unarchiving chunk.") logger.V(2).Info("Error unarchiving chunk.")
} }
}() }()
return archiveChan return archiveChan
@ -129,7 +130,8 @@ func (d *Archive) IsFiletype(ctx context.Context, reader io.Reader) (io.Reader,
// extractorHandler is applied to each file in an archiver.Extractor file. // extractorHandler is applied to each file in an archiver.Extractor file.
func (d *Archive) extractorHandler(archiveChan chan ([]byte)) func(context.Context, archiver.File) error { func (d *Archive) extractorHandler(archiveChan chan ([]byte)) func(context.Context, archiver.File) error {
return func(ctx context.Context, f archiver.File) error { return func(ctx context.Context, f archiver.File) error {
log.WithField("filename", f.Name()).Trace("Handling extracted file.") logger := logContext.AddLogger(ctx).Logger()
logger.V(5).Info("Handling extracted file.", "filename", f.Name())
depth := 0 depth := 0
if ctxDepth, ok := ctx.Value(depthKey).(int); ok { if ctxDepth, ok := ctx.Value(depthKey).(int); ok {
depth = ctxDepth depth = ctxDepth
@ -159,19 +161,20 @@ func (d *Archive) ReadToMax(ctx context.Context, reader io.Reader) (data []byte,
// rardecode. There is a bug somewhere with rar decoder format 29 // rardecode. There is a bug somewhere with rar decoder format 29
// that can lead to a panic. An issue is open in rardecode repo // that can lead to a panic. An issue is open in rardecode repo
// https://github.com/nwaples/rardecode/issues/30. // https://github.com/nwaples/rardecode/issues/30.
logger := logContext.AddLogger(ctx).Logger()
defer func() { defer func() {
if r := recover(); r != nil { if r := recover(); r != nil {
log.Errorf("Panic occurred when reading archive: %v", r)
// Return an error from ReadToMax. // Return an error from ReadToMax.
if e, ok := r.(error); ok { if e, ok := r.(error); ok {
err = e err = e
} else { } else {
err = fmt.Errorf("Panic occurred: %v", r) err = fmt.Errorf("Panic occurred: %v", r)
} }
logger.Error(err, "Panic occurred when reading archive")
} }
}() }()
fileContent := bytes.Buffer{} fileContent := bytes.Buffer{}
log.Tracef("Remaining buffer capacity: %d", maxSize-d.size) logger.V(5).Info("Remaining buffer capacity", "bytes", maxSize-d.size)
for i := 0; i <= maxSize/512; i++ { for i := 0; i <= maxSize/512; i++ {
if common.IsDone(ctx) { if common.IsDone(ctx) {
return nil, ctx.Err() return nil, ctx.Err()
@ -189,7 +192,7 @@ func (d *Archive) ReadToMax(ctx context.Context, reader io.Reader) (data []byte,
return fileContent.Bytes(), nil return fileContent.Bytes(), nil
} }
if d.size >= maxSize && bRead == 512 { if d.size >= maxSize && bRead == 512 {
log.Debug("Max archive size reached.") logger.V(2).Info("Max archive size reached.")
return fileContent.Bytes(), nil return fileContent.Bytes(), nil
} }
} }

View file

@ -4,15 +4,13 @@ import (
"encoding/json" "encoding/json"
"fmt" "fmt"
"github.com/sirupsen/logrus"
"github.com/trufflesecurity/trufflehog/v3/pkg/detectors" "github.com/trufflesecurity/trufflehog/v3/pkg/detectors"
"github.com/trufflesecurity/trufflehog/v3/pkg/pb/detectorspb" "github.com/trufflesecurity/trufflehog/v3/pkg/pb/detectorspb"
"github.com/trufflesecurity/trufflehog/v3/pkg/pb/source_metadatapb" "github.com/trufflesecurity/trufflehog/v3/pkg/pb/source_metadatapb"
"github.com/trufflesecurity/trufflehog/v3/pkg/pb/sourcespb" "github.com/trufflesecurity/trufflehog/v3/pkg/pb/sourcespb"
) )
func PrintJSON(r *detectors.ResultWithMetadata) { func PrintJSON(r *detectors.ResultWithMetadata) error {
v := &struct { v := &struct {
// SourceMetadata contains source-specific contextual information. // SourceMetadata contains source-specific contextual information.
SourceMetadata *source_metadatapb.MetaData SourceMetadata *source_metadatapb.MetaData
@ -52,7 +50,8 @@ func PrintJSON(r *detectors.ResultWithMetadata) {
} }
out, err := json.Marshal(v) out, err := json.Marshal(v)
if err != nil { if err != nil {
logrus.WithError(err).Fatal("could not marshal result") return fmt.Errorf("could not marshal result: %w", err)
} }
fmt.Println(string(out)) fmt.Println(string(out))
return nil
} }

View file

@ -12,7 +12,6 @@ import (
"github.com/go-git/go-git/v5/plumbing" "github.com/go-git/go-git/v5/plumbing"
"github.com/go-git/go-git/v5/plumbing/object" "github.com/go-git/go-git/v5/plumbing/object"
"github.com/sergi/go-diff/diffmatchpatch" "github.com/sergi/go-diff/diffmatchpatch"
"github.com/sirupsen/logrus"
"github.com/trufflesecurity/trufflehog/v3/pkg/context" "github.com/trufflesecurity/trufflehog/v3/pkg/context"
"github.com/trufflesecurity/trufflehog/v3/pkg/detectors" "github.com/trufflesecurity/trufflehog/v3/pkg/detectors"
@ -20,7 +19,7 @@ import (
"github.com/trufflesecurity/trufflehog/v3/pkg/sources/git" "github.com/trufflesecurity/trufflehog/v3/pkg/sources/git"
) )
func PrintLegacyJSON(ctx context.Context, r *detectors.ResultWithMetadata) { func PrintLegacyJSON(ctx context.Context, r *detectors.ResultWithMetadata) error {
var repo string var repo string
switch r.SourceType { switch r.SourceType {
case sourcespb.SourceType_SOURCE_TYPE_GIT: case sourcespb.SourceType_SOURCE_TYPE_GIT:
@ -30,27 +29,31 @@ func PrintLegacyJSON(ctx context.Context, r *detectors.ResultWithMetadata) {
case sourcespb.SourceType_SOURCE_TYPE_GITLAB: case sourcespb.SourceType_SOURCE_TYPE_GITLAB:
repo = r.SourceMetadata.GetGitlab().Repository repo = r.SourceMetadata.GetGitlab().Repository
default: default:
logrus.Errorf("unsupported source type for legacy json output: %s", r.SourceType) return fmt.Errorf("unsupported source type for legacy json output: %s", r.SourceType)
} }
// cloning the repo again here is not great and only works with unauthed repos // cloning the repo again here is not great and only works with unauthed repos
repoPath, remote, err := git.PrepareRepo(ctx, repo) repoPath, remote, err := git.PrepareRepo(ctx, repo)
if err != nil || repoPath == "" { if err != nil || repoPath == "" {
logrus.WithError(err).Fatal("error preparing git repo for scanning") return fmt.Errorf("error preparing git repo for scanning: %w", err)
} }
if remote { if remote {
defer os.RemoveAll(repoPath) defer os.RemoveAll(repoPath)
} }
legacy := ConvertToLegacyJSON(r, repoPath) legacy, err := ConvertToLegacyJSON(r, repoPath)
if err != nil {
return fmt.Errorf("could not convert to legacy JSON: %w", err)
}
out, err := json.Marshal(legacy) out, err := json.Marshal(legacy)
if err != nil { if err != nil {
logrus.WithError(err).Fatal("could not marshal result") return fmt.Errorf("could not marshal result: %w", err)
} }
fmt.Println(string(out)) fmt.Println(string(out))
return nil
} }
func ConvertToLegacyJSON(r *detectors.ResultWithMetadata, repoPath string) *LegacyJSONOutput { func ConvertToLegacyJSON(r *detectors.ResultWithMetadata, repoPath string) (*LegacyJSONOutput, error) {
var source LegacyJSONCompatibleSource var source LegacyJSONCompatibleSource
switch r.SourceType { switch r.SourceType {
case sourcespb.SourceType_SOURCE_TYPE_GIT: case sourcespb.SourceType_SOURCE_TYPE_GIT:
@ -60,14 +63,14 @@ func ConvertToLegacyJSON(r *detectors.ResultWithMetadata, repoPath string) *Lega
case sourcespb.SourceType_SOURCE_TYPE_GITLAB: case sourcespb.SourceType_SOURCE_TYPE_GITLAB:
source = r.SourceMetadata.GetGitlab() source = r.SourceMetadata.GetGitlab()
default: default:
log.Fatalf("legacy JSON output can not be used with this source: %s", r.SourceName) return nil, fmt.Errorf("legacy JSON output can not be used with this source: %s", r.SourceName)
} }
// The repo will be needed to gather info needed for the legacy output that isn't included in the new // The repo will be needed to gather info needed for the legacy output that isn't included in the new
// output format. // output format.
repo, err := gogit.PlainOpenWithOptions(repoPath, &gogit.PlainOpenOptions{DetectDotGit: true}) repo, err := gogit.PlainOpenWithOptions(repoPath, &gogit.PlainOpenOptions{DetectDotGit: true})
if err != nil { if err != nil {
logrus.WithError(err).Fatalf("could not open repo: %s", repoPath) return nil, fmt.Errorf("could not open repo %q: %w", repoPath, err)
} }
fileName := source.GetFile() fileName := source.GetFile()
@ -96,7 +99,7 @@ func ConvertToLegacyJSON(r *detectors.ResultWithMetadata, repoPath string) *Lega
Reason: r.Result.DetectorType.String(), Reason: r.Result.DetectorType.String(),
StringsFound: []string{foundString}, StringsFound: []string{foundString},
} }
return output return output, nil
} }
// BranchHeads creates a map of branch names to their head commit. This can be used to find if a commit is an ancestor // BranchHeads creates a map of branch names to their head commit. This can be used to find if a commit is an ancestor
@ -108,16 +111,17 @@ func BranchHeads(repo *gogit.Repository) (map[string]*object.Commit, error) {
return branches, err return branches, err
} }
logger := context.Background().Logger()
err = branchIter.ForEach(func(branchRef *plumbing.Reference) error { err = branchIter.ForEach(func(branchRef *plumbing.Reference) error {
branchName := branchRef.Name().String() branchName := branchRef.Name().String()
headHash, err := repo.ResolveRevision(plumbing.Revision(branchName)) headHash, err := repo.ResolveRevision(plumbing.Revision(branchName))
if err != nil { if err != nil {
logrus.WithError(err).Errorf("unable to resolve head of branch: %s", branchRef.Name().String()) logger.Error(err, "unable to resolve head of branch", "branch", branchRef.Name().String())
return nil return nil
} }
headCommit, err := repo.CommitObject(*headHash) headCommit, err := repo.CommitObject(*headHash)
if err != nil { if err != nil {
logrus.WithError(err).Errorf("unable to get commit: %s", headCommit.String()) logger.Error(err, "unable to get commit", "commit", headCommit.String())
return nil return nil
} }
branches[branchName] = headCommit branches[branchName] = headCommit
@ -128,15 +132,17 @@ func BranchHeads(repo *gogit.Repository) (map[string]*object.Commit, error) {
// FindBranch returns the first branch a commit is a part of. Not the most accurate, but it should work similar to pre v3.0. // FindBranch returns the first branch a commit is a part of. Not the most accurate, but it should work similar to pre v3.0.
func FindBranch(commit *object.Commit, repo *gogit.Repository) string { func FindBranch(commit *object.Commit, repo *gogit.Repository) string {
logger := context.Background().Logger()
branches, err := BranchHeads(repo) branches, err := BranchHeads(repo)
if err != nil { if err != nil {
logrus.WithError(err).Fatal("could not list branches") logger.Error(err, "could not list branches")
os.Exit(1)
} }
for name, head := range branches { for name, head := range branches {
isAncestor, err := commit.IsAncestor(head) isAncestor, err := commit.IsAncestor(head)
if err != nil { if err != nil {
logrus.WithError(err).Errorf("could not determine if %s is an ancestor of %s", commit.Hash.String(), head.Hash.String()) logger.Error(err, fmt.Sprintf("could not determine if %s is an ancestor of %s", commit.Hash.String(), head.Hash.String()))
continue continue
} }
if isAncestor { if isAncestor {
@ -149,12 +155,13 @@ func FindBranch(commit *object.Commit, repo *gogit.Repository) string {
// GenerateDiff will take a commit and create a string diff between the commit and its first parent. // GenerateDiff will take a commit and create a string diff between the commit and its first parent.
func GenerateDiff(commit *object.Commit, fileName string) string { func GenerateDiff(commit *object.Commit, fileName string) string {
var diff string var diff string
logger := context.Background().Logger().WithValues("file", fileName)
// First grab the first parent of the commit. If there are none, we are at the first commit and should diff against // First grab the first parent of the commit. If there are none, we are at the first commit and should diff against
// an empty file. // an empty file.
parent, err := commit.Parent(0) parent, err := commit.Parent(0)
if err != object.ErrParentNotFound && err != nil { if err != object.ErrParentNotFound && err != nil {
logrus.WithError(err).Errorf("could not find parent of %s", commit.Hash.String()) logger.Error(err, "could not find parent", "commit", commit.Hash.String())
} }
// Now get the files from the commit and its parent. // Now get the files from the commit and its parent.
@ -162,13 +169,13 @@ func GenerateDiff(commit *object.Commit, fileName string) string {
if parent != nil { if parent != nil {
parentFile, err = parent.File(fileName) parentFile, err = parent.File(fileName)
if err != nil && err != object.ErrFileNotFound { if err != nil && err != object.ErrFileNotFound {
logrus.WithError(err).Errorf("could not get previous version of file: %q", fileName) logger.Error(err, "could not get previous version of file")
return diff return diff
} }
} }
commitFile, err := commit.File(fileName) commitFile, err := commit.File(fileName)
if err != nil { if err != nil {
logrus.WithError(err).Errorf("could not get current version of file: %q", fileName) logger.Error(err, "could not get current version of file")
return diff return diff
} }
@ -179,14 +186,14 @@ func GenerateDiff(commit *object.Commit, fileName string) string {
if parentFile != nil { if parentFile != nil {
oldContent, err = parentFile.Contents() oldContent, err = parentFile.Contents()
if err != nil { if err != nil {
logrus.WithError(err).Errorf("could not get contents of previous version of file: %q", fileName) logger.Error(err, "could not get contents of previous version of file")
} }
} }
// commitFile should never be nil at this point, but double-checking so we don't get a nil error. // commitFile should never be nil at this point, but double-checking so we don't get a nil error.
if commitFile != nil { if commitFile != nil {
newContent, _ = commitFile.Contents() newContent, _ = commitFile.Contents()
if err != nil { if err != nil {
logrus.WithError(err).Errorf("could not get contents of current version of file: %q", fileName) logger.Error(err, "could not get contents of current version of file")
} }
} }
@ -199,7 +206,7 @@ func GenerateDiff(commit *object.Commit, fileName string) string {
// The String() method URL escapes the diff, so it needs to be undone. // The String() method URL escapes the diff, so it needs to be undone.
patchDiff, err := url.QueryUnescape(patch.String()) patchDiff, err := url.QueryUnescape(patch.String())
if err != nil { if err != nil {
logrus.WithError(err).Error("unable to unescape diff") logger.Error(err, "unable to unescape diff")
} }
diff += patchDiff diff += patchDiff
} }

View file

@ -7,7 +7,6 @@ import (
"strings" "strings"
"github.com/fatih/color" "github.com/fatih/color"
"github.com/sirupsen/logrus"
"golang.org/x/text/cases" "golang.org/x/text/cases"
"golang.org/x/text/language" "golang.org/x/text/language"
@ -21,7 +20,7 @@ var (
whitePrinter = color.New(color.FgWhite) whitePrinter = color.New(color.FgWhite)
) )
func PrintPlainOutput(r *detectors.ResultWithMetadata) { func PrintPlainOutput(r *detectors.ResultWithMetadata) error {
out := outputFormat{ out := outputFormat{
DetectorType: r.Result.DetectorType.String(), DetectorType: r.Result.DetectorType.String(),
DecoderType: r.Result.DecoderType.String(), DecoderType: r.Result.DecoderType.String(),
@ -32,7 +31,7 @@ func PrintPlainOutput(r *detectors.ResultWithMetadata) {
meta, err := structToMap(out.MetaData.Data) meta, err := structToMap(out.MetaData.Data)
if err != nil { if err != nil {
logrus.WithError(err).Fatal("could not marshal result") return fmt.Errorf("could not marshal result: %w", err)
} }
printer := greenPrinter printer := greenPrinter
@ -61,6 +60,7 @@ func PrintPlainOutput(r *detectors.ResultWithMetadata) {
printer.Printf("%s: %v\n", cases.Title(language.AmericanEnglish).String(k), aggregateData[k]) printer.Printf("%s: %v\n", cases.Title(language.AmericanEnglish).String(k), aggregateData[k])
} }
fmt.Println("") fmt.Println("")
return nil
} }
func structToMap(obj interface{}) (m map[string]map[string]interface{}, err error) { func structToMap(obj interface{}) (m map[string]map[string]interface{}, err error) {

View file

@ -5,8 +5,6 @@ import (
"bytes" "bytes"
"errors" "errors"
"io" "io"
"github.com/sirupsen/logrus"
) )
const ( const (
@ -32,7 +30,6 @@ func Chunker(originalChunk *Chunk) chan *Chunk {
chunk := *originalChunk chunk := *originalChunk
n, err := reader.Read(chunkBytes) n, err := reader.Read(chunkBytes)
if err != nil && !errors.Is(err, io.EOF) { if err != nil && !errors.Is(err, io.EOF) {
logrus.WithError(err).Error("Error chunking reader.")
break break
} }
peekData, _ := reader.Peek(PeekSize) peekData, _ := reader.Peek(PeekSize)

View file

@ -12,7 +12,6 @@ import (
"github.com/bill-rich/go-syslog/pkg/syslogparser/rfc3164" "github.com/bill-rich/go-syslog/pkg/syslogparser/rfc3164"
"github.com/crewjam/rfc5424" "github.com/crewjam/rfc5424"
"github.com/go-errors/errors" "github.com/go-errors/errors"
"github.com/sirupsen/logrus"
"golang.org/x/sync/semaphore" "golang.org/x/sync/semaphore"
"google.golang.org/protobuf/proto" "google.golang.org/protobuf/proto"
"google.golang.org/protobuf/types/known/anypb" "google.golang.org/protobuf/types/known/anypb"
@ -215,7 +214,7 @@ func (s *Source) monitorConnection(ctx context.Context, conn net.Conn, chunksCha
} }
err := conn.SetDeadline(time.Now().Add(time.Second)) err := conn.SetDeadline(time.Now().Add(time.Second))
if err != nil { if err != nil {
logrus.WithError(err).Debug("could not set connection deadline deadline") ctx.Logger().V(2).Info("could not set connection deadline", "error", err)
} }
input := make([]byte, 8096) input := make([]byte, 8096)
remote := conn.RemoteAddr() remote := conn.RemoteAddr()
@ -226,10 +225,10 @@ func (s *Source) monitorConnection(ctx context.Context, conn net.Conn, chunksCha
} }
continue continue
} }
logrus.Trace(string(input)) ctx.Logger().V(5).Info(string(input))
metadata, err := s.parseSyslogMetadata(input, remote.String()) metadata, err := s.parseSyslogMetadata(input, remote.String())
if err != nil { if err != nil {
logrus.WithError(err).Debug("failed to generate metadata") ctx.Logger().V(2).Info("failed to generate metadata", "error", err)
} }
chunksChan <- &sources.Chunk{ chunksChan <- &sources.Chunk{
SourceName: s.syslog.sourceName, SourceName: s.syslog.sourceName,
@ -249,7 +248,7 @@ func (s *Source) acceptTCPConnections(ctx context.Context, netListener net.Liste
} }
conn, err := netListener.Accept() conn, err := netListener.Accept()
if err != nil { if err != nil {
logrus.WithError(err).Debug("failed to accept TCP connection") ctx.Logger().V(2).Info("failed to accept TCP connection", "error", err)
continue continue
} }
go s.monitorConnection(ctx, conn, chunksChan) go s.monitorConnection(ctx, conn, chunksChan)
@ -271,7 +270,7 @@ func (s *Source) acceptUDPConnections(ctx context.Context, netListener net.Packe
} }
metadata, err := s.parseSyslogMetadata(input, remote.String()) metadata, err := s.parseSyslogMetadata(input, remote.String())
if err != nil { if err != nil {
logrus.WithError(err).Debug("failed to parse metadata") ctx.Logger().V(2).Info("failed to parse metadata", "error", err)
} }
chunksChan <- &sources.Chunk{ chunksChan <- &sources.Chunk{
SourceName: s.syslog.sourceName, SourceName: s.syslog.sourceName,

View file

@ -14,8 +14,8 @@ import (
"github.com/go-errors/errors" "github.com/go-errors/errors"
"github.com/jpillora/overseer/fetcher" "github.com/jpillora/overseer/fetcher"
log "github.com/sirupsen/logrus"
"github.com/trufflesecurity/trufflehog/v3/pkg/context"
"github.com/trufflesecurity/trufflehog/v3/pkg/version" "github.com/trufflesecurity/trufflehog/v3/pkg/version"
) )
@ -78,7 +78,7 @@ func (g *OSS) Fetch() (io.Reader, error) {
return nil, errors.New("already up to date") return nil, errors.New("already up to date")
} }
log.Debug("fetching trufflehog update") context.Background().Logger().V(2).Info("fetching trufflehog update")
newBinBytes, err := io.ReadAll(resp.Body) newBinBytes, err := io.ReadAll(resp.Body)
if err != nil { if err != nil {