2022-01-13 20:02:24 +00:00
|
|
|
package git
|
|
|
|
|
|
|
|
import (
|
2022-10-24 20:00:03 +00:00
|
|
|
"bufio"
|
|
|
|
"bytes"
|
2023-12-01 16:32:41 +00:00
|
|
|
"errors"
|
2022-01-13 20:02:24 +00:00
|
|
|
"fmt"
|
2024-02-20 15:21:16 +00:00
|
|
|
"io"
|
2022-01-13 20:02:24 +00:00
|
|
|
"net/url"
|
|
|
|
"os"
|
2022-03-15 00:47:18 +00:00
|
|
|
"os/exec"
|
2022-01-13 20:02:24 +00:00
|
|
|
"path/filepath"
|
2024-01-16 19:55:17 +00:00
|
|
|
"regexp"
|
2022-01-13 20:02:24 +00:00
|
|
|
"runtime"
|
|
|
|
"strings"
|
2023-07-31 18:12:38 +00:00
|
|
|
"sync/atomic"
|
2022-01-13 20:02:24 +00:00
|
|
|
"time"
|
|
|
|
|
|
|
|
"github.com/go-git/go-git/v5"
|
|
|
|
"github.com/go-git/go-git/v5/plumbing"
|
2022-09-23 15:58:45 +00:00
|
|
|
"github.com/go-git/go-git/v5/plumbing/object"
|
2024-04-04 02:19:14 +00:00
|
|
|
"github.com/google/go-github/v61/github"
|
2022-06-01 01:45:28 +00:00
|
|
|
"golang.org/x/oauth2"
|
2022-02-17 01:10:42 +00:00
|
|
|
"golang.org/x/sync/semaphore"
|
|
|
|
"google.golang.org/protobuf/proto"
|
|
|
|
"google.golang.org/protobuf/types/known/anypb"
|
2022-03-15 00:27:14 +00:00
|
|
|
|
2023-10-26 16:28:56 +00:00
|
|
|
"github.com/trufflesecurity/trufflehog/v3/pkg/cleantemp"
|
2023-11-16 21:59:53 +00:00
|
|
|
"github.com/trufflesecurity/trufflehog/v3/pkg/common"
|
2022-08-29 18:45:37 +00:00
|
|
|
"github.com/trufflesecurity/trufflehog/v3/pkg/context"
|
2022-08-23 20:29:20 +00:00
|
|
|
"github.com/trufflesecurity/trufflehog/v3/pkg/gitparse"
|
2022-08-10 23:10:45 +00:00
|
|
|
"github.com/trufflesecurity/trufflehog/v3/pkg/handlers"
|
2022-03-15 00:27:14 +00:00
|
|
|
"github.com/trufflesecurity/trufflehog/v3/pkg/pb/source_metadatapb"
|
|
|
|
"github.com/trufflesecurity/trufflehog/v3/pkg/pb/sourcespb"
|
|
|
|
"github.com/trufflesecurity/trufflehog/v3/pkg/sanitizer"
|
|
|
|
"github.com/trufflesecurity/trufflehog/v3/pkg/sources"
|
2022-01-13 20:02:24 +00:00
|
|
|
)
|
|
|
|
|
2023-09-13 00:23:25 +00:00
|
|
|
const SourceType = sourcespb.SourceType_SOURCE_TYPE_GIT
|
|
|
|
|
2022-01-13 20:02:24 +00:00
|
|
|
type Source struct {
|
|
|
|
name string
|
2024-01-30 20:51:58 +00:00
|
|
|
sourceID sources.SourceID
|
|
|
|
jobID sources.JobID
|
2022-01-13 20:02:24 +00:00
|
|
|
verify bool
|
2024-01-30 20:51:58 +00:00
|
|
|
|
|
|
|
useCustomContentWriter bool
|
|
|
|
git *Git
|
|
|
|
scanOptions *ScanOptions
|
|
|
|
|
2022-01-13 20:02:24 +00:00
|
|
|
sources.Progress
|
2024-01-30 20:51:58 +00:00
|
|
|
conn *sourcespb.Git
|
2022-01-13 20:02:24 +00:00
|
|
|
}
|
|
|
|
|
2024-01-30 20:51:58 +00:00
|
|
|
// WithCustomContentWriter sets the useCustomContentWriter flag on the source.
|
|
|
|
func (s *Source) WithCustomContentWriter() { s.useCustomContentWriter = true }
|
|
|
|
|
2022-01-13 20:02:24 +00:00
|
|
|
type Git struct {
|
|
|
|
sourceType sourcespb.SourceType
|
|
|
|
sourceName string
|
2023-09-14 18:28:24 +00:00
|
|
|
sourceID sources.SourceID
|
|
|
|
jobID sources.JobID
|
2022-03-22 16:27:15 +00:00
|
|
|
sourceMetadataFunc func(file, email, commit, timestamp, repository string, line int64) *source_metadatapb.MetaData
|
2022-01-13 20:02:24 +00:00
|
|
|
verify bool
|
2023-07-25 18:31:01 +00:00
|
|
|
metrics metrics
|
2022-04-01 23:47:27 +00:00
|
|
|
concurrency *semaphore.Weighted
|
2023-12-15 19:46:27 +00:00
|
|
|
skipBinaries bool
|
2023-12-22 19:55:23 +00:00
|
|
|
skipArchives bool
|
2024-01-30 20:51:58 +00:00
|
|
|
|
|
|
|
parser *gitparse.Parser
|
2022-01-13 20:02:24 +00:00
|
|
|
}
|
|
|
|
|
2023-07-25 18:31:01 +00:00
|
|
|
type metrics struct {
|
2023-07-31 18:12:38 +00:00
|
|
|
commitsScanned uint64
|
2023-07-25 18:31:01 +00:00
|
|
|
}
|
|
|
|
|
2024-01-30 20:51:58 +00:00
|
|
|
// Config for a Git source.
|
|
|
|
type Config struct {
|
|
|
|
Concurrency int
|
|
|
|
SourceMetadataFunc func(file, email, commit, timestamp, repository string, line int64) *source_metadatapb.MetaData
|
|
|
|
|
|
|
|
SourceName string
|
|
|
|
JobID sources.JobID
|
|
|
|
SourceID sources.SourceID
|
|
|
|
SourceType sourcespb.SourceType
|
|
|
|
Verify bool
|
|
|
|
SkipBinaries bool
|
|
|
|
SkipArchives bool
|
|
|
|
|
|
|
|
// UseCustomContentWriter indicates whether to use a custom contentWriter.
|
|
|
|
// When set to true, the parser will use a custom contentWriter provided through the WithContentWriter option.
|
|
|
|
// When false, the parser will use the default buffer (in-memory) contentWriter.
|
|
|
|
UseCustomContentWriter bool
|
|
|
|
}
|
|
|
|
|
|
|
|
// NewGit creates a new Git instance with the provided configuration. The Git instance is used to interact with
|
|
|
|
// Git repositories.
|
|
|
|
func NewGit(config *Config) *Git {
|
|
|
|
var parser *gitparse.Parser
|
|
|
|
if config.UseCustomContentWriter {
|
2024-02-05 18:43:55 +00:00
|
|
|
parser = gitparse.NewParser(gitparse.UseCustomContentWriter())
|
2024-01-30 20:51:58 +00:00
|
|
|
} else {
|
|
|
|
parser = gitparse.NewParser()
|
|
|
|
}
|
|
|
|
|
2022-01-13 20:02:24 +00:00
|
|
|
return &Git{
|
2024-01-30 20:51:58 +00:00
|
|
|
sourceType: config.SourceType,
|
|
|
|
sourceName: config.SourceName,
|
|
|
|
sourceID: config.SourceID,
|
|
|
|
jobID: config.JobID,
|
|
|
|
sourceMetadataFunc: config.SourceMetadataFunc,
|
|
|
|
verify: config.Verify,
|
|
|
|
concurrency: semaphore.NewWeighted(int64(config.Concurrency)),
|
|
|
|
skipBinaries: config.SkipBinaries,
|
|
|
|
skipArchives: config.SkipArchives,
|
|
|
|
parser: parser,
|
2022-01-13 20:02:24 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-06-23 16:15:51 +00:00
|
|
|
// Ensure the Source satisfies the interfaces at compile time.
|
2023-11-01 16:52:58 +00:00
|
|
|
var _ interface {
|
|
|
|
sources.Source
|
|
|
|
sources.SourceUnitEnumChunker
|
|
|
|
sources.SourceUnitUnmarshaller
|
|
|
|
} = (*Source)(nil)
|
2022-01-13 20:02:24 +00:00
|
|
|
|
|
|
|
// Type returns the type of source.
|
|
|
|
// It is used for matching source types in configuration and job input.
|
|
|
|
func (s *Source) Type() sourcespb.SourceType {
|
2023-09-13 00:23:25 +00:00
|
|
|
return SourceType
|
2022-01-13 20:02:24 +00:00
|
|
|
}
|
|
|
|
|
2023-09-14 18:28:24 +00:00
|
|
|
func (s *Source) SourceID() sources.SourceID {
|
2024-01-30 20:51:58 +00:00
|
|
|
return s.sourceID
|
2022-01-13 20:02:24 +00:00
|
|
|
}
|
|
|
|
|
2023-09-14 18:28:24 +00:00
|
|
|
func (s *Source) JobID() sources.JobID {
|
2024-01-30 20:51:58 +00:00
|
|
|
return s.jobID
|
2022-01-13 20:02:24 +00:00
|
|
|
}
|
|
|
|
|
2023-11-16 21:59:53 +00:00
|
|
|
// withScanOptions sets the scan options.
|
|
|
|
func (s *Source) withScanOptions(scanOptions *ScanOptions) {
|
2023-08-01 14:52:02 +00:00
|
|
|
s.scanOptions = scanOptions
|
|
|
|
}
|
|
|
|
|
2022-01-13 20:02:24 +00:00
|
|
|
// Init returns an initialized GitHub source.
|
2023-09-14 18:28:24 +00:00
|
|
|
func (s *Source) Init(aCtx context.Context, name string, jobId sources.JobID, sourceId sources.SourceID, verify bool, connection *anypb.Any, concurrency int) error {
|
2022-01-13 20:02:24 +00:00
|
|
|
s.name = name
|
2024-01-30 20:51:58 +00:00
|
|
|
s.sourceID = sourceId
|
|
|
|
s.jobID = jobId
|
2022-01-13 20:02:24 +00:00
|
|
|
s.verify = verify
|
2023-08-03 17:07:24 +00:00
|
|
|
if s.scanOptions == nil {
|
|
|
|
s.scanOptions = &ScanOptions{}
|
|
|
|
}
|
2022-01-13 20:02:24 +00:00
|
|
|
|
|
|
|
var conn sourcespb.Git
|
2022-08-10 02:20:02 +00:00
|
|
|
if err := anypb.UnmarshalTo(connection, &conn, proto.UnmarshalOptions{}); err != nil {
|
2023-12-01 16:32:41 +00:00
|
|
|
return fmt.Errorf("error unmarshalling connection: %w", err)
|
2022-01-13 20:02:24 +00:00
|
|
|
}
|
|
|
|
|
2023-11-16 21:59:53 +00:00
|
|
|
if uri := conn.GetUri(); uri != "" {
|
|
|
|
repoPath, _, err := prepareRepoSinceCommit(aCtx, uri, conn.GetBase())
|
|
|
|
if err != nil || repoPath == "" {
|
|
|
|
return fmt.Errorf("error preparing repo: %w", err)
|
|
|
|
}
|
|
|
|
conn.Directories = append(conn.Directories, repoPath)
|
|
|
|
}
|
|
|
|
|
|
|
|
filter, err := common.FilterFromFiles(conn.IncludePathsFile, conn.ExcludePathsFile)
|
|
|
|
if err != nil {
|
|
|
|
return fmt.Errorf("error creating filter: %w", err)
|
|
|
|
}
|
2023-12-01 16:32:41 +00:00
|
|
|
opts := []ScanOption{ScanOptionFilter(filter), ScanOptionLogOptions(new(git.LogOptions))}
|
2023-11-16 21:59:53 +00:00
|
|
|
|
|
|
|
if depth := conn.GetMaxDepth(); depth != 0 {
|
|
|
|
opts = append(opts, ScanOptionMaxDepth(depth))
|
|
|
|
}
|
|
|
|
if base := conn.GetBase(); base != "" {
|
|
|
|
opts = append(opts, ScanOptionBaseHash(base))
|
|
|
|
}
|
|
|
|
if head := conn.GetHead(); head != "" {
|
|
|
|
opts = append(opts, ScanOptionHeadCommit(head))
|
|
|
|
}
|
|
|
|
if globs := conn.GetExcludeGlobs(); globs != "" {
|
|
|
|
excludedGlobs := strings.Split(globs, ",")
|
|
|
|
opts = append(opts, ScanOptionExcludeGlobs(excludedGlobs))
|
|
|
|
}
|
|
|
|
if isBare := conn.GetBare(); isBare {
|
|
|
|
opts = append(opts, ScanOptionBare(isBare))
|
|
|
|
}
|
|
|
|
s.withScanOptions(NewScanOptions(opts...))
|
|
|
|
|
2022-01-13 20:02:24 +00:00
|
|
|
s.conn = &conn
|
|
|
|
|
2022-04-22 01:18:07 +00:00
|
|
|
if concurrency == 0 {
|
|
|
|
concurrency = runtime.NumCPU()
|
|
|
|
}
|
|
|
|
|
2023-12-07 15:26:27 +00:00
|
|
|
if err = CmdCheck(); err != nil {
|
2023-05-26 22:23:08 +00:00
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
2024-01-30 20:51:58 +00:00
|
|
|
cfg := &Config{
|
|
|
|
SourceName: s.name,
|
|
|
|
JobID: s.jobID,
|
|
|
|
SourceID: s.sourceID,
|
|
|
|
SourceType: s.Type(),
|
|
|
|
Verify: s.verify,
|
|
|
|
SkipBinaries: conn.GetSkipBinaries(),
|
|
|
|
SkipArchives: conn.GetSkipArchives(),
|
|
|
|
Concurrency: concurrency,
|
|
|
|
SourceMetadataFunc: func(file, email, commit, timestamp, repository string, line int64) *source_metadatapb.MetaData {
|
2022-01-13 20:02:24 +00:00
|
|
|
return &source_metadatapb.MetaData{
|
|
|
|
Data: &source_metadatapb.MetaData_Git{
|
|
|
|
Git: &source_metadatapb.Git{
|
|
|
|
Commit: sanitizer.UTF8(commit),
|
|
|
|
File: sanitizer.UTF8(file),
|
|
|
|
Email: sanitizer.UTF8(email),
|
|
|
|
Repository: sanitizer.UTF8(repository),
|
2022-03-03 21:21:33 +00:00
|
|
|
Timestamp: sanitizer.UTF8(timestamp),
|
2022-03-22 16:27:15 +00:00
|
|
|
Line: line,
|
2022-01-13 20:02:24 +00:00
|
|
|
},
|
|
|
|
},
|
|
|
|
}
|
2023-12-15 19:46:27 +00:00
|
|
|
},
|
2024-01-30 20:51:58 +00:00
|
|
|
UseCustomContentWriter: s.useCustomContentWriter,
|
|
|
|
}
|
|
|
|
s.git = NewGit(cfg)
|
2022-01-13 20:02:24 +00:00
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
|
|
|
// Chunks emits chunks of bytes over a channel.
|
2023-09-07 16:03:37 +00:00
|
|
|
func (s *Source) Chunks(ctx context.Context, chunksChan chan *sources.Chunk, _ ...sources.ChunkingTarget) error {
|
2023-11-01 16:22:44 +00:00
|
|
|
reporter := sources.ChanReporter{Ch: chunksChan}
|
|
|
|
if err := s.scanRepos(ctx, reporter); err != nil {
|
2023-08-01 14:52:02 +00:00
|
|
|
return err
|
|
|
|
}
|
2023-11-01 16:22:44 +00:00
|
|
|
if err := s.scanDirs(ctx, reporter); err != nil {
|
2023-08-01 14:52:02 +00:00
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
2023-02-08 17:33:01 +00:00
|
|
|
totalRepos := len(s.conn.Repositories) + len(s.conn.Directories)
|
2023-08-07 21:28:57 +00:00
|
|
|
ctx.Logger().V(1).Info("Git source finished scanning", "repo_count", totalRepos)
|
2023-08-01 14:52:02 +00:00
|
|
|
s.SetProgressComplete(
|
|
|
|
totalRepos, totalRepos,
|
|
|
|
fmt.Sprintf("Completed scanning source %s", s.name), "",
|
|
|
|
)
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
|
|
|
// scanRepos scans the configured repositories in s.conn.Repositories.
|
2023-11-01 16:22:44 +00:00
|
|
|
func (s *Source) scanRepos(ctx context.Context, reporter sources.ChunkReporter) error {
|
2023-08-01 14:52:02 +00:00
|
|
|
if len(s.conn.Repositories) == 0 {
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
totalRepos := len(s.conn.Repositories) + len(s.conn.Directories)
|
2023-11-01 16:52:58 +00:00
|
|
|
for i, repoURI := range s.conn.Repositories {
|
|
|
|
s.SetProgressComplete(i, totalRepos, fmt.Sprintf("Repo: %s", repoURI), "")
|
|
|
|
if len(repoURI) == 0 {
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
if err := s.scanRepo(ctx, repoURI, reporter); err != nil {
|
|
|
|
ctx.Logger().Info("error scanning repository", "repo", repoURI, "error", err)
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
|
|
|
// scanRepo scans a single provided repository.
|
|
|
|
func (s *Source) scanRepo(ctx context.Context, repoURI string, reporter sources.ChunkReporter) error {
|
|
|
|
var cloneFunc func() (string, *git.Repository, error)
|
2022-01-13 20:02:24 +00:00
|
|
|
switch cred := s.conn.GetCredential().(type) {
|
|
|
|
case *sourcespb.Git_BasicAuth:
|
2023-11-01 16:52:58 +00:00
|
|
|
cloneFunc = func() (string, *git.Repository, error) {
|
|
|
|
user := cred.BasicAuth.Username
|
|
|
|
token := cred.BasicAuth.Password
|
|
|
|
return CloneRepoUsingToken(ctx, token, repoURI, user)
|
2022-01-13 20:02:24 +00:00
|
|
|
}
|
|
|
|
case *sourcespb.Git_Unauthenticated:
|
2023-11-01 16:52:58 +00:00
|
|
|
cloneFunc = func() (string, *git.Repository, error) {
|
|
|
|
return CloneRepoUsingUnauthenticated(ctx, repoURI)
|
2022-01-13 20:02:24 +00:00
|
|
|
}
|
2022-09-28 18:40:01 +00:00
|
|
|
case *sourcespb.Git_SshAuth:
|
2023-11-01 16:52:58 +00:00
|
|
|
cloneFunc = func() (string, *git.Repository, error) {
|
|
|
|
return CloneRepoUsingSSH(ctx, repoURI)
|
2022-09-28 18:40:01 +00:00
|
|
|
}
|
2022-01-13 20:02:24 +00:00
|
|
|
default:
|
|
|
|
return errors.New("invalid connection type for git source")
|
|
|
|
}
|
2023-11-01 16:52:58 +00:00
|
|
|
|
|
|
|
err := func() error {
|
|
|
|
path, repo, err := cloneFunc()
|
|
|
|
defer os.RemoveAll(path)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
return s.git.ScanRepo(ctx, repo, path, s.scanOptions, reporter)
|
|
|
|
}()
|
|
|
|
if err != nil {
|
|
|
|
return reporter.ChunkErr(ctx, err)
|
|
|
|
}
|
2023-08-01 14:52:02 +00:00
|
|
|
return nil
|
|
|
|
}
|
2022-01-13 20:02:24 +00:00
|
|
|
|
2023-08-01 14:52:02 +00:00
|
|
|
// scanDirs scans the configured directories in s.conn.Directories.
|
2023-11-01 16:22:44 +00:00
|
|
|
func (s *Source) scanDirs(ctx context.Context, reporter sources.ChunkReporter) error {
|
2023-08-01 14:52:02 +00:00
|
|
|
totalRepos := len(s.conn.Repositories) + len(s.conn.Directories)
|
2023-02-08 17:33:01 +00:00
|
|
|
for i, gitDir := range s.conn.Directories {
|
|
|
|
s.SetProgressComplete(len(s.conn.Repositories)+i, totalRepos, fmt.Sprintf("Repo: %s", gitDir), "")
|
2022-01-13 20:02:24 +00:00
|
|
|
|
2023-02-08 17:33:01 +00:00
|
|
|
if len(gitDir) == 0 {
|
2022-01-13 20:02:24 +00:00
|
|
|
continue
|
|
|
|
}
|
2023-11-01 16:52:58 +00:00
|
|
|
if err := s.scanDir(ctx, gitDir, reporter); err != nil {
|
2023-08-01 14:52:02 +00:00
|
|
|
ctx.Logger().Info("error scanning repository", "repo", gitDir, "error", err)
|
|
|
|
continue
|
|
|
|
}
|
2023-11-01 16:52:58 +00:00
|
|
|
}
|
|
|
|
return nil
|
|
|
|
}
|
2022-06-10 18:00:50 +00:00
|
|
|
|
2023-11-01 16:52:58 +00:00
|
|
|
// scanDir scans a single provided directory.
|
|
|
|
func (s *Source) scanDir(ctx context.Context, gitDir string, reporter sources.ChunkReporter) error {
|
|
|
|
if !s.scanOptions.Bare && strings.HasSuffix(gitDir, "git") {
|
|
|
|
// TODO: Figure out why we skip directories ending in "git".
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
// try paths instead of url
|
|
|
|
repo, err := RepoFromPath(gitDir, s.scanOptions.Bare)
|
|
|
|
if err != nil {
|
|
|
|
return reporter.ChunkErr(ctx, err)
|
|
|
|
}
|
2023-08-01 14:52:02 +00:00
|
|
|
|
2023-11-01 16:52:58 +00:00
|
|
|
err = func() error {
|
2023-11-16 21:59:53 +00:00
|
|
|
if strings.HasPrefix(gitDir, filepath.Join(os.TempDir(), "trufflehog")) {
|
2023-11-01 16:52:58 +00:00
|
|
|
defer os.RemoveAll(gitDir)
|
2022-01-13 20:02:24 +00:00
|
|
|
}
|
|
|
|
|
2023-11-01 16:52:58 +00:00
|
|
|
return s.git.ScanRepo(ctx, repo, gitDir, s.scanOptions, reporter)
|
|
|
|
}()
|
|
|
|
if err != nil {
|
|
|
|
return reporter.ChunkErr(ctx, err)
|
2022-01-13 20:02:24 +00:00
|
|
|
}
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2023-08-03 16:23:41 +00:00
|
|
|
func RepoFromPath(path string, isBare bool) (*git.Repository, error) {
|
|
|
|
options := &git.PlainOpenOptions{}
|
|
|
|
if !isBare {
|
|
|
|
options.DetectDotGit = true
|
|
|
|
options.EnableDotGitCommonDir = true
|
2023-05-09 15:00:47 +00:00
|
|
|
}
|
|
|
|
return git.PlainOpenWithOptions(path, options)
|
2022-01-13 20:02:24 +00:00
|
|
|
}
|
|
|
|
|
2022-03-22 16:26:58 +00:00
|
|
|
func CleanOnError(err *error, path string) {
|
|
|
|
if *err != nil {
|
|
|
|
os.RemoveAll(path)
|
2022-01-13 20:02:24 +00:00
|
|
|
}
|
2022-03-22 16:26:58 +00:00
|
|
|
}
|
|
|
|
|
2023-07-11 19:13:33 +00:00
|
|
|
func GitURLParse(gitURL string) (*url.URL, error) {
|
2022-10-28 18:56:03 +00:00
|
|
|
parsedURL, originalError := url.Parse(gitURL)
|
|
|
|
if originalError != nil {
|
|
|
|
var err error
|
|
|
|
gitURLBytes := []byte("ssh://" + gitURL)
|
|
|
|
colonIndex := bytes.LastIndex(gitURLBytes, []byte(":"))
|
|
|
|
gitURLBytes[colonIndex] = byte('/')
|
|
|
|
parsedURL, err = url.Parse(string(gitURLBytes))
|
|
|
|
if err != nil {
|
|
|
|
return nil, originalError
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return parsedURL, nil
|
|
|
|
}
|
|
|
|
|
2023-09-27 22:52:07 +00:00
|
|
|
type cloneParams struct {
|
|
|
|
userInfo *url.Userinfo
|
|
|
|
gitURL string
|
|
|
|
args []string
|
|
|
|
clonePath string
|
|
|
|
}
|
|
|
|
|
|
|
|
// CloneRepo orchestrates the cloning of a given Git repository, returning its local path
|
|
|
|
// and a git.Repository object for further operations. The function sets up error handling
|
|
|
|
// infrastructure, ensuring that any encountered errors trigger a cleanup of resources.
|
|
|
|
// The core cloning logic is delegated to a nested function, which returns errors to the
|
|
|
|
// outer function for centralized error handling and cleanup.
|
|
|
|
func CloneRepo(ctx context.Context, userInfo *url.Userinfo, gitURL string, args ...string) (string, *git.Repository, error) {
|
2023-10-26 16:28:56 +00:00
|
|
|
clonePath, err := cleantemp.MkdirTemp()
|
2022-01-15 00:07:45 +00:00
|
|
|
if err != nil {
|
2022-11-03 23:36:52 +00:00
|
|
|
return "", nil, err
|
2022-01-15 00:07:45 +00:00
|
|
|
}
|
2023-09-27 22:52:07 +00:00
|
|
|
|
|
|
|
repo, err := executeClone(ctx, cloneParams{userInfo, gitURL, args, clonePath})
|
2022-01-13 20:02:24 +00:00
|
|
|
if err != nil {
|
2023-09-27 22:52:07 +00:00
|
|
|
// DO NOT FORGET TO CLEAN UP THE CLONE PATH HERE!!
|
|
|
|
// If we don't, we'll end up with a bunch of orphaned directories in the temp dir.
|
|
|
|
CleanOnError(&err, clonePath)
|
2022-10-28 18:56:03 +00:00
|
|
|
return "", nil, err
|
|
|
|
}
|
2023-09-27 22:52:07 +00:00
|
|
|
|
|
|
|
return clonePath, repo, nil
|
|
|
|
}
|
|
|
|
|
|
|
|
// executeClone prepares the Git URL, constructs, and executes the git clone command using the provided
|
|
|
|
// clonePath. It then opens the cloned repository, returning a git.Repository object.
|
|
|
|
func executeClone(ctx context.Context, params cloneParams) (*git.Repository, error) {
|
|
|
|
cloneURL, err := GitURLParse(params.gitURL)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
2022-10-28 18:56:03 +00:00
|
|
|
if cloneURL.User == nil {
|
2023-09-27 22:52:07 +00:00
|
|
|
cloneURL.User = params.userInfo
|
2022-01-13 20:02:24 +00:00
|
|
|
}
|
2022-05-24 15:49:03 +00:00
|
|
|
|
2023-09-27 22:52:07 +00:00
|
|
|
gitArgs := []string{"clone", cloneURL.String(), params.clonePath}
|
|
|
|
gitArgs = append(gitArgs, params.args...)
|
2022-05-24 15:49:03 +00:00
|
|
|
cloneCmd := exec.Command("git", gitArgs...)
|
2022-03-30 21:27:13 +00:00
|
|
|
|
2024-03-06 16:51:35 +00:00
|
|
|
safeURL, secretForRedaction, err := stripPassword(params.gitURL)
|
2022-12-13 23:51:57 +00:00
|
|
|
if err != nil {
|
|
|
|
ctx.Logger().V(1).Info("error stripping password from git url", "error", err)
|
|
|
|
}
|
|
|
|
logger := ctx.Logger().WithValues(
|
|
|
|
"subcommand", "git clone",
|
2023-09-27 22:52:07 +00:00
|
|
|
"repo", safeURL,
|
|
|
|
"path", params.clonePath,
|
|
|
|
"args", params.args,
|
2022-12-13 23:51:57 +00:00
|
|
|
)
|
|
|
|
|
|
|
|
// Execute command and wait for the stdout / stderr.
|
2024-03-06 16:51:35 +00:00
|
|
|
outputBytes, err := cloneCmd.CombinedOutput()
|
|
|
|
var output string
|
|
|
|
if secretForRedaction != "" {
|
|
|
|
output = strings.ReplaceAll(string(outputBytes), secretForRedaction, "<secret>")
|
|
|
|
} else {
|
|
|
|
output = string(outputBytes)
|
|
|
|
}
|
|
|
|
|
2022-03-30 21:27:13 +00:00
|
|
|
if err != nil {
|
2023-12-01 16:32:41 +00:00
|
|
|
err = fmt.Errorf("error executing git clone: %w", err)
|
2022-01-15 00:07:45 +00:00
|
|
|
}
|
2024-03-06 16:51:35 +00:00
|
|
|
logger.V(3).Info("git subcommand finished", "output", output)
|
2022-04-22 01:18:07 +00:00
|
|
|
|
2022-03-30 21:27:13 +00:00
|
|
|
if cloneCmd.ProcessState == nil {
|
2023-09-27 22:52:07 +00:00
|
|
|
return nil, fmt.Errorf("clone command exited with no output")
|
2022-03-30 21:27:13 +00:00
|
|
|
}
|
|
|
|
if cloneCmd.ProcessState != nil && cloneCmd.ProcessState.ExitCode() != 0 {
|
2024-03-06 16:51:35 +00:00
|
|
|
logger.V(1).Info("git clone failed", "output", output, "error", err)
|
2023-09-27 22:52:07 +00:00
|
|
|
return nil, fmt.Errorf("could not clone repo: %s, %w", safeURL, err)
|
2022-03-30 21:27:13 +00:00
|
|
|
}
|
2022-11-03 23:36:52 +00:00
|
|
|
|
2023-09-27 22:52:07 +00:00
|
|
|
options := &git.PlainOpenOptions{DetectDotGit: true, EnableDotGitCommonDir: true}
|
|
|
|
repo, err := git.PlainOpenWithOptions(params.clonePath, options)
|
2022-01-13 20:02:24 +00:00
|
|
|
if err != nil {
|
2023-09-27 22:52:07 +00:00
|
|
|
return nil, fmt.Errorf("could not open cloned repo: %w", err)
|
2022-01-13 20:02:24 +00:00
|
|
|
}
|
2022-12-13 23:51:57 +00:00
|
|
|
logger.V(1).Info("successfully cloned repo")
|
2023-09-27 22:52:07 +00:00
|
|
|
|
|
|
|
return repo, nil
|
2022-01-13 20:02:24 +00:00
|
|
|
}
|
|
|
|
|
2023-09-13 15:51:12 +00:00
|
|
|
// PingRepoUsingToken executes git ls-remote on a repo and returns any error that occurs. It can be used to validate
|
|
|
|
// that a repo actually exists and is reachable.
|
|
|
|
//
|
|
|
|
// Pinging using other authentication methods is only unimplemented because there's been no pressing need for it yet.
|
|
|
|
func PingRepoUsingToken(ctx context.Context, token, gitUrl, user string) error {
|
2023-12-07 15:26:27 +00:00
|
|
|
if err := CmdCheck(); err != nil {
|
2023-09-13 15:51:12 +00:00
|
|
|
return err
|
|
|
|
}
|
|
|
|
lsUrl, err := GitURLParse(gitUrl)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
if lsUrl.User == nil {
|
|
|
|
lsUrl.User = url.UserPassword(user, token)
|
|
|
|
}
|
|
|
|
|
|
|
|
// We don't actually care about any refs on the remote, we just care whether can can list them at all. So we query
|
|
|
|
// only for a ref that we know won't exist to minimize the search time on the remote. (By default, ls-remote exits
|
|
|
|
// with 0 even if it doesn't find any matching refs.)
|
|
|
|
fakeRef := "TRUFFLEHOG_CHECK_GIT_REMOTE_URL_REACHABILITY"
|
|
|
|
gitArgs := []string{"ls-remote", lsUrl.String(), "--quiet", fakeRef}
|
|
|
|
cmd := exec.Command("git", gitArgs...)
|
|
|
|
_, err = cmd.CombinedOutput()
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
2022-04-01 23:47:27 +00:00
|
|
|
// CloneRepoUsingToken clones a repo using a provided token.
|
2022-11-03 23:36:52 +00:00
|
|
|
func CloneRepoUsingToken(ctx context.Context, token, gitUrl, user string, args ...string) (string, *git.Repository, error) {
|
2022-03-22 16:26:58 +00:00
|
|
|
userInfo := url.UserPassword(user, token)
|
2022-11-03 23:36:52 +00:00
|
|
|
return CloneRepo(ctx, userInfo, gitUrl, args...)
|
2022-03-22 16:26:58 +00:00
|
|
|
}
|
|
|
|
|
2022-04-01 23:47:27 +00:00
|
|
|
// CloneRepoUsingUnauthenticated clones a repo with no authentication required.
|
2022-11-03 23:36:52 +00:00
|
|
|
func CloneRepoUsingUnauthenticated(ctx context.Context, url string, args ...string) (string, *git.Repository, error) {
|
|
|
|
return CloneRepo(ctx, nil, url, args...)
|
2022-03-22 16:26:58 +00:00
|
|
|
}
|
|
|
|
|
2022-11-03 23:36:52 +00:00
|
|
|
// CloneRepoUsingSSH clones a repo using SSH.
|
2024-01-16 19:55:17 +00:00
|
|
|
func CloneRepoUsingSSH(ctx context.Context, gitURL string, args ...string) (string, *git.Repository, error) {
|
|
|
|
if isCodeCommitURL(gitURL) {
|
|
|
|
return CloneRepo(ctx, nil, gitURL, args...)
|
|
|
|
}
|
2022-08-23 23:34:34 +00:00
|
|
|
userInfo := url.User("git")
|
2024-01-16 19:55:17 +00:00
|
|
|
return CloneRepo(ctx, userInfo, gitURL, args...)
|
2022-08-23 23:34:34 +00:00
|
|
|
}
|
|
|
|
|
2024-01-16 19:55:17 +00:00
|
|
|
var codeCommitRE = regexp.MustCompile(`ssh://git-codecommit\.[\w-]+\.amazonaws\.com`)
|
|
|
|
|
|
|
|
func isCodeCommitURL(gitURL string) bool { return codeCommitRE.MatchString(gitURL) }
|
|
|
|
|
2023-07-31 18:12:38 +00:00
|
|
|
func (s *Git) CommitsScanned() uint64 {
|
|
|
|
return atomic.LoadUint64(&s.metrics.commitsScanned)
|
2023-07-25 18:31:01 +00:00
|
|
|
}
|
|
|
|
|
2023-12-06 21:38:01 +00:00
|
|
|
const gitDirName = ".git"
|
|
|
|
|
2023-11-01 16:22:44 +00:00
|
|
|
func (s *Git) ScanCommits(ctx context.Context, repo *git.Repository, path string, scanOptions *ScanOptions, reporter sources.ChunkReporter) error {
|
2024-01-04 22:53:08 +00:00
|
|
|
// Get the remote URL for reporting (may be empty)
|
|
|
|
remoteURL := getSafeRemoteURL(repo, "origin")
|
|
|
|
var repoCtx context.Context
|
|
|
|
if remoteURL != "" {
|
|
|
|
repoCtx = context.WithValue(ctx, "repo", remoteURL)
|
|
|
|
} else {
|
|
|
|
repoCtx = context.WithValue(ctx, "repo", path)
|
|
|
|
}
|
|
|
|
|
2024-02-13 15:21:22 +00:00
|
|
|
logger := repoCtx.Logger()
|
|
|
|
var logValues []any
|
|
|
|
if scanOptions.BaseHash != "" {
|
|
|
|
logValues = append(logValues, "base", scanOptions.BaseHash)
|
|
|
|
}
|
|
|
|
if scanOptions.HeadHash != "" {
|
|
|
|
logValues = append(logValues, "head", scanOptions.HeadHash)
|
|
|
|
}
|
|
|
|
if scanOptions.MaxDepth > 0 {
|
|
|
|
logValues = append(logValues, "max_depth", scanOptions.MaxDepth)
|
|
|
|
}
|
|
|
|
|
2024-02-06 18:06:10 +00:00
|
|
|
diffChan, err := s.parser.RepoPath(repoCtx, path, scanOptions.HeadHash, scanOptions.BaseHash == "", scanOptions.ExcludeGlobs, scanOptions.Bare)
|
2022-01-13 20:02:24 +00:00
|
|
|
if err != nil {
|
2022-08-23 20:29:20 +00:00
|
|
|
return err
|
2022-01-13 20:02:24 +00:00
|
|
|
}
|
2024-02-06 18:06:10 +00:00
|
|
|
if diffChan == nil {
|
2022-08-23 20:29:20 +00:00
|
|
|
return nil
|
2022-04-16 22:41:51 +00:00
|
|
|
}
|
2022-04-07 23:32:55 +00:00
|
|
|
|
2023-12-06 21:38:01 +00:00
|
|
|
gitDir := filepath.Join(path, gitDirName)
|
|
|
|
|
2024-01-31 15:52:24 +00:00
|
|
|
logger.V(1).Info("scanning repo", logValues...)
|
|
|
|
|
2024-02-06 18:06:10 +00:00
|
|
|
var depth int64
|
|
|
|
var lastCommitHash string
|
|
|
|
for diff := range diffChan {
|
2022-12-06 21:24:27 +00:00
|
|
|
if scanOptions.MaxDepth > 0 && depth >= scanOptions.MaxDepth {
|
2022-12-13 23:51:57 +00:00
|
|
|
logger.V(1).Info("reached max depth", "depth", depth)
|
2022-12-06 21:24:27 +00:00
|
|
|
break
|
|
|
|
}
|
2022-08-10 23:10:45 +00:00
|
|
|
|
2024-04-29 20:58:45 +00:00
|
|
|
commit := diff.Commit
|
|
|
|
fullHash := commit.Hash
|
2024-02-13 15:21:22 +00:00
|
|
|
if scanOptions.BaseHash != "" && scanOptions.BaseHash == fullHash {
|
|
|
|
logger.V(1).Info("reached base commit", "commit", fullHash)
|
|
|
|
break
|
|
|
|
}
|
|
|
|
|
2024-04-29 20:58:45 +00:00
|
|
|
email := commit.Author
|
|
|
|
when := commit.Date.UTC().Format("2006-01-02 15:04:05 -0700")
|
|
|
|
|
2024-02-13 15:21:22 +00:00
|
|
|
if fullHash != lastCommitHash {
|
2024-02-06 18:06:10 +00:00
|
|
|
depth++
|
|
|
|
lastCommitHash = fullHash
|
|
|
|
atomic.AddUint64(&s.metrics.commitsScanned, 1)
|
|
|
|
logger.V(5).Info("scanning commit", "commit", fullHash)
|
|
|
|
|
2024-04-29 20:58:45 +00:00
|
|
|
// Scan the commit metadata.
|
|
|
|
// See https://github.com/trufflesecurity/trufflehog/issues/2683
|
|
|
|
var (
|
|
|
|
metadata = s.sourceMetadataFunc("", email, fullHash, when, remoteURL, 0)
|
|
|
|
sb strings.Builder
|
|
|
|
)
|
|
|
|
sb.WriteString(email)
|
|
|
|
sb.WriteString("\n")
|
|
|
|
sb.WriteString(commit.Committer)
|
|
|
|
sb.WriteString("\n")
|
|
|
|
sb.WriteString(commit.Message.String())
|
|
|
|
chunk := sources.Chunk{
|
|
|
|
SourceName: s.sourceName,
|
|
|
|
SourceID: s.sourceID,
|
|
|
|
JobID: s.jobID,
|
|
|
|
SourceType: s.sourceType,
|
|
|
|
SourceMetadata: metadata,
|
|
|
|
Data: []byte(sb.String()),
|
|
|
|
Verify: s.verify,
|
|
|
|
}
|
|
|
|
if err := reporter.ChunkOk(ctx, chunk); err != nil {
|
|
|
|
return err
|
|
|
|
}
|
2024-02-06 18:06:10 +00:00
|
|
|
}
|
2024-04-25 14:13:09 +00:00
|
|
|
|
2024-04-25 14:56:48 +00:00
|
|
|
fileName := diff.PathB
|
|
|
|
if fileName == "" {
|
2024-04-25 14:13:09 +00:00
|
|
|
continue
|
|
|
|
}
|
2024-04-29 20:58:45 +00:00
|
|
|
|
|
|
|
if !scanOptions.Filter.Pass(fileName) {
|
|
|
|
continue
|
|
|
|
}
|
2024-02-06 18:06:10 +00:00
|
|
|
|
|
|
|
// Handle binary files by reading the entire file rather than using the diff.
|
|
|
|
if diff.IsBinary {
|
|
|
|
metadata := s.sourceMetadataFunc(fileName, email, fullHash, when, remoteURL, 0)
|
|
|
|
chunkSkel := &sources.Chunk{
|
|
|
|
SourceName: s.sourceName,
|
|
|
|
SourceID: s.sourceID,
|
|
|
|
JobID: s.jobID,
|
|
|
|
SourceType: s.sourceType,
|
|
|
|
SourceMetadata: metadata,
|
|
|
|
Verify: s.verify,
|
2022-10-24 20:00:03 +00:00
|
|
|
}
|
2024-01-30 20:30:51 +00:00
|
|
|
|
2024-02-06 18:06:10 +00:00
|
|
|
commitHash := plumbing.NewHash(fullHash)
|
|
|
|
if err := s.handleBinary(ctx, gitDir, reporter, chunkSkel, commitHash, fileName); err != nil {
|
|
|
|
logger.V(1).Info(
|
|
|
|
"error handling binary file",
|
|
|
|
"error", err,
|
|
|
|
"filename", fileName,
|
|
|
|
"commit", commitHash,
|
|
|
|
"file", diff.PathB,
|
|
|
|
)
|
|
|
|
}
|
|
|
|
continue
|
|
|
|
}
|
2024-01-30 20:30:51 +00:00
|
|
|
|
2024-02-06 18:06:10 +00:00
|
|
|
if diff.Len() > sources.ChunkSize+sources.PeekSize {
|
|
|
|
s.gitChunk(ctx, diff, fileName, email, fullHash, when, remoteURL, reporter)
|
|
|
|
continue
|
|
|
|
}
|
2024-01-30 20:30:51 +00:00
|
|
|
|
2024-02-06 18:06:10 +00:00
|
|
|
chunkData := func(d *gitparse.Diff) error {
|
|
|
|
metadata := s.sourceMetadataFunc(fileName, email, fullHash, when, remoteURL, int64(diff.LineStart))
|
|
|
|
|
|
|
|
reader, err := d.ReadCloser()
|
|
|
|
if err != nil {
|
|
|
|
ctx.Logger().Error(
|
|
|
|
err, "error creating reader for commits",
|
|
|
|
"filename", fileName,
|
|
|
|
"commit", fullHash,
|
|
|
|
"file", diff.PathB,
|
|
|
|
)
|
|
|
|
return nil
|
2022-03-22 16:27:15 +00:00
|
|
|
}
|
2024-02-06 18:06:10 +00:00
|
|
|
defer reader.Close()
|
|
|
|
|
|
|
|
data := make([]byte, d.Len())
|
2024-02-20 15:21:16 +00:00
|
|
|
if _, err := io.ReadFull(reader, data); err != nil {
|
2024-02-06 18:06:10 +00:00
|
|
|
ctx.Logger().Error(
|
|
|
|
err, "error reading diff content for commit",
|
|
|
|
"filename", fileName,
|
|
|
|
"commit", fullHash,
|
|
|
|
"file", diff.PathB,
|
|
|
|
)
|
|
|
|
return nil
|
2023-11-01 16:22:44 +00:00
|
|
|
}
|
2024-02-06 18:06:10 +00:00
|
|
|
chunk := sources.Chunk{
|
|
|
|
SourceName: s.sourceName,
|
|
|
|
SourceID: s.sourceID,
|
|
|
|
JobID: s.jobID,
|
|
|
|
SourceType: s.sourceType,
|
|
|
|
SourceMetadata: metadata,
|
|
|
|
Data: data,
|
|
|
|
Verify: s.verify,
|
|
|
|
}
|
|
|
|
return reporter.ChunkOk(ctx, chunk)
|
|
|
|
}
|
|
|
|
if err := chunkData(diff); err != nil {
|
|
|
|
return err
|
2022-02-17 01:10:42 +00:00
|
|
|
}
|
2022-03-15 00:12:58 +00:00
|
|
|
}
|
|
|
|
return nil
|
2022-02-04 20:14:42 +00:00
|
|
|
}
|
|
|
|
|
2024-01-30 20:30:51 +00:00
|
|
|
func (s *Git) gitChunk(ctx context.Context, diff *gitparse.Diff, fileName, email, hash, when, urlMetadata string, reporter sources.ChunkReporter) {
|
|
|
|
reader, err := diff.ReadCloser()
|
|
|
|
if err != nil {
|
|
|
|
ctx.Logger().Error(err, "error creating reader for chunk", "filename", fileName, "commit", hash, "file", diff.PathB)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
defer reader.Close()
|
|
|
|
|
|
|
|
originalChunk := bufio.NewScanner(reader)
|
2022-10-24 20:00:03 +00:00
|
|
|
newChunkBuffer := bytes.Buffer{}
|
|
|
|
lastOffset := 0
|
|
|
|
for offset := 0; originalChunk.Scan(); offset++ {
|
2022-11-22 17:01:39 +00:00
|
|
|
line := make([]byte, len(originalChunk.Bytes())+1)
|
|
|
|
copy(line, originalChunk.Bytes())
|
|
|
|
line[len(line)-1] = byte('\n')
|
2022-10-24 20:57:27 +00:00
|
|
|
if len(line) > sources.ChunkSize || len(line)+newChunkBuffer.Len() > sources.ChunkSize {
|
2022-10-24 20:00:03 +00:00
|
|
|
// Add oversize chunk info
|
|
|
|
if newChunkBuffer.Len() > 0 {
|
|
|
|
// Send the existing fragment.
|
|
|
|
metadata := s.sourceMetadataFunc(fileName, email, hash, when, urlMetadata, int64(diff.LineStart+lastOffset))
|
2023-11-01 16:22:44 +00:00
|
|
|
chunk := sources.Chunk{
|
2022-10-24 20:00:03 +00:00
|
|
|
SourceName: s.sourceName,
|
|
|
|
SourceID: s.sourceID,
|
2023-08-29 19:02:30 +00:00
|
|
|
JobID: s.jobID,
|
2022-10-24 20:00:03 +00:00
|
|
|
SourceType: s.sourceType,
|
|
|
|
SourceMetadata: metadata,
|
2022-10-25 16:09:47 +00:00
|
|
|
Data: append([]byte{}, newChunkBuffer.Bytes()...),
|
2022-10-24 20:00:03 +00:00
|
|
|
Verify: s.verify,
|
|
|
|
}
|
2023-11-01 16:22:44 +00:00
|
|
|
if err := reporter.ChunkOk(ctx, chunk); err != nil {
|
|
|
|
// TODO: Return error.
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2022-10-24 20:00:03 +00:00
|
|
|
newChunkBuffer.Reset()
|
|
|
|
lastOffset = offset
|
|
|
|
}
|
2022-10-24 20:57:27 +00:00
|
|
|
if len(line) > sources.ChunkSize {
|
2022-10-24 20:00:03 +00:00
|
|
|
// Send the oversize line.
|
|
|
|
metadata := s.sourceMetadataFunc(fileName, email, hash, when, urlMetadata, int64(diff.LineStart+offset))
|
2023-11-01 16:22:44 +00:00
|
|
|
chunk := sources.Chunk{
|
2022-10-24 20:00:03 +00:00
|
|
|
SourceName: s.sourceName,
|
|
|
|
SourceID: s.sourceID,
|
2023-08-29 19:02:30 +00:00
|
|
|
JobID: s.jobID,
|
2022-10-24 20:00:03 +00:00
|
|
|
SourceType: s.sourceType,
|
|
|
|
SourceMetadata: metadata,
|
|
|
|
Data: line,
|
|
|
|
Verify: s.verify,
|
|
|
|
}
|
2023-11-01 16:22:44 +00:00
|
|
|
if err := reporter.ChunkOk(ctx, chunk); err != nil {
|
|
|
|
// TODO: Return error.
|
|
|
|
return
|
|
|
|
}
|
2022-10-24 20:00:03 +00:00
|
|
|
continue
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-11-03 23:36:52 +00:00
|
|
|
if _, err := newChunkBuffer.Write(line); err != nil {
|
|
|
|
ctx.Logger().Error(err, "error writing to chunk buffer", "filename", fileName, "commit", hash, "file", diff.PathB)
|
2022-10-24 20:00:03 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
// Send anything still in the new chunk buffer
|
|
|
|
if newChunkBuffer.Len() > 0 {
|
|
|
|
metadata := s.sourceMetadataFunc(fileName, email, hash, when, urlMetadata, int64(diff.LineStart+lastOffset))
|
2023-11-01 16:22:44 +00:00
|
|
|
chunk := sources.Chunk{
|
2022-10-24 20:00:03 +00:00
|
|
|
SourceName: s.sourceName,
|
|
|
|
SourceID: s.sourceID,
|
2023-08-29 19:02:30 +00:00
|
|
|
JobID: s.jobID,
|
2022-10-24 20:00:03 +00:00
|
|
|
SourceType: s.sourceType,
|
|
|
|
SourceMetadata: metadata,
|
2022-10-25 16:09:47 +00:00
|
|
|
Data: append([]byte{}, newChunkBuffer.Bytes()...),
|
2022-10-24 20:00:03 +00:00
|
|
|
Verify: s.verify,
|
|
|
|
}
|
2023-11-01 16:22:44 +00:00
|
|
|
if err := reporter.ChunkOk(ctx, chunk); err != nil {
|
|
|
|
// TODO: Return error.
|
|
|
|
return
|
|
|
|
}
|
2022-10-24 20:00:03 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-03-01 16:58:36 +00:00
|
|
|
// ScanStaged chunks staged changes.
|
2023-11-01 16:22:44 +00:00
|
|
|
func (s *Git) ScanStaged(ctx context.Context, repo *git.Repository, path string, scanOptions *ScanOptions, reporter sources.ChunkReporter) error {
|
2023-03-01 16:58:36 +00:00
|
|
|
// Get the URL metadata for reporting (may be empty).
|
2022-04-07 23:32:55 +00:00
|
|
|
urlMetadata := getSafeRemoteURL(repo, "origin")
|
2022-01-13 20:02:24 +00:00
|
|
|
|
2024-02-13 15:21:22 +00:00
|
|
|
diffChan, err := s.parser.Staged(ctx, path)
|
2022-09-04 01:01:36 +00:00
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
2024-02-06 18:06:10 +00:00
|
|
|
if diffChan == nil {
|
2022-09-04 01:01:36 +00:00
|
|
|
return nil
|
|
|
|
}
|
2022-01-13 20:02:24 +00:00
|
|
|
|
2023-05-03 16:47:00 +00:00
|
|
|
reachedBase := false
|
2023-12-06 21:38:01 +00:00
|
|
|
gitDir := filepath.Join(path, gitDirName)
|
2022-11-03 23:36:52 +00:00
|
|
|
|
2024-02-13 15:21:22 +00:00
|
|
|
logger := ctx.Logger()
|
|
|
|
var logValues []any
|
|
|
|
if scanOptions.BaseHash != "" {
|
|
|
|
logValues = append(logValues, "base", scanOptions.BaseHash)
|
|
|
|
}
|
|
|
|
if scanOptions.HeadHash != "" {
|
|
|
|
logValues = append(logValues, "head", scanOptions.HeadHash)
|
|
|
|
}
|
|
|
|
if scanOptions.MaxDepth > 0 {
|
|
|
|
logValues = append(logValues, "max_depth", scanOptions.MaxDepth)
|
|
|
|
}
|
|
|
|
logger.V(1).Info("scanning repo", logValues...)
|
|
|
|
|
2023-03-01 16:58:36 +00:00
|
|
|
ctx.Logger().V(1).Info("scanning staged changes", "path", path)
|
2024-02-06 18:06:10 +00:00
|
|
|
|
|
|
|
var depth int64
|
|
|
|
var lastCommitHash string
|
|
|
|
for diff := range diffChan {
|
|
|
|
fullHash := diff.Commit.Hash
|
|
|
|
logger := ctx.Logger().WithValues("filename", diff.PathB, "commit", fullHash, "file", diff.PathB)
|
|
|
|
logger.V(2).Info("scanning staged changes from git")
|
|
|
|
|
|
|
|
if scanOptions.MaxDepth > 0 && depth >= scanOptions.MaxDepth {
|
|
|
|
logger.V(1).Info("reached max depth")
|
|
|
|
break
|
|
|
|
}
|
|
|
|
|
2024-02-13 15:21:22 +00:00
|
|
|
if fullHash != lastCommitHash {
|
2022-09-04 01:01:36 +00:00
|
|
|
depth++
|
2024-02-06 18:06:10 +00:00
|
|
|
lastCommitHash = fullHash
|
|
|
|
atomic.AddUint64(&s.metrics.commitsScanned, 1)
|
|
|
|
}
|
2022-09-04 01:01:36 +00:00
|
|
|
|
2024-02-13 15:21:22 +00:00
|
|
|
if reachedBase && fullHash != scanOptions.BaseHash {
|
2024-02-06 18:06:10 +00:00
|
|
|
break
|
|
|
|
}
|
2022-01-13 20:02:24 +00:00
|
|
|
|
2024-02-13 15:21:22 +00:00
|
|
|
if scanOptions.BaseHash != "" && fullHash == scanOptions.BaseHash {
|
|
|
|
logger.V(1).Info("reached base hash, finishing scanning files")
|
|
|
|
reachedBase = true
|
2024-02-06 18:06:10 +00:00
|
|
|
}
|
2022-09-04 01:01:36 +00:00
|
|
|
|
2024-02-06 18:06:10 +00:00
|
|
|
if !scanOptions.Filter.Pass(diff.PathB) {
|
|
|
|
continue
|
|
|
|
}
|
2024-01-30 20:30:51 +00:00
|
|
|
|
2024-02-06 18:06:10 +00:00
|
|
|
fileName := diff.PathB
|
|
|
|
if fileName == "" {
|
|
|
|
continue
|
|
|
|
}
|
2024-01-30 20:30:51 +00:00
|
|
|
|
2024-02-06 18:06:10 +00:00
|
|
|
email := diff.Commit.Author
|
|
|
|
when := diff.Commit.Date.UTC().Format("2006-01-02 15:04:05 -0700")
|
|
|
|
|
|
|
|
// Handle binary files by reading the entire file rather than using the diff.
|
|
|
|
if diff.IsBinary {
|
|
|
|
commitHash := plumbing.NewHash(fullHash)
|
|
|
|
metadata := s.sourceMetadataFunc(fileName, email, "Staged", when, urlMetadata, 0)
|
|
|
|
chunkSkel := &sources.Chunk{
|
|
|
|
SourceName: s.sourceName,
|
|
|
|
SourceID: s.sourceID,
|
|
|
|
JobID: s.jobID,
|
|
|
|
SourceType: s.sourceType,
|
|
|
|
SourceMetadata: metadata,
|
|
|
|
Verify: s.verify,
|
|
|
|
}
|
|
|
|
if err := s.handleBinary(ctx, gitDir, reporter, chunkSkel, commitHash, fileName); err != nil {
|
|
|
|
logger.V(1).Info("error handling binary file", "error", err, "filename", fileName)
|
|
|
|
}
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
|
|
|
|
chunkData := func(d *gitparse.Diff) error {
|
|
|
|
metadata := s.sourceMetadataFunc(fileName, email, "Staged", when, urlMetadata, int64(diff.LineStart))
|
|
|
|
|
|
|
|
reader, err := d.ReadCloser()
|
|
|
|
if err != nil {
|
|
|
|
ctx.Logger().Error(
|
|
|
|
err, "error creating reader for staged",
|
|
|
|
"filename", fileName,
|
|
|
|
"commit", fullHash,
|
|
|
|
"file", diff.PathB,
|
|
|
|
)
|
|
|
|
return nil
|
2022-01-13 20:02:24 +00:00
|
|
|
}
|
2024-02-06 18:06:10 +00:00
|
|
|
defer reader.Close()
|
|
|
|
|
|
|
|
data := make([]byte, d.Len())
|
|
|
|
if _, err := reader.Read(data); err != nil {
|
|
|
|
ctx.Logger().Error(
|
|
|
|
err, "error reading diff content for staged",
|
|
|
|
"filename", fileName,
|
|
|
|
"commit", fullHash,
|
|
|
|
"file", diff.PathB,
|
|
|
|
)
|
|
|
|
return nil
|
2023-11-01 16:22:44 +00:00
|
|
|
}
|
2024-02-06 18:06:10 +00:00
|
|
|
chunk := sources.Chunk{
|
|
|
|
SourceName: s.sourceName,
|
|
|
|
SourceID: s.sourceID,
|
|
|
|
JobID: s.jobID,
|
|
|
|
SourceType: s.sourceType,
|
|
|
|
SourceMetadata: metadata,
|
|
|
|
Data: data,
|
|
|
|
Verify: s.verify,
|
|
|
|
}
|
|
|
|
return reporter.ChunkOk(ctx, chunk)
|
|
|
|
}
|
|
|
|
if err := chunkData(diff); err != nil {
|
|
|
|
return err
|
2022-01-13 20:02:24 +00:00
|
|
|
}
|
|
|
|
}
|
2022-02-04 20:14:42 +00:00
|
|
|
return nil
|
|
|
|
}
|
2022-01-13 20:02:24 +00:00
|
|
|
|
2023-11-01 16:22:44 +00:00
|
|
|
func (s *Git) ScanRepo(ctx context.Context, repo *git.Repository, repoPath string, scanOptions *ScanOptions, reporter sources.ChunkReporter) error {
|
2022-11-15 23:01:59 +00:00
|
|
|
if scanOptions == nil {
|
|
|
|
scanOptions = NewScanOptions()
|
|
|
|
}
|
2022-09-23 15:58:45 +00:00
|
|
|
if err := normalizeConfig(scanOptions, repo); err != nil {
|
|
|
|
return err
|
|
|
|
}
|
2023-04-27 23:48:04 +00:00
|
|
|
start := time.Now().Unix()
|
|
|
|
|
2023-11-01 16:22:44 +00:00
|
|
|
if err := s.ScanCommits(ctx, repo, repoPath, scanOptions, reporter); err != nil {
|
2022-02-04 20:14:42 +00:00
|
|
|
return err
|
|
|
|
}
|
2023-08-03 16:23:41 +00:00
|
|
|
if !scanOptions.Bare {
|
2023-11-01 16:22:44 +00:00
|
|
|
if err := s.ScanStaged(ctx, repo, repoPath, scanOptions, reporter); err != nil {
|
2023-08-03 16:23:41 +00:00
|
|
|
ctx.Logger().V(1).Info("error scanning unstaged changes", "error", err)
|
|
|
|
}
|
2022-02-04 20:14:42 +00:00
|
|
|
}
|
2022-11-03 23:36:52 +00:00
|
|
|
|
2024-01-31 15:52:24 +00:00
|
|
|
logger := ctx.Logger()
|
2023-12-08 16:44:35 +00:00
|
|
|
// We're logging time, but the repoPath is usually a dynamically generated folder in /tmp.
|
|
|
|
// To make this duration logging useful, we need to log the remote as well.
|
2024-01-31 15:52:24 +00:00
|
|
|
// Other sources may have included this info to the context, in which case we don't need to add it again.
|
|
|
|
if ctx.Value("repo") == nil {
|
|
|
|
remotes, _ := repo.Remotes()
|
|
|
|
repoURL := "Could not get remote for repo"
|
|
|
|
if len(remotes) != 0 {
|
|
|
|
repoURL = getSafeRemoteURL(repo, remotes[0].Config().Name)
|
|
|
|
}
|
|
|
|
logger = logger.WithValues("repo", repoURL)
|
2023-01-09 17:21:56 +00:00
|
|
|
}
|
|
|
|
|
2023-04-27 23:48:04 +00:00
|
|
|
scanTime := time.Now().Unix() - start
|
2024-01-31 15:52:24 +00:00
|
|
|
logger.V(1).Info(
|
2023-12-08 16:44:35 +00:00
|
|
|
"scanning git repo complete",
|
|
|
|
"path", repoPath,
|
|
|
|
"time_seconds", scanTime,
|
|
|
|
"commits_scanned", atomic.LoadUint64(&s.metrics.commitsScanned),
|
|
|
|
)
|
2022-02-17 01:10:42 +00:00
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2023-12-08 16:44:35 +00:00
|
|
|
// normalizeConfig updates scanOptions with the resolved base and head commit hashes.
|
|
|
|
// It's designed to handle scenarios where BaseHash and HeadHash in scanOptions might be branch names or
|
|
|
|
// other non-hash references. This ensures that both the base and head commits are resolved to actual commit hashes.
|
|
|
|
// If either commit cannot be resolved, it returns early.
|
|
|
|
// If both are resolved, it finds and sets the merge base in scanOptions.
|
|
|
|
func normalizeConfig(scanOptions *ScanOptions, repo *git.Repository) error {
|
|
|
|
baseCommit, baseSet, err := resolveAndSetCommit(repo, &scanOptions.BaseHash)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
2022-09-23 15:58:45 +00:00
|
|
|
}
|
|
|
|
|
2023-12-08 16:44:35 +00:00
|
|
|
headCommit, headSet, err := resolveAndSetCommit(repo, &scanOptions.HeadHash)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
if !(baseSet && headSet) {
|
|
|
|
return nil
|
2022-09-23 15:58:45 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
// If baseCommit is an ancestor of headCommit, update c.BaseRef to be the common ancestor.
|
2023-12-08 16:44:35 +00:00
|
|
|
mergeBase, err := headCommit.MergeBase(baseCommit)
|
|
|
|
if err != nil {
|
|
|
|
return fmt.Errorf("unable to resolve merge base: %w", err)
|
|
|
|
}
|
|
|
|
if len(mergeBase) == 0 {
|
|
|
|
return fmt.Errorf("unable to resolve merge base: no merge base found")
|
2022-09-23 15:58:45 +00:00
|
|
|
}
|
|
|
|
|
2023-12-08 16:44:35 +00:00
|
|
|
scanOptions.BaseHash = mergeBase[0].Hash.String()
|
|
|
|
|
2022-09-23 15:58:45 +00:00
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2023-12-08 16:44:35 +00:00
|
|
|
// resolveAndSetCommit resolves a Git reference to a commit object and updates the reference if it was not a direct hash.
|
|
|
|
// Returns the commit object, a boolean indicating if the commit was successfully set, and any error encountered.
|
|
|
|
func resolveAndSetCommit(repo *git.Repository, ref *string) (*object.Commit, bool, error) {
|
|
|
|
if repo == nil || ref == nil {
|
|
|
|
return nil, false, fmt.Errorf("repo and ref must be non-nil")
|
|
|
|
}
|
|
|
|
if len(*ref) == 0 {
|
|
|
|
return nil, false, nil
|
|
|
|
}
|
|
|
|
|
|
|
|
originalRef := *ref
|
|
|
|
resolvedRef, err := resolveHash(repo, originalRef)
|
|
|
|
if err != nil {
|
|
|
|
return nil, false, fmt.Errorf("unable to resolve ref: %w", err)
|
|
|
|
}
|
|
|
|
|
|
|
|
commit, err := repo.CommitObject(plumbing.NewHash(resolvedRef))
|
|
|
|
if err != nil {
|
|
|
|
return nil, false, fmt.Errorf("unable to resolve commit: %w", err)
|
|
|
|
}
|
|
|
|
|
|
|
|
wasSet := originalRef != resolvedRef
|
|
|
|
if wasSet {
|
|
|
|
*ref = resolvedRef
|
|
|
|
}
|
|
|
|
|
|
|
|
return commit, wasSet, nil
|
|
|
|
}
|
|
|
|
|
|
|
|
func resolveHash(repo *git.Repository, ref string) (string, error) {
|
|
|
|
if plumbing.IsHash(ref) {
|
|
|
|
return ref, nil
|
|
|
|
}
|
|
|
|
|
|
|
|
resolved, err := TryAdditionalBaseRefs(repo, ref)
|
|
|
|
if err != nil {
|
|
|
|
return "", err
|
|
|
|
}
|
|
|
|
return resolved.String(), nil
|
|
|
|
}
|
|
|
|
|
2024-03-06 16:51:35 +00:00
|
|
|
// stripPassword removes username:password contents from URLs. The first return value is the cleaned URL and the second
|
|
|
|
// is the password that was returned, if any. Callers can therefore use this function to identify secret material to
|
|
|
|
// redact elsewhere. If the argument begins with git@, it is returned unchanged, and the returned password is the empty
|
|
|
|
// string. If the argument is otherwise not parseable by url.Parse, an error is returned.
|
|
|
|
func stripPassword(u string) (string, string, error) {
|
2022-01-13 20:02:24 +00:00
|
|
|
if strings.HasPrefix(u, "git@") {
|
2024-03-06 16:51:35 +00:00
|
|
|
return u, "", nil
|
2022-01-13 20:02:24 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
repoURL, err := url.Parse(u)
|
|
|
|
if err != nil {
|
2024-03-06 16:51:35 +00:00
|
|
|
return "", "", fmt.Errorf("repo remote is not a URI: %w", err)
|
2022-01-13 20:02:24 +00:00
|
|
|
}
|
|
|
|
|
2024-03-06 16:51:35 +00:00
|
|
|
password, _ := repoURL.User.Password()
|
2023-06-26 19:33:54 +00:00
|
|
|
repoURL.User = nil
|
|
|
|
|
2024-03-06 16:51:35 +00:00
|
|
|
return repoURL.String(), password, nil
|
2022-01-13 20:02:24 +00:00
|
|
|
}
|
|
|
|
|
2022-04-01 23:47:27 +00:00
|
|
|
// TryAdditionalBaseRefs looks for additional possible base refs for a repo and returns a hash if found.
|
2022-01-13 20:02:24 +00:00
|
|
|
func TryAdditionalBaseRefs(repo *git.Repository, base string) (*plumbing.Hash, error) {
|
|
|
|
revisionPrefixes := []string{
|
|
|
|
"",
|
|
|
|
"refs/heads/",
|
|
|
|
"refs/remotes/origin/",
|
|
|
|
}
|
|
|
|
for _, prefix := range revisionPrefixes {
|
|
|
|
outHash, err := repo.ResolveRevision(plumbing.Revision(prefix + base))
|
2023-12-01 16:32:41 +00:00
|
|
|
if errors.Is(err, plumbing.ErrReferenceNotFound) {
|
2022-01-13 20:02:24 +00:00
|
|
|
continue
|
|
|
|
}
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
return outHash, nil
|
|
|
|
}
|
|
|
|
|
|
|
|
return nil, fmt.Errorf("no base refs succeeded for base: %q", base)
|
|
|
|
}
|
2022-04-01 23:47:27 +00:00
|
|
|
|
2023-11-16 21:59:53 +00:00
|
|
|
// prepareRepoSinceCommit clones a repo starting at the given commitHash and returns the cloned repo path.
|
|
|
|
func prepareRepoSinceCommit(ctx context.Context, uriString, commitHash string) (string, bool, error) {
|
2022-05-24 15:49:03 +00:00
|
|
|
if commitHash == "" {
|
2022-11-03 23:36:52 +00:00
|
|
|
return PrepareRepo(ctx, uriString)
|
2022-05-24 15:49:03 +00:00
|
|
|
}
|
|
|
|
// TODO: refactor with PrepareRepo to remove duplicated logic
|
|
|
|
|
|
|
|
// The git CLI doesn't have an option to shallow clone starting at a commit
|
|
|
|
// hash, but it does have an option to shallow clone since a timestamp. If
|
|
|
|
// the uriString is github.com, then we query the API for the timestamp of the
|
|
|
|
// hash and use that to clone.
|
|
|
|
|
2023-07-11 19:13:33 +00:00
|
|
|
uri, err := GitURLParse(uriString)
|
2022-05-24 15:49:03 +00:00
|
|
|
if err != nil {
|
|
|
|
return "", false, fmt.Errorf("unable to parse Git URI: %s", err)
|
|
|
|
}
|
|
|
|
|
|
|
|
if uri.Scheme == "file" || uri.Host != "github.com" {
|
2022-11-03 23:36:52 +00:00
|
|
|
return PrepareRepo(ctx, uriString)
|
2022-05-24 15:49:03 +00:00
|
|
|
}
|
|
|
|
|
2022-06-01 01:45:28 +00:00
|
|
|
uriPath := strings.TrimPrefix(uri.Path, "/")
|
|
|
|
owner, repoName, found := strings.Cut(uriPath, "/")
|
2022-05-24 15:49:03 +00:00
|
|
|
if !found {
|
2022-11-03 23:36:52 +00:00
|
|
|
return PrepareRepo(ctx, uriString)
|
2022-05-24 15:49:03 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
client := github.NewClient(nil)
|
2022-06-01 01:45:28 +00:00
|
|
|
if token := os.Getenv("GITHUB_TOKEN"); token != "" {
|
|
|
|
ts := oauth2.StaticTokenSource(
|
|
|
|
&oauth2.Token{AccessToken: token},
|
|
|
|
)
|
2023-05-19 18:09:51 +00:00
|
|
|
tc := oauth2.NewClient(ctx, ts)
|
2022-06-01 01:45:28 +00:00
|
|
|
client = github.NewClient(tc)
|
|
|
|
}
|
|
|
|
|
2022-05-24 15:49:03 +00:00
|
|
|
commit, _, err := client.Git.GetCommit(context.Background(), owner, repoName, commitHash)
|
|
|
|
if err != nil {
|
2022-11-03 23:36:52 +00:00
|
|
|
return PrepareRepo(ctx, uriString)
|
2022-05-24 15:49:03 +00:00
|
|
|
}
|
|
|
|
var timestamp string
|
|
|
|
{
|
|
|
|
author := commit.GetAuthor()
|
|
|
|
if author == nil {
|
2022-11-03 23:36:52 +00:00
|
|
|
return PrepareRepo(ctx, uriString)
|
2022-05-24 15:49:03 +00:00
|
|
|
}
|
|
|
|
timestamp = author.GetDate().Format(time.RFC3339)
|
|
|
|
}
|
|
|
|
|
|
|
|
remotePath := uri.String()
|
|
|
|
var path string
|
|
|
|
switch {
|
|
|
|
case uri.User != nil:
|
2023-06-26 18:56:08 +00:00
|
|
|
ctx.Logger().V(1).Info("cloning repo with authentication", "uri", uri.Redacted())
|
2022-05-24 15:49:03 +00:00
|
|
|
password, ok := uri.User.Password()
|
|
|
|
if !ok {
|
|
|
|
return "", true, fmt.Errorf("password must be included in Git repo URL when username is provided")
|
|
|
|
}
|
2022-11-03 23:36:52 +00:00
|
|
|
path, _, err = CloneRepoUsingToken(ctx, password, remotePath, uri.User.Username(), "--shallow-since", timestamp)
|
2022-05-24 15:49:03 +00:00
|
|
|
if err != nil {
|
2023-07-23 03:25:45 +00:00
|
|
|
return path, true, fmt.Errorf("failed to clone authenticated Git repo (%s): %s", uri.Redacted(), err)
|
2022-05-24 15:49:03 +00:00
|
|
|
}
|
|
|
|
default:
|
2022-11-03 23:36:52 +00:00
|
|
|
ctx.Logger().V(1).Info("cloning repo without authentication", "uri", uri)
|
|
|
|
path, _, err = CloneRepoUsingUnauthenticated(ctx, remotePath, "--shallow-since", timestamp)
|
2022-05-24 15:49:03 +00:00
|
|
|
if err != nil {
|
|
|
|
return path, true, fmt.Errorf("failed to clone unauthenticated Git repo (%s): %s", remotePath, err)
|
|
|
|
}
|
|
|
|
}
|
2022-11-03 23:36:52 +00:00
|
|
|
|
|
|
|
ctx.Logger().V(1).Info("cloned repo", "path", path)
|
2022-05-24 15:49:03 +00:00
|
|
|
return path, true, nil
|
|
|
|
}
|
|
|
|
|
|
|
|
// PrepareRepo clones a repo if possible and returns the cloned repo path.
|
2022-11-03 23:36:52 +00:00
|
|
|
func PrepareRepo(ctx context.Context, uriString string) (string, bool, error) {
|
2022-01-15 00:07:45 +00:00
|
|
|
var path string
|
2023-07-11 19:13:33 +00:00
|
|
|
uri, err := GitURLParse(uriString)
|
2022-01-15 00:07:45 +00:00
|
|
|
if err != nil {
|
|
|
|
return "", false, fmt.Errorf("unable to parse Git URI: %s", err)
|
|
|
|
}
|
|
|
|
|
|
|
|
remote := false
|
|
|
|
switch uri.Scheme {
|
|
|
|
case "file":
|
|
|
|
path = fmt.Sprintf("%s%s", uri.Host, uri.Path)
|
2022-05-02 22:04:05 +00:00
|
|
|
case "http", "https":
|
2022-05-24 15:49:03 +00:00
|
|
|
remotePath := uri.String()
|
2022-01-15 00:07:45 +00:00
|
|
|
remote = true
|
|
|
|
switch {
|
|
|
|
case uri.User != nil:
|
2023-06-26 18:56:08 +00:00
|
|
|
ctx.Logger().V(1).Info("cloning repo with authentication", "uri", uri.Redacted())
|
2022-01-15 00:07:45 +00:00
|
|
|
password, ok := uri.User.Password()
|
|
|
|
if !ok {
|
|
|
|
return "", remote, fmt.Errorf("password must be included in Git repo URL when username is provided")
|
|
|
|
}
|
2022-11-03 23:36:52 +00:00
|
|
|
path, _, err = CloneRepoUsingToken(ctx, password, remotePath, uri.User.Username())
|
2022-01-15 00:07:45 +00:00
|
|
|
if err != nil {
|
2023-07-23 03:25:45 +00:00
|
|
|
return path, remote, fmt.Errorf("failed to clone authenticated Git repo (%s): %s", uri.Redacted(), err)
|
2022-01-15 00:07:45 +00:00
|
|
|
}
|
|
|
|
default:
|
2022-11-03 23:36:52 +00:00
|
|
|
ctx.Logger().V(1).Info("cloning repo without authentication", "uri", uri)
|
|
|
|
path, _, err = CloneRepoUsingUnauthenticated(ctx, remotePath)
|
2022-01-15 00:07:45 +00:00
|
|
|
if err != nil {
|
|
|
|
return path, remote, fmt.Errorf("failed to clone unauthenticated Git repo (%s): %s", remotePath, err)
|
|
|
|
}
|
|
|
|
}
|
2022-08-23 23:34:34 +00:00
|
|
|
case "ssh":
|
|
|
|
remotePath := uri.String()
|
|
|
|
remote = true
|
2022-11-03 23:36:52 +00:00
|
|
|
path, _, err = CloneRepoUsingSSH(ctx, remotePath)
|
2022-08-29 18:45:37 +00:00
|
|
|
if err != nil {
|
|
|
|
return path, remote, fmt.Errorf("failed to clone unauthenticated Git repo (%s): %s", remotePath, err)
|
|
|
|
}
|
2022-01-15 00:07:45 +00:00
|
|
|
default:
|
|
|
|
return "", remote, fmt.Errorf("unsupported Git URI: %s", uriString)
|
|
|
|
}
|
2022-11-03 23:36:52 +00:00
|
|
|
|
|
|
|
ctx.Logger().V(1).Info("cloned repo", "path", path)
|
2022-01-15 00:07:45 +00:00
|
|
|
return path, remote, nil
|
|
|
|
}
|
2022-04-07 23:32:55 +00:00
|
|
|
|
|
|
|
// getSafeRemoteURL is a helper function that will attempt to get a safe URL first
|
|
|
|
// from the preferred remote name, falling back to the first remote name
|
|
|
|
// available, or an empty string if there are no remotes.
|
|
|
|
func getSafeRemoteURL(repo *git.Repository, preferred string) string {
|
|
|
|
remote, err := repo.Remote(preferred)
|
|
|
|
if err != nil {
|
|
|
|
var remotes []*git.Remote
|
|
|
|
if remotes, err = repo.Remotes(); err != nil {
|
|
|
|
return ""
|
|
|
|
}
|
|
|
|
if len(remotes) == 0 {
|
|
|
|
return ""
|
|
|
|
}
|
|
|
|
remote = remotes[0]
|
|
|
|
}
|
|
|
|
// URLs is guaranteed to be non-empty
|
2024-03-06 16:51:35 +00:00
|
|
|
safeURL, _, err := stripPassword(remote.Config().URLs[0])
|
2022-04-07 23:32:55 +00:00
|
|
|
if err != nil {
|
|
|
|
return ""
|
|
|
|
}
|
|
|
|
return safeURL
|
|
|
|
}
|
2022-08-10 23:10:45 +00:00
|
|
|
|
2023-12-15 19:46:27 +00:00
|
|
|
func (s *Git) handleBinary(ctx context.Context, gitDir string, reporter sources.ChunkReporter, chunkSkel *sources.Chunk, commitHash plumbing.Hash, path string) error {
|
2024-01-31 15:52:24 +00:00
|
|
|
fileCtx := context.WithValues(ctx, "commit", commitHash.String()[:7], "path", path)
|
2023-12-10 18:30:11 +00:00
|
|
|
fileCtx.Logger().V(5).Info("handling binary file")
|
2023-12-06 21:38:01 +00:00
|
|
|
|
|
|
|
if common.SkipFile(path) {
|
2023-12-15 19:46:27 +00:00
|
|
|
fileCtx.Logger().V(5).Info("file contains ignored extension")
|
2023-12-06 21:38:01 +00:00
|
|
|
return nil
|
2022-08-10 23:10:45 +00:00
|
|
|
}
|
|
|
|
|
2023-12-15 19:46:27 +00:00
|
|
|
if s.skipBinaries {
|
2023-12-22 20:01:07 +00:00
|
|
|
fileCtx.Logger().V(5).Info("skipping binary file", "path", path)
|
|
|
|
return nil
|
2023-12-15 19:46:27 +00:00
|
|
|
}
|
|
|
|
|
2023-12-06 21:38:01 +00:00
|
|
|
cmd := exec.Command("git", "-C", gitDir, "cat-file", "blob", commitHash.String()+":"+path)
|
|
|
|
|
|
|
|
var stderr bytes.Buffer
|
|
|
|
cmd.Stderr = &stderr
|
|
|
|
|
2024-05-10 18:36:06 +00:00
|
|
|
stdout, err := cmd.Output()
|
2022-08-10 23:10:45 +00:00
|
|
|
if err != nil {
|
2024-05-10 18:36:06 +00:00
|
|
|
return fmt.Errorf("error running git cat-file: %w\n%s", err, stderr.Bytes())
|
2023-12-04 21:37:11 +00:00
|
|
|
}
|
|
|
|
|
2024-05-10 18:36:06 +00:00
|
|
|
return handlers.HandleFile(fileCtx, bytes.NewReader(stdout), chunkSkel, reporter, handlers.WithSkipArchives(s.skipArchives))
|
2022-08-10 23:10:45 +00:00
|
|
|
}
|
2023-06-23 16:15:51 +00:00
|
|
|
|
2023-11-01 16:52:58 +00:00
|
|
|
func (s *Source) Enumerate(ctx context.Context, reporter sources.UnitReporter) error {
|
|
|
|
for _, repo := range s.conn.GetDirectories() {
|
|
|
|
if repo == "" {
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
unit := SourceUnit{ID: repo, Kind: UnitDir}
|
|
|
|
if err := reporter.UnitOk(ctx, unit); err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
}
|
|
|
|
for _, repo := range s.conn.GetRepositories() {
|
|
|
|
if repo == "" {
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
unit := SourceUnit{ID: repo, Kind: UnitRepo}
|
|
|
|
if err := reporter.UnitOk(ctx, unit); err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
|
|
|
func (s *Source) ChunkUnit(ctx context.Context, unit sources.SourceUnit, reporter sources.ChunkReporter) error {
|
2024-02-20 19:24:13 +00:00
|
|
|
unitID, kind := unit.SourceUnitID()
|
2023-11-01 16:52:58 +00:00
|
|
|
|
2024-02-20 19:24:13 +00:00
|
|
|
switch kind {
|
2023-11-01 16:52:58 +00:00
|
|
|
case UnitRepo:
|
2024-02-20 19:24:13 +00:00
|
|
|
return s.scanRepo(ctx, unitID, reporter)
|
2023-11-01 16:52:58 +00:00
|
|
|
case UnitDir:
|
2024-02-20 19:24:13 +00:00
|
|
|
return s.scanDir(ctx, unitID, reporter)
|
2023-11-01 16:52:58 +00:00
|
|
|
default:
|
2024-02-20 19:24:13 +00:00
|
|
|
return fmt.Errorf("unexpected git unit kind: %q", kind)
|
2023-11-01 16:52:58 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-06-23 16:15:51 +00:00
|
|
|
func (s *Source) UnmarshalSourceUnit(data []byte) (sources.SourceUnit, error) {
|
|
|
|
return UnmarshalUnit(data)
|
|
|
|
}
|