Merge branch 'main' into impl-analyzer-tests-huggingface-stripe-airbrake

This commit is contained in:
Abdul Basit 2024-09-13 20:17:02 +05:00 committed by GitHub
commit e574f78a37
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
51 changed files with 3289 additions and 614 deletions

4
go.mod
View file

@ -39,7 +39,7 @@ require (
github.com/elastic/go-elasticsearch/v8 v8.14.0
github.com/envoyproxy/protoc-gen-validate v1.1.0
github.com/fatih/color v1.17.0
github.com/felixge/fgprof v0.9.4
github.com/felixge/fgprof v0.9.5
github.com/gabriel-vasile/mimetype v1.4.5
github.com/getsentry/sentry-go v0.28.1
github.com/go-errors/errors v1.5.1
@ -78,7 +78,7 @@ require (
github.com/patrickmn/go-cache v2.1.0+incompatible
github.com/paulbellamy/ratecounter v0.2.0
github.com/pkg/errors v0.9.1
github.com/prometheus/client_golang v1.20.1
github.com/prometheus/client_golang v1.20.3
github.com/rabbitmq/amqp091-go v1.10.0
github.com/sassoftware/go-rpmutils v0.4.0
github.com/schollz/progressbar/v3 v3.14.6

4
go.sum
View file

@ -263,6 +263,8 @@ github.com/fatih/color v1.17.0 h1:GlRw1BRJxkpqUCBKzKOw098ed57fEsKeNjpTe3cSjK4=
github.com/fatih/color v1.17.0/go.mod h1:YZ7TlrGPkiz6ku9fK3TLD/pl3CpsiFyu8N92HLgmosI=
github.com/felixge/fgprof v0.9.4 h1:ocDNwMFlnA0NU0zSB3I52xkO4sFXk80VK9lXjLClu88=
github.com/felixge/fgprof v0.9.4/go.mod h1:yKl+ERSa++RYOs32d8K6WEXCB4uXdLls4ZaZPpayhMM=
github.com/felixge/fgprof v0.9.5 h1:8+vR6yu2vvSKn08urWyEuxx75NWPEvybbkBirEpsbVY=
github.com/felixge/fgprof v0.9.5/go.mod h1:yKl+ERSa++RYOs32d8K6WEXCB4uXdLls4ZaZPpayhMM=
github.com/felixge/httpsnoop v1.0.4 h1:NFTV2Zj1bL4mc9sqWACXbQFVBBg2W3GPvqp8/ESS2Wg=
github.com/felixge/httpsnoop v1.0.4/go.mod h1:m8KPJKqk1gH5J9DgRY2ASl2lWCfGKXixSwevea8zH2U=
github.com/form3tech-oss/jwt-go v3.2.5+incompatible h1:/l4kBbb4/vGSsdtB5nUe8L7B9mImVMaBPw9L/0TBHU8=
@ -635,6 +637,8 @@ github.com/prashantv/gostub v1.1.0 h1:BTyx3RfQjRHnUWaGF9oQos79AlQ5k8WNktv7VGvVH4
github.com/prashantv/gostub v1.1.0/go.mod h1:A5zLQHz7ieHGG7is6LLXLz7I8+3LZzsrV0P1IAHhP5U=
github.com/prometheus/client_golang v1.20.1 h1:IMJXHOD6eARkQpxo8KkhgEVFlBNm+nkrFUyGlIu7Na8=
github.com/prometheus/client_golang v1.20.1/go.mod h1:PIEt8X02hGcP8JWbeHyeZ53Y/jReSnHgO035n//V5WE=
github.com/prometheus/client_golang v1.20.3 h1:oPksm4K8B+Vt35tUhw6GbSNSgVlVSBH0qELP/7u83l4=
github.com/prometheus/client_golang v1.20.3/go.mod h1:PIEt8X02hGcP8JWbeHyeZ53Y/jReSnHgO035n//V5WE=
github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA=
github.com/prometheus/client_model v0.6.1 h1:ZKSh/rekM+n3CeS952MLRAdFwIKqeY8b62p8ais2e9E=
github.com/prometheus/client_model v0.6.1/go.mod h1:OrxVMOVHjw3lKMa8+x6HeMGkHMQyHDk9E3jmP2AmGiY=

View file

@ -252,7 +252,7 @@ func AnalyzePermissions(cfg *config.Config, key string) (*SecretInfo, error) {
}
func convertScopeToAnalyzerPermissions(scopes []string) []analyzers.Permission {
permissions := make([]analyzers.Permission, len(scopes))
permissions := make([]analyzers.Permission, 0, len(scopes))
for _, scope := range scopes {
permissions = append(permissions, analyzers.Permission{Value: scope})
}

View file

@ -0,0 +1,25 @@
{
"AnalyzerType": 8,
"Bindings": [
{
"Resource": {
"Name": "sandbox19e49763d44e498e850589ea7d54bd82.mailgun.org",
"FullyQualifiedName": "mailgun/6478cb31d026c112819856cd/sandbox19e49763d44e498e850589ea7d54bd82.mailgun.org",
"Type": "domain",
"Metadata": {
"created_at": "Thu, 01 Jun 2023 16:45:37 GMT",
"is_disabled": false,
"state": "active",
"type": "sandbox"
},
"Parent": null
},
"Permission": {
"Value": "full_access",
"Parent": null
}
}
],
"UnboundedResources": null,
"Metadata": null
}

View file

@ -1,7 +1,9 @@
//go:generate generate_permissions permissions.yaml permissions.go mailgun
package mailgun
import (
"encoding/json"
"errors"
"fmt"
"net/http"
"os"
@ -24,14 +26,50 @@ type Analyzer struct {
func (Analyzer) Type() analyzerpb.AnalyzerType { return analyzerpb.AnalyzerType_Mailgun }
func (a Analyzer) Analyze(_ context.Context, credInfo map[string]string) (*analyzers.AnalyzerResult, error) {
_, err := AnalyzePermissions(a.Cfg, credInfo["key"])
key, ok := credInfo["key"]
if !ok {
return nil, errors.New("key not found in credentialInfo")
}
info, err := AnalyzePermissions(a.Cfg, key)
if err != nil {
return nil, err
}
return nil, fmt.Errorf("not implemented")
return secretInfoToAnalyzerResult(info), nil
}
func secretInfoToAnalyzerResult(info *DomainsJSON) *analyzers.AnalyzerResult {
if info == nil {
return nil
}
result := analyzers.AnalyzerResult{
AnalyzerType: analyzerpb.AnalyzerType_Mailgun,
Bindings: make([]analyzers.Binding, len(info.Items)),
}
for idx, domain := range info.Items {
result.Bindings[idx] = analyzers.Binding{
Resource: analyzers.Resource{
Name: domain.URL,
FullyQualifiedName: "mailgun/" + domain.ID + "/" + domain.URL,
Type: "domain",
Metadata: map[string]any{
"created_at": domain.CreatedAt,
"type": domain.Type,
"state": domain.State,
"is_disabled": domain.IsDisabled,
},
},
Permission: analyzers.Permission{
Value: PermissionStrings[FullAccess],
},
}
}
return &result
}
type Domain struct {
ID string `json:"id"`
URL string `json:"name"`
IsDisabled bool `json:"is_disabled"`
Type string `json:"type"`

View file

@ -0,0 +1,100 @@
package mailgun
import (
_ "embed"
"encoding/json"
"sort"
"testing"
"time"
"github.com/trufflesecurity/trufflehog/v3/pkg/analyzer/analyzers"
"github.com/trufflesecurity/trufflehog/v3/pkg/analyzer/config"
"github.com/trufflesecurity/trufflehog/v3/pkg/common"
"github.com/trufflesecurity/trufflehog/v3/pkg/context"
)
//go:embed expected_output.json
var expectedOutput []byte
func TestAnalyzer_Analyze(t *testing.T) {
ctx, cancel := context.WithTimeout(context.Background(), time.Second*5)
defer cancel()
testSecrets, err := common.GetSecret(ctx, "trufflehog-testing", "detectors5")
if err != nil {
t.Fatalf("could not get test secrets from GCP: %s", err)
}
tests := []struct {
name string
key string
want string // JSON string
wantErr bool
}{
{
name: "valid Mailgun key",
key: testSecrets.MustGetField("NEW_MAILGUN_TOKEN_ACTIVE"),
want: string(expectedOutput),
wantErr: false,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
a := Analyzer{Cfg: &config.Config{}}
got, err := a.Analyze(ctx, map[string]string{"key": tt.key})
if (err != nil) != tt.wantErr {
t.Errorf("Analyzer.Analyze() error = %v, wantErr %v", err, tt.wantErr)
return
}
// bindings need to be in the same order to be comparable
sortBindings(got.Bindings)
// Marshal the actual result to JSON
gotJSON, err := json.Marshal(got)
if err != nil {
t.Fatalf("could not marshal got to JSON: %s", err)
}
// Parse the expected JSON string
var wantObj analyzers.AnalyzerResult
if err := json.Unmarshal([]byte(tt.want), &wantObj); err != nil {
t.Fatalf("could not unmarshal want JSON string: %s", err)
}
// bindings need to be in the same order to be comparable
sortBindings(wantObj.Bindings)
// Marshal the expected result to JSON (to normalize)
wantJSON, err := json.Marshal(wantObj)
if err != nil {
t.Fatalf("could not marshal want to JSON: %s", err)
}
// Compare the JSON strings
if string(gotJSON) != string(wantJSON) {
// Pretty-print both JSON strings for easier comparison
var gotIndented, wantIndented []byte
gotIndented, err = json.MarshalIndent(got, "", " ")
if err != nil {
t.Fatalf("could not marshal got to indented JSON: %s", err)
}
wantIndented, err = json.MarshalIndent(wantObj, "", " ")
if err != nil {
t.Fatalf("could not marshal want to indented JSON: %s", err)
}
t.Errorf("Analyzer.Analyze() = %s, want %s", gotIndented, wantIndented)
}
})
}
}
// Helper function to sort bindings
func sortBindings(bindings []analyzers.Binding) {
sort.SliceStable(bindings, func(i, j int) bool {
if bindings[i].Resource.Name == bindings[j].Resource.Name {
return bindings[i].Permission.Value < bindings[j].Permission.Value
}
return bindings[i].Resource.Name < bindings[j].Resource.Name
})
}

View file

@ -0,0 +1,71 @@
// Code generated by go generate; DO NOT EDIT.
package mailgun
import "errors"
type Permission int
const (
Invalid Permission = iota
Read Permission = iota
Write Permission = iota
FullAccess Permission = iota
)
var (
PermissionStrings = map[Permission]string{
Read: "read",
Write: "write",
FullAccess: "full_access",
}
StringToPermission = map[string]Permission{
"read": Read,
"write": Write,
"full_access": FullAccess,
}
PermissionIDs = map[Permission]int{
Read: 1,
Write: 2,
FullAccess: 3,
}
IdToPermission = map[int]Permission{
1: Read,
2: Write,
3: FullAccess,
}
)
// ToString converts a Permission enum to its string representation
func (p Permission) ToString() (string, error) {
if str, ok := PermissionStrings[p]; ok {
return str, nil
}
return "", errors.New("invalid permission")
}
// ToID converts a Permission enum to its ID
func (p Permission) ToID() (int, error) {
if id, ok := PermissionIDs[p]; ok {
return id, nil
}
return 0, errors.New("invalid permission")
}
// PermissionFromString converts a string representation to its Permission enum
func PermissionFromString(s string) (Permission, error) {
if p, ok := StringToPermission[s]; ok {
return p, nil
}
return 0, errors.New("invalid permission string")
}
// PermissionFromID converts an ID to its Permission enum
func PermissionFromID(id int) (Permission, error) {
if p, ok := IdToPermission[id]; ok {
return p, nil
}
return 0, errors.New("invalid permission ID")
}

View file

@ -0,0 +1,4 @@
permissions:
- read
- write
- full_access

File diff suppressed because one or more lines are too long

View file

@ -1,3 +1,5 @@
//go:generate generate_permissions permissions.yaml permissions.go mysql
package mysql
import (
@ -17,8 +19,189 @@ import (
"github.com/trufflesecurity/trufflehog/v3/pkg/analyzer/analyzers"
"github.com/trufflesecurity/trufflehog/v3/pkg/analyzer/config"
"github.com/trufflesecurity/trufflehog/v3/pkg/analyzer/pb/analyzerpb"
"github.com/trufflesecurity/trufflehog/v3/pkg/context"
)
var _ analyzers.Analyzer = (*Analyzer)(nil)
type Analyzer struct {
Cfg *config.Config
}
func (Analyzer) Type() analyzerpb.AnalyzerType { return analyzerpb.AnalyzerType_MySQL }
func (a Analyzer) Analyze(_ context.Context, credInfo map[string]string) (*analyzers.AnalyzerResult, error) {
uri, ok := credInfo["connection_string"]
if !ok {
return nil, fmt.Errorf("missing connection string")
}
info, err := AnalyzePermissions(a.Cfg, uri)
if err != nil {
return nil, err
}
return secretInfoToAnalyzerResult(info), nil
}
func secretInfoToAnalyzerResult(info *SecretInfo) *analyzers.AnalyzerResult {
if info == nil {
return nil
}
result := analyzers.AnalyzerResult{
AnalyzerType: analyzerpb.AnalyzerType_MySQL,
Metadata: nil,
Bindings: []analyzers.Binding{},
}
// add user priviliges to bindings
userBindings, userResource := bakeUserBindings(info)
result.Bindings = append(result.Bindings, userBindings...)
// add user's database priviliges to bindings
databaseBindings := bakeDatabaseBindings(userResource, info)
result.Bindings = append(result.Bindings, databaseBindings...)
return &result
}
func bakeUserBindings(info *SecretInfo) ([]analyzers.Binding, *analyzers.Resource) {
var userBindings []analyzers.Binding
// add user and their priviliges to bindings
userResource := analyzers.Resource{
Name: info.User,
FullyQualifiedName: info.Host + "/" + info.User,
Type: "user",
}
for _, priv := range info.GlobalPrivs.Privs {
userBindings = append(userBindings, analyzers.Binding{
Resource: userResource,
Permission: analyzers.Permission{
Value: priv,
},
})
}
return userBindings, &userResource
}
func bakeDatabaseBindings(userResource *analyzers.Resource, info *SecretInfo) []analyzers.Binding {
var databaseBindings []analyzers.Binding
for _, database := range info.Databases {
dbResource := analyzers.Resource{
Name: database.Name,
FullyQualifiedName: info.Host + "/" + database.Name,
Type: "database",
Metadata: map[string]any{
"default": database.Default,
"non_existent": database.Nonexistent,
},
Parent: userResource,
}
for _, priv := range database.Privs {
databaseBindings = append(databaseBindings, analyzers.Binding{
Resource: dbResource,
Permission: analyzers.Permission{
Value: priv,
},
})
}
// add this database's table privileges to bindings
tableBindings := bakeTableBindings(&dbResource, database)
databaseBindings = append(databaseBindings, tableBindings...)
// add this database's routines privileges to bindings
routineBindings := bakeRoutineBindings(&dbResource, database)
databaseBindings = append(databaseBindings, routineBindings...)
}
return databaseBindings
}
func bakeTableBindings(dbResource *analyzers.Resource, database *Database) []analyzers.Binding {
if database.Tables == nil {
return nil
}
var tableBindings []analyzers.Binding
for _, table := range *database.Tables {
tableResource := analyzers.Resource{
Name: table.Name,
FullyQualifiedName: dbResource.FullyQualifiedName + "/" + table.Name,
Type: "table",
Metadata: map[string]any{
"bytes": table.Bytes,
"non_existent": table.Nonexistent,
},
Parent: dbResource,
}
for _, priv := range table.Privs {
tableBindings = append(tableBindings, analyzers.Binding{
Resource: tableResource,
Permission: analyzers.Permission{
Value: priv,
},
})
}
// Add this table's column privileges to bindings
for _, column := range table.Columns {
columnResource := analyzers.Resource{
Name: column.Name,
FullyQualifiedName: tableResource.FullyQualifiedName + "/" + column.Name,
Type: "column",
Parent: &tableResource,
}
for _, priv := range column.Privs {
tableBindings = append(tableBindings, analyzers.Binding{
Resource: columnResource,
Permission: analyzers.Permission{
Value: priv,
},
})
}
}
}
return tableBindings
}
func bakeRoutineBindings(dbResource *analyzers.Resource, database *Database) []analyzers.Binding {
if database.Routines == nil {
return nil
}
var routineBindings []analyzers.Binding
for _, routine := range *database.Routines {
routineResource := analyzers.Resource{
Name: routine.Name,
FullyQualifiedName: dbResource.FullyQualifiedName + "/" + routine.Name,
Type: "routine",
Metadata: map[string]any{
"non_existent": routine.Nonexistent,
},
Parent: dbResource,
}
for _, priv := range routine.Privs {
routineBindings = append(routineBindings, analyzers.Binding{
Resource: routineResource,
Permission: analyzers.Permission{
Value: priv,
},
})
}
}
return routineBindings
}
const (
// MySQL SSL Modes
mysql_sslmode = "ssl-mode"
@ -74,6 +257,7 @@ type Routine struct {
// USER() returns `doadmin@localhost`
type SecretInfo struct {
Host string
User string
Databases map[string]*Database
GlobalPrivs GlobalPrivs
@ -99,8 +283,13 @@ func AnalyzeAndPrintPermissions(cfg *config.Config, key string) {
}
func AnalyzePermissions(cfg *config.Config, connectionStr string) (*SecretInfo, error) {
// Parse the connection string
u, err := parseConnectionStr(connectionStr)
if err != nil {
return nil, fmt.Errorf("parsing the connection string: %w", err)
}
db, err := createConnection(connectionStr)
db, err := createConnection(u)
if err != nil {
return nil, fmt.Errorf("connecting to the MySQL database: %w", err)
}
@ -139,13 +328,14 @@ func AnalyzePermissions(cfg *config.Config, connectionStr string) (*SecretInfo,
processGrants(grants, databases, &globalPrivs)
return &SecretInfo{
Host: u.Hostname(),
User: user,
Databases: databases,
GlobalPrivs: globalPrivs,
}, nil
}
func createConnection(connection string) (*sql.DB, error) {
func parseConnectionStr(connection string) (*dburl.URL, error) {
// Check if the connection string starts with 'mysql://'
if !strings.HasPrefix(connection, "mysql://") {
color.Yellow("[i] The connection string should start with 'mysql://'. Adding it for you.")
@ -163,7 +353,10 @@ func createConnection(connection string) (*sql.DB, error) {
if err != nil {
return nil, err
}
return u, nil
}
func createConnection(u *dburl.URL) (*sql.DB, error) {
// Connect to the MySQL database
db, err := sql.Open("mysql", u.DSN)
if err != nil {

View file

@ -0,0 +1,104 @@
package mysql
import (
_ "embed"
"encoding/json"
"fmt"
"testing"
"github.com/brianvoe/gofakeit/v7"
"github.com/google/go-cmp/cmp"
"github.com/testcontainers/testcontainers-go/modules/mysql"
"github.com/trufflesecurity/trufflehog/v3/pkg/analyzer/analyzers"
"github.com/trufflesecurity/trufflehog/v3/pkg/analyzer/config"
"github.com/trufflesecurity/trufflehog/v3/pkg/context"
)
//go:embed expected_output.json
var expectedOutput []byte
func TestAnalyzer_Analyze(t *testing.T) {
mysqlUser := "root"
mysqlPass := gofakeit.Password(true, true, true, false, false, 10)
mysqlDatabase := "mysql"
ctx := context.Background()
mysqlC, err := mysql.Run(ctx, "mysql",
mysql.WithDatabase(mysqlDatabase),
mysql.WithUsername(mysqlUser),
mysql.WithPassword(mysqlPass),
)
if err != nil {
t.Fatal(err)
}
defer func() { _ = mysqlC.Terminate(ctx) }()
host, err := mysqlC.Host(ctx)
if err != nil {
t.Fatal(err)
}
port, err := mysqlC.MappedPort(ctx, "3306")
if err != nil {
t.Fatal(err)
}
tests := []struct {
name string
connectionString string
want []byte // JSON string
wantErr bool
}{
{
name: "valid Mysql connection",
connectionString: fmt.Sprintf(`root:%s@%s:%s/%s`, mysqlPass, host, port.Port(), mysqlDatabase),
want: expectedOutput,
wantErr: false,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
a := Analyzer{Cfg: &config.Config{}}
got, err := a.Analyze(context.Background(), map[string]string{"connection_string": tt.connectionString})
if (err != nil) != tt.wantErr {
t.Errorf("Analyzer.Analyze() error = %v, wantErr %v", err, tt.wantErr)
return
}
// Marshal the actual result to JSON
gotJSON, err := json.Marshal(got)
if err != nil {
t.Fatalf("could not marshal got to JSON: %s", err)
}
// Parse the expected JSON string
var wantObj analyzers.AnalyzerResult
if err := json.Unmarshal(tt.want, &wantObj); err != nil {
t.Fatalf("could not unmarshal want JSON string: %s", err)
}
// Marshal the expected result to JSON (to normalize)
wantJSON, err := json.Marshal(wantObj)
if err != nil {
t.Fatalf("could not marshal want to JSON: %s", err)
}
// Compare bindings separately because they are not guaranteed to be in the same order
if len(got.Bindings) != len(wantObj.Bindings) {
t.Errorf("Analyzer.Analyze() = %s, want %s", gotJSON, wantJSON)
return
}
got.Bindings = nil
wantObj.Bindings = nil
// Compare the rest of the Object
if diff := cmp.Diff(&wantObj, got); diff != "" {
t.Errorf("%s: (-want +got)\n%s", tt.name, diff)
return
}
})
}
}

View file

@ -0,0 +1,451 @@
// Code generated by go generate; DO NOT EDIT.
package mysql
import "errors"
type Permission int
const (
Invalid Permission = iota
Alter Permission = iota
AlterRoutine Permission = iota
AllowNonexistentDefiner Permission = iota
ApplicationPasswordAdmin Permission = iota
AuditAbortExempt Permission = iota
AuditAdmin Permission = iota
AuthenticationPolicyAdmin Permission = iota
BackupAdmin Permission = iota
BinlogAdmin Permission = iota
BinlogEncryptionAdmin Permission = iota
CloneAdmin Permission = iota
ConnectionAdmin Permission = iota
Create Permission = iota
CreateRole Permission = iota
CreateRoutine Permission = iota
CreateTablespace Permission = iota
CreateTemporaryTables Permission = iota
CreateUser Permission = iota
CreateView Permission = iota
Delete Permission = iota
Drop Permission = iota
DropRole Permission = iota
EncryptionKeyAdmin Permission = iota
Event Permission = iota
Execute Permission = iota
File Permission = iota
FirewallAdmin Permission = iota
FirewallExempt Permission = iota
FirewallUser Permission = iota
FlushOptimizerCosts Permission = iota
FlushStatus Permission = iota
FlushTables Permission = iota
FlushUserResources Permission = iota
GrantOption Permission = iota
GroupReplicationAdmin Permission = iota
GroupReplicationStream Permission = iota
Index Permission = iota
InnodbRedoLogArchive Permission = iota
InnodbRedoLogEnable Permission = iota
Insert Permission = iota
LockingTables Permission = iota
MaskingDictionariesAdmin Permission = iota
NdbStoredUser Permission = iota
PasswordlessUserAdmin Permission = iota
PersistRoVariablesAdmin Permission = iota
Process Permission = iota
Proxy Permission = iota
References Permission = iota
Reload Permission = iota
ReplicationApplier Permission = iota
ReplicationClient Permission = iota
ReplicationSlave Permission = iota
ReplicationSlaveAdmin Permission = iota
ResourceGroupAdmin Permission = iota
ResourceGroupUser Permission = iota
RoleAdmin Permission = iota
Select Permission = iota
SensitiveVariablesObserver Permission = iota
ServiceConnectionAdmin Permission = iota
SessionVariablesAdmin Permission = iota
SetAnyDefiner Permission = iota
SetUserId Permission = iota
ShowDatabases Permission = iota
ShowRoutine Permission = iota
ShowView Permission = iota
Shutdown Permission = iota
SkipQueryRewrite Permission = iota
Super Permission = iota
SystemUser Permission = iota
SystemVariablesAdmin Permission = iota
TableEncryptionAdmin Permission = iota
TelemetryLogAdmin Permission = iota
TpConnectionAdmin Permission = iota
TransactionGtidTag Permission = iota
Trigger Permission = iota
Update Permission = iota
Usage Permission = iota
VersionTokenAdmin Permission = iota
XaRecoverAdmin Permission = iota
)
var (
PermissionStrings = map[Permission]string{
Alter: "alter",
AlterRoutine: "alter_routine",
AllowNonexistentDefiner: "allow_nonexistent_definer",
ApplicationPasswordAdmin: "application_password_admin",
AuditAbortExempt: "audit_abort_exempt",
AuditAdmin: "audit_admin",
AuthenticationPolicyAdmin: "authentication_policy_admin",
BackupAdmin: "backup_admin",
BinlogAdmin: "binlog_admin",
BinlogEncryptionAdmin: "binlog_encryption_admin",
CloneAdmin: "clone_admin",
ConnectionAdmin: "connection_admin",
Create: "create",
CreateRole: "create_role",
CreateRoutine: "create_routine",
CreateTablespace: "create_tablespace",
CreateTemporaryTables: "create_temporary_tables",
CreateUser: "create_user",
CreateView: "create_view",
Delete: "delete",
Drop: "drop",
DropRole: "drop_role",
EncryptionKeyAdmin: "encryption_key_admin",
Event: "event",
Execute: "execute",
File: "file",
FirewallAdmin: "firewall_admin",
FirewallExempt: "firewall_exempt",
FirewallUser: "firewall_user",
FlushOptimizerCosts: "flush_optimizer_costs",
FlushStatus: "flush_status",
FlushTables: "flush_tables",
FlushUserResources: "flush_user_resources",
GrantOption: "grant_option",
GroupReplicationAdmin: "group_replication_admin",
GroupReplicationStream: "group_replication_stream",
Index: "index",
InnodbRedoLogArchive: "innodb_redo_log_archive",
InnodbRedoLogEnable: "innodb_redo_log_enable",
Insert: "insert",
LockingTables: "locking_tables",
MaskingDictionariesAdmin: "masking_dictionaries_admin",
NdbStoredUser: "ndb_stored_user",
PasswordlessUserAdmin: "passwordless_user_admin",
PersistRoVariablesAdmin: "persist_ro_variables_admin",
Process: "process",
Proxy: "proxy",
References: "references",
Reload: "reload",
ReplicationApplier: "replication_applier",
ReplicationClient: "replication_client",
ReplicationSlave: "replication_slave",
ReplicationSlaveAdmin: "replication_slave_admin",
ResourceGroupAdmin: "resource_group_admin",
ResourceGroupUser: "resource_group_user",
RoleAdmin: "role_admin",
Select: "select",
SensitiveVariablesObserver: "sensitive_variables_observer",
ServiceConnectionAdmin: "service_connection_admin",
SessionVariablesAdmin: "session_variables_admin",
SetAnyDefiner: "set_any_definer",
SetUserId: "set_user_id",
ShowDatabases: "show_databases",
ShowRoutine: "show_routine",
ShowView: "show_view",
Shutdown: "shutdown",
SkipQueryRewrite: "skip_query_rewrite",
Super: "super",
SystemUser: "system_user",
SystemVariablesAdmin: "system_variables_admin",
TableEncryptionAdmin: "table_encryption_admin",
TelemetryLogAdmin: "telemetry_log_admin",
TpConnectionAdmin: "tp_connection_admin",
TransactionGtidTag: "transaction_gtid_tag",
Trigger: "trigger",
Update: "update",
Usage: "usage",
VersionTokenAdmin: "version_token_admin",
XaRecoverAdmin: "xa_recover_admin",
}
StringToPermission = map[string]Permission{
"alter": Alter,
"alter_routine": AlterRoutine,
"allow_nonexistent_definer": AllowNonexistentDefiner,
"application_password_admin": ApplicationPasswordAdmin,
"audit_abort_exempt": AuditAbortExempt,
"audit_admin": AuditAdmin,
"authentication_policy_admin": AuthenticationPolicyAdmin,
"backup_admin": BackupAdmin,
"binlog_admin": BinlogAdmin,
"binlog_encryption_admin": BinlogEncryptionAdmin,
"clone_admin": CloneAdmin,
"connection_admin": ConnectionAdmin,
"create": Create,
"create_role": CreateRole,
"create_routine": CreateRoutine,
"create_tablespace": CreateTablespace,
"create_temporary_tables": CreateTemporaryTables,
"create_user": CreateUser,
"create_view": CreateView,
"delete": Delete,
"drop": Drop,
"drop_role": DropRole,
"encryption_key_admin": EncryptionKeyAdmin,
"event": Event,
"execute": Execute,
"file": File,
"firewall_admin": FirewallAdmin,
"firewall_exempt": FirewallExempt,
"firewall_user": FirewallUser,
"flush_optimizer_costs": FlushOptimizerCosts,
"flush_status": FlushStatus,
"flush_tables": FlushTables,
"flush_user_resources": FlushUserResources,
"grant_option": GrantOption,
"group_replication_admin": GroupReplicationAdmin,
"group_replication_stream": GroupReplicationStream,
"index": Index,
"innodb_redo_log_archive": InnodbRedoLogArchive,
"innodb_redo_log_enable": InnodbRedoLogEnable,
"insert": Insert,
"locking_tables": LockingTables,
"masking_dictionaries_admin": MaskingDictionariesAdmin,
"ndb_stored_user": NdbStoredUser,
"passwordless_user_admin": PasswordlessUserAdmin,
"persist_ro_variables_admin": PersistRoVariablesAdmin,
"process": Process,
"proxy": Proxy,
"references": References,
"reload": Reload,
"replication_applier": ReplicationApplier,
"replication_client": ReplicationClient,
"replication_slave": ReplicationSlave,
"replication_slave_admin": ReplicationSlaveAdmin,
"resource_group_admin": ResourceGroupAdmin,
"resource_group_user": ResourceGroupUser,
"role_admin": RoleAdmin,
"select": Select,
"sensitive_variables_observer": SensitiveVariablesObserver,
"service_connection_admin": ServiceConnectionAdmin,
"session_variables_admin": SessionVariablesAdmin,
"set_any_definer": SetAnyDefiner,
"set_user_id": SetUserId,
"show_databases": ShowDatabases,
"show_routine": ShowRoutine,
"show_view": ShowView,
"shutdown": Shutdown,
"skip_query_rewrite": SkipQueryRewrite,
"super": Super,
"system_user": SystemUser,
"system_variables_admin": SystemVariablesAdmin,
"table_encryption_admin": TableEncryptionAdmin,
"telemetry_log_admin": TelemetryLogAdmin,
"tp_connection_admin": TpConnectionAdmin,
"transaction_gtid_tag": TransactionGtidTag,
"trigger": Trigger,
"update": Update,
"usage": Usage,
"version_token_admin": VersionTokenAdmin,
"xa_recover_admin": XaRecoverAdmin,
}
PermissionIDs = map[Permission]int{
Alter: 1,
AlterRoutine: 2,
AllowNonexistentDefiner: 3,
ApplicationPasswordAdmin: 4,
AuditAbortExempt: 5,
AuditAdmin: 6,
AuthenticationPolicyAdmin: 7,
BackupAdmin: 8,
BinlogAdmin: 9,
BinlogEncryptionAdmin: 10,
CloneAdmin: 11,
ConnectionAdmin: 12,
Create: 13,
CreateRole: 14,
CreateRoutine: 15,
CreateTablespace: 16,
CreateTemporaryTables: 17,
CreateUser: 18,
CreateView: 19,
Delete: 20,
Drop: 21,
DropRole: 22,
EncryptionKeyAdmin: 23,
Event: 24,
Execute: 25,
File: 26,
FirewallAdmin: 27,
FirewallExempt: 28,
FirewallUser: 29,
FlushOptimizerCosts: 30,
FlushStatus: 31,
FlushTables: 32,
FlushUserResources: 33,
GrantOption: 34,
GroupReplicationAdmin: 35,
GroupReplicationStream: 36,
Index: 37,
InnodbRedoLogArchive: 38,
InnodbRedoLogEnable: 39,
Insert: 40,
LockingTables: 41,
MaskingDictionariesAdmin: 42,
NdbStoredUser: 43,
PasswordlessUserAdmin: 44,
PersistRoVariablesAdmin: 45,
Process: 46,
Proxy: 47,
References: 48,
Reload: 49,
ReplicationApplier: 50,
ReplicationClient: 51,
ReplicationSlave: 52,
ReplicationSlaveAdmin: 53,
ResourceGroupAdmin: 54,
ResourceGroupUser: 55,
RoleAdmin: 56,
Select: 57,
SensitiveVariablesObserver: 58,
ServiceConnectionAdmin: 59,
SessionVariablesAdmin: 60,
SetAnyDefiner: 61,
SetUserId: 62,
ShowDatabases: 63,
ShowRoutine: 64,
ShowView: 65,
Shutdown: 66,
SkipQueryRewrite: 67,
Super: 68,
SystemUser: 69,
SystemVariablesAdmin: 70,
TableEncryptionAdmin: 71,
TelemetryLogAdmin: 72,
TpConnectionAdmin: 73,
TransactionGtidTag: 74,
Trigger: 75,
Update: 76,
Usage: 77,
VersionTokenAdmin: 78,
XaRecoverAdmin: 79,
}
IdToPermission = map[int]Permission{
1: Alter,
2: AlterRoutine,
3: AllowNonexistentDefiner,
4: ApplicationPasswordAdmin,
5: AuditAbortExempt,
6: AuditAdmin,
7: AuthenticationPolicyAdmin,
8: BackupAdmin,
9: BinlogAdmin,
10: BinlogEncryptionAdmin,
11: CloneAdmin,
12: ConnectionAdmin,
13: Create,
14: CreateRole,
15: CreateRoutine,
16: CreateTablespace,
17: CreateTemporaryTables,
18: CreateUser,
19: CreateView,
20: Delete,
21: Drop,
22: DropRole,
23: EncryptionKeyAdmin,
24: Event,
25: Execute,
26: File,
27: FirewallAdmin,
28: FirewallExempt,
29: FirewallUser,
30: FlushOptimizerCosts,
31: FlushStatus,
32: FlushTables,
33: FlushUserResources,
34: GrantOption,
35: GroupReplicationAdmin,
36: GroupReplicationStream,
37: Index,
38: InnodbRedoLogArchive,
39: InnodbRedoLogEnable,
40: Insert,
41: LockingTables,
42: MaskingDictionariesAdmin,
43: NdbStoredUser,
44: PasswordlessUserAdmin,
45: PersistRoVariablesAdmin,
46: Process,
47: Proxy,
48: References,
49: Reload,
50: ReplicationApplier,
51: ReplicationClient,
52: ReplicationSlave,
53: ReplicationSlaveAdmin,
54: ResourceGroupAdmin,
55: ResourceGroupUser,
56: RoleAdmin,
57: Select,
58: SensitiveVariablesObserver,
59: ServiceConnectionAdmin,
60: SessionVariablesAdmin,
61: SetAnyDefiner,
62: SetUserId,
63: ShowDatabases,
64: ShowRoutine,
65: ShowView,
66: Shutdown,
67: SkipQueryRewrite,
68: Super,
69: SystemUser,
70: SystemVariablesAdmin,
71: TableEncryptionAdmin,
72: TelemetryLogAdmin,
73: TpConnectionAdmin,
74: TransactionGtidTag,
75: Trigger,
76: Update,
77: Usage,
78: VersionTokenAdmin,
79: XaRecoverAdmin,
}
)
// ToString converts a Permission enum to its string representation
func (p Permission) ToString() (string, error) {
if str, ok := PermissionStrings[p]; ok {
return str, nil
}
return "", errors.New("invalid permission")
}
// ToID converts a Permission enum to its ID
func (p Permission) ToID() (int, error) {
if id, ok := PermissionIDs[p]; ok {
return id, nil
}
return 0, errors.New("invalid permission")
}
// PermissionFromString converts a string representation to its Permission enum
func PermissionFromString(s string) (Permission, error) {
if p, ok := StringToPermission[s]; ok {
return p, nil
}
return 0, errors.New("invalid permission string")
}
// PermissionFromID converts an ID to its Permission enum
func PermissionFromID(id int) (Permission, error) {
if p, ok := IdToPermission[id]; ok {
return p, nil
}
return 0, errors.New("invalid permission ID")
}

View file

@ -0,0 +1,80 @@
permissions:
- alter
- alter_routine
- allow_nonexistent_definer
- application_password_admin
- audit_abort_exempt
- audit_admin
- authentication_policy_admin
- backup_admin
- binlog_admin
- binlog_encryption_admin
- clone_admin
- connection_admin
- create
- create_role
- create_routine
- create_tablespace
- create_temporary_tables
- create_user
- create_view
- delete
- drop
- drop_role
- encryption_key_admin
- event
- execute
- file
- firewall_admin
- firewall_exempt
- firewall_user
- flush_optimizer_costs
- flush_status
- flush_tables
- flush_user_resources
- grant_option
- group_replication_admin
- group_replication_stream
- index
- innodb_redo_log_archive
- innodb_redo_log_enable
- insert
- locking_tables
- masking_dictionaries_admin
- ndb_stored_user
- passwordless_user_admin
- persist_ro_variables_admin
- process
- proxy
- references
- reload
- replication_applier
- replication_client
- replication_slave
- replication_slave_admin
- resource_group_admin
- resource_group_user
- role_admin
- select
- sensitive_variables_observer
- service_connection_admin
- session_variables_admin
- set_any_definer
- set_user_id
- show_databases
- show_routine
- show_view
- shutdown
- skip_query_rewrite
- super
- system_user
- system_variables_admin
- table_encryption_admin
- telemetry_log_admin
- tp_connection_admin
- transaction_gtid_tag
- trigger
- update
- usage
- version_token_admin
- xa_recover_admin

View file

@ -0,0 +1,177 @@
{
"AnalyzerType": 15,
"Bindings": [
{
"Resource": {
"Name": "Analytics",
"FullyQualifiedName": "727f01-d6.myshopify.com/detectors@trufflesec.com/Analytics",
"Type": "category",
"Metadata": null,
"Parent": {
"Name": "My Store",
"FullyQualifiedName": "727f01-d6.myshopify.com/detectors@trufflesec.com",
"Type": "shop",
"Metadata": {
"created_at": "2024-08-16T17:16:17+05:00"
},
"Parent": null
}
},
"Permission": {
"Value": "read",
"Parent": null
}
},
{
"Resource": {
"Name": "Applications",
"FullyQualifiedName": "727f01-d6.myshopify.com/detectors@trufflesec.com/Applications",
"Type": "category",
"Metadata": null,
"Parent": {
"Name": "My Store",
"FullyQualifiedName": "727f01-d6.myshopify.com/detectors@trufflesec.com",
"Type": "shop",
"Metadata": {
"created_at": "2024-08-16T17:16:17+05:00"
},
"Parent": null
}
},
"Permission": {
"Value": "read",
"Parent": null
}
},
{
"Resource": {
"Name": "Assigned fulfillment orders",
"FullyQualifiedName": "727f01-d6.myshopify.com/detectors@trufflesec.com/Assigned fulfillment orders",
"Type": "category",
"Metadata": null,
"Parent": {
"Name": "My Store",
"FullyQualifiedName": "727f01-d6.myshopify.com/detectors@trufflesec.com",
"Type": "shop",
"Metadata": {
"created_at": "2024-08-16T17:16:17+05:00"
},
"Parent": null
}
},
"Permission": {
"Value": "full_access",
"Parent": null
}
},
{
"Resource": {
"Name": "Customers",
"FullyQualifiedName": "727f01-d6.myshopify.com/detectors@trufflesec.com/Customers",
"Type": "category",
"Metadata": null,
"Parent": {
"Name": "My Store",
"FullyQualifiedName": "727f01-d6.myshopify.com/detectors@trufflesec.com",
"Type": "shop",
"Metadata": {
"created_at": "2024-08-16T17:16:17+05:00"
},
"Parent": null
}
},
"Permission": {
"Value": "full_access",
"Parent": null
}
},
{
"Resource": {
"Name": "Discovery",
"FullyQualifiedName": "727f01-d6.myshopify.com/detectors@trufflesec.com/Discovery",
"Type": "category",
"Metadata": null,
"Parent": {
"Name": "My Store",
"FullyQualifiedName": "727f01-d6.myshopify.com/detectors@trufflesec.com",
"Type": "shop",
"Metadata": {
"created_at": "2024-08-16T17:16:17+05:00"
},
"Parent": null
}
},
"Permission": {
"Value": "full_access",
"Parent": null
}
},
{
"Resource": {
"Name": "Merchant-managed fulfillment orders",
"FullyQualifiedName": "727f01-d6.myshopify.com/detectors@trufflesec.com/Merchant-managed fulfillment orders",
"Type": "category",
"Metadata": null,
"Parent": {
"Name": "My Store",
"FullyQualifiedName": "727f01-d6.myshopify.com/detectors@trufflesec.com",
"Type": "shop",
"Metadata": {
"created_at": "2024-08-16T17:16:17+05:00"
},
"Parent": null
}
},
"Permission": {
"Value": "full_access",
"Parent": null
}
},
{
"Resource": {
"Name": "Reports",
"FullyQualifiedName": "727f01-d6.myshopify.com/detectors@trufflesec.com/Reports",
"Type": "category",
"Metadata": null,
"Parent": {
"Name": "My Store",
"FullyQualifiedName": "727f01-d6.myshopify.com/detectors@trufflesec.com",
"Type": "shop",
"Metadata": {
"created_at": "2024-08-16T17:16:17+05:00"
},
"Parent": null
}
},
"Permission": {
"Value": "full_access",
"Parent": null
}
},
{
"Resource": {
"Name": "cart_transforms",
"FullyQualifiedName": "727f01-d6.myshopify.com/detectors@trufflesec.com/cart_transforms",
"Type": "category",
"Metadata": null,
"Parent": {
"Name": "My Store",
"FullyQualifiedName": "727f01-d6.myshopify.com/detectors@trufflesec.com",
"Type": "shop",
"Metadata": {
"created_at": "2024-08-16T17:16:17+05:00"
},
"Parent": null
}
},
"Permission": {
"Value": "full_access",
"Parent": null
}
}
],
"UnboundedResources": null,
"Metadata": {
"status_code": 200
}
}

View file

@ -0,0 +1,71 @@
// Code generated by go generate; DO NOT EDIT.
package shopify
import "errors"
type Permission int
const (
Invalid Permission = iota
Read Permission = iota
Write Permission = iota
FullAccess Permission = iota
)
var (
PermissionStrings = map[Permission]string{
Read: "read",
Write: "write",
FullAccess: "full_access",
}
StringToPermission = map[string]Permission{
"read": Read,
"write": Write,
"full_access": FullAccess,
}
PermissionIDs = map[Permission]int{
Read: 1,
Write: 2,
FullAccess: 3,
}
IdToPermission = map[int]Permission{
1: Read,
2: Write,
3: FullAccess,
}
)
// ToString converts a Permission enum to its string representation
func (p Permission) ToString() (string, error) {
if str, ok := PermissionStrings[p]; ok {
return str, nil
}
return "", errors.New("invalid permission")
}
// ToID converts a Permission enum to its ID
func (p Permission) ToID() (int, error) {
if id, ok := PermissionIDs[p]; ok {
return id, nil
}
return 0, errors.New("invalid permission")
}
// PermissionFromString converts a string representation to its Permission enum
func PermissionFromString(s string) (Permission, error) {
if p, ok := StringToPermission[s]; ok {
return p, nil
}
return 0, errors.New("invalid permission string")
}
// PermissionFromID converts an ID to its Permission enum
func PermissionFromID(id int) (Permission, error) {
if p, ok := IdToPermission[id]; ok {
return p, nil
}
return 0, errors.New("invalid permission ID")
}

View file

@ -0,0 +1,4 @@
permissions:
- read
- write
- full_access

View file

@ -1,8 +1,11 @@
//go:generate generate_permissions permissions.yaml permissions.go shopify
package shopify
import (
_ "embed"
"encoding/json"
"errors"
"fmt"
"net/http"
"os"
@ -12,8 +15,100 @@ import (
"github.com/jedib0t/go-pretty/table"
"github.com/trufflesecurity/trufflehog/v3/pkg/analyzer/analyzers"
"github.com/trufflesecurity/trufflehog/v3/pkg/analyzer/config"
"github.com/trufflesecurity/trufflehog/v3/pkg/analyzer/pb/analyzerpb"
"github.com/trufflesecurity/trufflehog/v3/pkg/context"
)
var _ analyzers.Analyzer = (*Analyzer)(nil)
type Analyzer struct {
Cfg *config.Config
}
var (
// order the categories
categoryOrder = []string{"Analytics", "Applications", "Assigned fulfillment orders", "Browsing behavior", "Custom pixels", "Customers", "Discounts", "Discovery", "Draft orders", "Files", "Fulfillment services", "Gift cards", "Inventory", "Legal policies", "Locations", "Marketing events", "Merchant-managed fulfillment orders", "Metaobject definitions", "Metaobject entries", "Online Store navigation", "Online Store pages", "Order editing", "Orders", "Packing slip management", "Payment customizations", "Payment terms", "Pixels", "Price rules", "Product feeds", "Product listings", "Products", "Publications", "Purchase options", "Reports", "Resource feedback", "Returns", "Sales channels", "Script tags", "Shipping", "Shop locales", "Shopify Markets", "Shopify Payments accounts", "Shopify Payments bank accounts", "Shopify Payments disputes", "Shopify Payments payouts", "Store content", "Store credit account transactions", "Store credit accounts", "Themes", "Third-party fulfillment orders", "Translations", "all_cart_transforms", "all_checkout_completion_target_customizations", "cart_transforms", "cash_tracking", "companies", "custom_fulfillment_services", "customer_data_erasure", "customer_merge", "delivery_customizations", "delivery_option_generators", "discounts_allocator_functions", "fulfillment_constraint_rules", "gates", "order_submission_rules", "privacy_settings", "shopify_payments_provider_accounts_sensitive", "validations"}
)
func (Analyzer) Type() analyzerpb.AnalyzerType { return analyzerpb.AnalyzerType_Shopify }
func (a Analyzer) Analyze(_ context.Context, credInfo map[string]string) (*analyzers.AnalyzerResult, error) {
key, ok := credInfo["key"]
if !ok {
return nil, errors.New("key not found in credentialInfo")
}
storeUrl, ok := credInfo["store_url"]
if !ok {
return nil, errors.New("store_url not found in credentialInfo")
}
info, err := AnalyzePermissions(a.Cfg, key, storeUrl)
if err != nil {
return nil, err
}
return secretInfoToAnalyzerResult(info), nil
}
func secretInfoToAnalyzerResult(info *SecretInfo) *analyzers.AnalyzerResult {
if info == nil {
return nil
}
result := analyzers.AnalyzerResult{
AnalyzerType: analyzerpb.AnalyzerType_Shopify,
Metadata: map[string]any{
"status_code": info.StatusCode,
},
}
resource := &analyzers.Resource{
Name: info.ShopInfo.Shop.Name,
FullyQualifiedName: info.ShopInfo.Shop.Domain + "/" + info.ShopInfo.Shop.Email,
Type: "shop",
Metadata: map[string]any{
"created_at": info.ShopInfo.Shop.CreatedAt,
},
Parent: nil,
}
result.Bindings = make([]analyzers.Binding, 0)
for _, category := range categoryOrder {
if val, ok := info.Scopes[category]; ok {
cateogryResource := &analyzers.Resource{
Name: category,
FullyQualifiedName: resource.FullyQualifiedName + "/" + category, // shop.domain/shop.email/category
Type: "category",
Parent: resource,
}
if sliceContains(val.Scopes, "Read") && sliceContains(val.Scopes, "Write") {
result.Bindings = append(result.Bindings, analyzers.Binding{
Resource: *cateogryResource,
Permission: analyzers.Permission{
Value: PermissionStrings[FullAccess],
},
})
continue
}
for _, scope := range val.Scopes {
lowerScope := strings.ToLower(scope)
if _, ok := StringToPermission[lowerScope]; !ok { // skip unknown scopes/permission
continue
}
result.Bindings = append(result.Bindings, analyzers.Binding{
Resource: *cateogryResource,
Permission: analyzers.Permission{
Value: lowerScope,
},
})
}
}
}
return &result
}
//go:embed scopes.json
var scopesConfig []byte
@ -90,6 +185,7 @@ func determineScopes(data ScopeDataJSON, input string) map[string]OutputScopes {
type ShopInfoJSON struct {
Shop struct {
Domain string `json:"domain"`
Name string `json:"name"`
Email string `json:"email"`
CreatedAt string `json:"created_at"`
@ -224,9 +320,6 @@ func printAccessScopes(accessScopes map[string]OutputScopes) {
t.SetOutputMirror(os.Stdout)
t.AppendHeader(table.Row{"Scope", "Description", "Access"})
// order the categories
categoryOrder := []string{"Analytics", "Applications", "Assigned fulfillment orders", "Browsing behavior", "Custom pixels", "Customers", "Discounts", "Discovery", "Draft orders", "Files", "Fulfillment services", "Gift cards", "Inventory", "Legal policies", "Locations", "Marketing events", "Merchant-managed fulfillment orders", "Metaobject definitions", "Metaobject entries", "Online Store navigation", "Online Store pages", "Order editing", "Orders", "Packing slip management", "Payment customizations", "Payment terms", "Pixels", "Price rules", "Product feeds", "Product listings", "Products", "Publications", "Purchase options", "Reports", "Resource feedback", "Returns", "Sales channels", "Script tags", "Shipping", "Shop locales", "Shopify Markets", "Shopify Payments accounts", "Shopify Payments bank accounts", "Shopify Payments disputes", "Shopify Payments payouts", "Store content", "Store credit account transactions", "Store credit accounts", "Themes", "Third-party fulfillment orders", "Translations", "all_cart_transforms", "all_checkout_completion_target_customizations", "cart_transforms", "cash_tracking", "companies", "custom_fulfillment_services", "customer_data_erasure", "customer_merge", "delivery_customizations", "delivery_option_generators", "discounts_allocator_functions", "fulfillment_constraint_rules", "gates", "order_submission_rules", "privacy_settings", "shopify_payments_provider_accounts_sensitive", "validations"}
for _, category := range categoryOrder {
if val, ok := accessScopes[category]; ok {
t.AppendRow([]interface{}{color.GreenString(category), color.GreenString(val.Description), color.GreenString(val.PrintScopes())})

View file

@ -0,0 +1,88 @@
package shopify
import (
_ "embed"
"encoding/json"
"testing"
"time"
"github.com/trufflesecurity/trufflehog/v3/pkg/analyzer/analyzers"
"github.com/trufflesecurity/trufflehog/v3/pkg/analyzer/config"
"github.com/trufflesecurity/trufflehog/v3/pkg/common"
"github.com/trufflesecurity/trufflehog/v3/pkg/context"
)
//go:embed expected_output.json
var expectedOutput []byte
func TestAnalyzer_Analyze(t *testing.T) {
ctx, cancel := context.WithTimeout(context.Background(), time.Second*5)
defer cancel()
testSecrets, err := common.GetSecret(ctx, "trufflehog-testing", "detectors4")
if err != nil {
t.Fatalf("could not get test secrets from GCP: %s", err)
}
secret := testSecrets.MustGetField("SHOPIFY_ADMIN_SECRET")
domain := testSecrets.MustGetField("SHOPIFY_DOMAIN")
tests := []struct {
name string
key string
storeUrl string
want string
wantErr bool
}{
{
name: "valid Shopify key",
key: secret,
storeUrl: domain,
want: string(expectedOutput),
wantErr: false,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
a := Analyzer{Cfg: &config.Config{}}
got, err := a.Analyze(ctx, map[string]string{"key": tt.key, "store_url": tt.storeUrl})
if (err != nil) != tt.wantErr {
t.Errorf("Analyzer.Analyze() error = %v, wantErr %v", err, tt.wantErr)
return
}
// Marshal the actual result to JSON
gotJSON, err := json.Marshal(got)
if err != nil {
t.Fatalf("could not marshal got to JSON: %s", err)
}
// Parse the expected JSON string
var wantObj analyzers.AnalyzerResult
if err := json.Unmarshal([]byte(tt.want), &wantObj); err != nil {
t.Fatalf("could not unmarshal want JSON string: %s", err)
}
// Marshal the expected result to JSON (to normalize)
wantJSON, err := json.Marshal(wantObj)
if err != nil {
t.Fatalf("could not marshal want to JSON: %s", err)
}
// Compare the JSON strings
if string(gotJSON) != string(wantJSON) {
// Pretty-print both JSON strings for easier comparison
var gotIndented, wantIndented []byte
gotIndented, err = json.MarshalIndent(got, "", " ")
if err != nil {
t.Fatalf("could not marshal got to indented JSON: %s", err)
}
wantIndented, err = json.MarshalIndent(wantObj, "", " ")
if err != nil {
t.Fatalf("could not marshal want to indented JSON: %s", err)
}
t.Errorf("Analyzer.Analyze() = %s, want %s", gotIndented, wantIndented)
}
})
}
}

View file

@ -27,6 +27,10 @@ func init() {
}
}
func (d *Base64) Type() detectorspb.DecoderType {
return detectorspb.DecoderType_BASE64
}
func (d *Base64) FromChunk(chunk *sources.Chunk) *DecodableChunk {
decodableChunk := &DecodableChunk{Chunk: chunk, DecoderType: d.Type()}
encodedSubstrings := getSubstringsOfCharacterSet(chunk.Data, 20, b64CharsetMapping, b64EndChars)
@ -67,10 +71,6 @@ func (d *Base64) FromChunk(chunk *sources.Chunk) *DecodableChunk {
return nil
}
func (d *Base64) Type() detectorspb.DecoderType {
return detectorspb.DecoderType_BASE64
}
func isASCII(b []byte) bool {
for i := 0; i < len(b); i++ {
if b[i] > unicode.MaxASCII {

View file

@ -24,6 +24,10 @@ var (
escapePat = regexp.MustCompile(`(?i:\\{1,2}u)([a-fA-F0-9]{4})`)
)
func (d *EscapedUnicode) Type() detectorspb.DecoderType {
return detectorspb.DecoderType_ESCAPED_UNICODE
}
func (d *EscapedUnicode) FromChunk(chunk *sources.Chunk) *DecodableChunk {
if chunk == nil || len(chunk.Data) == 0 {
return nil
@ -94,10 +98,6 @@ func decodeCodePoint(input []byte) []byte {
return input
}
func (d *EscapedUnicode) Type() detectorspb.DecoderType {
return detectorspb.DecoderType_ESCAPED_UNICODE
}
func decodeEscaped(input []byte) []byte {
// Find all Unicode escape sequences in the input byte slice
indices := escapePat.FindAllSubmatchIndex(input, -1)

View file

@ -11,6 +11,10 @@ import (
type UTF16 struct{}
func (d *UTF16) Type() detectorspb.DecoderType {
return detectorspb.DecoderType_UTF16
}
func (d *UTF16) FromChunk(chunk *sources.Chunk) *DecodableChunk {
if chunk == nil || len(chunk.Data) == 0 {
return nil
@ -28,10 +32,6 @@ func (d *UTF16) FromChunk(chunk *sources.Chunk) *DecodableChunk {
return nil
}
func (d *UTF16) Type() detectorspb.DecoderType {
return detectorspb.DecoderType_UTF16
}
// utf16ToUTF8 converts a byte slice containing UTF-16 encoded data to a UTF-8 encoded byte slice.
func utf16ToUTF8(b []byte) ([]byte, error) {
var bufBE, bufLE bytes.Buffer

View file

@ -10,6 +10,10 @@ import (
type UTF8 struct{}
func (d *UTF8) Type() detectorspb.DecoderType {
return detectorspb.DecoderType_PLAIN
}
func (d *UTF8) FromChunk(chunk *sources.Chunk) *DecodableChunk {
if chunk == nil || len(chunk.Data) == 0 {
return nil
@ -25,10 +29,6 @@ func (d *UTF8) FromChunk(chunk *sources.Chunk) *DecodableChunk {
return decodableChunk
}
func (d *UTF8) Type() detectorspb.DecoderType {
return detectorspb.DecoderType_PLAIN
}
// extractSubstrings performs similarly to the strings binutil,
// extacting contigous portions of printable characters that we care
// about from some bytes

View file

@ -31,7 +31,7 @@ var (
// Keywords are used for efficiently pre-filtering chunks.
// Use identifiers in the secret preferably, or the provider name.
func (s Scanner) Keywords() []string {
return []string{"aha"}
return []string{"aha.io"}
}
func (s Scanner) getClient() *http.Client {

View file

@ -97,6 +97,9 @@ matchLoop:
err = pingRes.err
s.SetVerificationError(err, jdbcConn)
}
s.AnalysisInfo = map[string]string{
"connection_string": jdbcConn,
}
// TODO: specialized redaction
}

View file

@ -134,6 +134,7 @@ func TestJdbc_FromChunk(t *testing.T) {
t.Fatal("no raw secret present")
}
got[i].Raw = nil
got[i].AnalysisInfo = nil
}
if diff := pretty.Compare(got, tt.want); diff != "" {
t.Errorf("Jdbc.FromData() %s diff: (-got +want)\n%s", tt.name, diff)

View file

@ -31,7 +31,7 @@ var (
// Tokens created after Jan 18 2023 use a variable length
tokenPat = regexp.MustCompile(detectors.PrefixRegex([]string{"jira"}) + `\b([A-Za-z0-9+/=_-]+=[A-Za-z0-9]{8})\b`)
domainPat = regexp.MustCompile(detectors.PrefixRegex([]string{"jira"}) + `\b([a-zA-Z-0-9]{5,24}\.[a-zA-Z-0-9]{3,16}\.[a-zA-Z-0-9]{3,16})\b`)
emailPat = regexp.MustCompile(detectors.PrefixRegex([]string{"jira"}) + `\b[A-Za-z0-9._%+-]+@[A-Za-z0-9.-]+\.[A-Z|a-z]{2,}\b`)
emailPat = regexp.MustCompile(detectors.PrefixRegex([]string{"jira"}) + `\b([A-Za-z0-9._%+-]+@[A-Za-z0-9.-]+\.[A-Z|a-z]{2,})\b`)
)
const (
@ -54,11 +54,11 @@ func (s Scanner) FromData(ctx context.Context, verify bool, data []byte) (result
emails := emailPat.FindAllStringSubmatch(dataStr, -1)
for _, email := range emails {
email = strings.Split(email[0], " ")
if len(email) != 2 {
continue
}
resEmail := strings.TrimSpace(email[1])
for _, token := range tokens {
if len(token) != 2 {
continue

View file

@ -3,16 +3,17 @@ package mailgun
import (
"context"
"fmt"
regexp "github.com/wasilibs/go-re2"
"net/http"
"strings"
regexp "github.com/wasilibs/go-re2"
"github.com/trufflesecurity/trufflehog/v3/pkg/common"
"github.com/trufflesecurity/trufflehog/v3/pkg/detectors"
"github.com/trufflesecurity/trufflehog/v3/pkg/pb/detectorspb"
)
type Scanner struct{
type Scanner struct {
detectors.DefaultMultiPartCredentialProvider
}
@ -72,6 +73,7 @@ func (s Scanner) FromData(ctx context.Context, verify bool, data []byte) (result
s1.Verified = true
}
}
s1.AnalysisInfo = map[string]string{"key": resMatch}
}

View file

@ -129,6 +129,7 @@ func TestMailgun_FromChunk(t *testing.T) {
t.Fatalf("no raw secret present: \n %+v", got[i])
}
got[i].Raw = nil
got[i].AnalysisInfo = nil
}
if diff := pretty.Compare(got, tt.want); diff != "" {
t.Errorf("Mailgun.FromData() %s diff: (-got +want)\n%s", tt.name, diff)

View file

@ -0,0 +1,104 @@
package nvapi
import (
"context"
"fmt"
"io"
"net/http"
"net/url"
"strings"
regexp "github.com/wasilibs/go-re2"
"github.com/trufflesecurity/trufflehog/v3/pkg/common"
"github.com/trufflesecurity/trufflehog/v3/pkg/detectors"
"github.com/trufflesecurity/trufflehog/v3/pkg/pb/detectorspb"
)
type Scanner struct {
client *http.Client
}
// Ensure the Scanner satisfies the interface at compile time.
var _ detectors.Detector = (*Scanner)(nil)
var (
defaultClient = common.SaneHttpClient()
// Make sure that your group is surrounded in boundary characters such as below to reduce false positives.
keyPat = regexp.MustCompile(`\b(nvapi-[a-zA-Z0-9_-]{64})\b`)
)
// Keywords are used for efficiently pre-filtering chunks.
// Use identifiers in the secret preferably, or the provider name.
func (s Scanner) Keywords() []string {
return []string{"nvapi-"}
}
// FromData will find and optionally verify Nvapi secrets in a given set of bytes.
func (s Scanner) FromData(ctx context.Context, verify bool, data []byte) (results []detectors.Result, err error) {
dataStr := string(data)
uniqueMatches := make(map[string]struct{})
for _, match := range keyPat.FindAllStringSubmatch(dataStr, -1) {
uniqueMatches[match[1]] = struct{}{}
}
for match := range uniqueMatches {
s1 := detectors.Result{
DetectorType: detectorspb.DetectorType_NVAPI,
Raw: []byte(match),
}
if verify {
client := s.client
if client == nil {
client = defaultClient
}
isVerified, extraData, verificationErr := verifyMatch(ctx, client, match)
s1.Verified = isVerified
s1.ExtraData = extraData
s1.SetVerificationError(verificationErr, match)
}
results = append(results, s1)
}
return
}
func verifyMatch(ctx context.Context, client *http.Client, token string) (bool, map[string]string, error) {
data := url.Values{}
data.Set("credentials", token)
req, err := http.NewRequestWithContext(ctx, http.MethodPost, "https://api.ngc.nvidia.com/v3/keys/get-caller-info", strings.NewReader(data.Encode()))
if err != nil {
return false, nil, nil
}
req.Header.Add("Content-Type", "application/x-www-form-urlencoded")
res, err := client.Do(req)
if err != nil {
return false, nil, err
}
defer func() {
_, _ = io.Copy(io.Discard, res.Body)
_ = res.Body.Close()
}()
switch res.StatusCode {
case http.StatusOK:
// If the endpoint returns useful information, we can return it as a map.
return true, nil, nil
case http.StatusUnauthorized:
// The secret is determinately not verified (nothing to do)
return false, nil, nil
default:
return false, nil, fmt.Errorf("unexpected HTTP response status %d", res.StatusCode)
}
}
func (s Scanner) Type() detectorspb.DetectorType {
return detectorspb.DetectorType_NVAPI
}

View file

@ -0,0 +1,220 @@
//go:build detectors
// +build detectors
package nvapi
import (
"context"
"fmt"
"testing"
"time"
"github.com/google/go-cmp/cmp"
"github.com/google/go-cmp/cmp/cmpopts"
"github.com/trufflesecurity/trufflehog/v3/pkg/common"
"github.com/trufflesecurity/trufflehog/v3/pkg/detectors"
"github.com/trufflesecurity/trufflehog/v3/pkg/engine/ahocorasick"
"github.com/trufflesecurity/trufflehog/v3/pkg/pb/detectorspb"
)
func TestNvapi_Pattern(t *testing.T) {
d := Scanner{}
ahoCorasickCore := ahocorasick.NewAhoCorasickCore([]detectors.Detector{d})
tests := []struct {
name string
input string
want []string
}{
{
name: "typical pattern",
input: "nvapi_token = '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'",
want: []string{"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"},
},
}
for _, test := range tests {
t.Run(test.name, func(t *testing.T) {
matchedDetectors := ahoCorasickCore.FindDetectorMatches([]byte(test.input))
if len(matchedDetectors) == 0 {
t.Errorf("keywords '%v' not matched by: %s", d.Keywords(), test.input)
return
}
results, err := d.FromData(context.Background(), false, []byte(test.input))
if err != nil {
t.Errorf("error = %v", err)
return
}
if len(results) != len(test.want) {
if len(results) == 0 {
t.Errorf("did not receive result")
} else {
t.Errorf("expected %d results, only received %d", len(test.want), len(results))
}
return
}
actual := make(map[string]struct{}, len(results))
for _, r := range results {
if len(r.RawV2) > 0 {
actual[string(r.RawV2)] = struct{}{}
} else {
actual[string(r.Raw)] = struct{}{}
}
}
expected := make(map[string]struct{}, len(test.want))
for _, v := range test.want {
expected[v] = struct{}{}
}
if diff := cmp.Diff(expected, actual); diff != "" {
t.Errorf("%s diff: (-want +got)\n%s", test.name, diff)
}
})
}
}
func TestNvapi_FromChunk(t *testing.T) {
ctx, cancel := context.WithTimeout(context.Background(), time.Second*5)
defer cancel()
testSecrets, err := common.GetSecret(ctx, "trufflehog-testing", "detectors5")
if err != nil {
t.Fatalf("could not get test secrets from GCP: %s", err)
}
secret := testSecrets.MustGetField("NVAPI")
inactiveSecret := testSecrets.MustGetField("NVAPI_INACTIVE")
type args struct {
ctx context.Context
data []byte
verify bool
}
tests := []struct {
name string
s Scanner
args args
want []detectors.Result
wantErr bool
wantVerificationErr bool
}{
{
name: "found, verified",
s: Scanner{},
args: args{
ctx: context.Background(),
data: []byte(fmt.Sprintf("You can find a nvapi secret %s within", secret)),
verify: true,
},
want: []detectors.Result{
{
DetectorType: detectorspb.DetectorType_NVAPI,
Verified: true,
},
},
wantErr: false,
wantVerificationErr: false,
},
{
name: "found, unverified",
s: Scanner{},
args: args{
ctx: context.Background(),
data: []byte(fmt.Sprintf("You can find a nvapi secret %s within but not valid", inactiveSecret)), // the secret would satisfy the regex but not pass validation
verify: true,
},
want: []detectors.Result{
{
DetectorType: detectorspb.DetectorType_NVAPI,
Verified: false,
},
},
wantErr: false,
wantVerificationErr: false,
},
{
name: "not found",
s: Scanner{},
args: args{
ctx: context.Background(),
data: []byte("You cannot find the secret within"),
verify: true,
},
want: nil,
wantErr: false,
wantVerificationErr: false,
},
{
name: "found, would be verified if not for timeout",
s: Scanner{client: common.SaneHttpClientTimeOut(1 * time.Microsecond)},
args: args{
ctx: context.Background(),
data: []byte(fmt.Sprintf("You can find a nvapi secret %s within", secret)),
verify: true,
},
want: []detectors.Result{
{
DetectorType: detectorspb.DetectorType_NVAPI,
Verified: false,
},
},
wantErr: false,
wantVerificationErr: true,
},
{
name: "found, verified but unexpected api surface",
s: Scanner{client: common.ConstantResponseHttpClient(404, "")},
args: args{
ctx: context.Background(),
data: []byte(fmt.Sprintf("You can find a nvapi secret %s within", secret)),
verify: true,
},
want: []detectors.Result{
{
DetectorType: detectorspb.DetectorType_NVAPI,
Verified: false,
},
},
wantErr: false,
wantVerificationErr: true,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
got, err := tt.s.FromData(tt.args.ctx, tt.args.verify, tt.args.data)
if (err != nil) != tt.wantErr {
t.Errorf("Nvapi.FromData() error = %v, wantErr %v", err, tt.wantErr)
return
}
for i := range got {
if len(got[i].Raw) == 0 {
t.Fatalf("no raw secret present: \n %+v", got[i])
}
if (got[i].VerificationError() != nil) != tt.wantVerificationErr {
t.Fatalf("wantVerificationError = %v, verification error = %v", tt.wantVerificationErr, got[i].VerificationError())
}
}
ignoreOpts := cmpopts.IgnoreFields(detectors.Result{}, "Raw", "verificationError")
if diff := cmp.Diff(got, tt.want, ignoreOpts); diff != "" {
t.Errorf("Nvapi.FromData() %s diff: (-got +want)\n%s", tt.name, diff)
}
})
}
}
func BenchmarkFromData(benchmark *testing.B) {
ctx := context.Background()
s := Scanner{}
for name, data := range detectors.MustGetBenchmarkData() {
benchmark.Run(name, func(b *testing.B) {
b.ResetTimer()
for n := 0; n < b.N; n++ {
_, err := s.FromData(ctx, false, data)
if err != nil {
b.Fatal(err)
}
}
})
}
}

125
pkg/detectors/pypi/pypi.go Normal file
View file

@ -0,0 +1,125 @@
package pypi
import (
"bytes"
"context"
"fmt"
"io"
"mime/multipart"
"net/http"
regexp "github.com/wasilibs/go-re2"
"github.com/trufflesecurity/trufflehog/v3/pkg/common"
"github.com/trufflesecurity/trufflehog/v3/pkg/detectors"
"github.com/trufflesecurity/trufflehog/v3/pkg/pb/detectorspb"
)
type Scanner struct {
client *http.Client
}
// Ensure the Scanner satisfies the interface at compile time.
var _ detectors.Detector = (*Scanner)(nil)
var (
defaultClient = common.SaneHttpClient()
// Make sure that your group is surrounded in boundary characters such as below to reduce false positives.
keyPat = regexp.MustCompile("(pypi-AgEIcHlwaS5vcmcCJ[a-zA-Z0-9-_]{150,157})")
)
// Keywords are used for efficiently pre-filtering chunks.
// Use identifiers in the secret preferably, or the provider name.
func (s Scanner) Keywords() []string {
return []string{"pypi-AgEIcHlwaS5vcmcCJ"}
}
// FromData will find and optionally verify Pypi secrets in a given set of bytes.
func (s Scanner) FromData(ctx context.Context, verify bool, data []byte) (results []detectors.Result, err error) {
dataStr := string(data)
uniqueMatches := make(map[string]struct{})
for _, match := range keyPat.FindAllStringSubmatch(dataStr, -1) {
uniqueMatches[match[1]] = struct{}{}
}
for match := range uniqueMatches {
s1 := detectors.Result{
DetectorType: detectorspb.DetectorType_PyPI,
Raw: []byte(match),
}
if verify {
client := s.client
if client == nil {
client = defaultClient
}
isVerified, extraData, verificationErr := verifyMatch(ctx, client, match)
s1.Verified = isVerified
s1.ExtraData = extraData
s1.SetVerificationError(verificationErr, match)
}
results = append(results, s1)
}
return
}
func verifyMatch(ctx context.Context, client *http.Client, token string) (bool, map[string]string, error) {
// Create a buffer to hold the multipart form data
var body bytes.Buffer
writer := multipart.NewWriter(&body)
// Add the form fields like in the curl request
_ = writer.WriteField(":action", "file_upload")
_ = writer.WriteField("name", "dummy-package")
_ = writer.WriteField("version", "0.0.1")
_ = writer.WriteField("content", "dummy-content")
// Close the writer to finalize the form
writer.Close()
// Create a new POST request to the PyPI legacy upload URL
req, err := http.NewRequestWithContext(ctx, http.MethodPost, "https://upload.pypi.org/legacy/", &body)
if err != nil {
return false, nil, err
}
// Add the Authorization header with the PyPI API token
req.Header.Add("Authorization", "token "+token)
// Set the Content-Type to the multipart form boundary
req.Header.Set("Content-Type", writer.FormDataContentType())
// Execute the HTTP request
res, err := client.Do(req)
if err != nil {
return false, nil, err
}
defer func() {
_, _ = io.Copy(io.Discard, res.Body)
_ = res.Body.Close()
}()
// Check for expected status codes for verification
if res.StatusCode == http.StatusBadRequest {
verified, err := common.ResponseContainsSubstring(res.Body, "Include at least one message digest.")
if err != nil {
return false, nil, err
}
if verified {
return true, nil, nil
}
} else if res.StatusCode == http.StatusForbidden {
// If we get a 403 status, the key is invalid
return false, nil, nil
}
// For all other status codes, return an error
return false, nil, fmt.Errorf("unexpected HTTP response status %d", res.StatusCode)
}
func (s Scanner) Type() detectorspb.DetectorType {
return detectorspb.DetectorType_PyPI
}

View file

@ -0,0 +1,220 @@
//go:build detectors
// +build detectors
package pypi
import (
"context"
"fmt"
"testing"
"time"
"github.com/google/go-cmp/cmp"
"github.com/google/go-cmp/cmp/cmpopts"
"github.com/trufflesecurity/trufflehog/v3/pkg/common"
"github.com/trufflesecurity/trufflehog/v3/pkg/detectors"
"github.com/trufflesecurity/trufflehog/v3/pkg/engine/ahocorasick"
"github.com/trufflesecurity/trufflehog/v3/pkg/pb/detectorspb"
)
func TestPypi_Pattern(t *testing.T) {
d := Scanner{}
ahoCorasickCore := ahocorasick.NewAhoCorasickCore([]detectors.Detector{d})
tests := []struct {
name string
input string
want []string
}{
{
name: "typical pattern",
input: "pypi_token = 'pypi-AgEIcHlwaS5vcmcCJDQyM2M0Yjg4LWUyNDnnnnhhMy1hNigyLWI2ZWUyMTMwYzI2MgACKlszLCJhOWQwMWE0MS01Nzk4LTQyOWYtOTk4MS1lYzE5NTJhM2E3YzgiXQAABiBeGtDnnnnnV32VpiyeU-YUDKplSv0E5ngmwsnHaV2jGg'",
want: []string{"pypi-AgEIcHlwaS5vcmcCJDQyM2M0Yjg4LWUyNDnnnnhhMy1hNigyLWI2ZWUyMTMwYzI2MgACKlszLCJhOWQwMWE0MS01Nzk4LTQyOWYtOTk4MS1lYzE5NTJhM2E3YzgiXQAABiBeGtDnnnnnV32VpiyeU-YUDKplSv0E5ngmwsnHaV2jGg"},
},
}
for _, test := range tests {
t.Run(test.name, func(t *testing.T) {
matchedDetectors := ahoCorasickCore.FindDetectorMatches([]byte(test.input))
if len(matchedDetectors) == 0 {
t.Errorf("keywords '%v' not matched by: %s", d.Keywords(), test.input)
return
}
results, err := d.FromData(context.Background(), false, []byte(test.input))
if err != nil {
t.Errorf("error = %v", err)
return
}
if len(results) != len(test.want) {
if len(results) == 0 {
t.Errorf("did not receive result")
} else {
t.Errorf("expected %d results, only received %d", len(test.want), len(results))
}
return
}
actual := make(map[string]struct{}, len(results))
for _, r := range results {
if len(r.RawV2) > 0 {
actual[string(r.RawV2)] = struct{}{}
} else {
actual[string(r.Raw)] = struct{}{}
}
}
expected := make(map[string]struct{}, len(test.want))
for _, v := range test.want {
expected[v] = struct{}{}
}
if diff := cmp.Diff(expected, actual); diff != "" {
t.Errorf("%s diff: (-want +got)\n%s", test.name, diff)
}
})
}
}
func TestPypi_FromChunk(t *testing.T) {
ctx, cancel := context.WithTimeout(context.Background(), time.Second*5)
defer cancel()
testSecrets, err := common.GetSecret(ctx, "trufflehog-testing", "detectors5")
if err != nil {
t.Fatalf("could not get test secrets from GCP: %s", err)
}
secret := testSecrets.MustGetField("PYPI")
inactiveSecret := testSecrets.MustGetField("PYPI_INACTIVE")
type args struct {
ctx context.Context
data []byte
verify bool
}
tests := []struct {
name string
s Scanner
args args
want []detectors.Result
wantErr bool
wantVerificationErr bool
}{
{
name: "found, verified",
s: Scanner{},
args: args{
ctx: context.Background(),
data: []byte(fmt.Sprintf("You can find a pypi secret %s within", secret)),
verify: true,
},
want: []detectors.Result{
{
DetectorType: detectorspb.DetectorType_PyPI,
Verified: true,
},
},
wantErr: false,
wantVerificationErr: false,
},
{
name: "found, unverified",
s: Scanner{},
args: args{
ctx: context.Background(),
data: []byte(fmt.Sprintf("You can find a pypi secret %s within but not valid", inactiveSecret)), // the secret would satisfy the regex but not pass validation
verify: true,
},
want: []detectors.Result{
{
DetectorType: detectorspb.DetectorType_PyPI,
Verified: false,
},
},
wantErr: false,
wantVerificationErr: false,
},
{
name: "not found",
s: Scanner{},
args: args{
ctx: context.Background(),
data: []byte("You cannot find the secret within"),
verify: true,
},
want: nil,
wantErr: false,
wantVerificationErr: false,
},
{
name: "found, would be verified if not for timeout",
s: Scanner{client: common.SaneHttpClientTimeOut(1 * time.Microsecond)},
args: args{
ctx: context.Background(),
data: []byte(fmt.Sprintf("You can find a pypi secret %s within", secret)),
verify: true,
},
want: []detectors.Result{
{
DetectorType: detectorspb.DetectorType_PyPI,
Verified: false,
},
},
wantErr: false,
wantVerificationErr: true,
},
{
name: "found, verified but unexpected api surface",
s: Scanner{client: common.ConstantResponseHttpClient(404, "")},
args: args{
ctx: context.Background(),
data: []byte(fmt.Sprintf("You can find a pypi secret %s within", secret)),
verify: true,
},
want: []detectors.Result{
{
DetectorType: detectorspb.DetectorType_PyPI,
Verified: false,
},
},
wantErr: false,
wantVerificationErr: true,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
got, err := tt.s.FromData(tt.args.ctx, tt.args.verify, tt.args.data)
if (err != nil) != tt.wantErr {
t.Errorf("Pypi.FromData() error = %v, wantErr %v", err, tt.wantErr)
return
}
for i := range got {
if len(got[i].Raw) == 0 {
t.Fatalf("no raw secret present: \n %+v", got[i])
}
if (got[i].VerificationError() != nil) != tt.wantVerificationErr {
t.Fatalf("wantVerificationError = %v, verification error = %v", tt.wantVerificationErr, got[i].VerificationError())
}
}
ignoreOpts := cmpopts.IgnoreFields(detectors.Result{}, "Raw", "verificationError")
if diff := cmp.Diff(got, tt.want, ignoreOpts); diff != "" {
t.Errorf("Pypi.FromData() %s diff: (-got +want)\n%s", tt.name, diff)
}
})
}
}
func BenchmarkFromData(benchmark *testing.B) {
ctx := context.Background()
s := Scanner{}
for name, data := range detectors.MustGetBenchmarkData() {
benchmark.Run(name, func(b *testing.B) {
b.ResetTimer()
for n := 0; n < b.N; n++ {
_, err := s.FromData(ctx, false, data)
if err != nil {
b.Fatal(err)
}
}
})
}
}

View file

@ -74,6 +74,10 @@ func (s Scanner) FromData(ctx context.Context, verify bool, data []byte) (result
s1.ExtraData = map[string]string{
"access_scopes": strings.Join(handleArray, ","),
}
s1.AnalysisInfo = map[string]string{
"key": key,
"store_url": domainRes,
}
}
res.Body.Close()
}

View file

@ -99,6 +99,7 @@ func TestShopify_FromChunk(t *testing.T) {
t.Fatalf("no raw secret present: \n %+v", got[i])
}
got[i].Raw = nil
got[i].AnalysisInfo = nil
}
if diff := pretty.Compare(got, tt.want); diff != "" {
t.Errorf("Shopify.FromData() %s diff: (-got +want)\n%s", tt.name, diff)

View file

@ -472,6 +472,7 @@ import (
"github.com/trufflesecurity/trufflehog/v3/pkg/detectors/nugetapikey"
"github.com/trufflesecurity/trufflehog/v3/pkg/detectors/numverify"
"github.com/trufflesecurity/trufflehog/v3/pkg/detectors/nutritionix"
"github.com/trufflesecurity/trufflehog/v3/pkg/detectors/nvapi"
"github.com/trufflesecurity/trufflehog/v3/pkg/detectors/nylas"
"github.com/trufflesecurity/trufflehog/v3/pkg/detectors/oanda"
"github.com/trufflesecurity/trufflehog/v3/pkg/detectors/okta"
@ -550,6 +551,7 @@ import (
"github.com/trufflesecurity/trufflehog/v3/pkg/detectors/purestake"
"github.com/trufflesecurity/trufflehog/v3/pkg/detectors/pushbulletapikey"
"github.com/trufflesecurity/trufflehog/v3/pkg/detectors/pusherchannelkey"
"github.com/trufflesecurity/trufflehog/v3/pkg/detectors/pypi"
"github.com/trufflesecurity/trufflehog/v3/pkg/detectors/qase"
"github.com/trufflesecurity/trufflehog/v3/pkg/detectors/qualaroo"
"github.com/trufflesecurity/trufflehog/v3/pkg/detectors/qubole"
@ -805,6 +807,7 @@ import (
func DefaultDetectors() []detectors.Detector {
return []detectors.Detector{
&heroku.Scanner{},
&pypi.Scanner{},
&linearapi.Scanner{},
&alibaba.Scanner{},
aws.New(),
@ -1628,6 +1631,7 @@ func DefaultDetectors() []detectors.Detector {
atlassianv2.Scanner{},
netsuite.Scanner{},
robinhoodcrypto.Scanner{},
nvapi.Scanner{},
}
}

View file

@ -771,7 +771,7 @@ func (e *Engine) scannerWorker(ctx context.Context) {
decodeLatency.WithLabelValues(decoder.Type().String(), chunk.SourceName).Observe(float64(decodeTime))
if decoded == nil {
ctx.Logger().V(4).Info("no decoder found for chunk", "chunk", chunk)
ctx.Logger().V(4).Info("decoder not applicable for chunk", "decoder", decoder.Type().String(), "chunk", chunk)
continue
}
@ -797,7 +797,6 @@ func (e *Engine) scannerWorker(ctx context.Context) {
wgDoneFn: wgDetect.Done,
}
}
continue
}
dataSize := float64(len(chunk.Data))

View file

@ -3,6 +3,7 @@ package feature
import "sync/atomic"
var (
ForceSkipBinaries = atomic.Bool{}
ForceSkipArchives = atomic.Bool{}
ForceSkipBinaries = atomic.Bool{}
ForceSkipArchives = atomic.Bool{}
SkipAdditionalRefs = atomic.Bool{}
)

View file

@ -1098,6 +1098,8 @@ const (
DetectorType_ElevenLabs DetectorType = 994
DetectorType_Netsuite DetectorType = 995
DetectorType_RobinhoodCrypto DetectorType = 996
DetectorType_NVAPI DetectorType = 997
DetectorType_PyPI DetectorType = 998
)
// Enum value maps for DetectorType.
@ -2096,6 +2098,8 @@ var (
994: "ElevenLabs",
995: "Netsuite",
996: "RobinhoodCrypto",
997: "NVAPI",
998: "PyPI",
}
DetectorType_value = map[string]int32{
"Alibaba": 0,
@ -3091,6 +3095,8 @@ var (
"ElevenLabs": 994,
"Netsuite": 995,
"RobinhoodCrypto": 996,
"NVAPI": 997,
"PyPI": 998,
}
)
@ -3544,7 +3550,7 @@ var file_detectors_proto_rawDesc = []byte{
0x4c, 0x41, 0x49, 0x4e, 0x10, 0x01, 0x12, 0x0a, 0x0a, 0x06, 0x42, 0x41, 0x53, 0x45, 0x36, 0x34,
0x10, 0x02, 0x12, 0x09, 0x0a, 0x05, 0x55, 0x54, 0x46, 0x31, 0x36, 0x10, 0x03, 0x12, 0x13, 0x0a,
0x0f, 0x45, 0x53, 0x43, 0x41, 0x50, 0x45, 0x44, 0x5f, 0x55, 0x4e, 0x49, 0x43, 0x4f, 0x44, 0x45,
0x10, 0x04, 0x2a, 0xae, 0x7f, 0x0a, 0x0c, 0x44, 0x65, 0x74, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x54,
0x10, 0x04, 0x2a, 0xc5, 0x7f, 0x0a, 0x0c, 0x44, 0x65, 0x74, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x54,
0x79, 0x70, 0x65, 0x12, 0x0b, 0x0a, 0x07, 0x41, 0x6c, 0x69, 0x62, 0x61, 0x62, 0x61, 0x10, 0x00,
0x12, 0x08, 0x0a, 0x04, 0x41, 0x4d, 0x51, 0x50, 0x10, 0x01, 0x12, 0x07, 0x0a, 0x03, 0x41, 0x57,
0x53, 0x10, 0x02, 0x12, 0x09, 0x0a, 0x05, 0x41, 0x7a, 0x75, 0x72, 0x65, 0x10, 0x03, 0x12, 0x0a,
@ -4563,11 +4569,13 @@ var file_detectors_proto_rawDesc = []byte{
0x0a, 0x45, 0x6c, 0x65, 0x76, 0x65, 0x6e, 0x4c, 0x61, 0x62, 0x73, 0x10, 0xe2, 0x07, 0x12, 0x0d,
0x0a, 0x08, 0x4e, 0x65, 0x74, 0x73, 0x75, 0x69, 0x74, 0x65, 0x10, 0xe3, 0x07, 0x12, 0x14, 0x0a,
0x0f, 0x52, 0x6f, 0x62, 0x69, 0x6e, 0x68, 0x6f, 0x6f, 0x64, 0x43, 0x72, 0x79, 0x70, 0x74, 0x6f,
0x10, 0xe4, 0x07, 0x42, 0x3d, 0x5a, 0x3b, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f,
0x6d, 0x2f, 0x74, 0x72, 0x75, 0x66, 0x66, 0x6c, 0x65, 0x73, 0x65, 0x63, 0x75, 0x72, 0x69, 0x74,
0x79, 0x2f, 0x74, 0x72, 0x75, 0x66, 0x66, 0x6c, 0x65, 0x68, 0x6f, 0x67, 0x2f, 0x76, 0x33, 0x2f,
0x70, 0x6b, 0x67, 0x2f, 0x70, 0x62, 0x2f, 0x64, 0x65, 0x74, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x73,
0x70, 0x62, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33,
0x10, 0xe4, 0x07, 0x12, 0x0a, 0x0a, 0x05, 0x4e, 0x56, 0x41, 0x50, 0x49, 0x10, 0xe5, 0x07, 0x12,
0x09, 0x0a, 0x04, 0x50, 0x79, 0x50, 0x49, 0x10, 0xe6, 0x07, 0x42, 0x3d, 0x5a, 0x3b, 0x67, 0x69,
0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x74, 0x72, 0x75, 0x66, 0x66, 0x6c, 0x65,
0x73, 0x65, 0x63, 0x75, 0x72, 0x69, 0x74, 0x79, 0x2f, 0x74, 0x72, 0x75, 0x66, 0x66, 0x6c, 0x65,
0x68, 0x6f, 0x67, 0x2f, 0x76, 0x33, 0x2f, 0x70, 0x6b, 0x67, 0x2f, 0x70, 0x62, 0x2f, 0x64, 0x65,
0x74, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x73, 0x70, 0x62, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f,
0x33,
}
var (

View file

@ -3117,6 +3117,117 @@ func (x *Elasticsearch) GetTimestamp() string {
return ""
}
type Sentry struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
EventId string `protobuf:"bytes,1,opt,name=event_id,json=eventId,proto3" json:"event_id,omitempty"`
OrganizationId string `protobuf:"bytes,2,opt,name=organization_id,json=organizationId,proto3" json:"organization_id,omitempty"`
OrganizationSlug string `protobuf:"bytes,3,opt,name=organization_slug,json=organizationSlug,proto3" json:"organization_slug,omitempty"`
OrganizationDateCreated string `protobuf:"bytes,4,opt,name=organization_date_created,json=organizationDateCreated,proto3" json:"organization_date_created,omitempty"`
ProjectId string `protobuf:"bytes,5,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"`
ProjectSlug string `protobuf:"bytes,6,opt,name=project_slug,json=projectSlug,proto3" json:"project_slug,omitempty"`
IssueId string `protobuf:"bytes,7,opt,name=issue_id,json=issueId,proto3" json:"issue_id,omitempty"`
DateCreated string `protobuf:"bytes,8,opt,name=date_created,json=dateCreated,proto3" json:"date_created,omitempty"`
Link string `protobuf:"bytes,9,opt,name=link,proto3" json:"link,omitempty"`
}
func (x *Sentry) Reset() {
*x = Sentry{}
if protoimpl.UnsafeEnabled {
mi := &file_source_metadata_proto_msgTypes[33]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *Sentry) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*Sentry) ProtoMessage() {}
func (x *Sentry) ProtoReflect() protoreflect.Message {
mi := &file_source_metadata_proto_msgTypes[33]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use Sentry.ProtoReflect.Descriptor instead.
func (*Sentry) Descriptor() ([]byte, []int) {
return file_source_metadata_proto_rawDescGZIP(), []int{33}
}
func (x *Sentry) GetEventId() string {
if x != nil {
return x.EventId
}
return ""
}
func (x *Sentry) GetOrganizationId() string {
if x != nil {
return x.OrganizationId
}
return ""
}
func (x *Sentry) GetOrganizationSlug() string {
if x != nil {
return x.OrganizationSlug
}
return ""
}
func (x *Sentry) GetOrganizationDateCreated() string {
if x != nil {
return x.OrganizationDateCreated
}
return ""
}
func (x *Sentry) GetProjectId() string {
if x != nil {
return x.ProjectId
}
return ""
}
func (x *Sentry) GetProjectSlug() string {
if x != nil {
return x.ProjectSlug
}
return ""
}
func (x *Sentry) GetIssueId() string {
if x != nil {
return x.IssueId
}
return ""
}
func (x *Sentry) GetDateCreated() string {
if x != nil {
return x.DateCreated
}
return ""
}
func (x *Sentry) GetLink() string {
if x != nil {
return x.Link
}
return ""
}
type MetaData struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
@ -3156,13 +3267,14 @@ type MetaData struct {
// *MetaData_Webhook
// *MetaData_Elasticsearch
// *MetaData_Huggingface
// *MetaData_Sentry
Data isMetaData_Data `protobuf_oneof:"data"`
}
func (x *MetaData) Reset() {
*x = MetaData{}
if protoimpl.UnsafeEnabled {
mi := &file_source_metadata_proto_msgTypes[33]
mi := &file_source_metadata_proto_msgTypes[34]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
@ -3175,7 +3287,7 @@ func (x *MetaData) String() string {
func (*MetaData) ProtoMessage() {}
func (x *MetaData) ProtoReflect() protoreflect.Message {
mi := &file_source_metadata_proto_msgTypes[33]
mi := &file_source_metadata_proto_msgTypes[34]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
@ -3188,7 +3300,7 @@ func (x *MetaData) ProtoReflect() protoreflect.Message {
// Deprecated: Use MetaData.ProtoReflect.Descriptor instead.
func (*MetaData) Descriptor() ([]byte, []int) {
return file_source_metadata_proto_rawDescGZIP(), []int{33}
return file_source_metadata_proto_rawDescGZIP(), []int{34}
}
func (m *MetaData) GetData() isMetaData_Data {
@ -3422,6 +3534,13 @@ func (x *MetaData) GetHuggingface() *Huggingface {
return nil
}
func (x *MetaData) GetSentry() *Sentry {
if x, ok := x.GetData().(*MetaData_Sentry); ok {
return x.Sentry
}
return nil
}
type isMetaData_Data interface {
isMetaData_Data()
}
@ -3554,6 +3673,10 @@ type MetaData_Huggingface struct {
Huggingface *Huggingface `protobuf:"bytes,32,opt,name=huggingface,proto3,oneof"`
}
type MetaData_Sentry struct {
Sentry *Sentry `protobuf:"bytes,33,opt,name=sentry,proto3,oneof"`
}
func (*MetaData_Azure) isMetaData_Data() {}
func (*MetaData_Bitbucket) isMetaData_Data() {}
@ -3618,6 +3741,8 @@ func (*MetaData_Elasticsearch) isMetaData_Data() {}
func (*MetaData_Huggingface) isMetaData_Data() {}
func (*MetaData_Sentry) isMetaData_Data() {}
var File_source_metadata_proto protoreflect.FileDescriptor
var file_source_metadata_proto_rawDesc = []byte{
@ -4009,125 +4134,148 @@ var file_source_metadata_proto_rawDesc = []byte{
0x1f, 0x0a, 0x0b, 0x64, 0x6f, 0x63, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x5f, 0x69, 0x64, 0x18, 0x02,
0x20, 0x01, 0x28, 0x09, 0x52, 0x0a, 0x64, 0x6f, 0x63, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x49, 0x64,
0x12, 0x1c, 0x0a, 0x09, 0x74, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x18, 0x03, 0x20,
0x01, 0x28, 0x09, 0x52, 0x09, 0x74, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x22, 0xc3,
0x0d, 0x0a, 0x08, 0x4d, 0x65, 0x74, 0x61, 0x44, 0x61, 0x74, 0x61, 0x12, 0x2e, 0x0a, 0x05, 0x61,
0x7a, 0x75, 0x72, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x16, 0x2e, 0x73, 0x6f, 0x75,
0x72, 0x63, 0x65, 0x5f, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x41, 0x7a, 0x75,
0x72, 0x65, 0x48, 0x00, 0x52, 0x05, 0x61, 0x7a, 0x75, 0x72, 0x65, 0x12, 0x3a, 0x0a, 0x09, 0x62,
0x69, 0x74, 0x62, 0x75, 0x63, 0x6b, 0x65, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a,
0x2e, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61,
0x2e, 0x42, 0x69, 0x74, 0x62, 0x75, 0x63, 0x6b, 0x65, 0x74, 0x48, 0x00, 0x52, 0x09, 0x62, 0x69,
0x74, 0x62, 0x75, 0x63, 0x6b, 0x65, 0x74, 0x12, 0x37, 0x0a, 0x08, 0x63, 0x69, 0x72, 0x63, 0x6c,
0x65, 0x63, 0x69, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x73, 0x6f, 0x75, 0x72,
0x63, 0x65, 0x5f, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x43, 0x69, 0x72, 0x63,
0x6c, 0x65, 0x43, 0x49, 0x48, 0x00, 0x52, 0x08, 0x63, 0x69, 0x72, 0x63, 0x6c, 0x65, 0x63, 0x69,
0x12, 0x3d, 0x0a, 0x0a, 0x63, 0x6f, 0x6e, 0x66, 0x6c, 0x75, 0x65, 0x6e, 0x63, 0x65, 0x18, 0x04,
0x20, 0x01, 0x28, 0x0b, 0x32, 0x1b, 0x2e, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f, 0x6d, 0x65,
0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x43, 0x6f, 0x6e, 0x66, 0x6c, 0x75, 0x65, 0x6e, 0x63,
0x65, 0x48, 0x00, 0x52, 0x0a, 0x63, 0x6f, 0x6e, 0x66, 0x6c, 0x75, 0x65, 0x6e, 0x63, 0x65, 0x12,
0x31, 0x0a, 0x06, 0x64, 0x6f, 0x63, 0x6b, 0x65, 0x72, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32,
0x17, 0x2e, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74,
0x61, 0x2e, 0x44, 0x6f, 0x63, 0x6b, 0x65, 0x72, 0x48, 0x00, 0x52, 0x06, 0x64, 0x6f, 0x63, 0x6b,
0x65, 0x72, 0x12, 0x28, 0x0a, 0x03, 0x65, 0x63, 0x72, 0x18, 0x06, 0x20, 0x01, 0x28, 0x0b, 0x32,
0x01, 0x28, 0x09, 0x52, 0x09, 0x74, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x22, 0xc9,
0x02, 0x0a, 0x06, 0x53, 0x65, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x19, 0x0a, 0x08, 0x65, 0x76, 0x65,
0x6e, 0x74, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x65, 0x76, 0x65,
0x6e, 0x74, 0x49, 0x64, 0x12, 0x27, 0x0a, 0x0f, 0x6f, 0x72, 0x67, 0x61, 0x6e, 0x69, 0x7a, 0x61,
0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x69, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0e, 0x6f,
0x72, 0x67, 0x61, 0x6e, 0x69, 0x7a, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x49, 0x64, 0x12, 0x2b, 0x0a,
0x11, 0x6f, 0x72, 0x67, 0x61, 0x6e, 0x69, 0x7a, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x73, 0x6c,
0x75, 0x67, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x10, 0x6f, 0x72, 0x67, 0x61, 0x6e, 0x69,
0x7a, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x53, 0x6c, 0x75, 0x67, 0x12, 0x3a, 0x0a, 0x19, 0x6f, 0x72,
0x67, 0x61, 0x6e, 0x69, 0x7a, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x64, 0x61, 0x74, 0x65, 0x5f,
0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x64, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x17, 0x6f,
0x72, 0x67, 0x61, 0x6e, 0x69, 0x7a, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x44, 0x61, 0x74, 0x65, 0x43,
0x72, 0x65, 0x61, 0x74, 0x65, 0x64, 0x12, 0x1d, 0x0a, 0x0a, 0x70, 0x72, 0x6f, 0x6a, 0x65, 0x63,
0x74, 0x5f, 0x69, 0x64, 0x18, 0x05, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x70, 0x72, 0x6f, 0x6a,
0x65, 0x63, 0x74, 0x49, 0x64, 0x12, 0x21, 0x0a, 0x0c, 0x70, 0x72, 0x6f, 0x6a, 0x65, 0x63, 0x74,
0x5f, 0x73, 0x6c, 0x75, 0x67, 0x18, 0x06, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b, 0x70, 0x72, 0x6f,
0x6a, 0x65, 0x63, 0x74, 0x53, 0x6c, 0x75, 0x67, 0x12, 0x19, 0x0a, 0x08, 0x69, 0x73, 0x73, 0x75,
0x65, 0x5f, 0x69, 0x64, 0x18, 0x07, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x69, 0x73, 0x73, 0x75,
0x65, 0x49, 0x64, 0x12, 0x21, 0x0a, 0x0c, 0x64, 0x61, 0x74, 0x65, 0x5f, 0x63, 0x72, 0x65, 0x61,
0x74, 0x65, 0x64, 0x18, 0x08, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b, 0x64, 0x61, 0x74, 0x65, 0x43,
0x72, 0x65, 0x61, 0x74, 0x65, 0x64, 0x12, 0x12, 0x0a, 0x04, 0x6c, 0x69, 0x6e, 0x6b, 0x18, 0x09,
0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6c, 0x69, 0x6e, 0x6b, 0x22, 0xf6, 0x0d, 0x0a, 0x08, 0x4d,
0x65, 0x74, 0x61, 0x44, 0x61, 0x74, 0x61, 0x12, 0x2e, 0x0a, 0x05, 0x61, 0x7a, 0x75, 0x72, 0x65,
0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x16, 0x2e, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f,
0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x41, 0x7a, 0x75, 0x72, 0x65, 0x48, 0x00,
0x52, 0x05, 0x61, 0x7a, 0x75, 0x72, 0x65, 0x12, 0x3a, 0x0a, 0x09, 0x62, 0x69, 0x74, 0x62, 0x75,
0x63, 0x6b, 0x65, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x73, 0x6f, 0x75,
0x72, 0x63, 0x65, 0x5f, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x42, 0x69, 0x74,
0x62, 0x75, 0x63, 0x6b, 0x65, 0x74, 0x48, 0x00, 0x52, 0x09, 0x62, 0x69, 0x74, 0x62, 0x75, 0x63,
0x6b, 0x65, 0x74, 0x12, 0x37, 0x0a, 0x08, 0x63, 0x69, 0x72, 0x63, 0x6c, 0x65, 0x63, 0x69, 0x18,
0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f, 0x6d,
0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x43, 0x69, 0x72, 0x63, 0x6c, 0x65, 0x43, 0x49,
0x48, 0x00, 0x52, 0x08, 0x63, 0x69, 0x72, 0x63, 0x6c, 0x65, 0x63, 0x69, 0x12, 0x3d, 0x0a, 0x0a,
0x63, 0x6f, 0x6e, 0x66, 0x6c, 0x75, 0x65, 0x6e, 0x63, 0x65, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b,
0x32, 0x1b, 0x2e, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61,
0x74, 0x61, 0x2e, 0x43, 0x6f, 0x6e, 0x66, 0x6c, 0x75, 0x65, 0x6e, 0x63, 0x65, 0x48, 0x00, 0x52,
0x0a, 0x63, 0x6f, 0x6e, 0x66, 0x6c, 0x75, 0x65, 0x6e, 0x63, 0x65, 0x12, 0x31, 0x0a, 0x06, 0x64,
0x6f, 0x63, 0x6b, 0x65, 0x72, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x73, 0x6f,
0x75, 0x72, 0x63, 0x65, 0x5f, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x44, 0x6f,
0x63, 0x6b, 0x65, 0x72, 0x48, 0x00, 0x52, 0x06, 0x64, 0x6f, 0x63, 0x6b, 0x65, 0x72, 0x12, 0x28,
0x0a, 0x03, 0x65, 0x63, 0x72, 0x18, 0x06, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x14, 0x2e, 0x73, 0x6f,
0x75, 0x72, 0x63, 0x65, 0x5f, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x45, 0x43,
0x52, 0x48, 0x00, 0x52, 0x03, 0x65, 0x63, 0x72, 0x12, 0x28, 0x0a, 0x03, 0x67, 0x63, 0x73, 0x18,
0x07, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x14, 0x2e, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f, 0x6d,
0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x47, 0x43, 0x53, 0x48, 0x00, 0x52, 0x03, 0x67,
0x63, 0x73, 0x12, 0x31, 0x0a, 0x06, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x18, 0x08, 0x20, 0x01,
0x28, 0x0b, 0x32, 0x17, 0x2e, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f, 0x6d, 0x65, 0x74, 0x61,
0x64, 0x61, 0x74, 0x61, 0x2e, 0x47, 0x69, 0x74, 0x68, 0x75, 0x62, 0x48, 0x00, 0x52, 0x06, 0x67,
0x69, 0x74, 0x68, 0x75, 0x62, 0x12, 0x31, 0x0a, 0x06, 0x67, 0x69, 0x74, 0x6c, 0x61, 0x62, 0x18,
0x09, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f, 0x6d,
0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x47, 0x69, 0x74, 0x6c, 0x61, 0x62, 0x48, 0x00,
0x52, 0x06, 0x67, 0x69, 0x74, 0x6c, 0x61, 0x62, 0x12, 0x2b, 0x0a, 0x04, 0x6a, 0x69, 0x72, 0x61,
0x18, 0x0a, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f,
0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x4a, 0x69, 0x72, 0x61, 0x48, 0x00, 0x52,
0x04, 0x6a, 0x69, 0x72, 0x61, 0x12, 0x28, 0x0a, 0x03, 0x6e, 0x70, 0x6d, 0x18, 0x0b, 0x20, 0x01,
0x28, 0x0b, 0x32, 0x14, 0x2e, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f, 0x6d, 0x65, 0x74, 0x61,
0x64, 0x61, 0x74, 0x61, 0x2e, 0x4e, 0x50, 0x4d, 0x48, 0x00, 0x52, 0x03, 0x6e, 0x70, 0x6d, 0x12,
0x2b, 0x0a, 0x04, 0x70, 0x79, 0x70, 0x69, 0x18, 0x0c, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x15, 0x2e,
0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e,
0x50, 0x79, 0x50, 0x69, 0x48, 0x00, 0x52, 0x04, 0x70, 0x79, 0x70, 0x69, 0x12, 0x25, 0x0a, 0x02,
0x73, 0x33, 0x18, 0x0d, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x13, 0x2e, 0x73, 0x6f, 0x75, 0x72, 0x63,
0x65, 0x5f, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x53, 0x33, 0x48, 0x00, 0x52,
0x02, 0x73, 0x33, 0x12, 0x2e, 0x0a, 0x05, 0x73, 0x6c, 0x61, 0x63, 0x6b, 0x18, 0x0e, 0x20, 0x01,
0x28, 0x0b, 0x32, 0x16, 0x2e, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f, 0x6d, 0x65, 0x74, 0x61,
0x64, 0x61, 0x74, 0x61, 0x2e, 0x53, 0x6c, 0x61, 0x63, 0x6b, 0x48, 0x00, 0x52, 0x05, 0x73, 0x6c,
0x61, 0x63, 0x6b, 0x12, 0x3d, 0x0a, 0x0a, 0x66, 0x69, 0x6c, 0x65, 0x73, 0x79, 0x73, 0x74, 0x65,
0x6d, 0x18, 0x0f, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1b, 0x2e, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65,
0x5f, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x46, 0x69, 0x6c, 0x65, 0x73, 0x79,
0x73, 0x74, 0x65, 0x6d, 0x48, 0x00, 0x52, 0x0a, 0x66, 0x69, 0x6c, 0x65, 0x73, 0x79, 0x73, 0x74,
0x65, 0x6d, 0x12, 0x28, 0x0a, 0x03, 0x67, 0x69, 0x74, 0x18, 0x10, 0x20, 0x01, 0x28, 0x0b, 0x32,
0x14, 0x2e, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74,
0x61, 0x2e, 0x45, 0x43, 0x52, 0x48, 0x00, 0x52, 0x03, 0x65, 0x63, 0x72, 0x12, 0x28, 0x0a, 0x03,
0x67, 0x63, 0x73, 0x18, 0x07, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x14, 0x2e, 0x73, 0x6f, 0x75, 0x72,
0x63, 0x65, 0x5f, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x47, 0x43, 0x53, 0x48,
0x00, 0x52, 0x03, 0x67, 0x63, 0x73, 0x12, 0x31, 0x0a, 0x06, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62,
0x18, 0x08, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f,
0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x47, 0x69, 0x74, 0x68, 0x75, 0x62, 0x48,
0x00, 0x52, 0x06, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x12, 0x31, 0x0a, 0x06, 0x67, 0x69, 0x74,
0x6c, 0x61, 0x62, 0x18, 0x09, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x73, 0x6f, 0x75, 0x72,
0x63, 0x65, 0x5f, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x47, 0x69, 0x74, 0x6c,
0x61, 0x62, 0x48, 0x00, 0x52, 0x06, 0x67, 0x69, 0x74, 0x6c, 0x61, 0x62, 0x12, 0x2b, 0x0a, 0x04,
0x6a, 0x69, 0x72, 0x61, 0x18, 0x0a, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x73, 0x6f, 0x75,
0x72, 0x63, 0x65, 0x5f, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x4a, 0x69, 0x72,
0x61, 0x48, 0x00, 0x52, 0x04, 0x6a, 0x69, 0x72, 0x61, 0x12, 0x28, 0x0a, 0x03, 0x6e, 0x70, 0x6d,
0x18, 0x0b, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x14, 0x2e, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f,
0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x4e, 0x50, 0x4d, 0x48, 0x00, 0x52, 0x03,
0x6e, 0x70, 0x6d, 0x12, 0x2b, 0x0a, 0x04, 0x70, 0x79, 0x70, 0x69, 0x18, 0x0c, 0x20, 0x01, 0x28,
0x0b, 0x32, 0x15, 0x2e, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f, 0x6d, 0x65, 0x74, 0x61, 0x64,
0x61, 0x74, 0x61, 0x2e, 0x50, 0x79, 0x50, 0x69, 0x48, 0x00, 0x52, 0x04, 0x70, 0x79, 0x70, 0x69,
0x12, 0x25, 0x0a, 0x02, 0x73, 0x33, 0x18, 0x0d, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x13, 0x2e, 0x73,
0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x53,
0x33, 0x48, 0x00, 0x52, 0x02, 0x73, 0x33, 0x12, 0x2e, 0x0a, 0x05, 0x73, 0x6c, 0x61, 0x63, 0x6b,
0x18, 0x0e, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x16, 0x2e, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f,
0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x53, 0x6c, 0x61, 0x63, 0x6b, 0x48, 0x00,
0x52, 0x05, 0x73, 0x6c, 0x61, 0x63, 0x6b, 0x12, 0x3d, 0x0a, 0x0a, 0x66, 0x69, 0x6c, 0x65, 0x73,
0x79, 0x73, 0x74, 0x65, 0x6d, 0x18, 0x0f, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1b, 0x2e, 0x73, 0x6f,
0x75, 0x72, 0x63, 0x65, 0x5f, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x46, 0x69,
0x6c, 0x65, 0x73, 0x79, 0x73, 0x74, 0x65, 0x6d, 0x48, 0x00, 0x52, 0x0a, 0x66, 0x69, 0x6c, 0x65,
0x73, 0x79, 0x73, 0x74, 0x65, 0x6d, 0x12, 0x28, 0x0a, 0x03, 0x67, 0x69, 0x74, 0x18, 0x10, 0x20,
0x01, 0x28, 0x0b, 0x32, 0x14, 0x2e, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f, 0x6d, 0x65, 0x74,
0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x47, 0x69, 0x74, 0x48, 0x00, 0x52, 0x03, 0x67, 0x69, 0x74,
0x12, 0x2b, 0x0a, 0x04, 0x74, 0x65, 0x73, 0x74, 0x18, 0x11, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x15,
0x61, 0x2e, 0x47, 0x69, 0x74, 0x48, 0x00, 0x52, 0x03, 0x67, 0x69, 0x74, 0x12, 0x2b, 0x0a, 0x04,
0x74, 0x65, 0x73, 0x74, 0x18, 0x11, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x73, 0x6f, 0x75,
0x72, 0x63, 0x65, 0x5f, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x54, 0x65, 0x73,
0x74, 0x48, 0x00, 0x52, 0x04, 0x74, 0x65, 0x73, 0x74, 0x12, 0x3a, 0x0a, 0x09, 0x62, 0x75, 0x69,
0x6c, 0x64, 0x6b, 0x69, 0x74, 0x65, 0x18, 0x12, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x73,
0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x42,
0x75, 0x69, 0x6c, 0x64, 0x6b, 0x69, 0x74, 0x65, 0x48, 0x00, 0x52, 0x09, 0x62, 0x75, 0x69, 0x6c,
0x64, 0x6b, 0x69, 0x74, 0x65, 0x12, 0x31, 0x0a, 0x06, 0x67, 0x65, 0x72, 0x72, 0x69, 0x74, 0x18,
0x13, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f, 0x6d,
0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x47, 0x65, 0x72, 0x72, 0x69, 0x74, 0x48, 0x00,
0x52, 0x06, 0x67, 0x65, 0x72, 0x72, 0x69, 0x74, 0x12, 0x34, 0x0a, 0x07, 0x6a, 0x65, 0x6e, 0x6b,
0x69, 0x6e, 0x73, 0x18, 0x14, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x18, 0x2e, 0x73, 0x6f, 0x75, 0x72,
0x63, 0x65, 0x5f, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x4a, 0x65, 0x6e, 0x6b,
0x69, 0x6e, 0x73, 0x48, 0x00, 0x52, 0x07, 0x6a, 0x65, 0x6e, 0x6b, 0x69, 0x6e, 0x73, 0x12, 0x2e,
0x0a, 0x05, 0x74, 0x65, 0x61, 0x6d, 0x73, 0x18, 0x15, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x16, 0x2e,
0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e,
0x54, 0x65, 0x61, 0x6d, 0x73, 0x48, 0x00, 0x52, 0x05, 0x74, 0x65, 0x61, 0x6d, 0x73, 0x12, 0x40,
0x0a, 0x0b, 0x61, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x6f, 0x72, 0x79, 0x18, 0x16, 0x20,
0x01, 0x28, 0x0b, 0x32, 0x1c, 0x2e, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f, 0x6d, 0x65, 0x74,
0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x41, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x6f, 0x72,
0x79, 0x48, 0x00, 0x52, 0x0b, 0x61, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x6f, 0x72, 0x79,
0x12, 0x31, 0x0a, 0x06, 0x73, 0x79, 0x73, 0x6c, 0x6f, 0x67, 0x18, 0x17, 0x20, 0x01, 0x28, 0x0b,
0x32, 0x17, 0x2e, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61,
0x74, 0x61, 0x2e, 0x53, 0x79, 0x73, 0x6c, 0x6f, 0x67, 0x48, 0x00, 0x52, 0x06, 0x73, 0x79, 0x73,
0x6c, 0x6f, 0x67, 0x12, 0x34, 0x0a, 0x07, 0x66, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x72, 0x18, 0x18,
0x20, 0x01, 0x28, 0x0b, 0x32, 0x18, 0x2e, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f, 0x6d, 0x65,
0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x46, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x72, 0x48, 0x00,
0x52, 0x07, 0x66, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x72, 0x12, 0x3d, 0x0a, 0x0a, 0x73, 0x68, 0x61,
0x72, 0x65, 0x70, 0x6f, 0x69, 0x6e, 0x74, 0x18, 0x19, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1b, 0x2e,
0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e,
0x53, 0x68, 0x61, 0x72, 0x65, 0x50, 0x6f, 0x69, 0x6e, 0x74, 0x48, 0x00, 0x52, 0x0a, 0x73, 0x68,
0x61, 0x72, 0x65, 0x70, 0x6f, 0x69, 0x6e, 0x74, 0x12, 0x40, 0x0a, 0x0b, 0x67, 0x6f, 0x6f, 0x67,
0x6c, 0x65, 0x44, 0x72, 0x69, 0x76, 0x65, 0x18, 0x1a, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1c, 0x2e,
0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e,
0x47, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x44, 0x72, 0x69, 0x76, 0x65, 0x48, 0x00, 0x52, 0x0b, 0x67,
0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x44, 0x72, 0x69, 0x76, 0x65, 0x12, 0x3d, 0x0a, 0x0a, 0x61, 0x7a,
0x75, 0x72, 0x65, 0x52, 0x65, 0x70, 0x6f, 0x73, 0x18, 0x1b, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1b,
0x2e, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61,
0x2e, 0x54, 0x65, 0x73, 0x74, 0x48, 0x00, 0x52, 0x04, 0x74, 0x65, 0x73, 0x74, 0x12, 0x3a, 0x0a,
0x09, 0x62, 0x75, 0x69, 0x6c, 0x64, 0x6b, 0x69, 0x74, 0x65, 0x18, 0x12, 0x20, 0x01, 0x28, 0x0b,
0x32, 0x1a, 0x2e, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61,
0x74, 0x61, 0x2e, 0x42, 0x75, 0x69, 0x6c, 0x64, 0x6b, 0x69, 0x74, 0x65, 0x48, 0x00, 0x52, 0x09,
0x62, 0x75, 0x69, 0x6c, 0x64, 0x6b, 0x69, 0x74, 0x65, 0x12, 0x31, 0x0a, 0x06, 0x67, 0x65, 0x72,
0x72, 0x69, 0x74, 0x18, 0x13, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x73, 0x6f, 0x75, 0x72,
0x63, 0x65, 0x5f, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x47, 0x65, 0x72, 0x72,
0x69, 0x74, 0x48, 0x00, 0x52, 0x06, 0x67, 0x65, 0x72, 0x72, 0x69, 0x74, 0x12, 0x34, 0x0a, 0x07,
0x6a, 0x65, 0x6e, 0x6b, 0x69, 0x6e, 0x73, 0x18, 0x14, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x18, 0x2e,
0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e,
0x4a, 0x65, 0x6e, 0x6b, 0x69, 0x6e, 0x73, 0x48, 0x00, 0x52, 0x07, 0x6a, 0x65, 0x6e, 0x6b, 0x69,
0x6e, 0x73, 0x12, 0x2e, 0x0a, 0x05, 0x74, 0x65, 0x61, 0x6d, 0x73, 0x18, 0x15, 0x20, 0x01, 0x28,
0x0b, 0x32, 0x16, 0x2e, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f, 0x6d, 0x65, 0x74, 0x61, 0x64,
0x61, 0x74, 0x61, 0x2e, 0x54, 0x65, 0x61, 0x6d, 0x73, 0x48, 0x00, 0x52, 0x05, 0x74, 0x65, 0x61,
0x6d, 0x73, 0x12, 0x40, 0x0a, 0x0b, 0x61, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x6f, 0x72,
0x79, 0x18, 0x16, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1c, 0x2e, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65,
0x5f, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x41, 0x72, 0x74, 0x69, 0x66, 0x61,
0x63, 0x74, 0x6f, 0x72, 0x79, 0x48, 0x00, 0x52, 0x0b, 0x61, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63,
0x74, 0x6f, 0x72, 0x79, 0x12, 0x31, 0x0a, 0x06, 0x73, 0x79, 0x73, 0x6c, 0x6f, 0x67, 0x18, 0x17,
0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f, 0x6d, 0x65,
0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x53, 0x79, 0x73, 0x6c, 0x6f, 0x67, 0x48, 0x00, 0x52,
0x06, 0x73, 0x79, 0x73, 0x6c, 0x6f, 0x67, 0x12, 0x34, 0x0a, 0x07, 0x66, 0x6f, 0x72, 0x61, 0x67,
0x65, 0x72, 0x18, 0x18, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x18, 0x2e, 0x73, 0x6f, 0x75, 0x72, 0x63,
0x65, 0x5f, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x46, 0x6f, 0x72, 0x61, 0x67,
0x65, 0x72, 0x48, 0x00, 0x52, 0x07, 0x66, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x72, 0x12, 0x3d, 0x0a,
0x0a, 0x73, 0x68, 0x61, 0x72, 0x65, 0x70, 0x6f, 0x69, 0x6e, 0x74, 0x18, 0x19, 0x20, 0x01, 0x28,
0x0b, 0x32, 0x1b, 0x2e, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f, 0x6d, 0x65, 0x74, 0x61, 0x64,
0x61, 0x74, 0x61, 0x2e, 0x53, 0x68, 0x61, 0x72, 0x65, 0x50, 0x6f, 0x69, 0x6e, 0x74, 0x48, 0x00,
0x52, 0x0a, 0x73, 0x68, 0x61, 0x72, 0x65, 0x70, 0x6f, 0x69, 0x6e, 0x74, 0x12, 0x40, 0x0a, 0x0b,
0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x44, 0x72, 0x69, 0x76, 0x65, 0x18, 0x1a, 0x20, 0x01, 0x28,
0x0b, 0x32, 0x1c, 0x2e, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f, 0x6d, 0x65, 0x74, 0x61, 0x64,
0x61, 0x74, 0x61, 0x2e, 0x47, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x44, 0x72, 0x69, 0x76, 0x65, 0x48,
0x00, 0x52, 0x0b, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x44, 0x72, 0x69, 0x76, 0x65, 0x12, 0x3d,
0x0a, 0x0a, 0x61, 0x7a, 0x75, 0x72, 0x65, 0x52, 0x65, 0x70, 0x6f, 0x73, 0x18, 0x1b, 0x20, 0x01,
0x28, 0x0b, 0x32, 0x1b, 0x2e, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f, 0x6d, 0x65, 0x74, 0x61,
0x64, 0x61, 0x74, 0x61, 0x2e, 0x41, 0x7a, 0x75, 0x72, 0x65, 0x52, 0x65, 0x70, 0x6f, 0x73, 0x48,
0x00, 0x52, 0x0a, 0x61, 0x7a, 0x75, 0x72, 0x65, 0x52, 0x65, 0x70, 0x6f, 0x73, 0x12, 0x37, 0x0a,
0x08, 0x74, 0x72, 0x61, 0x76, 0x69, 0x73, 0x43, 0x49, 0x18, 0x1c, 0x20, 0x01, 0x28, 0x0b, 0x32,
0x19, 0x2e, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74,
0x61, 0x2e, 0x54, 0x72, 0x61, 0x76, 0x69, 0x73, 0x43, 0x49, 0x48, 0x00, 0x52, 0x08, 0x74, 0x72,
0x61, 0x76, 0x69, 0x73, 0x43, 0x49, 0x12, 0x34, 0x0a, 0x07, 0x70, 0x6f, 0x73, 0x74, 0x6d, 0x61,
0x6e, 0x18, 0x1d, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x18, 0x2e, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65,
0x5f, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x50, 0x6f, 0x73, 0x74, 0x6d, 0x61,
0x6e, 0x48, 0x00, 0x52, 0x07, 0x70, 0x6f, 0x73, 0x74, 0x6d, 0x61, 0x6e, 0x12, 0x34, 0x0a, 0x07,
0x77, 0x65, 0x62, 0x68, 0x6f, 0x6f, 0x6b, 0x18, 0x1e, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x18, 0x2e,
0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e,
0x57, 0x65, 0x62, 0x68, 0x6f, 0x6f, 0x6b, 0x48, 0x00, 0x52, 0x07, 0x77, 0x65, 0x62, 0x68, 0x6f,
0x6f, 0x6b, 0x12, 0x46, 0x0a, 0x0d, 0x65, 0x6c, 0x61, 0x73, 0x74, 0x69, 0x63, 0x73, 0x65, 0x61,
0x72, 0x63, 0x68, 0x18, 0x1f, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1e, 0x2e, 0x73, 0x6f, 0x75, 0x72,
0x63, 0x65, 0x5f, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x45, 0x6c, 0x61, 0x73,
0x74, 0x69, 0x63, 0x73, 0x65, 0x61, 0x72, 0x63, 0x68, 0x48, 0x00, 0x52, 0x0d, 0x65, 0x6c, 0x61,
0x73, 0x74, 0x69, 0x63, 0x73, 0x65, 0x61, 0x72, 0x63, 0x68, 0x12, 0x40, 0x0a, 0x0b, 0x68, 0x75,
0x67, 0x67, 0x69, 0x6e, 0x67, 0x66, 0x61, 0x63, 0x65, 0x18, 0x20, 0x20, 0x01, 0x28, 0x0b, 0x32,
0x1c, 0x2e, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74,
0x61, 0x2e, 0x48, 0x75, 0x67, 0x67, 0x69, 0x6e, 0x67, 0x66, 0x61, 0x63, 0x65, 0x48, 0x00, 0x52,
0x0b, 0x68, 0x75, 0x67, 0x67, 0x69, 0x6e, 0x67, 0x66, 0x61, 0x63, 0x65, 0x42, 0x06, 0x0a, 0x04,
0x64, 0x61, 0x74, 0x61, 0x2a, 0x3e, 0x0a, 0x0a, 0x56, 0x69, 0x73, 0x69, 0x62, 0x69, 0x6c, 0x69,
0x74, 0x79, 0x12, 0x0a, 0x0a, 0x06, 0x70, 0x75, 0x62, 0x6c, 0x69, 0x63, 0x10, 0x00, 0x12, 0x0b,
0x0a, 0x07, 0x70, 0x72, 0x69, 0x76, 0x61, 0x74, 0x65, 0x10, 0x01, 0x12, 0x0a, 0x0a, 0x06, 0x73,
0x68, 0x61, 0x72, 0x65, 0x64, 0x10, 0x02, 0x12, 0x0b, 0x0a, 0x07, 0x75, 0x6e, 0x6b, 0x6e, 0x6f,
0x77, 0x6e, 0x10, 0x03, 0x42, 0x43, 0x5a, 0x41, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63,
0x6f, 0x6d, 0x2f, 0x74, 0x72, 0x75, 0x66, 0x66, 0x6c, 0x65, 0x73, 0x65, 0x63, 0x75, 0x72, 0x69,
0x74, 0x79, 0x2f, 0x74, 0x72, 0x75, 0x66, 0x66, 0x6c, 0x65, 0x68, 0x6f, 0x67, 0x2f, 0x76, 0x33,
0x2f, 0x70, 0x6b, 0x67, 0x2f, 0x70, 0x62, 0x2f, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f, 0x6d,
0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x70, 0x62, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f,
0x33,
0x2e, 0x41, 0x7a, 0x75, 0x72, 0x65, 0x52, 0x65, 0x70, 0x6f, 0x73, 0x48, 0x00, 0x52, 0x0a, 0x61,
0x7a, 0x75, 0x72, 0x65, 0x52, 0x65, 0x70, 0x6f, 0x73, 0x12, 0x37, 0x0a, 0x08, 0x74, 0x72, 0x61,
0x76, 0x69, 0x73, 0x43, 0x49, 0x18, 0x1c, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x73, 0x6f,
0x75, 0x72, 0x63, 0x65, 0x5f, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x54, 0x72,
0x61, 0x76, 0x69, 0x73, 0x43, 0x49, 0x48, 0x00, 0x52, 0x08, 0x74, 0x72, 0x61, 0x76, 0x69, 0x73,
0x43, 0x49, 0x12, 0x34, 0x0a, 0x07, 0x70, 0x6f, 0x73, 0x74, 0x6d, 0x61, 0x6e, 0x18, 0x1d, 0x20,
0x01, 0x28, 0x0b, 0x32, 0x18, 0x2e, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f, 0x6d, 0x65, 0x74,
0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x50, 0x6f, 0x73, 0x74, 0x6d, 0x61, 0x6e, 0x48, 0x00, 0x52,
0x07, 0x70, 0x6f, 0x73, 0x74, 0x6d, 0x61, 0x6e, 0x12, 0x34, 0x0a, 0x07, 0x77, 0x65, 0x62, 0x68,
0x6f, 0x6f, 0x6b, 0x18, 0x1e, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x18, 0x2e, 0x73, 0x6f, 0x75, 0x72,
0x63, 0x65, 0x5f, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x57, 0x65, 0x62, 0x68,
0x6f, 0x6f, 0x6b, 0x48, 0x00, 0x52, 0x07, 0x77, 0x65, 0x62, 0x68, 0x6f, 0x6f, 0x6b, 0x12, 0x46,
0x0a, 0x0d, 0x65, 0x6c, 0x61, 0x73, 0x74, 0x69, 0x63, 0x73, 0x65, 0x61, 0x72, 0x63, 0x68, 0x18,
0x1f, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1e, 0x2e, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f, 0x6d,
0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x45, 0x6c, 0x61, 0x73, 0x74, 0x69, 0x63, 0x73,
0x65, 0x61, 0x72, 0x63, 0x68, 0x48, 0x00, 0x52, 0x0d, 0x65, 0x6c, 0x61, 0x73, 0x74, 0x69, 0x63,
0x73, 0x65, 0x61, 0x72, 0x63, 0x68, 0x12, 0x40, 0x0a, 0x0b, 0x68, 0x75, 0x67, 0x67, 0x69, 0x6e,
0x67, 0x66, 0x61, 0x63, 0x65, 0x18, 0x20, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1c, 0x2e, 0x73, 0x6f,
0x75, 0x72, 0x63, 0x65, 0x5f, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x48, 0x75,
0x67, 0x67, 0x69, 0x6e, 0x67, 0x66, 0x61, 0x63, 0x65, 0x48, 0x00, 0x52, 0x0b, 0x68, 0x75, 0x67,
0x67, 0x69, 0x6e, 0x67, 0x66, 0x61, 0x63, 0x65, 0x12, 0x31, 0x0a, 0x06, 0x73, 0x65, 0x6e, 0x74,
0x72, 0x79, 0x18, 0x21, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x73, 0x6f, 0x75, 0x72, 0x63,
0x65, 0x5f, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x53, 0x65, 0x6e, 0x74, 0x72,
0x79, 0x48, 0x00, 0x52, 0x06, 0x73, 0x65, 0x6e, 0x74, 0x72, 0x79, 0x42, 0x06, 0x0a, 0x04, 0x64,
0x61, 0x74, 0x61, 0x2a, 0x3e, 0x0a, 0x0a, 0x56, 0x69, 0x73, 0x69, 0x62, 0x69, 0x6c, 0x69, 0x74,
0x79, 0x12, 0x0a, 0x0a, 0x06, 0x70, 0x75, 0x62, 0x6c, 0x69, 0x63, 0x10, 0x00, 0x12, 0x0b, 0x0a,
0x07, 0x70, 0x72, 0x69, 0x76, 0x61, 0x74, 0x65, 0x10, 0x01, 0x12, 0x0a, 0x0a, 0x06, 0x73, 0x68,
0x61, 0x72, 0x65, 0x64, 0x10, 0x02, 0x12, 0x0b, 0x0a, 0x07, 0x75, 0x6e, 0x6b, 0x6e, 0x6f, 0x77,
0x6e, 0x10, 0x03, 0x42, 0x43, 0x5a, 0x41, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f,
0x6d, 0x2f, 0x74, 0x72, 0x75, 0x66, 0x66, 0x6c, 0x65, 0x73, 0x65, 0x63, 0x75, 0x72, 0x69, 0x74,
0x79, 0x2f, 0x74, 0x72, 0x75, 0x66, 0x66, 0x6c, 0x65, 0x68, 0x6f, 0x67, 0x2f, 0x76, 0x33, 0x2f,
0x70, 0x6b, 0x67, 0x2f, 0x70, 0x62, 0x2f, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f, 0x6d, 0x65,
0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x70, 0x62, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33,
}
var (
@ -4143,7 +4291,7 @@ func file_source_metadata_proto_rawDescGZIP() []byte {
}
var file_source_metadata_proto_enumTypes = make([]protoimpl.EnumInfo, 1)
var file_source_metadata_proto_msgTypes = make([]protoimpl.MessageInfo, 34)
var file_source_metadata_proto_msgTypes = make([]protoimpl.MessageInfo, 35)
var file_source_metadata_proto_goTypes = []interface{}{
(Visibility)(0), // 0: source_metadata.Visibility
(*Azure)(nil), // 1: source_metadata.Azure
@ -4179,8 +4327,9 @@ var file_source_metadata_proto_goTypes = []interface{}{
(*Vector)(nil), // 31: source_metadata.Vector
(*Webhook)(nil), // 32: source_metadata.Webhook
(*Elasticsearch)(nil), // 33: source_metadata.Elasticsearch
(*MetaData)(nil), // 34: source_metadata.MetaData
(*timestamppb.Timestamp)(nil), // 35: google.protobuf.Timestamp
(*Sentry)(nil), // 34: source_metadata.Sentry
(*MetaData)(nil), // 35: source_metadata.MetaData
(*timestamppb.Timestamp)(nil), // 36: google.protobuf.Timestamp
}
var file_source_metadata_proto_depIdxs = []int32{
0, // 0: source_metadata.Github.visibility:type_name -> source_metadata.Visibility
@ -4190,7 +4339,7 @@ var file_source_metadata_proto_depIdxs = []int32{
16, // 4: source_metadata.Forager.npm:type_name -> source_metadata.NPM
17, // 5: source_metadata.Forager.pypi:type_name -> source_metadata.PyPi
0, // 6: source_metadata.AzureRepos.visibility:type_name -> source_metadata.Visibility
35, // 7: source_metadata.Vector.timestamp:type_name -> google.protobuf.Timestamp
36, // 7: source_metadata.Vector.timestamp:type_name -> google.protobuf.Timestamp
31, // 8: source_metadata.Webhook.vector:type_name -> source_metadata.Vector
1, // 9: source_metadata.MetaData.azure:type_name -> source_metadata.Azure
2, // 10: source_metadata.MetaData.bitbucket:type_name -> source_metadata.Bitbucket
@ -4224,11 +4373,12 @@ var file_source_metadata_proto_depIdxs = []int32{
32, // 38: source_metadata.MetaData.webhook:type_name -> source_metadata.Webhook
33, // 39: source_metadata.MetaData.elasticsearch:type_name -> source_metadata.Elasticsearch
14, // 40: source_metadata.MetaData.huggingface:type_name -> source_metadata.Huggingface
41, // [41:41] is the sub-list for method output_type
41, // [41:41] is the sub-list for method input_type
41, // [41:41] is the sub-list for extension type_name
41, // [41:41] is the sub-list for extension extendee
0, // [0:41] is the sub-list for field type_name
34, // 41: source_metadata.MetaData.sentry:type_name -> source_metadata.Sentry
42, // [42:42] is the sub-list for method output_type
42, // [42:42] is the sub-list for method input_type
42, // [42:42] is the sub-list for extension type_name
42, // [42:42] is the sub-list for extension extendee
0, // [0:42] is the sub-list for field type_name
}
func init() { file_source_metadata_proto_init() }
@ -4634,6 +4784,18 @@ func file_source_metadata_proto_init() {
}
}
file_source_metadata_proto_msgTypes[33].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*Sentry); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
file_source_metadata_proto_msgTypes[34].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*MetaData); i {
case 0:
return &v.state
@ -4654,7 +4816,7 @@ func file_source_metadata_proto_init() {
file_source_metadata_proto_msgTypes[31].OneofWrappers = []interface{}{
(*Webhook_Vector)(nil),
}
file_source_metadata_proto_msgTypes[33].OneofWrappers = []interface{}{
file_source_metadata_proto_msgTypes[34].OneofWrappers = []interface{}{
(*MetaData_Azure)(nil),
(*MetaData_Bitbucket)(nil),
(*MetaData_Circleci)(nil),
@ -4687,6 +4849,7 @@ func file_source_metadata_proto_init() {
(*MetaData_Webhook)(nil),
(*MetaData_Elasticsearch)(nil),
(*MetaData_Huggingface)(nil),
(*MetaData_Sentry)(nil),
}
type x struct{}
out := protoimpl.TypeBuilder{
@ -4694,7 +4857,7 @@ func file_source_metadata_proto_init() {
GoPackagePath: reflect.TypeOf(x{}).PkgPath(),
RawDescriptor: file_source_metadata_proto_rawDesc,
NumEnums: 1,
NumMessages: 34,
NumMessages: 35,
NumExtensions: 0,
NumServices: 0,
},

View file

@ -3899,6 +3899,122 @@ var _ interface {
ErrorName() string
} = ElasticsearchValidationError{}
// Validate checks the field values on Sentry with the rules defined in the
// proto definition for this message. If any rules are violated, the first
// error encountered is returned, or nil if there are no violations.
func (m *Sentry) Validate() error {
return m.validate(false)
}
// ValidateAll checks the field values on Sentry with the rules defined in the
// proto definition for this message. If any rules are violated, the result is
// a list of violation errors wrapped in SentryMultiError, or nil if none found.
func (m *Sentry) ValidateAll() error {
return m.validate(true)
}
func (m *Sentry) validate(all bool) error {
if m == nil {
return nil
}
var errors []error
// no validation rules for EventId
// no validation rules for OrganizationId
// no validation rules for OrganizationSlug
// no validation rules for OrganizationDateCreated
// no validation rules for ProjectId
// no validation rules for ProjectSlug
// no validation rules for IssueId
// no validation rules for DateCreated
// no validation rules for Link
if len(errors) > 0 {
return SentryMultiError(errors)
}
return nil
}
// SentryMultiError is an error wrapping multiple validation errors returned by
// Sentry.ValidateAll() if the designated constraints aren't met.
type SentryMultiError []error
// Error returns a concatenation of all the error messages it wraps.
func (m SentryMultiError) Error() string {
var msgs []string
for _, err := range m {
msgs = append(msgs, err.Error())
}
return strings.Join(msgs, "; ")
}
// AllErrors returns a list of validation violation errors.
func (m SentryMultiError) AllErrors() []error { return m }
// SentryValidationError is the validation error returned by Sentry.Validate if
// the designated constraints aren't met.
type SentryValidationError struct {
field string
reason string
cause error
key bool
}
// Field function returns field value.
func (e SentryValidationError) Field() string { return e.field }
// Reason function returns reason value.
func (e SentryValidationError) Reason() string { return e.reason }
// Cause function returns cause value.
func (e SentryValidationError) Cause() error { return e.cause }
// Key function returns key value.
func (e SentryValidationError) Key() bool { return e.key }
// ErrorName returns error name.
func (e SentryValidationError) ErrorName() string { return "SentryValidationError" }
// Error satisfies the builtin error interface
func (e SentryValidationError) Error() string {
cause := ""
if e.cause != nil {
cause = fmt.Sprintf(" | caused by: %v", e.cause)
}
key := ""
if e.key {
key = "key for "
}
return fmt.Sprintf(
"invalid %sSentry.%s: %s%s",
key,
e.field,
e.reason,
cause)
}
var _ error = SentryValidationError{}
var _ interface {
Field() string
Reason() string
Key() bool
Cause() error
ErrorName() string
} = SentryValidationError{}
// Validate checks the field values on MetaData with the rules defined in the
// proto definition for this message. If any rules are violated, the first
// error encountered is returned, or nil if there are no violations.
@ -5234,6 +5350,47 @@ func (m *MetaData) validate(all bool) error {
}
}
case *MetaData_Sentry:
if v == nil {
err := MetaDataValidationError{
field: "Data",
reason: "oneof value cannot be a typed-nil",
}
if !all {
return err
}
errors = append(errors, err)
}
if all {
switch v := interface{}(m.GetSentry()).(type) {
case interface{ ValidateAll() error }:
if err := v.ValidateAll(); err != nil {
errors = append(errors, MetaDataValidationError{
field: "Sentry",
reason: "embedded message failed validation",
cause: err,
})
}
case interface{ Validate() error }:
if err := v.Validate(); err != nil {
errors = append(errors, MetaDataValidationError{
field: "Sentry",
reason: "embedded message failed validation",
cause: err,
})
}
}
} else if v, ok := interface{}(m.GetSentry()).(interface{ Validate() error }); ok {
if err := v.Validate(); err != nil {
return MetaDataValidationError{
field: "Sentry",
reason: "embedded message failed validation",
cause: err,
}
}
}
default:
_ = v // ensures v is used
}

View file

@ -66,6 +66,7 @@ const (
SourceType_SOURCE_TYPE_ELASTICSEARCH SourceType = 35
SourceType_SOURCE_TYPE_HUGGINGFACE SourceType = 36
SourceType_SOURCE_TYPE_GITHUB_EXPERIMENTAL SourceType = 37
SourceType_SOURCE_TYPE_SENTRY SourceType = 38
)
// Enum value maps for SourceType.
@ -109,6 +110,7 @@ var (
35: "SOURCE_TYPE_ELASTICSEARCH",
36: "SOURCE_TYPE_HUGGINGFACE",
37: "SOURCE_TYPE_GITHUB_EXPERIMENTAL",
38: "SOURCE_TYPE_SENTRY",
}
SourceType_value = map[string]int32{
"SOURCE_TYPE_AZURE_STORAGE": 0,
@ -149,6 +151,7 @@ var (
"SOURCE_TYPE_ELASTICSEARCH": 35,
"SOURCE_TYPE_HUGGINGFACE": 36,
"SOURCE_TYPE_GITHUB_EXPERIMENTAL": 37,
"SOURCE_TYPE_SENTRY": 38,
}
)
@ -4218,6 +4221,125 @@ func (x *Elasticsearch) GetBestEffortScan() bool {
return false
}
type Sentry struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
Endpoint string `protobuf:"bytes,1,opt,name=endpoint,proto3" json:"endpoint,omitempty"`
// Types that are assignable to Credential:
//
// *Sentry_AuthToken
// *Sentry_DsnKey
// *Sentry_ApiKey
Credential isSentry_Credential `protobuf_oneof:"credential"`
InsecureSkipVerifyTls bool `protobuf:"varint,5,opt,name=insecure_skip_verify_tls,json=insecureSkipVerifyTls,proto3" json:"insecure_skip_verify_tls,omitempty"`
Projects string `protobuf:"bytes,6,opt,name=projects,proto3" json:"projects,omitempty"`
}
func (x *Sentry) Reset() {
*x = Sentry{}
if protoimpl.UnsafeEnabled {
mi := &file_sources_proto_msgTypes[35]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *Sentry) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*Sentry) ProtoMessage() {}
func (x *Sentry) ProtoReflect() protoreflect.Message {
mi := &file_sources_proto_msgTypes[35]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use Sentry.ProtoReflect.Descriptor instead.
func (*Sentry) Descriptor() ([]byte, []int) {
return file_sources_proto_rawDescGZIP(), []int{35}
}
func (x *Sentry) GetEndpoint() string {
if x != nil {
return x.Endpoint
}
return ""
}
func (m *Sentry) GetCredential() isSentry_Credential {
if m != nil {
return m.Credential
}
return nil
}
func (x *Sentry) GetAuthToken() string {
if x, ok := x.GetCredential().(*Sentry_AuthToken); ok {
return x.AuthToken
}
return ""
}
func (x *Sentry) GetDsnKey() string {
if x, ok := x.GetCredential().(*Sentry_DsnKey); ok {
return x.DsnKey
}
return ""
}
func (x *Sentry) GetApiKey() string {
if x, ok := x.GetCredential().(*Sentry_ApiKey); ok {
return x.ApiKey
}
return ""
}
func (x *Sentry) GetInsecureSkipVerifyTls() bool {
if x != nil {
return x.InsecureSkipVerifyTls
}
return false
}
func (x *Sentry) GetProjects() string {
if x != nil {
return x.Projects
}
return ""
}
type isSentry_Credential interface {
isSentry_Credential()
}
type Sentry_AuthToken struct {
AuthToken string `protobuf:"bytes,2,opt,name=auth_token,json=authToken,proto3,oneof"`
}
type Sentry_DsnKey struct {
DsnKey string `protobuf:"bytes,3,opt,name=dsn_key,json=dsnKey,proto3,oneof"`
}
type Sentry_ApiKey struct {
ApiKey string `protobuf:"bytes,4,opt,name=api_key,json=apiKey,proto3,oneof"`
}
func (*Sentry_AuthToken) isSentry_Credential() {}
func (*Sentry_DsnKey) isSentry_Credential() {}
func (*Sentry_ApiKey) isSentry_Credential() {}
var File_sources_proto protoreflect.FileDescriptor
var file_sources_proto_rawDesc = []byte{
@ -4863,78 +4985,94 @@ var file_sources_proto_rawDesc = []byte{
0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x12, 0x28, 0x0a, 0x10, 0x62, 0x65, 0x73, 0x74, 0x5f,
0x65, 0x66, 0x66, 0x6f, 0x72, 0x74, 0x5f, 0x73, 0x63, 0x61, 0x6e, 0x18, 0x0a, 0x20, 0x01, 0x28,
0x08, 0x52, 0x0e, 0x62, 0x65, 0x73, 0x74, 0x45, 0x66, 0x66, 0x6f, 0x72, 0x74, 0x53, 0x63, 0x61,
0x6e, 0x2a, 0xb1, 0x08, 0x0a, 0x0a, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x54, 0x79, 0x70, 0x65,
0x12, 0x1d, 0x0a, 0x19, 0x53, 0x4f, 0x55, 0x52, 0x43, 0x45, 0x5f, 0x54, 0x59, 0x50, 0x45, 0x5f,
0x41, 0x5a, 0x55, 0x52, 0x45, 0x5f, 0x53, 0x54, 0x4f, 0x52, 0x41, 0x47, 0x45, 0x10, 0x00, 0x12,
0x19, 0x0a, 0x15, 0x53, 0x4f, 0x55, 0x52, 0x43, 0x45, 0x5f, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x42,
0x49, 0x54, 0x42, 0x55, 0x43, 0x4b, 0x45, 0x54, 0x10, 0x01, 0x12, 0x18, 0x0a, 0x14, 0x53, 0x4f,
0x55, 0x52, 0x43, 0x45, 0x5f, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x43, 0x49, 0x52, 0x43, 0x4c, 0x45,
0x43, 0x49, 0x10, 0x02, 0x12, 0x1a, 0x0a, 0x16, 0x53, 0x4f, 0x55, 0x52, 0x43, 0x45, 0x5f, 0x54,
0x59, 0x50, 0x45, 0x5f, 0x43, 0x4f, 0x4e, 0x46, 0x4c, 0x55, 0x45, 0x4e, 0x43, 0x45, 0x10, 0x03,
0x12, 0x16, 0x0a, 0x12, 0x53, 0x4f, 0x55, 0x52, 0x43, 0x45, 0x5f, 0x54, 0x59, 0x50, 0x45, 0x5f,
0x44, 0x4f, 0x43, 0x4b, 0x45, 0x52, 0x10, 0x04, 0x12, 0x13, 0x0a, 0x0f, 0x53, 0x4f, 0x55, 0x52,
0x43, 0x45, 0x5f, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x45, 0x43, 0x52, 0x10, 0x05, 0x12, 0x13, 0x0a,
0x0f, 0x53, 0x4f, 0x55, 0x52, 0x43, 0x45, 0x5f, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x47, 0x43, 0x53,
0x10, 0x06, 0x12, 0x16, 0x0a, 0x12, 0x53, 0x4f, 0x55, 0x52, 0x43, 0x45, 0x5f, 0x54, 0x59, 0x50,
0x45, 0x5f, 0x47, 0x49, 0x54, 0x48, 0x55, 0x42, 0x10, 0x07, 0x12, 0x1a, 0x0a, 0x16, 0x53, 0x4f,
0x55, 0x52, 0x43, 0x45, 0x5f, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x50, 0x55, 0x42, 0x4c, 0x49, 0x43,
0x5f, 0x47, 0x49, 0x54, 0x10, 0x08, 0x12, 0x16, 0x0a, 0x12, 0x53, 0x4f, 0x55, 0x52, 0x43, 0x45,
0x5f, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x47, 0x49, 0x54, 0x4c, 0x41, 0x42, 0x10, 0x09, 0x12, 0x14,
0x0a, 0x10, 0x53, 0x4f, 0x55, 0x52, 0x43, 0x45, 0x5f, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x4a, 0x49,
0x52, 0x41, 0x10, 0x0a, 0x12, 0x24, 0x0a, 0x20, 0x53, 0x4f, 0x55, 0x52, 0x43, 0x45, 0x5f, 0x54,
0x59, 0x50, 0x45, 0x5f, 0x4e, 0x50, 0x4d, 0x5f, 0x55, 0x4e, 0x41, 0x55, 0x54, 0x48, 0x44, 0x5f,
0x50, 0x41, 0x43, 0x4b, 0x41, 0x47, 0x45, 0x53, 0x10, 0x0b, 0x12, 0x25, 0x0a, 0x21, 0x53, 0x4f,
0x55, 0x52, 0x43, 0x45, 0x5f, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x50, 0x59, 0x50, 0x49, 0x5f, 0x55,
0x4e, 0x41, 0x55, 0x54, 0x48, 0x44, 0x5f, 0x50, 0x41, 0x43, 0x4b, 0x41, 0x47, 0x45, 0x53, 0x10,
0x0c, 0x12, 0x12, 0x0a, 0x0e, 0x53, 0x4f, 0x55, 0x52, 0x43, 0x45, 0x5f, 0x54, 0x59, 0x50, 0x45,
0x5f, 0x53, 0x33, 0x10, 0x0d, 0x12, 0x15, 0x0a, 0x11, 0x53, 0x4f, 0x55, 0x52, 0x43, 0x45, 0x5f,
0x54, 0x59, 0x50, 0x45, 0x5f, 0x53, 0x4c, 0x41, 0x43, 0x4b, 0x10, 0x0e, 0x12, 0x1a, 0x0a, 0x16,
0x53, 0x4f, 0x55, 0x52, 0x43, 0x45, 0x5f, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x46, 0x49, 0x4c, 0x45,
0x53, 0x59, 0x53, 0x54, 0x45, 0x4d, 0x10, 0x0f, 0x12, 0x13, 0x0a, 0x0f, 0x53, 0x4f, 0x55, 0x52,
0x43, 0x45, 0x5f, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x47, 0x49, 0x54, 0x10, 0x10, 0x12, 0x14, 0x0a,
0x10, 0x53, 0x4f, 0x55, 0x52, 0x43, 0x45, 0x5f, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x54, 0x45, 0x53,
0x54, 0x10, 0x11, 0x12, 0x1b, 0x0a, 0x17, 0x53, 0x4f, 0x55, 0x52, 0x43, 0x45, 0x5f, 0x54, 0x59,
0x50, 0x45, 0x5f, 0x53, 0x33, 0x5f, 0x55, 0x4e, 0x41, 0x55, 0x54, 0x48, 0x45, 0x44, 0x10, 0x12,
0x12, 0x2a, 0x0a, 0x26, 0x53, 0x4f, 0x55, 0x52, 0x43, 0x45, 0x5f, 0x54, 0x59, 0x50, 0x45, 0x5f,
0x47, 0x49, 0x54, 0x48, 0x55, 0x42, 0x5f, 0x55, 0x4e, 0x41, 0x55, 0x54, 0x48, 0x45, 0x4e, 0x54,
0x49, 0x43, 0x41, 0x54, 0x45, 0x44, 0x5f, 0x4f, 0x52, 0x47, 0x10, 0x13, 0x12, 0x19, 0x0a, 0x15,
0x53, 0x4f, 0x55, 0x52, 0x43, 0x45, 0x5f, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x42, 0x55, 0x49, 0x4c,
0x44, 0x4b, 0x49, 0x54, 0x45, 0x10, 0x14, 0x12, 0x16, 0x0a, 0x12, 0x53, 0x4f, 0x55, 0x52, 0x43,
0x45, 0x5f, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x47, 0x45, 0x52, 0x52, 0x49, 0x54, 0x10, 0x15, 0x12,
0x17, 0x0a, 0x13, 0x53, 0x4f, 0x55, 0x52, 0x43, 0x45, 0x5f, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x4a,
0x45, 0x4e, 0x4b, 0x49, 0x4e, 0x53, 0x10, 0x16, 0x12, 0x15, 0x0a, 0x11, 0x53, 0x4f, 0x55, 0x52,
0x43, 0x45, 0x5f, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x54, 0x45, 0x41, 0x4d, 0x53, 0x10, 0x17, 0x12,
0x21, 0x0a, 0x1d, 0x53, 0x4f, 0x55, 0x52, 0x43, 0x45, 0x5f, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x4a,
0x46, 0x52, 0x4f, 0x47, 0x5f, 0x41, 0x52, 0x54, 0x49, 0x46, 0x41, 0x43, 0x54, 0x4f, 0x52, 0x59,
0x10, 0x18, 0x12, 0x16, 0x0a, 0x12, 0x53, 0x4f, 0x55, 0x52, 0x43, 0x45, 0x5f, 0x54, 0x59, 0x50,
0x45, 0x5f, 0x53, 0x59, 0x53, 0x4c, 0x4f, 0x47, 0x10, 0x19, 0x12, 0x27, 0x0a, 0x23, 0x53, 0x4f,
0x55, 0x52, 0x43, 0x45, 0x5f, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x50, 0x55, 0x42, 0x4c, 0x49, 0x43,
0x5f, 0x45, 0x56, 0x45, 0x4e, 0x54, 0x5f, 0x4d, 0x4f, 0x4e, 0x49, 0x54, 0x4f, 0x52, 0x49, 0x4e,
0x47, 0x10, 0x1a, 0x12, 0x1e, 0x0a, 0x1a, 0x53, 0x4f, 0x55, 0x52, 0x43, 0x45, 0x5f, 0x54, 0x59,
0x50, 0x45, 0x5f, 0x53, 0x4c, 0x41, 0x43, 0x4b, 0x5f, 0x52, 0x45, 0x41, 0x4c, 0x54, 0x49, 0x4d,
0x45, 0x10, 0x1b, 0x12, 0x1c, 0x0a, 0x18, 0x53, 0x4f, 0x55, 0x52, 0x43, 0x45, 0x5f, 0x54, 0x59,
0x50, 0x45, 0x5f, 0x47, 0x4f, 0x4f, 0x47, 0x4c, 0x45, 0x5f, 0x44, 0x52, 0x49, 0x56, 0x45, 0x10,
0x1c, 0x12, 0x1a, 0x0a, 0x16, 0x53, 0x4f, 0x55, 0x52, 0x43, 0x45, 0x5f, 0x54, 0x59, 0x50, 0x45,
0x5f, 0x53, 0x48, 0x41, 0x52, 0x45, 0x50, 0x4f, 0x49, 0x4e, 0x54, 0x10, 0x1d, 0x12, 0x1c, 0x0a,
0x18, 0x53, 0x4f, 0x55, 0x52, 0x43, 0x45, 0x5f, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x47, 0x43, 0x53,
0x5f, 0x55, 0x4e, 0x41, 0x55, 0x54, 0x48, 0x45, 0x44, 0x10, 0x1e, 0x12, 0x1b, 0x0a, 0x17, 0x53,
0x4f, 0x55, 0x52, 0x43, 0x45, 0x5f, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x41, 0x5a, 0x55, 0x52, 0x45,
0x5f, 0x52, 0x45, 0x50, 0x4f, 0x53, 0x10, 0x1f, 0x12, 0x18, 0x0a, 0x14, 0x53, 0x4f, 0x55, 0x52,
0x43, 0x45, 0x5f, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x54, 0x52, 0x41, 0x56, 0x49, 0x53, 0x43, 0x49,
0x10, 0x20, 0x12, 0x17, 0x0a, 0x13, 0x53, 0x4f, 0x55, 0x52, 0x43, 0x45, 0x5f, 0x54, 0x59, 0x50,
0x45, 0x5f, 0x50, 0x4f, 0x53, 0x54, 0x4d, 0x41, 0x4e, 0x10, 0x21, 0x12, 0x17, 0x0a, 0x13, 0x53,
0x4f, 0x55, 0x52, 0x43, 0x45, 0x5f, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x57, 0x45, 0x42, 0x48, 0x4f,
0x4f, 0x4b, 0x10, 0x22, 0x12, 0x1d, 0x0a, 0x19, 0x53, 0x4f, 0x55, 0x52, 0x43, 0x45, 0x5f, 0x54,
0x59, 0x50, 0x45, 0x5f, 0x45, 0x4c, 0x41, 0x53, 0x54, 0x49, 0x43, 0x53, 0x45, 0x41, 0x52, 0x43,
0x48, 0x10, 0x23, 0x12, 0x1b, 0x0a, 0x17, 0x53, 0x4f, 0x55, 0x52, 0x43, 0x45, 0x5f, 0x54, 0x59,
0x50, 0x45, 0x5f, 0x48, 0x55, 0x47, 0x47, 0x49, 0x4e, 0x47, 0x46, 0x41, 0x43, 0x45, 0x10, 0x24,
0x12, 0x23, 0x0a, 0x1f, 0x53, 0x4f, 0x55, 0x52, 0x43, 0x45, 0x5f, 0x54, 0x59, 0x50, 0x45, 0x5f,
0x47, 0x49, 0x54, 0x48, 0x55, 0x42, 0x5f, 0x45, 0x58, 0x50, 0x45, 0x52, 0x49, 0x4d, 0x45, 0x4e,
0x54, 0x41, 0x4c, 0x10, 0x25, 0x42, 0x3b, 0x5a, 0x39, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e,
0x63, 0x6f, 0x6d, 0x2f, 0x74, 0x72, 0x75, 0x66, 0x66, 0x6c, 0x65, 0x73, 0x65, 0x63, 0x75, 0x72,
0x69, 0x74, 0x79, 0x2f, 0x74, 0x72, 0x75, 0x66, 0x66, 0x6c, 0x65, 0x68, 0x6f, 0x67, 0x2f, 0x76,
0x33, 0x2f, 0x70, 0x6b, 0x67, 0x2f, 0x70, 0x62, 0x2f, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73,
0x70, 0x62, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33,
0x6e, 0x22, 0xde, 0x01, 0x0a, 0x06, 0x53, 0x65, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x1a, 0x0a, 0x08,
0x65, 0x6e, 0x64, 0x70, 0x6f, 0x69, 0x6e, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08,
0x65, 0x6e, 0x64, 0x70, 0x6f, 0x69, 0x6e, 0x74, 0x12, 0x1f, 0x0a, 0x0a, 0x61, 0x75, 0x74, 0x68,
0x5f, 0x74, 0x6f, 0x6b, 0x65, 0x6e, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x09,
0x61, 0x75, 0x74, 0x68, 0x54, 0x6f, 0x6b, 0x65, 0x6e, 0x12, 0x19, 0x0a, 0x07, 0x64, 0x73, 0x6e,
0x5f, 0x6b, 0x65, 0x79, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x06, 0x64, 0x73,
0x6e, 0x4b, 0x65, 0x79, 0x12, 0x19, 0x0a, 0x07, 0x61, 0x70, 0x69, 0x5f, 0x6b, 0x65, 0x79, 0x18,
0x04, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x06, 0x61, 0x70, 0x69, 0x4b, 0x65, 0x79, 0x12,
0x37, 0x0a, 0x18, 0x69, 0x6e, 0x73, 0x65, 0x63, 0x75, 0x72, 0x65, 0x5f, 0x73, 0x6b, 0x69, 0x70,
0x5f, 0x76, 0x65, 0x72, 0x69, 0x66, 0x79, 0x5f, 0x74, 0x6c, 0x73, 0x18, 0x05, 0x20, 0x01, 0x28,
0x08, 0x52, 0x15, 0x69, 0x6e, 0x73, 0x65, 0x63, 0x75, 0x72, 0x65, 0x53, 0x6b, 0x69, 0x70, 0x56,
0x65, 0x72, 0x69, 0x66, 0x79, 0x54, 0x6c, 0x73, 0x12, 0x1a, 0x0a, 0x08, 0x70, 0x72, 0x6f, 0x6a,
0x65, 0x63, 0x74, 0x73, 0x18, 0x06, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x70, 0x72, 0x6f, 0x6a,
0x65, 0x63, 0x74, 0x73, 0x42, 0x0c, 0x0a, 0x0a, 0x63, 0x72, 0x65, 0x64, 0x65, 0x6e, 0x74, 0x69,
0x61, 0x6c, 0x2a, 0xc9, 0x08, 0x0a, 0x0a, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x54, 0x79, 0x70,
0x65, 0x12, 0x1d, 0x0a, 0x19, 0x53, 0x4f, 0x55, 0x52, 0x43, 0x45, 0x5f, 0x54, 0x59, 0x50, 0x45,
0x5f, 0x41, 0x5a, 0x55, 0x52, 0x45, 0x5f, 0x53, 0x54, 0x4f, 0x52, 0x41, 0x47, 0x45, 0x10, 0x00,
0x12, 0x19, 0x0a, 0x15, 0x53, 0x4f, 0x55, 0x52, 0x43, 0x45, 0x5f, 0x54, 0x59, 0x50, 0x45, 0x5f,
0x42, 0x49, 0x54, 0x42, 0x55, 0x43, 0x4b, 0x45, 0x54, 0x10, 0x01, 0x12, 0x18, 0x0a, 0x14, 0x53,
0x4f, 0x55, 0x52, 0x43, 0x45, 0x5f, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x43, 0x49, 0x52, 0x43, 0x4c,
0x45, 0x43, 0x49, 0x10, 0x02, 0x12, 0x1a, 0x0a, 0x16, 0x53, 0x4f, 0x55, 0x52, 0x43, 0x45, 0x5f,
0x54, 0x59, 0x50, 0x45, 0x5f, 0x43, 0x4f, 0x4e, 0x46, 0x4c, 0x55, 0x45, 0x4e, 0x43, 0x45, 0x10,
0x03, 0x12, 0x16, 0x0a, 0x12, 0x53, 0x4f, 0x55, 0x52, 0x43, 0x45, 0x5f, 0x54, 0x59, 0x50, 0x45,
0x5f, 0x44, 0x4f, 0x43, 0x4b, 0x45, 0x52, 0x10, 0x04, 0x12, 0x13, 0x0a, 0x0f, 0x53, 0x4f, 0x55,
0x52, 0x43, 0x45, 0x5f, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x45, 0x43, 0x52, 0x10, 0x05, 0x12, 0x13,
0x0a, 0x0f, 0x53, 0x4f, 0x55, 0x52, 0x43, 0x45, 0x5f, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x47, 0x43,
0x53, 0x10, 0x06, 0x12, 0x16, 0x0a, 0x12, 0x53, 0x4f, 0x55, 0x52, 0x43, 0x45, 0x5f, 0x54, 0x59,
0x50, 0x45, 0x5f, 0x47, 0x49, 0x54, 0x48, 0x55, 0x42, 0x10, 0x07, 0x12, 0x1a, 0x0a, 0x16, 0x53,
0x4f, 0x55, 0x52, 0x43, 0x45, 0x5f, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x50, 0x55, 0x42, 0x4c, 0x49,
0x43, 0x5f, 0x47, 0x49, 0x54, 0x10, 0x08, 0x12, 0x16, 0x0a, 0x12, 0x53, 0x4f, 0x55, 0x52, 0x43,
0x45, 0x5f, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x47, 0x49, 0x54, 0x4c, 0x41, 0x42, 0x10, 0x09, 0x12,
0x14, 0x0a, 0x10, 0x53, 0x4f, 0x55, 0x52, 0x43, 0x45, 0x5f, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x4a,
0x49, 0x52, 0x41, 0x10, 0x0a, 0x12, 0x24, 0x0a, 0x20, 0x53, 0x4f, 0x55, 0x52, 0x43, 0x45, 0x5f,
0x54, 0x59, 0x50, 0x45, 0x5f, 0x4e, 0x50, 0x4d, 0x5f, 0x55, 0x4e, 0x41, 0x55, 0x54, 0x48, 0x44,
0x5f, 0x50, 0x41, 0x43, 0x4b, 0x41, 0x47, 0x45, 0x53, 0x10, 0x0b, 0x12, 0x25, 0x0a, 0x21, 0x53,
0x4f, 0x55, 0x52, 0x43, 0x45, 0x5f, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x50, 0x59, 0x50, 0x49, 0x5f,
0x55, 0x4e, 0x41, 0x55, 0x54, 0x48, 0x44, 0x5f, 0x50, 0x41, 0x43, 0x4b, 0x41, 0x47, 0x45, 0x53,
0x10, 0x0c, 0x12, 0x12, 0x0a, 0x0e, 0x53, 0x4f, 0x55, 0x52, 0x43, 0x45, 0x5f, 0x54, 0x59, 0x50,
0x45, 0x5f, 0x53, 0x33, 0x10, 0x0d, 0x12, 0x15, 0x0a, 0x11, 0x53, 0x4f, 0x55, 0x52, 0x43, 0x45,
0x5f, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x53, 0x4c, 0x41, 0x43, 0x4b, 0x10, 0x0e, 0x12, 0x1a, 0x0a,
0x16, 0x53, 0x4f, 0x55, 0x52, 0x43, 0x45, 0x5f, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x46, 0x49, 0x4c,
0x45, 0x53, 0x59, 0x53, 0x54, 0x45, 0x4d, 0x10, 0x0f, 0x12, 0x13, 0x0a, 0x0f, 0x53, 0x4f, 0x55,
0x52, 0x43, 0x45, 0x5f, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x47, 0x49, 0x54, 0x10, 0x10, 0x12, 0x14,
0x0a, 0x10, 0x53, 0x4f, 0x55, 0x52, 0x43, 0x45, 0x5f, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x54, 0x45,
0x53, 0x54, 0x10, 0x11, 0x12, 0x1b, 0x0a, 0x17, 0x53, 0x4f, 0x55, 0x52, 0x43, 0x45, 0x5f, 0x54,
0x59, 0x50, 0x45, 0x5f, 0x53, 0x33, 0x5f, 0x55, 0x4e, 0x41, 0x55, 0x54, 0x48, 0x45, 0x44, 0x10,
0x12, 0x12, 0x2a, 0x0a, 0x26, 0x53, 0x4f, 0x55, 0x52, 0x43, 0x45, 0x5f, 0x54, 0x59, 0x50, 0x45,
0x5f, 0x47, 0x49, 0x54, 0x48, 0x55, 0x42, 0x5f, 0x55, 0x4e, 0x41, 0x55, 0x54, 0x48, 0x45, 0x4e,
0x54, 0x49, 0x43, 0x41, 0x54, 0x45, 0x44, 0x5f, 0x4f, 0x52, 0x47, 0x10, 0x13, 0x12, 0x19, 0x0a,
0x15, 0x53, 0x4f, 0x55, 0x52, 0x43, 0x45, 0x5f, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x42, 0x55, 0x49,
0x4c, 0x44, 0x4b, 0x49, 0x54, 0x45, 0x10, 0x14, 0x12, 0x16, 0x0a, 0x12, 0x53, 0x4f, 0x55, 0x52,
0x43, 0x45, 0x5f, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x47, 0x45, 0x52, 0x52, 0x49, 0x54, 0x10, 0x15,
0x12, 0x17, 0x0a, 0x13, 0x53, 0x4f, 0x55, 0x52, 0x43, 0x45, 0x5f, 0x54, 0x59, 0x50, 0x45, 0x5f,
0x4a, 0x45, 0x4e, 0x4b, 0x49, 0x4e, 0x53, 0x10, 0x16, 0x12, 0x15, 0x0a, 0x11, 0x53, 0x4f, 0x55,
0x52, 0x43, 0x45, 0x5f, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x54, 0x45, 0x41, 0x4d, 0x53, 0x10, 0x17,
0x12, 0x21, 0x0a, 0x1d, 0x53, 0x4f, 0x55, 0x52, 0x43, 0x45, 0x5f, 0x54, 0x59, 0x50, 0x45, 0x5f,
0x4a, 0x46, 0x52, 0x4f, 0x47, 0x5f, 0x41, 0x52, 0x54, 0x49, 0x46, 0x41, 0x43, 0x54, 0x4f, 0x52,
0x59, 0x10, 0x18, 0x12, 0x16, 0x0a, 0x12, 0x53, 0x4f, 0x55, 0x52, 0x43, 0x45, 0x5f, 0x54, 0x59,
0x50, 0x45, 0x5f, 0x53, 0x59, 0x53, 0x4c, 0x4f, 0x47, 0x10, 0x19, 0x12, 0x27, 0x0a, 0x23, 0x53,
0x4f, 0x55, 0x52, 0x43, 0x45, 0x5f, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x50, 0x55, 0x42, 0x4c, 0x49,
0x43, 0x5f, 0x45, 0x56, 0x45, 0x4e, 0x54, 0x5f, 0x4d, 0x4f, 0x4e, 0x49, 0x54, 0x4f, 0x52, 0x49,
0x4e, 0x47, 0x10, 0x1a, 0x12, 0x1e, 0x0a, 0x1a, 0x53, 0x4f, 0x55, 0x52, 0x43, 0x45, 0x5f, 0x54,
0x59, 0x50, 0x45, 0x5f, 0x53, 0x4c, 0x41, 0x43, 0x4b, 0x5f, 0x52, 0x45, 0x41, 0x4c, 0x54, 0x49,
0x4d, 0x45, 0x10, 0x1b, 0x12, 0x1c, 0x0a, 0x18, 0x53, 0x4f, 0x55, 0x52, 0x43, 0x45, 0x5f, 0x54,
0x59, 0x50, 0x45, 0x5f, 0x47, 0x4f, 0x4f, 0x47, 0x4c, 0x45, 0x5f, 0x44, 0x52, 0x49, 0x56, 0x45,
0x10, 0x1c, 0x12, 0x1a, 0x0a, 0x16, 0x53, 0x4f, 0x55, 0x52, 0x43, 0x45, 0x5f, 0x54, 0x59, 0x50,
0x45, 0x5f, 0x53, 0x48, 0x41, 0x52, 0x45, 0x50, 0x4f, 0x49, 0x4e, 0x54, 0x10, 0x1d, 0x12, 0x1c,
0x0a, 0x18, 0x53, 0x4f, 0x55, 0x52, 0x43, 0x45, 0x5f, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x47, 0x43,
0x53, 0x5f, 0x55, 0x4e, 0x41, 0x55, 0x54, 0x48, 0x45, 0x44, 0x10, 0x1e, 0x12, 0x1b, 0x0a, 0x17,
0x53, 0x4f, 0x55, 0x52, 0x43, 0x45, 0x5f, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x41, 0x5a, 0x55, 0x52,
0x45, 0x5f, 0x52, 0x45, 0x50, 0x4f, 0x53, 0x10, 0x1f, 0x12, 0x18, 0x0a, 0x14, 0x53, 0x4f, 0x55,
0x52, 0x43, 0x45, 0x5f, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x54, 0x52, 0x41, 0x56, 0x49, 0x53, 0x43,
0x49, 0x10, 0x20, 0x12, 0x17, 0x0a, 0x13, 0x53, 0x4f, 0x55, 0x52, 0x43, 0x45, 0x5f, 0x54, 0x59,
0x50, 0x45, 0x5f, 0x50, 0x4f, 0x53, 0x54, 0x4d, 0x41, 0x4e, 0x10, 0x21, 0x12, 0x17, 0x0a, 0x13,
0x53, 0x4f, 0x55, 0x52, 0x43, 0x45, 0x5f, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x57, 0x45, 0x42, 0x48,
0x4f, 0x4f, 0x4b, 0x10, 0x22, 0x12, 0x1d, 0x0a, 0x19, 0x53, 0x4f, 0x55, 0x52, 0x43, 0x45, 0x5f,
0x54, 0x59, 0x50, 0x45, 0x5f, 0x45, 0x4c, 0x41, 0x53, 0x54, 0x49, 0x43, 0x53, 0x45, 0x41, 0x52,
0x43, 0x48, 0x10, 0x23, 0x12, 0x1b, 0x0a, 0x17, 0x53, 0x4f, 0x55, 0x52, 0x43, 0x45, 0x5f, 0x54,
0x59, 0x50, 0x45, 0x5f, 0x48, 0x55, 0x47, 0x47, 0x49, 0x4e, 0x47, 0x46, 0x41, 0x43, 0x45, 0x10,
0x24, 0x12, 0x23, 0x0a, 0x1f, 0x53, 0x4f, 0x55, 0x52, 0x43, 0x45, 0x5f, 0x54, 0x59, 0x50, 0x45,
0x5f, 0x47, 0x49, 0x54, 0x48, 0x55, 0x42, 0x5f, 0x45, 0x58, 0x50, 0x45, 0x52, 0x49, 0x4d, 0x45,
0x4e, 0x54, 0x41, 0x4c, 0x10, 0x25, 0x12, 0x16, 0x0a, 0x12, 0x53, 0x4f, 0x55, 0x52, 0x43, 0x45,
0x5f, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x53, 0x45, 0x4e, 0x54, 0x52, 0x59, 0x10, 0x26, 0x42, 0x3b,
0x5a, 0x39, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x74, 0x72, 0x75,
0x66, 0x66, 0x6c, 0x65, 0x73, 0x65, 0x63, 0x75, 0x72, 0x69, 0x74, 0x79, 0x2f, 0x74, 0x72, 0x75,
0x66, 0x66, 0x6c, 0x65, 0x68, 0x6f, 0x67, 0x2f, 0x76, 0x33, 0x2f, 0x70, 0x6b, 0x67, 0x2f, 0x70,
0x62, 0x2f, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x70, 0x62, 0x62, 0x06, 0x70, 0x72, 0x6f,
0x74, 0x6f, 0x33,
}
var (
@ -4950,7 +5088,7 @@ func file_sources_proto_rawDescGZIP() []byte {
}
var file_sources_proto_enumTypes = make([]protoimpl.EnumInfo, 2)
var file_sources_proto_msgTypes = make([]protoimpl.MessageInfo, 35)
var file_sources_proto_msgTypes = make([]protoimpl.MessageInfo, 36)
var file_sources_proto_goTypes = []interface{}{
(SourceType)(0), // 0: sources.SourceType
(Confluence_GetAllSpacesScope)(0), // 1: sources.Confluence.GetAllSpacesScope
@ -4989,72 +5127,73 @@ var file_sources_proto_goTypes = []interface{}{
(*Postman)(nil), // 34: sources.Postman
(*Webhook)(nil), // 35: sources.Webhook
(*Elasticsearch)(nil), // 36: sources.Elasticsearch
(*durationpb.Duration)(nil), // 37: google.protobuf.Duration
(*anypb.Any)(nil), // 38: google.protobuf.Any
(*credentialspb.BasicAuth)(nil), // 39: credentials.BasicAuth
(*credentialspb.Unauthenticated)(nil), // 40: credentials.Unauthenticated
(*credentialspb.Oauth2)(nil), // 41: credentials.Oauth2
(*credentialspb.KeySecret)(nil), // 42: credentials.KeySecret
(*credentialspb.CloudEnvironment)(nil), // 43: credentials.CloudEnvironment
(*credentialspb.SSHAuth)(nil), // 44: credentials.SSHAuth
(*credentialspb.GitHubApp)(nil), // 45: credentials.GitHubApp
(*credentialspb.AWSSessionTokenSecret)(nil), // 46: credentials.AWSSessionTokenSecret
(*credentialspb.SlackTokens)(nil), // 47: credentials.SlackTokens
(*credentialspb.Header)(nil), // 48: credentials.Header
(*credentialspb.ClientCredentials)(nil), // 49: credentials.ClientCredentials
(*timestamppb.Timestamp)(nil), // 50: google.protobuf.Timestamp
(*Sentry)(nil), // 37: sources.Sentry
(*durationpb.Duration)(nil), // 38: google.protobuf.Duration
(*anypb.Any)(nil), // 39: google.protobuf.Any
(*credentialspb.BasicAuth)(nil), // 40: credentials.BasicAuth
(*credentialspb.Unauthenticated)(nil), // 41: credentials.Unauthenticated
(*credentialspb.Oauth2)(nil), // 42: credentials.Oauth2
(*credentialspb.KeySecret)(nil), // 43: credentials.KeySecret
(*credentialspb.CloudEnvironment)(nil), // 44: credentials.CloudEnvironment
(*credentialspb.SSHAuth)(nil), // 45: credentials.SSHAuth
(*credentialspb.GitHubApp)(nil), // 46: credentials.GitHubApp
(*credentialspb.AWSSessionTokenSecret)(nil), // 47: credentials.AWSSessionTokenSecret
(*credentialspb.SlackTokens)(nil), // 48: credentials.SlackTokens
(*credentialspb.Header)(nil), // 49: credentials.Header
(*credentialspb.ClientCredentials)(nil), // 50: credentials.ClientCredentials
(*timestamppb.Timestamp)(nil), // 51: google.protobuf.Timestamp
}
var file_sources_proto_depIdxs = []int32{
37, // 0: sources.LocalSource.scan_interval:type_name -> google.protobuf.Duration
38, // 1: sources.LocalSource.connection:type_name -> google.protobuf.Any
39, // 2: sources.Artifactory.basic_auth:type_name -> credentials.BasicAuth
40, // 3: sources.Artifactory.unauthenticated:type_name -> credentials.Unauthenticated
39, // 4: sources.AzureStorage.basic_auth:type_name -> credentials.BasicAuth
40, // 5: sources.AzureStorage.unauthenticated:type_name -> credentials.Unauthenticated
41, // 6: sources.Bitbucket.oauth:type_name -> credentials.Oauth2
39, // 7: sources.Bitbucket.basic_auth:type_name -> credentials.BasicAuth
40, // 8: sources.Confluence.unauthenticated:type_name -> credentials.Unauthenticated
39, // 9: sources.Confluence.basic_auth:type_name -> credentials.BasicAuth
38, // 0: sources.LocalSource.scan_interval:type_name -> google.protobuf.Duration
39, // 1: sources.LocalSource.connection:type_name -> google.protobuf.Any
40, // 2: sources.Artifactory.basic_auth:type_name -> credentials.BasicAuth
41, // 3: sources.Artifactory.unauthenticated:type_name -> credentials.Unauthenticated
40, // 4: sources.AzureStorage.basic_auth:type_name -> credentials.BasicAuth
41, // 5: sources.AzureStorage.unauthenticated:type_name -> credentials.Unauthenticated
42, // 6: sources.Bitbucket.oauth:type_name -> credentials.Oauth2
40, // 7: sources.Bitbucket.basic_auth:type_name -> credentials.BasicAuth
41, // 8: sources.Confluence.unauthenticated:type_name -> credentials.Unauthenticated
40, // 9: sources.Confluence.basic_auth:type_name -> credentials.BasicAuth
1, // 10: sources.Confluence.spaces_scope:type_name -> sources.Confluence.GetAllSpacesScope
40, // 11: sources.Docker.unauthenticated:type_name -> credentials.Unauthenticated
39, // 12: sources.Docker.basic_auth:type_name -> credentials.BasicAuth
42, // 13: sources.ECR.access_key:type_name -> credentials.KeySecret
40, // 14: sources.GCS.unauthenticated:type_name -> credentials.Unauthenticated
43, // 15: sources.GCS.adc:type_name -> credentials.CloudEnvironment
41, // 16: sources.GCS.oauth:type_name -> credentials.Oauth2
39, // 17: sources.Git.basic_auth:type_name -> credentials.BasicAuth
40, // 18: sources.Git.unauthenticated:type_name -> credentials.Unauthenticated
44, // 19: sources.Git.ssh_auth:type_name -> credentials.SSHAuth
41, // 20: sources.GitLab.oauth:type_name -> credentials.Oauth2
39, // 21: sources.GitLab.basic_auth:type_name -> credentials.BasicAuth
45, // 22: sources.GitHub.github_app:type_name -> credentials.GitHubApp
40, // 23: sources.GitHub.unauthenticated:type_name -> credentials.Unauthenticated
39, // 24: sources.GitHub.basic_auth:type_name -> credentials.BasicAuth
40, // 25: sources.Huggingface.unauthenticated:type_name -> credentials.Unauthenticated
39, // 26: sources.JIRA.basic_auth:type_name -> credentials.BasicAuth
40, // 27: sources.JIRA.unauthenticated:type_name -> credentials.Unauthenticated
41, // 28: sources.JIRA.oauth:type_name -> credentials.Oauth2
40, // 29: sources.NPMUnauthenticatedPackage.unauthenticated:type_name -> credentials.Unauthenticated
40, // 30: sources.PyPIUnauthenticatedPackage.unauthenticated:type_name -> credentials.Unauthenticated
42, // 31: sources.S3.access_key:type_name -> credentials.KeySecret
40, // 32: sources.S3.unauthenticated:type_name -> credentials.Unauthenticated
43, // 33: sources.S3.cloud_environment:type_name -> credentials.CloudEnvironment
46, // 34: sources.S3.session_token:type_name -> credentials.AWSSessionTokenSecret
47, // 35: sources.Slack.tokens:type_name -> credentials.SlackTokens
39, // 36: sources.Gerrit.basic_auth:type_name -> credentials.BasicAuth
40, // 37: sources.Gerrit.unauthenticated:type_name -> credentials.Unauthenticated
39, // 38: sources.Jenkins.basic_auth:type_name -> credentials.BasicAuth
48, // 39: sources.Jenkins.header:type_name -> credentials.Header
40, // 40: sources.Jenkins.unauthenticated:type_name -> credentials.Unauthenticated
49, // 41: sources.Teams.authenticated:type_name -> credentials.ClientCredentials
41, // 42: sources.Teams.oauth:type_name -> credentials.Oauth2
40, // 43: sources.Forager.unauthenticated:type_name -> credentials.Unauthenticated
50, // 44: sources.Forager.since:type_name -> google.protobuf.Timestamp
47, // 45: sources.SlackRealtime.tokens:type_name -> credentials.SlackTokens
41, // 46: sources.Sharepoint.oauth:type_name -> credentials.Oauth2
41, // 47: sources.AzureRepos.oauth:type_name -> credentials.Oauth2
40, // 48: sources.Postman.unauthenticated:type_name -> credentials.Unauthenticated
48, // 49: sources.Webhook.header:type_name -> credentials.Header
41, // 11: sources.Docker.unauthenticated:type_name -> credentials.Unauthenticated
40, // 12: sources.Docker.basic_auth:type_name -> credentials.BasicAuth
43, // 13: sources.ECR.access_key:type_name -> credentials.KeySecret
41, // 14: sources.GCS.unauthenticated:type_name -> credentials.Unauthenticated
44, // 15: sources.GCS.adc:type_name -> credentials.CloudEnvironment
42, // 16: sources.GCS.oauth:type_name -> credentials.Oauth2
40, // 17: sources.Git.basic_auth:type_name -> credentials.BasicAuth
41, // 18: sources.Git.unauthenticated:type_name -> credentials.Unauthenticated
45, // 19: sources.Git.ssh_auth:type_name -> credentials.SSHAuth
42, // 20: sources.GitLab.oauth:type_name -> credentials.Oauth2
40, // 21: sources.GitLab.basic_auth:type_name -> credentials.BasicAuth
46, // 22: sources.GitHub.github_app:type_name -> credentials.GitHubApp
41, // 23: sources.GitHub.unauthenticated:type_name -> credentials.Unauthenticated
40, // 24: sources.GitHub.basic_auth:type_name -> credentials.BasicAuth
41, // 25: sources.Huggingface.unauthenticated:type_name -> credentials.Unauthenticated
40, // 26: sources.JIRA.basic_auth:type_name -> credentials.BasicAuth
41, // 27: sources.JIRA.unauthenticated:type_name -> credentials.Unauthenticated
42, // 28: sources.JIRA.oauth:type_name -> credentials.Oauth2
41, // 29: sources.NPMUnauthenticatedPackage.unauthenticated:type_name -> credentials.Unauthenticated
41, // 30: sources.PyPIUnauthenticatedPackage.unauthenticated:type_name -> credentials.Unauthenticated
43, // 31: sources.S3.access_key:type_name -> credentials.KeySecret
41, // 32: sources.S3.unauthenticated:type_name -> credentials.Unauthenticated
44, // 33: sources.S3.cloud_environment:type_name -> credentials.CloudEnvironment
47, // 34: sources.S3.session_token:type_name -> credentials.AWSSessionTokenSecret
48, // 35: sources.Slack.tokens:type_name -> credentials.SlackTokens
40, // 36: sources.Gerrit.basic_auth:type_name -> credentials.BasicAuth
41, // 37: sources.Gerrit.unauthenticated:type_name -> credentials.Unauthenticated
40, // 38: sources.Jenkins.basic_auth:type_name -> credentials.BasicAuth
49, // 39: sources.Jenkins.header:type_name -> credentials.Header
41, // 40: sources.Jenkins.unauthenticated:type_name -> credentials.Unauthenticated
50, // 41: sources.Teams.authenticated:type_name -> credentials.ClientCredentials
42, // 42: sources.Teams.oauth:type_name -> credentials.Oauth2
41, // 43: sources.Forager.unauthenticated:type_name -> credentials.Unauthenticated
51, // 44: sources.Forager.since:type_name -> google.protobuf.Timestamp
48, // 45: sources.SlackRealtime.tokens:type_name -> credentials.SlackTokens
42, // 46: sources.Sharepoint.oauth:type_name -> credentials.Oauth2
42, // 47: sources.AzureRepos.oauth:type_name -> credentials.Oauth2
41, // 48: sources.Postman.unauthenticated:type_name -> credentials.Unauthenticated
49, // 49: sources.Webhook.header:type_name -> credentials.Header
50, // [50:50] is the sub-list for method output_type
50, // [50:50] is the sub-list for method input_type
50, // [50:50] is the sub-list for extension type_name
@ -5488,6 +5627,18 @@ func file_sources_proto_init() {
return nil
}
}
file_sources_proto_msgTypes[35].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*Sentry); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
}
file_sources_proto_msgTypes[1].OneofWrappers = []interface{}{
(*Artifactory_BasicAuth)(nil),
@ -5618,13 +5769,18 @@ func file_sources_proto_init() {
file_sources_proto_msgTypes[33].OneofWrappers = []interface{}{
(*Webhook_Header)(nil),
}
file_sources_proto_msgTypes[35].OneofWrappers = []interface{}{
(*Sentry_AuthToken)(nil),
(*Sentry_DsnKey)(nil),
(*Sentry_ApiKey)(nil),
}
type x struct{}
out := protoimpl.TypeBuilder{
File: protoimpl.DescBuilder{
GoPackagePath: reflect.TypeOf(x{}).PkgPath(),
RawDescriptor: file_sources_proto_rawDesc,
NumEnums: 2,
NumMessages: 35,
NumMessages: 36,
NumExtensions: 0,
NumServices: 0,
},

View file

@ -6240,3 +6240,148 @@ var _ interface {
Cause() error
ErrorName() string
} = ElasticsearchValidationError{}
// Validate checks the field values on Sentry with the rules defined in the
// proto definition for this message. If any rules are violated, the first
// error encountered is returned, or nil if there are no violations.
func (m *Sentry) Validate() error {
return m.validate(false)
}
// ValidateAll checks the field values on Sentry with the rules defined in the
// proto definition for this message. If any rules are violated, the result is
// a list of violation errors wrapped in SentryMultiError, or nil if none found.
func (m *Sentry) ValidateAll() error {
return m.validate(true)
}
func (m *Sentry) validate(all bool) error {
if m == nil {
return nil
}
var errors []error
// no validation rules for Endpoint
// no validation rules for InsecureSkipVerifyTls
// no validation rules for Projects
switch v := m.Credential.(type) {
case *Sentry_AuthToken:
if v == nil {
err := SentryValidationError{
field: "Credential",
reason: "oneof value cannot be a typed-nil",
}
if !all {
return err
}
errors = append(errors, err)
}
// no validation rules for AuthToken
case *Sentry_DsnKey:
if v == nil {
err := SentryValidationError{
field: "Credential",
reason: "oneof value cannot be a typed-nil",
}
if !all {
return err
}
errors = append(errors, err)
}
// no validation rules for DsnKey
case *Sentry_ApiKey:
if v == nil {
err := SentryValidationError{
field: "Credential",
reason: "oneof value cannot be a typed-nil",
}
if !all {
return err
}
errors = append(errors, err)
}
// no validation rules for ApiKey
default:
_ = v // ensures v is used
}
if len(errors) > 0 {
return SentryMultiError(errors)
}
return nil
}
// SentryMultiError is an error wrapping multiple validation errors returned by
// Sentry.ValidateAll() if the designated constraints aren't met.
type SentryMultiError []error
// Error returns a concatenation of all the error messages it wraps.
func (m SentryMultiError) Error() string {
var msgs []string
for _, err := range m {
msgs = append(msgs, err.Error())
}
return strings.Join(msgs, "; ")
}
// AllErrors returns a list of validation violation errors.
func (m SentryMultiError) AllErrors() []error { return m }
// SentryValidationError is the validation error returned by Sentry.Validate if
// the designated constraints aren't met.
type SentryValidationError struct {
field string
reason string
cause error
key bool
}
// Field function returns field value.
func (e SentryValidationError) Field() string { return e.field }
// Reason function returns reason value.
func (e SentryValidationError) Reason() string { return e.reason }
// Cause function returns cause value.
func (e SentryValidationError) Cause() error { return e.cause }
// Key function returns key value.
func (e SentryValidationError) Key() bool { return e.key }
// ErrorName returns error name.
func (e SentryValidationError) ErrorName() string { return "SentryValidationError" }
// Error satisfies the builtin error interface
func (e SentryValidationError) Error() string {
cause := ""
if e.cause != nil {
cause = fmt.Sprintf(" | caused by: %v", e.cause)
}
key := ""
if e.key {
key = "key for "
}
return fmt.Sprintf(
"invalid %sSentry.%s: %s%s",
key,
e.field,
e.reason,
cause)
}
var _ error = SentryValidationError{}
var _ interface {
Field() string
Reason() string
Key() bool
Cause() error
ErrorName() string
} = SentryValidationError{}

View file

@ -420,10 +420,13 @@ func executeClone(ctx context.Context, params cloneParams) (*git.Repository, err
"clone",
cloneURL.String(),
params.clonePath,
"-c",
"remote.origin.fetch=+refs/*:refs/remotes/origin/*",
"--quiet", // https://git-scm.com/docs/git-clone#Documentation/git-clone.txt-code--quietcode
}
if !feature.SkipAdditionalRefs.Load() {
gitArgs = append(gitArgs,
"-c",
"remote.origin.fetch=+refs/*:refs/remotes/origin/*")
}
gitArgs = append(gitArgs, params.args...)
cloneCmd := exec.Command("git", gitArgs...)
@ -969,17 +972,17 @@ func (s *Git) ScanRepo(ctx context.Context, repo *git.Repository, repoPath strin
// If either commit cannot be resolved, it returns early.
// If both are resolved, it finds and sets the merge base in scanOptions.
func normalizeConfig(scanOptions *ScanOptions, repo *git.Repository) error {
baseCommit, baseSet, err := resolveAndSetCommit(repo, &scanOptions.BaseHash)
baseCommit, err := resolveAndSetCommit(repo, &scanOptions.BaseHash)
if err != nil {
return err
}
headCommit, headSet, err := resolveAndSetCommit(repo, &scanOptions.HeadHash)
headCommit, err := resolveAndSetCommit(repo, &scanOptions.HeadHash)
if err != nil {
return err
}
if !(baseSet && headSet) {
if baseCommit == nil || headCommit == nil {
return nil
}
@ -998,32 +1001,31 @@ func normalizeConfig(scanOptions *ScanOptions, repo *git.Repository) error {
}
// resolveAndSetCommit resolves a Git reference to a commit object and updates the reference if it was not a direct hash.
// Returns the commit object, a boolean indicating if the commit was successfully set, and any error encountered.
func resolveAndSetCommit(repo *git.Repository, ref *string) (*object.Commit, bool, error) {
// Returns the commit object and any error encountered.
func resolveAndSetCommit(repo *git.Repository, ref *string) (*object.Commit, error) {
if repo == nil || ref == nil {
return nil, false, fmt.Errorf("repo and ref must be non-nil")
return nil, fmt.Errorf("repo and ref must be non-nil")
}
if len(*ref) == 0 {
return nil, false, nil
return nil, nil
}
originalRef := *ref
resolvedRef, err := resolveHash(repo, originalRef)
if err != nil {
return nil, false, fmt.Errorf("unable to resolve ref: %w", err)
return nil, fmt.Errorf("unable to resolve ref: %w", err)
}
commit, err := repo.CommitObject(plumbing.NewHash(resolvedRef))
if err != nil {
return nil, false, fmt.Errorf("unable to resolve commit: %w", err)
return nil, fmt.Errorf("unable to resolve commit: %w", err)
}
wasSet := originalRef != resolvedRef
if wasSet {
if originalRef != resolvedRef {
*ref = resolvedRef
}
return commit, wasSet, nil
return commit, nil
}
func resolveHash(repo *git.Repository, ref string) (string, error) {

View file

@ -17,14 +17,14 @@ type tokenConnector struct {
apiClient *github.Client
token string
isGitHubEnterprise bool
handleRateLimit func(error) bool
handleRateLimit func(context.Context, error) bool
user string
userMu sync.Mutex
}
var _ connector = (*tokenConnector)(nil)
func newTokenConnector(apiEndpoint string, token string, handleRateLimit func(error) bool) (*tokenConnector, error) {
func newTokenConnector(apiEndpoint string, token string, handleRateLimit func(context.Context, error) bool) (*tokenConnector, error) {
const httpTimeoutSeconds = 60
httpClient := common.RetryableHTTPClientTimeout(int64(httpTimeoutSeconds))
tokenSource := oauth2.StaticTokenSource(&oauth2.Token{AccessToken: token})
@ -68,7 +68,7 @@ func (c *tokenConnector) getUser(ctx context.Context) (string, error) {
)
for {
user, _, err = c.apiClient.Users.Get(ctx, "")
if c.handleRateLimit(err) {
if c.handleRateLimit(ctx, err) {
continue
}
if err != nil {

View file

@ -61,7 +61,6 @@ type Source struct {
scanOptMu sync.Mutex // protects the scanOptions
scanOptions *git.ScanOptions
log logr.Logger
conn *sourcespb.GitHub
jobPool *errgroup.Group
resumeInfoMutex sync.Mutex
@ -76,6 +75,32 @@ type Source struct {
sources.CommonSourceUnitUnmarshaller
}
// --------------------------------------------------------------------------------
// RepoUnit and GistUnit are implementations of SourceUnit used during
// enumeration. The different types aren't strictly necessary, but are a bit
// more explicit and allow type checking/safety.
var _ sources.SourceUnit = (*RepoUnit)(nil)
var _ sources.SourceUnit = (*GistUnit)(nil)
type RepoUnit struct {
name string
url string
}
func (r RepoUnit) SourceUnitID() (string, sources.SourceUnitKind) { return r.url, "repo" }
func (r RepoUnit) Display() string { return r.name }
type GistUnit struct {
name string
url string
}
func (g GistUnit) SourceUnitID() (string, sources.SourceUnitKind) { return g.url, "gist" }
func (g GistUnit) Display() string { return g.name }
// --------------------------------------------------------------------------------
// WithCustomContentWriter sets the useCustomContentWriter flag on the source.
func (s *Source) WithCustomContentWriter() { s.useCustomContentWriter = true }
@ -117,13 +142,13 @@ type filteredRepoCache struct {
include, exclude []glob.Glob
}
func (s *Source) newFilteredRepoCache(c cache.Cache[string], include, exclude []string) *filteredRepoCache {
func (s *Source) newFilteredRepoCache(ctx context.Context, c cache.Cache[string], include, exclude []string) *filteredRepoCache {
includeGlobs := make([]glob.Glob, 0, len(include))
excludeGlobs := make([]glob.Glob, 0, len(exclude))
for _, ig := range include {
g, err := glob.Compile(ig)
if err != nil {
s.log.V(1).Info("invalid include glob", "include_value", ig, "err", err)
ctx.Logger().V(1).Info("invalid include glob", "include_value", ig, "err", err)
continue
}
includeGlobs = append(includeGlobs, g)
@ -131,7 +156,7 @@ func (s *Source) newFilteredRepoCache(c cache.Cache[string], include, exclude []
for _, eg := range exclude {
g, err := glob.Compile(eg)
if err != nil {
s.log.V(1).Info("invalid exclude glob", "exclude_value", eg, "err", err)
ctx.Logger().V(1).Info("invalid exclude glob", "exclude_value", eg, "err", err)
continue
}
excludeGlobs = append(excludeGlobs, g)
@ -180,8 +205,6 @@ func (s *Source) Init(aCtx context.Context, name string, jobID sources.JobID, so
return err
}
s.log = aCtx.Logger()
s.name = name
s.sourceID = sourceID
s.jobID = jobID
@ -208,7 +231,8 @@ func (s *Source) Init(aCtx context.Context, name string, jobID sources.JobID, so
}
s.memberCache = make(map[string]struct{})
s.filteredRepoCache = s.newFilteredRepoCache(memory.New[string](),
s.filteredRepoCache = s.newFilteredRepoCache(aCtx,
memory.New[string](),
append(s.conn.GetRepositories(), s.conn.GetIncludeRepos()...),
s.conn.GetIgnoreRepos(),
)
@ -315,25 +339,30 @@ func (s *Source) Chunks(ctx context.Context, chunksChan chan *sources.Chunk, tar
}
func (s *Source) enumerate(ctx context.Context) error {
// Create a reporter that does nothing for now.
noopReporter := sources.VisitorReporter{
VisitUnit: func(ctx context.Context, su sources.SourceUnit) error {
return nil
},
}
// I'm not wild about switching on the connector type here (as opposed to dispatching to the connector itself) but
// this felt like a compromise that allowed me to isolate connection logic without rewriting the entire source.
switch c := s.connector.(type) {
case *appConnector:
if err := s.enumerateWithApp(ctx, c.InstallationClient()); err != nil {
if err := s.enumerateWithApp(ctx, c.InstallationClient(), noopReporter); err != nil {
return err
}
case *basicAuthConnector:
if err := s.enumerateBasicAuth(ctx); err != nil {
if err := s.enumerateBasicAuth(ctx, noopReporter); err != nil {
return err
}
case *tokenConnector:
if err := s.enumerateWithToken(ctx, c.IsGithubEnterprise()); err != nil {
if err := s.enumerateWithToken(ctx, c.IsGithubEnterprise(), noopReporter); err != nil {
return err
}
case *unauthenticatedConnector:
s.enumerateUnauthenticated(ctx)
s.enumerateUnauthenticated(ctx, noopReporter)
}
s.repos = make([]string, 0, s.filteredRepoCache.Count())
RepoLoop:
@ -360,7 +389,7 @@ RepoLoop:
// Normalize the URL to the Gist's pull URL.
// See https://github.com/trufflesecurity/trufflehog/pull/2625#issuecomment-2025507937
repo = gist.GetGitPullURL()
if s.handleRateLimit(err) {
if s.handleRateLimit(repoCtx, err) {
continue
}
if err != nil {
@ -374,7 +403,7 @@ RepoLoop:
// Cache repository info.
for {
ghRepo, _, err := s.connector.APIClient().Repositories.Get(repoCtx, urlParts[1], urlParts[2])
if s.handleRateLimit(err) {
if s.handleRateLimit(repoCtx, err) {
continue
}
if err != nil {
@ -389,22 +418,23 @@ RepoLoop:
s.repos = append(s.repos, repo)
}
githubReposEnumerated.WithLabelValues(s.name).Set(float64(len(s.repos)))
s.log.Info("Completed enumeration", "num_repos", len(s.repos), "num_orgs", s.orgsCache.Count(), "num_members", len(s.memberCache))
ctx.Logger().Info("Completed enumeration", "num_repos", len(s.repos), "num_orgs", s.orgsCache.Count(), "num_members", len(s.memberCache))
// We must sort the repos so we can resume later if necessary.
sort.Strings(s.repos)
return nil
}
func (s *Source) enumerateBasicAuth(ctx context.Context) error {
func (s *Source) enumerateBasicAuth(ctx context.Context, reporter sources.UnitReporter) error {
for _, org := range s.orgsCache.Keys() {
orgCtx := context.WithValue(ctx, "account", org)
userType, err := s.getReposByOrgOrUser(ctx, org)
userType, err := s.getReposByOrgOrUser(ctx, org, reporter)
if err != nil {
orgCtx.Logger().Error(err, "error fetching repos for org or user")
continue
}
// TODO: This modifies s.memberCache but it doesn't look like
// we do anything with it.
if userType == organization && s.conn.ScanUsers {
if err := s.addMembersByOrg(ctx, org); err != nil {
orgCtx.Logger().Error(err, "Unable to add members by org")
@ -415,14 +445,14 @@ func (s *Source) enumerateBasicAuth(ctx context.Context) error {
return nil
}
func (s *Source) enumerateUnauthenticated(ctx context.Context) {
func (s *Source) enumerateUnauthenticated(ctx context.Context, reporter sources.UnitReporter) {
if s.orgsCache.Count() > unauthGithubOrgRateLimt {
s.log.Info("You may experience rate limiting when using the unauthenticated GitHub api. Consider using an authenticated scan instead.")
ctx.Logger().Info("You may experience rate limiting when using the unauthenticated GitHub api. Consider using an authenticated scan instead.")
}
for _, org := range s.orgsCache.Keys() {
orgCtx := context.WithValue(ctx, "account", org)
userType, err := s.getReposByOrgOrUser(ctx, org)
userType, err := s.getReposByOrgOrUser(ctx, org, reporter)
if err != nil {
orgCtx.Logger().Error(err, "error fetching repos for org or user")
continue
@ -434,14 +464,14 @@ func (s *Source) enumerateUnauthenticated(ctx context.Context) {
}
}
func (s *Source) enumerateWithToken(ctx context.Context, isGithubEnterprise bool) error {
func (s *Source) enumerateWithToken(ctx context.Context, isGithubEnterprise bool, reporter sources.UnitReporter) error {
ctx.Logger().V(1).Info("Enumerating with token")
var ghUser *github.User
var err error
for {
ghUser, _, err = s.connector.APIClient().Users.Get(ctx, "")
if s.handleRateLimit(err) {
if s.handleRateLimit(ctx, err) {
continue
}
if err != nil {
@ -453,11 +483,11 @@ func (s *Source) enumerateWithToken(ctx context.Context, isGithubEnterprise bool
specificScope := len(s.repos) > 0 || s.orgsCache.Count() > 0
if !specificScope {
// Enumerate the user's orgs and repos if none were specified.
if err := s.getReposByUser(ctx, ghUser.GetLogin()); err != nil {
s.log.Error(err, "Unable to fetch repos for the current user", "user", ghUser.GetLogin())
if err := s.getReposByUser(ctx, ghUser.GetLogin(), reporter); err != nil {
ctx.Logger().Error(err, "Unable to fetch repos for the current user", "user", ghUser.GetLogin())
}
if err := s.addUserGistsToCache(ctx, ghUser.GetLogin()); err != nil {
s.log.Error(err, "Unable to fetch gists for the current user", "user", ghUser.GetLogin())
if err := s.addUserGistsToCache(ctx, ghUser.GetLogin(), reporter); err != nil {
ctx.Logger().Error(err, "Unable to fetch gists for the current user", "user", ghUser.GetLogin())
}
if isGithubEnterprise {
@ -472,7 +502,7 @@ func (s *Source) enumerateWithToken(ctx context.Context, isGithubEnterprise bool
if len(s.orgsCache.Keys()) > 0 {
for _, org := range s.orgsCache.Keys() {
orgCtx := context.WithValue(ctx, "account", org)
userType, err := s.getReposByOrgOrUser(ctx, org)
userType, err := s.getReposByOrgOrUser(ctx, org, reporter)
if err != nil {
orgCtx.Logger().Error(err, "Unable to fetch repos for org or user")
continue
@ -486,18 +516,18 @@ func (s *Source) enumerateWithToken(ctx context.Context, isGithubEnterprise bool
}
if s.conn.ScanUsers && len(s.memberCache) > 0 {
s.log.Info("Fetching repos for org members", "org_count", s.orgsCache.Count(), "member_count", len(s.memberCache))
s.addReposForMembers(ctx)
ctx.Logger().Info("Fetching repos for org members", "org_count", s.orgsCache.Count(), "member_count", len(s.memberCache))
s.addReposForMembers(ctx, reporter)
}
}
return nil
}
func (s *Source) enumerateWithApp(ctx context.Context, installationClient *github.Client) error {
func (s *Source) enumerateWithApp(ctx context.Context, installationClient *github.Client, reporter sources.UnitReporter) error {
// If no repos were provided, enumerate them.
if len(s.repos) == 0 {
if err := s.getReposByApp(ctx); err != nil {
if err := s.getReposByApp(ctx, reporter); err != nil {
return err
}
@ -507,13 +537,14 @@ func (s *Source) enumerateWithApp(ctx context.Context, installationClient *githu
if err != nil {
return err
}
s.log.Info("Scanning repos", "org_members", len(s.memberCache))
ctx.Logger().Info("Scanning repos", "org_members", len(s.memberCache))
// TODO: Replace loop below with a call to s.addReposForMembers(ctx, reporter)
for member := range s.memberCache {
logger := s.log.WithValues("member", member)
if err := s.addUserGistsToCache(ctx, member); err != nil {
logger := ctx.Logger().WithValues("member", member)
if err := s.addUserGistsToCache(ctx, member, reporter); err != nil {
logger.Error(err, "error fetching gists by user")
}
if err := s.getReposByUser(ctx, member); err != nil {
if err := s.getReposByUser(ctx, member, reporter); err != nil {
logger.Error(err, "error fetching repos by user")
}
}
@ -536,7 +567,7 @@ func createGitHubClient(httpClient *http.Client, apiEndpoint string) (*github.Cl
func (s *Source) scan(ctx context.Context, chunksChan chan *sources.Chunk) error {
var scannedCount uint64 = 1
s.log.V(2).Info("Found repos to scan", "count", len(s.repos))
ctx.Logger().V(2).Info("Found repos to scan", "count", len(s.repos))
// If there is resume information available, limit this scan to only the repos that still need scanning.
reposToScan, progressIndexOffset := sources.FilterReposToResume(s.repos, s.GetProgress().EncodedResumeInfo)
@ -574,7 +605,7 @@ func (s *Source) scan(ctx context.Context, chunksChan chan *sources.Chunk) error
if !ok {
// This should never happen.
err := fmt.Errorf("no repoInfo for URL: %s", repoURL)
s.log.Error(err, "failed to scan repository")
ctx.Logger().Error(err, "failed to scan repository")
return nil
}
repoCtx := context.WithValues(ctx, "repo", repoURL)
@ -618,7 +649,7 @@ func (s *Source) scan(ctx context.Context, chunksChan chan *sources.Chunk) error
_ = s.jobPool.Wait()
if scanErrs.Count() > 0 {
s.log.V(0).Info("failed to scan some repositories", "error_count", scanErrs.Count(), "errors", scanErrs.String())
ctx.Logger().Info("failed to scan some repositories", "error_count", scanErrs.Count(), "errors", scanErrs.String())
}
s.SetProgressComplete(len(s.repos), len(s.repos), "Completed GitHub scan", "")
@ -666,7 +697,7 @@ var (
// Authenticated users have a rate limit of 5,000 requests per hour,
// however, certain actions are subject to a stricter "secondary" limit.
// https://docs.github.com/en/rest/overview/rate-limits-for-the-rest-api
func (s *Source) handleRateLimit(errIn error) bool {
func (s *Source) handleRateLimit(ctx context.Context, errIn error) bool {
if errIn == nil {
return false
}
@ -705,12 +736,12 @@ func (s *Source) handleRateLimit(errIn error) bool {
if retryAfter > 0 {
retryAfter = retryAfter + jitter
rateLimitResumeTime = now.Add(retryAfter)
s.log.V(0).Info(fmt.Sprintf("exceeded %s rate limit", limitType), "retry_after", retryAfter.String(), "resume_time", rateLimitResumeTime.Format(time.RFC3339))
ctx.Logger().Info(fmt.Sprintf("exceeded %s rate limit", limitType), "retry_after", retryAfter.String(), "resume_time", rateLimitResumeTime.Format(time.RFC3339))
} else {
retryAfter = (5 * time.Minute) + jitter
rateLimitResumeTime = now.Add(retryAfter)
// TODO: Use exponential backoff instead of static retry time.
s.log.V(0).Error(errIn, "unexpected rate limit error", "retry_after", retryAfter.String(), "resume_time", rateLimitResumeTime.Format(time.RFC3339))
ctx.Logger().Error(errIn, "unexpected rate limit error", "retry_after", retryAfter.String(), "resume_time", rateLimitResumeTime.Format(time.RFC3339))
}
rateLimitMu.Unlock()
@ -724,26 +755,27 @@ func (s *Source) handleRateLimit(errIn error) bool {
return true
}
func (s *Source) addReposForMembers(ctx context.Context) {
s.log.Info("Fetching repos from members", "members", len(s.memberCache))
func (s *Source) addReposForMembers(ctx context.Context, reporter sources.UnitReporter) {
ctx.Logger().Info("Fetching repos from members", "members", len(s.memberCache))
for member := range s.memberCache {
if err := s.addUserGistsToCache(ctx, member); err != nil {
s.log.Info("Unable to fetch gists by user", "user", member, "error", err)
if err := s.addUserGistsToCache(ctx, member, reporter); err != nil {
ctx.Logger().Info("Unable to fetch gists by user", "user", member, "error", err)
}
if err := s.getReposByUser(ctx, member); err != nil {
s.log.Info("Unable to fetch repos by user", "user", member, "error", err)
if err := s.getReposByUser(ctx, member, reporter); err != nil {
ctx.Logger().Info("Unable to fetch repos by user", "user", member, "error", err)
}
}
}
// addUserGistsToCache collects all the gist urls for a given user,
// and adds them to the filteredRepoCache.
func (s *Source) addUserGistsToCache(ctx context.Context, user string) error {
func (s *Source) addUserGistsToCache(ctx context.Context, user string, reporter sources.UnitReporter) error {
gistOpts := &github.GistListOptions{}
logger := s.log.WithValues("user", user)
logger := ctx.Logger().WithValues("user", user)
for {
gists, res, err := s.connector.APIClient().Gists.List(ctx, user, gistOpts)
if s.handleRateLimit(err) {
if s.handleRateLimit(ctx, err) {
continue
}
if err != nil {
@ -753,6 +785,9 @@ func (s *Source) addUserGistsToCache(ctx context.Context, user string) error {
for _, gist := range gists {
s.filteredRepoCache.Set(gist.GetID(), gist.GetGitPullURL())
s.cacheGistInfo(gist)
if err := reporter.UnitOk(ctx, GistUnit{name: gist.GetID(), url: gist.GetGitPullURL()}); err != nil {
return err
}
}
if res == nil || res.NextPage == 0 {
@ -788,7 +823,7 @@ func (s *Source) addMembersByApp(ctx context.Context, installationClient *github
}
func (s *Source) addAllVisibleOrgs(ctx context.Context) {
s.log.V(2).Info("enumerating all visible organizations on GHE")
ctx.Logger().V(2).Info("enumerating all visible organizations on GHE")
// Enumeration on this endpoint does not use pages it uses a since ID.
// The endpoint will return organizations with an ID greater than the given since ID.
// Empty org response is our cue to break the enumeration loop.
@ -800,11 +835,11 @@ func (s *Source) addAllVisibleOrgs(ctx context.Context) {
}
for {
orgs, _, err := s.connector.APIClient().Organizations.ListAll(ctx, orgOpts)
if s.handleRateLimit(err) {
if s.handleRateLimit(ctx, err) {
continue
}
if err != nil {
s.log.Error(err, "could not list all organizations")
ctx.Logger().Error(err, "could not list all organizations")
return
}
@ -813,7 +848,7 @@ func (s *Source) addAllVisibleOrgs(ctx context.Context) {
}
lastOrgID := *orgs[len(orgs)-1].ID
s.log.V(2).Info(fmt.Sprintf("listed organization IDs %d through %d", orgOpts.Since, lastOrgID))
ctx.Logger().V(2).Info(fmt.Sprintf("listed organization IDs %d through %d", orgOpts.Since, lastOrgID))
orgOpts.Since = lastOrgID
for _, org := range orgs {
@ -827,7 +862,7 @@ func (s *Source) addAllVisibleOrgs(ctx context.Context) {
continue
}
s.orgsCache.Set(name, name)
s.log.V(2).Info("adding organization for repository enumeration", "id", org.ID, "name", name)
ctx.Logger().V(2).Info("adding organization for repository enumeration", "id", org.ID, "name", name)
}
}
}
@ -836,10 +871,10 @@ func (s *Source) addOrgsByUser(ctx context.Context, user string) {
orgOpts := &github.ListOptions{
PerPage: defaultPagination,
}
logger := s.log.WithValues("user", user)
logger := ctx.Logger().WithValues("user", user)
for {
orgs, resp, err := s.connector.APIClient().Organizations.List(ctx, "", orgOpts)
if s.handleRateLimit(err) {
if s.handleRateLimit(ctx, err) {
continue
}
if err != nil {
@ -869,10 +904,10 @@ func (s *Source) addMembersByOrg(ctx context.Context, org string) error {
},
}
logger := s.log.WithValues("org", org)
logger := ctx.Logger().WithValues("org", org)
for {
members, res, err := s.connector.APIClient().Organizations.ListMembers(ctx, org, opts)
if s.handleRateLimit(err) {
if s.handleRateLimit(ctx, err) {
continue
}
if err != nil || len(members) == 0 {
@ -994,7 +1029,7 @@ func (s *Source) processGistComments(ctx context.Context, gistURL string, urlPar
}
for {
comments, _, err := s.connector.APIClient().Gists.ListComments(ctx, gistID, options)
if s.handleRateLimit(err) {
if s.handleRateLimit(ctx, err) {
continue
}
if err != nil {
@ -1107,7 +1142,7 @@ func (s *Source) processIssues(ctx context.Context, repoInfo repoInfo, chunksCha
for {
issues, _, err := s.connector.APIClient().Issues.ListByRepo(ctx, repoInfo.owner, repoInfo.name, bodyTextsOpts)
if s.handleRateLimit(err) {
if s.handleRateLimit(ctx, err) {
continue
}
@ -1179,7 +1214,7 @@ func (s *Source) processIssueComments(ctx context.Context, repoInfo repoInfo, ch
for {
issueComments, _, err := s.connector.APIClient().Issues.ListComments(ctx, repoInfo.owner, repoInfo.name, allComments, issueOpts)
if s.handleRateLimit(err) {
if s.handleRateLimit(ctx, err) {
continue
}
if err != nil {
@ -1244,7 +1279,7 @@ func (s *Source) processPRs(ctx context.Context, repoInfo repoInfo, chunksChan c
for {
prs, _, err := s.connector.APIClient().PullRequests.List(ctx, repoInfo.owner, repoInfo.name, prOpts)
if s.handleRateLimit(err) {
if s.handleRateLimit(ctx, err) {
continue
}
if err != nil {
@ -1276,7 +1311,7 @@ func (s *Source) processPRComments(ctx context.Context, repoInfo repoInfo, chunk
for {
prComments, _, err := s.connector.APIClient().PullRequests.ListComments(ctx, repoInfo.owner, repoInfo.name, allComments, prOpts)
if s.handleRateLimit(err) {
if s.handleRateLimit(ctx, err) {
continue
}
if err != nil {

View file

@ -9,7 +9,6 @@ import (
"testing"
"time"
"github.com/go-logr/logr"
"github.com/kylelemons/godebug/pretty"
"github.com/stretchr/testify/assert"
"google.golang.org/protobuf/types/known/anypb"
@ -58,12 +57,11 @@ func TestSource_Token(t *testing.T) {
s := Source{
conn: src,
log: logr.Discard(),
memberCache: map[string]struct{}{},
repoInfoCache: newRepoInfoCache(),
}
s.Init(ctx, "github integration test source", 0, 0, false, conn, 1)
s.filteredRepoCache = s.newFilteredRepoCache(memory.New[string](), nil, nil)
s.filteredRepoCache = s.newFilteredRepoCache(ctx, memory.New[string](), nil, nil)
err = s.enumerateWithApp(ctx, s.connector.(*appConnector).InstallationClient())
assert.NoError(t, err)

View file

@ -15,7 +15,6 @@ import (
"testing"
"time"
"github.com/go-logr/logr"
"github.com/google/go-cmp/cmp"
"github.com/google/go-github/v63/github"
"github.com/stretchr/testify/assert"
@ -100,7 +99,7 @@ func TestAddReposByOrg(t *testing.T) {
Repositories: nil,
IgnoreRepos: []string{"secret/super-*-repo2"},
})
err := s.getReposByOrg(context.Background(), "super-secret-org")
err := s.getReposByOrg(context.Background(), "super-secret-org", noopReporter())
assert.Nil(t, err)
assert.Equal(t, 1, s.filteredRepoCache.Count())
ok := s.filteredRepoCache.Exists("super-secret-repo")
@ -128,7 +127,7 @@ func TestAddReposByOrg_IncludeRepos(t *testing.T) {
IncludeRepos: []string{"super-secret-org/super*"},
Organizations: []string{"super-secret-org"},
})
err := s.getReposByOrg(context.Background(), "super-secret-org")
err := s.getReposByOrg(context.Background(), "super-secret-org", noopReporter())
assert.Nil(t, err)
assert.Equal(t, 2, s.filteredRepoCache.Count())
ok := s.filteredRepoCache.Exists("super-secret-org/super-secret-repo")
@ -156,7 +155,7 @@ func TestAddReposByUser(t *testing.T) {
},
IgnoreRepos: []string{"super-secret-user/super-secret-repo2"},
})
err := s.getReposByUser(context.Background(), "super-secret-user")
err := s.getReposByUser(context.Background(), "super-secret-user", noopReporter())
assert.Nil(t, err)
assert.Equal(t, 1, s.filteredRepoCache.Count())
ok := s.filteredRepoCache.Exists("super-secret-user/super-secret-repo")
@ -174,7 +173,7 @@ func TestAddGistsByUser(t *testing.T) {
JSON([]map[string]string{{"id": "aa5a315d61ae9438b18d", "git_pull_url": "https://gist.github.com/aa5a315d61ae9438b18d.git"}})
s := initTestSource(&sourcespb.GitHub{Credential: &sourcespb.GitHub_Unauthenticated{}})
err := s.addUserGistsToCache(context.Background(), "super-secret-user")
err := s.addUserGistsToCache(context.Background(), "super-secret-user", noopReporter())
assert.Nil(t, err)
assert.Equal(t, 1, s.filteredRepoCache.Count())
ok := s.filteredRepoCache.Exists("aa5a315d61ae9438b18d")
@ -266,7 +265,7 @@ func TestAddReposByApp(t *testing.T) {
})
s := initTestSource(&sourcespb.GitHub{Credential: &sourcespb.GitHub_Unauthenticated{}})
err := s.getReposByApp(context.Background())
err := s.getReposByApp(context.Background(), noopReporter())
assert.Nil(t, err)
assert.Equal(t, 2, s.filteredRepoCache.Count())
ok := s.filteredRepoCache.Exists("ssr1")
@ -369,7 +368,8 @@ func TestNormalizeRepos(t *testing.T) {
func TestHandleRateLimit(t *testing.T) {
s := initTestSource(&sourcespb.GitHub{Credential: &sourcespb.GitHub_Unauthenticated{}})
assert.False(t, s.handleRateLimit(nil))
ctx := context.Background()
assert.False(t, s.handleRateLimit(ctx, nil))
// Request
reqUrl, _ := url.Parse("https://github.com/trufflesecurity/trufflehog")
@ -400,7 +400,7 @@ func TestHandleRateLimit(t *testing.T) {
Message: "Too Many Requests",
}
assert.True(t, s.handleRateLimit(err))
assert.True(t, s.handleRateLimit(ctx, err))
}
func TestEnumerateUnauthenticated(t *testing.T) {
@ -419,7 +419,7 @@ func TestEnumerateUnauthenticated(t *testing.T) {
s.orgsCache = memory.New[string]()
s.orgsCache.Set("super-secret-org", "super-secret-org")
//s.enumerateUnauthenticated(context.Background(), apiEndpoint)
s.enumerateUnauthenticated(context.Background())
s.enumerateUnauthenticated(context.Background(), noopReporter())
assert.Equal(t, 1, s.filteredRepoCache.Count())
ok := s.filteredRepoCache.Exists("super-secret-org/super-secret-repo")
assert.True(t, ok)
@ -458,7 +458,7 @@ func TestEnumerateWithToken(t *testing.T) {
Token: "token",
},
})
err := s.enumerateWithToken(context.Background(), false)
err := s.enumerateWithToken(context.Background(), false, noopReporter())
assert.Nil(t, err)
assert.Equal(t, 2, s.filteredRepoCache.Count())
ok := s.filteredRepoCache.Exists("super-secret-user/super-secret-repo")
@ -502,7 +502,7 @@ func BenchmarkEnumerateWithToken(b *testing.B) {
b.ResetTimer()
for i := 0; i < b.N; i++ {
_ = s.enumerateWithToken(context.Background(), false)
_ = s.enumerateWithToken(context.Background(), false, noopReporter())
}
}
@ -660,7 +660,7 @@ func TestEnumerateWithToken_IncludeRepos(t *testing.T) {
})
s.repos = []string{"some-special-repo"}
err := s.enumerateWithToken(context.Background(), false)
err := s.enumerateWithToken(context.Background(), false, noopReporter())
assert.Nil(t, err)
assert.Equal(t, 1, len(s.repos))
assert.Equal(t, []string{"some-special-repo"}, s.repos)
@ -693,7 +693,7 @@ func TestEnumerateWithApp(t *testing.T) {
},
},
})
err := s.enumerateWithApp(context.Background(), s.connector.(*appConnector).InstallationClient())
err := s.enumerateWithApp(context.Background(), s.connector.(*appConnector).InstallationClient(), noopReporter())
assert.Nil(t, err)
assert.Equal(t, 0, len(s.repos))
assert.False(t, gock.HasUnmatchedRequest())
@ -721,7 +721,6 @@ func Test_setProgressCompleteWithRepo_resumeInfo(t *testing.T) {
s := &Source{
repos: []string{},
log: logr.Discard(),
}
for _, tt := range tests {
@ -772,7 +771,6 @@ func Test_setProgressCompleteWithRepo_Progress(t *testing.T) {
for _, tt := range tests {
s := &Source{
repos: tt.repos,
log: logr.Discard(),
}
s.setProgressCompleteWithRepo(tt.index, tt.offset, "")
@ -910,3 +908,11 @@ func Test_ScanMultipleTargets_MultipleErrors(t *testing.T) {
assert.ElementsMatch(t, got, want)
}
}
func noopReporter() sources.UnitReporter {
return sources.VisitorReporter{
VisitUnit: func(context.Context, sources.SourceUnit) error {
return nil
},
}
}

View file

@ -14,6 +14,7 @@ import (
"github.com/trufflesecurity/trufflehog/v3/pkg/context"
"github.com/trufflesecurity/trufflehog/v3/pkg/giturl"
"github.com/trufflesecurity/trufflehog/v3/pkg/pb/source_metadatapb"
"github.com/trufflesecurity/trufflehog/v3/pkg/sources"
)
type repoInfoCache struct {
@ -76,8 +77,8 @@ func (s *Source) appListReposWrapper(ctx context.Context, _ string, opts repoLis
return nil, res, err
}
func (s *Source) getReposByApp(ctx context.Context) error {
return s.processRepos(ctx, "", s.appListReposWrapper, &appListOptions{
func (s *Source) getReposByApp(ctx context.Context, reporter sources.UnitReporter) error {
return s.processRepos(ctx, "", reporter, s.appListReposWrapper, &appListOptions{
ListOptions: github.ListOptions{
PerPage: defaultPagination,
},
@ -96,8 +97,8 @@ func (s *Source) userListReposWrapper(ctx context.Context, user string, opts rep
return s.connector.APIClient().Repositories.ListByUser(ctx, user, &opts.(*userListOptions).RepositoryListByUserOptions)
}
func (s *Source) getReposByUser(ctx context.Context, user string) error {
return s.processRepos(ctx, user, s.userListReposWrapper, &userListOptions{
func (s *Source) getReposByUser(ctx context.Context, user string, reporter sources.UnitReporter) error {
return s.processRepos(ctx, user, reporter, s.userListReposWrapper, &userListOptions{
RepositoryListByUserOptions: github.RepositoryListByUserOptions{
ListOptions: github.ListOptions{
PerPage: defaultPagination,
@ -119,8 +120,8 @@ func (s *Source) orgListReposWrapper(ctx context.Context, org string, opts repoL
return s.connector.APIClient().Repositories.ListByOrg(ctx, org, &opts.(*orgListOptions).RepositoryListByOrgOptions)
}
func (s *Source) getReposByOrg(ctx context.Context, org string) error {
return s.processRepos(ctx, org, s.orgListReposWrapper, &orgListOptions{
func (s *Source) getReposByOrg(ctx context.Context, org string, reporter sources.UnitReporter) error {
return s.processRepos(ctx, org, reporter, s.orgListReposWrapper, &orgListOptions{
RepositoryListByOrgOptions: github.RepositoryListByOrgOptions{
ListOptions: github.ListOptions{
PerPage: defaultPagination,
@ -145,11 +146,11 @@ const (
organization
)
func (s *Source) getReposByOrgOrUser(ctx context.Context, name string) (userType, error) {
func (s *Source) getReposByOrgOrUser(ctx context.Context, name string, reporter sources.UnitReporter) (userType, error) {
var err error
// List repositories for the organization |name|.
err = s.getReposByOrg(ctx, name)
err = s.getReposByOrg(ctx, name, reporter)
if err == nil {
return organization, nil
} else if !isGitHub404Error(err) {
@ -157,9 +158,9 @@ func (s *Source) getReposByOrgOrUser(ctx context.Context, name string) (userType
}
// List repositories for the user |name|.
err = s.getReposByUser(ctx, name)
err = s.getReposByUser(ctx, name, reporter)
if err == nil {
if err := s.addUserGistsToCache(ctx, name); err != nil {
if err := s.addUserGistsToCache(ctx, name, reporter); err != nil {
ctx.Logger().Error(err, "Unable to add user to cache")
}
return user, nil
@ -180,8 +181,8 @@ func isGitHub404Error(err error) bool {
return ghErr.Response.StatusCode == http.StatusNotFound
}
func (s *Source) processRepos(ctx context.Context, target string, listRepos repoLister, listOpts repoListOptions) error {
logger := s.log.WithValues("target", target)
func (s *Source) processRepos(ctx context.Context, target string, reporter sources.UnitReporter, listRepos repoLister, listOpts repoListOptions) error {
logger := ctx.Logger().WithValues("target", target)
opts := listOpts.getListOptions()
var (
@ -191,14 +192,14 @@ func (s *Source) processRepos(ctx context.Context, target string, listRepos repo
for {
someRepos, res, err := listRepos(ctx, target, listOpts)
if s.handleRateLimit(err) {
if s.handleRateLimit(ctx, err) {
continue
}
if err != nil {
return err
}
s.log.V(2).Info("Listed repos", "page", opts.Page, "last_page", res.LastPage)
ctx.Logger().V(2).Info("Listed repos", "page", opts.Page, "last_page", res.LastPage)
for _, r := range someRepos {
if r.GetFork() {
if !s.conn.IncludeForks {
@ -215,8 +216,10 @@ func (s *Source) processRepos(ctx context.Context, target string, listRepos repo
repoName, repoURL := r.GetFullName(), r.GetCloneURL()
s.totalRepoSize += r.GetSize()
s.filteredRepoCache.Set(repoName, repoURL)
s.cacheRepoInfo(r)
if err := reporter.UnitOk(ctx, RepoUnit{name: repoName, url: repoURL}); err != nil {
return err
}
logger.V(3).Info("repo attributes", "name", repoName, "kb_size", r.GetSize(), "repo_url", repoURL)
}

View file

@ -118,17 +118,6 @@ func New(opts ...Option) *BufferedFileWriter {
return w
}
// NewFromReader creates a new instance of BufferedFileWriter and writes the content from the provided reader to the writer.
func NewFromReader(r io.Reader, opts ...Option) (*BufferedFileWriter, error) {
opts = append(opts, WithBufferSize(Large))
writer := New(opts...)
if _, err := io.Copy(writer, r); err != nil && !errors.Is(err, io.EOF) {
return nil, fmt.Errorf("error writing to buffered file writer: %w", err)
}
return writer, nil
}
// Len returns the number of bytes written to the buffer or file.
func (w *BufferedFileWriter) Len() int { return int(w.size) }
@ -291,14 +280,7 @@ func (w *BufferedFileWriter) CloseForWriting() error {
// If the content is stored in memory, it returns a custom reader that handles returning the buffer to the pool.
// The caller should call Close() on the returned io.Reader when done to ensure resources are properly released.
// This method can only be used when the BufferedFileWriter is in read-only mode.
func (w *BufferedFileWriter) ReadCloser() (io.ReadCloser, error) { return w.ReadSeekCloser() }
// ReadSeekCloser returns an io.ReadSeekCloser to read the written content.
// If the content is stored in a file, it opens the file and returns a file reader.
// If the content is stored in memory, it returns a custom reader that allows seeking and handles returning
// the buffer to the pool.
// This method can only be used when the BufferedFileWriter is in read-only mode.
func (w *BufferedFileWriter) ReadSeekCloser() (io.ReadSeekCloser, error) {
func (w *BufferedFileWriter) ReadCloser() (io.ReadCloser, error) {
if w.state != readOnly {
return nil, fmt.Errorf("BufferedFileWriter must be in read-only mode to read")
}

View file

@ -2,11 +2,8 @@ package bufferedfilewriter
import (
"bytes"
"crypto/rand"
"fmt"
"io"
"os"
"strings"
"testing"
"time"
@ -498,103 +495,6 @@ func BenchmarkBufferedFileWriterWriteSmall(b *testing.B) {
}
}
// Create a custom reader that can simulate errors.
type errorReader struct{}
func (errorReader) Read([]byte) (n int, err error) { return 0, fmt.Errorf("error reading") }
func TestNewFromReader(t *testing.T) {
t.Parallel()
testCases := []struct {
name string
reader io.Reader
wantErr bool
wantData string
}{
{
name: "Success case",
reader: strings.NewReader("hello world"),
wantData: "hello world",
},
{
name: "Empty reader",
reader: strings.NewReader(""),
},
{
name: "Error reader",
reader: errorReader{},
wantErr: true,
},
}
for _, tc := range testCases {
tc := tc
t.Run(tc.name, func(t *testing.T) {
t.Parallel()
bufWriter, err := NewFromReader(tc.reader)
if err != nil && tc.wantErr {
return
}
assert.NoError(t, err)
assert.NotNil(t, bufWriter)
err = bufWriter.CloseForWriting()
assert.NoError(t, err)
b := new(bytes.Buffer)
rdr, err := bufWriter.ReadCloser()
if err != nil && tc.wantErr {
return
}
assert.NoError(t, err)
if rdr == nil {
return
}
defer rdr.Close()
_, err = b.ReadFrom(rdr)
assert.NoError(t, err)
assert.Equal(t, tc.wantData, b.String())
})
}
}
func TestNewFromReaderThresholdExceeded(t *testing.T) {
t.Parallel()
// Create a large data buffer that exceeds the threshold.
largeData := make([]byte, 1024*1024) // 1 MB
_, err := rand.Read(largeData)
assert.NoError(t, err)
// Create a BufferedFileWriter with a smaller threshold.
threshold := uint64(1024) // 1 KB
bufWriter, err := NewFromReader(bytes.NewReader(largeData), WithThreshold(threshold))
assert.NoError(t, err)
err = bufWriter.CloseForWriting()
assert.NoError(t, err)
rdr, err := bufWriter.ReadCloser()
assert.NoError(t, err)
defer rdr.Close()
// Verify that the data was written to a file.
assert.NotEmpty(t, bufWriter.filename)
assert.NotNil(t, bufWriter.file)
// Read the data from the BufferedFileWriter.
readData, err := io.ReadAll(rdr)
assert.NoError(t, err)
assert.Equal(t, largeData, readData)
// Verify the size of the data written.
assert.Equal(t, uint64(len(largeData)), bufWriter.size)
}
func TestBufferWriterCloseForWritingWithFile(t *testing.T) {
bufPool := pool.NewBufferPool(defaultBufferSize)
@ -700,74 +600,3 @@ func TestBufferedFileWriter_ReadFrom(t *testing.T) {
})
}
}
// simpleReader wraps a string, allowing it to be read as an io.Reader without implementing io.WriterTo.
type simpleReader struct {
data []byte
offset int
}
func newSimpleReader(s string) *simpleReader { return &simpleReader{data: []byte(s)} }
// Read implements the io.Reader interface.
func (sr *simpleReader) Read(p []byte) (n int, err error) {
if sr.offset >= len(sr.data) {
return 0, io.EOF // no more data to read
}
n = copy(p, sr.data[sr.offset:]) // copy data to p
sr.offset += n // move offset for next read
return
}
func TestNewFromReaderThresholdExceededSimpleReader(t *testing.T) {
t.Parallel()
// Create a large data buffer that exceeds the threshold.
largeData := strings.Repeat("a", 1024*1024) // 1 MB
// Create a BufferedFileWriter with a smaller threshold.
threshold := uint64(1024) // 1 KB
bufWriter, err := NewFromReader(newSimpleReader(largeData), WithThreshold(threshold))
assert.NoError(t, err)
err = bufWriter.CloseForWriting()
assert.NoError(t, err)
rdr, err := bufWriter.ReadCloser()
assert.NoError(t, err)
defer rdr.Close()
// Verify that the data was written to a file.
assert.NotEmpty(t, bufWriter.filename)
assert.NotNil(t, bufWriter.file)
// Read the data from the BufferedFileWriter.
readData, err := io.ReadAll(rdr)
assert.NoError(t, err)
assert.Equal(t, largeData, string(readData))
// Verify the size of the data written.
assert.Equal(t, uint64(len(largeData)), bufWriter.size)
}
func BenchmarkNewFromReader(b *testing.B) {
largeData := strings.Repeat("a", 1024*1024) // 1 MB
b.ResetTimer()
for i := 0; i < b.N; i++ {
reader := newSimpleReader(largeData)
b.StartTimer()
bufWriter, err := NewFromReader(reader)
assert.NoError(b, err)
b.StopTimer()
err = bufWriter.CloseForWriting()
assert.NoError(b, err)
rdr, err := bufWriter.ReadCloser()
assert.NoError(b, err)
rdr.Close()
}
}

View file

@ -1006,6 +1006,8 @@ enum DetectorType {
ElevenLabs = 994;
Netsuite = 995;
RobinhoodCrypto = 996;
NVAPI = 997;
PyPI = 998;
}
message Result {