Elastic adapter (#2727)

* Add stub source and elastic API funcs

* Spawn workers and ship chunks

* Now successfully detects a credential

- Added tests
- Added some documentation comments
- Threaded the passed context through to all the API requests

* Linting fixes

* Add integration tests and resolve some bugs they uncovered

* Logstash -> Elasticsearch

* Add support for --index-pattern

* Add support for --query-json

* Use structs instead of string building to construct a search body

* Support --since-timestamp

* Implement additional authentication methods

* Fix some small bugs

* Refactoring to support --best-effort-scan

* Finish implementation of --best-effort-scan

* Implement scan catch-up

* Finish connecting support for nodes CLI arg

* Add some integration tests around the catchup mechanism

* go mod tidy

* Fix some linting issues

* Remove some debugging Prints

* Move off of _doc

* Remove informational Printf and add informational logging

* Remove debugging logging

* Copy the index from the outer loop as well

* Don't burn up the ES API with rapid requests if there's no work to do in subsequent scans

* No need to export UnitOfWork.AddSearch

* Use a better name for the range query variable when building the timestamp range clause in searches

* Replace some unlocking defers with explicit unlocks to make the synchronized part of the code clearer

* found -> ok

* Remove superfluous buildElasticClient method

---------

Co-authored-by: Charlie Gunyon <charlie@spectral.energy>
This commit is contained in:
Charlie Gunyon 2024-05-24 16:38:20 +02:00 committed by GitHub
parent 1441289d41
commit 311494e86e
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
19 changed files with 2250 additions and 217 deletions

4
go.mod
View file

@ -28,6 +28,7 @@ require (
github.com/coinbase/waas-client-library-go v1.0.8
github.com/couchbase/gocb/v2 v2.8.1
github.com/crewjam/rfc5424 v0.1.0
github.com/elastic/go-elasticsearch/v8 v8.13.0
github.com/envoyproxy/protoc-gen-validate v1.0.4
github.com/fatih/color v1.17.0
github.com/felixge/fgprof v0.9.4
@ -75,6 +76,7 @@ require (
github.com/stretchr/testify v1.9.0
github.com/tailscale/depaware v0.0.0-20210622194025-720c4b409502
github.com/testcontainers/testcontainers-go v0.31.0
github.com/testcontainers/testcontainers-go/modules/elasticsearch v0.31.0
github.com/testcontainers/testcontainers-go/modules/mssql v0.31.0
github.com/testcontainers/testcontainers-go/modules/mysql v0.31.0
github.com/testcontainers/testcontainers-go/modules/postgres v0.31.0
@ -160,6 +162,7 @@ require (
github.com/docker/go-units v0.5.0 // indirect
github.com/dsnet/compress v0.0.1 // indirect
github.com/dvsekhvalnov/jose2go v1.6.0 // indirect
github.com/elastic/elastic-transport-go/v8 v8.5.0 // indirect
github.com/emirpasic/gods v1.18.1 // indirect
github.com/erikgeiser/coninput v0.0.0-20211004153227-1c3628e74d0f // indirect
github.com/felixge/httpsnoop v1.0.4 // indirect
@ -211,7 +214,6 @@ require (
github.com/launchdarkly/go-server-sdk-evaluation/v3 v3.0.0 // indirect
github.com/lucasb-eyer/go-colorful v1.2.0 // indirect
github.com/lufia/plan9stats v0.0.0-20211012122336-39d0f177ccd0 // indirect
github.com/magefile/mage v1.14.0 // indirect
github.com/magiconair/properties v1.8.7 // indirect
github.com/mailru/easyjson v0.7.7 // indirect
github.com/mattn/go-colorable v0.1.13 // indirect

20
go.sum
View file

@ -7,8 +7,6 @@ cloud.google.com/go v0.45.1/go.mod h1:RpBamKRgapWJb87xiFSdk4g1CME7QZg3uwTez+TSTj
cloud.google.com/go v0.46.3/go.mod h1:a6bKKbmY7er1mI7TEI4lsAkts/mkhTSZK8w33B4RAg0=
cloud.google.com/go v0.50.0/go.mod h1:r9sluTvynVuxRIOHXQEHMFffphuXHOMZMycpNR5e6To=
cloud.google.com/go v0.53.0/go.mod h1:fp/UouUEsRkN6ryDKNW/Upv/JBKnv6WDthjR6+vze6M=
cloud.google.com/go v0.112.2 h1:ZaGT6LiG7dBzi6zNOvVZwacaXlmf3lRqnC4DQzqyRQw=
cloud.google.com/go v0.112.2/go.mod h1:iEqjp//KquGIJV/m+Pk3xecgKNhV+ry+vVTsy4TbDms=
cloud.google.com/go v0.113.0 h1:g3C70mn3lWfckKBiCVsAshabrDg01pQ0pnX1MNtnMkA=
cloud.google.com/go v0.113.0/go.mod h1:glEqlogERKYeePz6ZdkcLJ28Q2I6aERgDDErBg9GzO8=
cloud.google.com/go/auth v0.4.1 h1:Z7YNIhlWRtrnKlZke7z3GMqzvuYzdc2z98F9D1NV5Hg=
@ -119,10 +117,6 @@ github.com/armon/go-socks5 v0.0.0-20160902184237-e75332964ef5 h1:0CwZNZbxp69SHPd
github.com/armon/go-socks5 v0.0.0-20160902184237-e75332964ef5/go.mod h1:wHh0iHkYZB8zMSxRWpUBQtwG5a7fFgvEO+odwuTv2gs=
github.com/atotto/clipboard v0.1.4 h1:EH0zSVneZPSuFR11BlR9YppQTVDbh5+16AmcJi4g1z4=
github.com/atotto/clipboard v0.1.4/go.mod h1:ZY9tmq7sm5xIbd9bOK4onWV4S6X0u6GY7Vn0Yu86PYI=
github.com/aws/aws-sdk-go v1.53.3 h1:xv0iGCCLdf6ZtlLPMCBjm+tU9UBLP5hXnSqnbKFYmto=
github.com/aws/aws-sdk-go v1.53.3/go.mod h1:LF8svs817+Nz+DmiMQKTO3ubZ/6IaTpq3TjupRn3Eqk=
github.com/aws/aws-sdk-go v1.53.5 h1:1OcVWMjGlwt7EU5OWmmEEXqaYfmX581EK317QJZXItM=
github.com/aws/aws-sdk-go v1.53.5/go.mod h1:LF8svs817+Nz+DmiMQKTO3ubZ/6IaTpq3TjupRn3Eqk=
github.com/aws/aws-sdk-go v1.53.6 h1:1/MYh/VmxdJu7v2bwvDA2JS30UI7bg62QYgQ7KxMa/Q=
github.com/aws/aws-sdk-go v1.53.6/go.mod h1:LF8svs817+Nz+DmiMQKTO3ubZ/6IaTpq3TjupRn3Eqk=
github.com/aws/smithy-go v1.20.1 h1:4SZlSlMr36UEqC7XOyRVb27XMeZubNcBNN+9IgEPIQw=
@ -235,6 +229,10 @@ github.com/dsnet/compress v0.0.1/go.mod h1:Aw8dCMJ7RioblQeTqt88akK31OvO8Dhf5Jflh
github.com/dsnet/golib v0.0.0-20171103203638-1ea166775780/go.mod h1:Lj+Z9rebOhdfkVLjJ8T6VcRQv3SXugXy999NBtR9aFY=
github.com/dvsekhvalnov/jose2go v1.6.0 h1:Y9gnSnP4qEI0+/uQkHvFXeD2PLPJeXEL+ySMEA2EjTY=
github.com/dvsekhvalnov/jose2go v1.6.0/go.mod h1:QsHjhyTlD/lAVqn/NSbVZmSCGeDehTB/mPZadG+mhXU=
github.com/elastic/elastic-transport-go/v8 v8.5.0 h1:v5membAl7lvQgBTexPRDBO/RdnlQX+FM9fUVDyXxvH0=
github.com/elastic/elastic-transport-go/v8 v8.5.0/go.mod h1:YLHer5cj0csTzNFXoNQ8qhtGY1GTvSqPnKWKaqQE3Hk=
github.com/elastic/go-elasticsearch/v8 v8.13.0 h1:YXPAWpvbYX0mWSNG9tnEpvs4h1stgMy5JUeKZECYYB8=
github.com/elastic/go-elasticsearch/v8 v8.13.0/go.mod h1:DIn7HopJs4oZC/w0WoJR13uMUxtHeq92eI5bqv5CRfI=
github.com/elazarl/goproxy v0.0.0-20230808193330-2592e75ae04a h1:mATvB/9r/3gvcejNsXKSkQ6lcIaNec2nyfOdlTBR2lU=
github.com/elazarl/goproxy v0.0.0-20230808193330-2592e75ae04a/go.mod h1:Ro8st/ElPeALwNFlcTpWmkr6IoMFfkjXAvTHpevnDsM=
github.com/emirpasic/gods v1.18.1 h1:FXtiHYKDGKCW2KzwZKx0iC0PQmdlorYgdFG9jPXJ1Bc=
@ -690,14 +688,14 @@ github.com/tailscale/depaware v0.0.0-20210622194025-720c4b409502 h1:34icjjmqJ2HP
github.com/tailscale/depaware v0.0.0-20210622194025-720c4b409502/go.mod h1:p9lPsd+cx33L3H9nNoecRRxPssFKUwwI50I3pZ0yT+8=
github.com/testcontainers/testcontainers-go v0.31.0 h1:W0VwIhcEVhRflwL9as3dhY6jXjVCA27AkmbnZ+UTh3U=
github.com/testcontainers/testcontainers-go v0.31.0/go.mod h1:D2lAoA0zUFiSY+eAflqK5mcUx/A5hrrORaEQrd0SefI=
github.com/testcontainers/testcontainers-go/modules/elasticsearch v0.31.0 h1:KBbU/rVL3RhrFYcrVGY+NDw3x3Ho2YlJUnjbIOsL6jk=
github.com/testcontainers/testcontainers-go/modules/elasticsearch v0.31.0/go.mod h1:7YQbgJUoNDztnXWAdCRtI+gUqBM+URd83JzwYlzwGhQ=
github.com/testcontainers/testcontainers-go/modules/mssql v0.31.0 h1:X4MRxswzZJov/X5a5FYGzNmMRAKlnErE+5euMoMJGzM=
github.com/testcontainers/testcontainers-go/modules/mssql v0.31.0/go.mod h1:GsGFz4tcxka1meZdBBHdqZCYdpHQaa/pORXW/ELWZV0=
github.com/testcontainers/testcontainers-go/modules/mysql v0.31.0 h1:790+S8ewZYCbG+o8IiFlZ8ZZ33XbNO6zV9qhU6xhlRk=
github.com/testcontainers/testcontainers-go/modules/mysql v0.31.0/go.mod h1:REFmO+lSG9S6uSBEwIMZCxeI36uhScjTwChYADeO3JA=
github.com/testcontainers/testcontainers-go/modules/postgres v0.31.0 h1:isAwFS3KNKRbJMbWv+wolWqOFUECmjYZ+sIRZCIBc/E=
github.com/testcontainers/testcontainers-go/modules/postgres v0.31.0/go.mod h1:ZNYY8vumNCEG9YI59A9d6/YaMY49uwRhmeU563EzFGw=
github.com/tetratelabs/wazero v1.7.0 h1:jg5qPydno59wqjpGrHph81lbtHzTrWzwwtD4cD88+hQ=
github.com/tetratelabs/wazero v1.7.0/go.mod h1:ytl6Zuh20R/eROuyDaGPkp82O9C/DJfXAwJfQ3X6/7Y=
github.com/tetratelabs/wazero v1.7.1 h1:QtSfd6KLc41DIMpDYlJdoMc6k7QTN246DM2+n2Y/Dx8=
github.com/tetratelabs/wazero v1.7.1/go.mod h1:ytl6Zuh20R/eROuyDaGPkp82O9C/DJfXAwJfQ3X6/7Y=
github.com/therootcompany/xz v1.0.1 h1:CmOtsn1CbtmyYiusbfmhmkpAAETj0wBIH6kCYaX+xzw=
@ -718,8 +716,6 @@ github.com/ulikunitz/xz v0.5.12/go.mod h1:nbz6k7qbPmH4IRqmfOplQw/tblSgqTqBwxkY0o
github.com/urfave/cli v1.22.12/go.mod h1:sSBEIC79qR6OvcmsD4U3KABeOTxDqQtdDnaFuUN30b8=
github.com/vbatts/tar-split v0.11.3 h1:hLFqsOLQ1SsppQNTMpkpPXClLDfC2A3Zgy9OUU+RVck=
github.com/vbatts/tar-split v0.11.3/go.mod h1:9QlHN18E+fEH7RdG+QAJJcuya3rqT7eXSTY7wGrAokY=
github.com/wasilibs/go-re2 v1.5.2 h1:fDO2TJrRzRrv3jD0gzOvmZ2UM4Yt9YXOEdLrlNc/Ies=
github.com/wasilibs/go-re2 v1.5.2/go.mod h1:UqqxQ1O99boQUm1r61H/IYGiGQOS/P88K7hU5nLNkEg=
github.com/wasilibs/go-re2 v1.5.3 h1:wiuTcgDZdLhu8NG8oqF5sF5Q3yIU14lPAvXqeYzDK3g=
github.com/wasilibs/go-re2 v1.5.3/go.mod h1:PzpVPsBdFC7vM8QJbbEnOeTmwA0DGE783d/Gex8eCV8=
github.com/wasilibs/nottinygc v0.4.0 h1:h1TJMihMC4neN6Zq+WKpLxgd9xCFMw7O9ETLwY2exJQ=
@ -1025,8 +1021,6 @@ google.golang.org/api v0.13.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsb
google.golang.org/api v0.14.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI=
google.golang.org/api v0.15.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI=
google.golang.org/api v0.17.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE=
google.golang.org/api v0.180.0 h1:M2D87Yo0rGBPWpo1orwfCLehUUL6E7/TYe5gvMQWDh4=
google.golang.org/api v0.180.0/go.mod h1:51AiyoEg1MJPSZ9zvklA8VnRILPXxn1iVen9v25XHAE=
google.golang.org/api v0.181.0 h1:rPdjwnWgiPPOJx3IcSAQ2III5aX5tCer6wMpa/xmZi4=
google.golang.org/api v0.181.0/go.mod h1:MnQ+M0CFsfUwA5beZ+g/vCBCPXvtmZwRz2qzZk8ih1k=
google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM=
@ -1053,8 +1047,6 @@ google.golang.org/genproto v0.0.0-20240401170217-c3f982113cda h1:wu/KJm9KJwpfHWh
google.golang.org/genproto v0.0.0-20240401170217-c3f982113cda/go.mod h1:g2LLCvCeCSir/JJSWosk19BR4NVxGqHUC6rxIRsd7Aw=
google.golang.org/genproto/googleapis/api v0.0.0-20240506185236-b8a5c65736ae h1:AH34z6WAGVNkllnKs5raNq3yRq93VnjBG6rpfub/jYk=
google.golang.org/genproto/googleapis/api v0.0.0-20240506185236-b8a5c65736ae/go.mod h1:FfiGhwUm6CJviekPrc0oJ+7h29e+DmWU6UtjX0ZvI7Y=
google.golang.org/genproto/googleapis/rpc v0.0.0-20240429193739-8cf5692501f6 h1:DujSIu+2tC9Ht0aPNA7jgj23Iq8Ewi5sgkQ++wdvonE=
google.golang.org/genproto/googleapis/rpc v0.0.0-20240429193739-8cf5692501f6/go.mod h1:WtryC6hu0hhx87FDGxWCDptyssuo68sk10vYjF+T9fY=
google.golang.org/genproto/googleapis/rpc v0.0.0-20240513163218-0867130af1f8 h1:mxSlqyb8ZAHsYDCfiXN1EDdNTdvjUJSLY+OnAUtYNYA=
google.golang.org/genproto/googleapis/rpc v0.0.0-20240513163218-0867130af1f8/go.mod h1:I7Y+G38R2bu5j1aLzfFmQfTcU/WnFuqDwLZAbvKTKpM=
google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c=

28
main.go
View file

@ -177,6 +177,18 @@ var (
postmanWorkspacePaths = postmanScan.Flag("workspace-paths", "Path to Postman workspaces.").Strings()
postmanCollectionPaths = postmanScan.Flag("collection-paths", "Path to Postman collections.").Strings()
postmanEnvironmentPaths = postmanScan.Flag("environment-paths", "Path to Postman environments.").Strings()
elasticsearchScan = cli.Command("elasticsearch", "Scan Elasticsearch")
elasticsearchNodes = elasticsearchScan.Flag("nodes", "Elasticsearch nodes").Envar("ELASTICSEARCH_NODES").Strings()
elasticsearchUsername = elasticsearchScan.Flag("username", "Elasticsearch username").Envar("ELASTICSEARCH_USERNAME").String()
elasticsearchPassword = elasticsearchScan.Flag("password", "Elasticsearch password").Envar("ELASTICSEARCH_PASSWORD").String()
elasticsearchServiceToken = elasticsearchScan.Flag("service-token", "Elasticsearch service token").Envar("ELASTICSEARCH_SERVICE_TOKEN").String()
elasticsearchCloudId = elasticsearchScan.Flag("cloud-id", "Elasticsearch cloud ID. Can also be provided with environment variable").Envar("ELASTICSEARCH_CLOUD_ID").String()
elasticsearchAPIKey = elasticsearchScan.Flag("api-key", "Elasticsearch API key. Can also be provided with environment variable").Envar("ELASTICSEARCH_API_KEY").String()
elasticsearchIndexPattern = elasticsearchScan.Flag("index-pattern", "Filters the indices to search").Default("*").Envar("ELASTICSEARCH_INDEX_PATTERN").String()
elasticsearchQueryJSON = elasticsearchScan.Flag("query-json", "Filters the documents to search").Envar("ELASTICSEARCH_QUERY_JSON").String()
elasticsearchSinceTimestamp = elasticsearchScan.Flag("since-timestamp", "Filters the documents to search to those created since this timestamp; overrides any timestamp from --query-json").Envar("ELASTICSEARCH_SINCE_TIMESTAMP").String()
elasticsearchBestEffortScan = elasticsearchScan.Flag("best-effort-scan", "Attempts to continuously scan a cluster").Envar("ELASTICSEARCH_BEST_EFFORT_SCAN").Bool()
)
func init() {
@ -633,6 +645,22 @@ func run(state overseer.State) {
if err := e.ScanPostman(ctx, cfg); err != nil {
logFatal(err, "Failed to scan Postman.")
}
case elasticsearchScan.FullCommand():
cfg := sources.ElasticsearchConfig{
Nodes: *elasticsearchNodes,
Username: *elasticsearchUsername,
Password: *elasticsearchPassword,
CloudID: *elasticsearchCloudId,
APIKey: *elasticsearchAPIKey,
ServiceToken: *elasticsearchServiceToken,
IndexPattern: *elasticsearchIndexPattern,
QueryJSON: *elasticsearchQueryJSON,
SinceTimestamp: *elasticsearchSinceTimestamp,
BestEffortScan: *elasticsearchBestEffortScan,
}
if err := e.ScanElasticsearch(ctx, cfg); err != nil {
logFatal(err, "Failed to scan Elasticsearch.")
}
default:
logFatal(fmt.Errorf("invalid command"), "Command not recognized.")
}

View file

@ -0,0 +1,46 @@
package engine
import (
"runtime"
"google.golang.org/protobuf/proto"
"google.golang.org/protobuf/types/known/anypb"
"github.com/trufflesecurity/trufflehog/v3/pkg/context"
"github.com/trufflesecurity/trufflehog/v3/pkg/pb/sourcespb"
"github.com/trufflesecurity/trufflehog/v3/pkg/sources"
"github.com/trufflesecurity/trufflehog/v3/pkg/sources/elasticsearch"
)
// ScanElasticsearch scans a Elasticsearch installation.
func (e *Engine) ScanElasticsearch(ctx context.Context, c sources.ElasticsearchConfig) error {
connection := &sourcespb.Elasticsearch{
Nodes: c.Nodes,
Username: c.Username,
Password: c.Password,
CloudId: c.CloudID,
ApiKey: c.APIKey,
ServiceToken: c.ServiceToken,
IndexPattern: c.IndexPattern,
QueryJson: c.QueryJSON,
SinceTimestamp: c.SinceTimestamp,
BestEffortScan: c.BestEffortScan,
}
var conn anypb.Any
err := anypb.MarshalFrom(&conn, connection, proto.MarshalOptions{})
if err != nil {
ctx.Logger().Error(err, "failed to marshal Elasticsearch connection")
return err
}
sourceName := "trufflehog - Elasticsearch"
sourceID, jobID, _ := e.sourceManager.GetIDs(ctx, sourceName, elasticsearch.SourceType)
elasticsearchSource := &elasticsearch.Source{}
if err := elasticsearchSource.Init(ctx, sourceName, jobID, sourceID, true, &conn, runtime.NumCPU()); err != nil {
return err
}
_, err = e.sourceManager.Run(ctx, sourceName, elasticsearchSource)
return err
}

View file

@ -1,7 +1,7 @@
// Code generated by protoc-gen-go. DO NOT EDIT.
// versions:
// protoc-gen-go v1.33.0
// protoc v4.25.3
// protoc-gen-go v1.32.0
// protoc v4.25.2
// source: credentials.proto
package credentialspb

View file

@ -1,7 +1,7 @@
// Code generated by protoc-gen-go. DO NOT EDIT.
// versions:
// protoc-gen-go v1.33.0
// protoc v4.25.3
// protoc-gen-go v1.32.0
// protoc v4.25.2
// source: custom_detectors.proto
package custom_detectorspb

View file

@ -1,7 +1,7 @@
// Code generated by protoc-gen-go. DO NOT EDIT.
// versions:
// protoc-gen-go v1.33.0
// protoc v4.25.3
// protoc-gen-go v1.32.0
// protoc v4.25.2
// source: detectors.proto
package detectorspb

View file

@ -1,7 +1,7 @@
// Code generated by protoc-gen-go. DO NOT EDIT.
// versions:
// protoc-gen-go v1.33.0
// protoc v4.25.3
// protoc-gen-go v1.32.0
// protoc v4.25.2
// source: source_metadata.proto
package source_metadatapb
@ -2935,6 +2935,69 @@ type Webhook_Vector struct {
func (*Webhook_Vector) isWebhook_Data() {}
type Elasticsearch struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
Index string `protobuf:"bytes,1,opt,name=index,proto3" json:"index,omitempty"`
DocumentId string `protobuf:"bytes,2,opt,name=document_id,json=documentId,proto3" json:"document_id,omitempty"`
Timestamp string `protobuf:"bytes,3,opt,name=timestamp,proto3" json:"timestamp,omitempty"`
}
func (x *Elasticsearch) Reset() {
*x = Elasticsearch{}
if protoimpl.UnsafeEnabled {
mi := &file_source_metadata_proto_msgTypes[31]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *Elasticsearch) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*Elasticsearch) ProtoMessage() {}
func (x *Elasticsearch) ProtoReflect() protoreflect.Message {
mi := &file_source_metadata_proto_msgTypes[31]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use Elasticsearch.ProtoReflect.Descriptor instead.
func (*Elasticsearch) Descriptor() ([]byte, []int) {
return file_source_metadata_proto_rawDescGZIP(), []int{31}
}
func (x *Elasticsearch) GetIndex() string {
if x != nil {
return x.Index
}
return ""
}
func (x *Elasticsearch) GetDocumentId() string {
if x != nil {
return x.DocumentId
}
return ""
}
func (x *Elasticsearch) GetTimestamp() string {
if x != nil {
return x.Timestamp
}
return ""
}
type MetaData struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
@ -2972,13 +3035,14 @@ type MetaData struct {
// *MetaData_TravisCI
// *MetaData_Postman
// *MetaData_Webhook
// *MetaData_Elasticsearch
Data isMetaData_Data `protobuf_oneof:"data"`
}
func (x *MetaData) Reset() {
*x = MetaData{}
if protoimpl.UnsafeEnabled {
mi := &file_source_metadata_proto_msgTypes[31]
mi := &file_source_metadata_proto_msgTypes[32]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
@ -2991,7 +3055,7 @@ func (x *MetaData) String() string {
func (*MetaData) ProtoMessage() {}
func (x *MetaData) ProtoReflect() protoreflect.Message {
mi := &file_source_metadata_proto_msgTypes[31]
mi := &file_source_metadata_proto_msgTypes[32]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
@ -3004,7 +3068,7 @@ func (x *MetaData) ProtoReflect() protoreflect.Message {
// Deprecated: Use MetaData.ProtoReflect.Descriptor instead.
func (*MetaData) Descriptor() ([]byte, []int) {
return file_source_metadata_proto_rawDescGZIP(), []int{31}
return file_source_metadata_proto_rawDescGZIP(), []int{32}
}
func (m *MetaData) GetData() isMetaData_Data {
@ -3224,6 +3288,13 @@ func (x *MetaData) GetWebhook() *Webhook {
return nil
}
func (x *MetaData) GetElasticsearch() *Elasticsearch {
if x, ok := x.GetData().(*MetaData_Elasticsearch); ok {
return x.Elasticsearch
}
return nil
}
type isMetaData_Data interface {
isMetaData_Data()
}
@ -3348,6 +3419,10 @@ type MetaData_Webhook struct {
Webhook *Webhook `protobuf:"bytes,30,opt,name=webhook,proto3,oneof"`
}
type MetaData_Elasticsearch struct {
Elasticsearch *Elasticsearch `protobuf:"bytes,31,opt,name=elasticsearch,proto3,oneof"`
}
func (*MetaData_Azure) isMetaData_Data() {}
func (*MetaData_Bitbucket) isMetaData_Data() {}
@ -3408,6 +3483,8 @@ func (*MetaData_Postman) isMetaData_Data() {}
func (*MetaData_Webhook) isMetaData_Data() {}
func (*MetaData_Elasticsearch) isMetaData_Data() {}
var File_source_metadata_proto protoreflect.FileDescriptor
var file_source_metadata_proto_rawDesc = []byte{
@ -3774,115 +3851,126 @@ var file_source_metadata_proto_rawDesc = []byte{
0x72, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65,
0x5f, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x56, 0x65, 0x63, 0x74, 0x6f, 0x72,
0x48, 0x00, 0x52, 0x06, 0x76, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x42, 0x06, 0x0a, 0x04, 0x64, 0x61,
0x74, 0x61, 0x22, 0xb9, 0x0c, 0x0a, 0x08, 0x4d, 0x65, 0x74, 0x61, 0x44, 0x61, 0x74, 0x61, 0x12,
0x2e, 0x0a, 0x05, 0x61, 0x7a, 0x75, 0x72, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x16,
0x2e, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61,
0x2e, 0x41, 0x7a, 0x75, 0x72, 0x65, 0x48, 0x00, 0x52, 0x05, 0x61, 0x7a, 0x75, 0x72, 0x65, 0x12,
0x3a, 0x0a, 0x09, 0x62, 0x69, 0x74, 0x62, 0x75, 0x63, 0x6b, 0x65, 0x74, 0x18, 0x02, 0x20, 0x01,
0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f, 0x6d, 0x65, 0x74, 0x61,
0x64, 0x61, 0x74, 0x61, 0x2e, 0x42, 0x69, 0x74, 0x62, 0x75, 0x63, 0x6b, 0x65, 0x74, 0x48, 0x00,
0x52, 0x09, 0x62, 0x69, 0x74, 0x62, 0x75, 0x63, 0x6b, 0x65, 0x74, 0x12, 0x37, 0x0a, 0x08, 0x63,
0x69, 0x72, 0x63, 0x6c, 0x65, 0x63, 0x69, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x19, 0x2e,
0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e,
0x43, 0x69, 0x72, 0x63, 0x6c, 0x65, 0x43, 0x49, 0x48, 0x00, 0x52, 0x08, 0x63, 0x69, 0x72, 0x63,
0x6c, 0x65, 0x63, 0x69, 0x12, 0x3d, 0x0a, 0x0a, 0x63, 0x6f, 0x6e, 0x66, 0x6c, 0x75, 0x65, 0x6e,
0x63, 0x65, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1b, 0x2e, 0x73, 0x6f, 0x75, 0x72, 0x63,
0x65, 0x5f, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x43, 0x6f, 0x6e, 0x66, 0x6c,
0x75, 0x65, 0x6e, 0x63, 0x65, 0x48, 0x00, 0x52, 0x0a, 0x63, 0x6f, 0x6e, 0x66, 0x6c, 0x75, 0x65,
0x6e, 0x63, 0x65, 0x12, 0x31, 0x0a, 0x06, 0x64, 0x6f, 0x63, 0x6b, 0x65, 0x72, 0x18, 0x05, 0x20,
0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f, 0x6d, 0x65, 0x74,
0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x44, 0x6f, 0x63, 0x6b, 0x65, 0x72, 0x48, 0x00, 0x52, 0x06,
0x64, 0x6f, 0x63, 0x6b, 0x65, 0x72, 0x12, 0x28, 0x0a, 0x03, 0x65, 0x63, 0x72, 0x18, 0x06, 0x20,
0x01, 0x28, 0x0b, 0x32, 0x14, 0x2e, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f, 0x6d, 0x65, 0x74,
0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x45, 0x43, 0x52, 0x48, 0x00, 0x52, 0x03, 0x65, 0x63, 0x72,
0x12, 0x28, 0x0a, 0x03, 0x67, 0x63, 0x73, 0x18, 0x07, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x14, 0x2e,
0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e,
0x47, 0x43, 0x53, 0x48, 0x00, 0x52, 0x03, 0x67, 0x63, 0x73, 0x12, 0x31, 0x0a, 0x06, 0x67, 0x69,
0x74, 0x68, 0x75, 0x62, 0x18, 0x08, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x73, 0x6f, 0x75,
0x72, 0x63, 0x65, 0x5f, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x47, 0x69, 0x74,
0x68, 0x75, 0x62, 0x48, 0x00, 0x52, 0x06, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x12, 0x31, 0x0a,
0x06, 0x67, 0x69, 0x74, 0x6c, 0x61, 0x62, 0x18, 0x09, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e,
0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e,
0x47, 0x69, 0x74, 0x6c, 0x61, 0x62, 0x48, 0x00, 0x52, 0x06, 0x67, 0x69, 0x74, 0x6c, 0x61, 0x62,
0x12, 0x2b, 0x0a, 0x04, 0x6a, 0x69, 0x72, 0x61, 0x18, 0x0a, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x15,
0x2e, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61,
0x2e, 0x4a, 0x69, 0x72, 0x61, 0x48, 0x00, 0x52, 0x04, 0x6a, 0x69, 0x72, 0x61, 0x12, 0x28, 0x0a,
0x03, 0x6e, 0x70, 0x6d, 0x18, 0x0b, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x14, 0x2e, 0x73, 0x6f, 0x75,
0x72, 0x63, 0x65, 0x5f, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x4e, 0x50, 0x4d,
0x48, 0x00, 0x52, 0x03, 0x6e, 0x70, 0x6d, 0x12, 0x2b, 0x0a, 0x04, 0x70, 0x79, 0x70, 0x69, 0x18,
0x0c, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f, 0x6d,
0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x50, 0x79, 0x50, 0x69, 0x48, 0x00, 0x52, 0x04,
0x70, 0x79, 0x70, 0x69, 0x12, 0x25, 0x0a, 0x02, 0x73, 0x33, 0x18, 0x0d, 0x20, 0x01, 0x28, 0x0b,
0x32, 0x13, 0x2e, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61,
0x74, 0x61, 0x2e, 0x53, 0x33, 0x48, 0x00, 0x52, 0x02, 0x73, 0x33, 0x12, 0x2e, 0x0a, 0x05, 0x73,
0x6c, 0x61, 0x63, 0x6b, 0x18, 0x0e, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x16, 0x2e, 0x73, 0x6f, 0x75,
0x72, 0x63, 0x65, 0x5f, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x53, 0x6c, 0x61,
0x63, 0x6b, 0x48, 0x00, 0x52, 0x05, 0x73, 0x6c, 0x61, 0x63, 0x6b, 0x12, 0x3d, 0x0a, 0x0a, 0x66,
0x69, 0x6c, 0x65, 0x73, 0x79, 0x73, 0x74, 0x65, 0x6d, 0x18, 0x0f, 0x20, 0x01, 0x28, 0x0b, 0x32,
0x1b, 0x2e, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74,
0x61, 0x2e, 0x46, 0x69, 0x6c, 0x65, 0x73, 0x79, 0x73, 0x74, 0x65, 0x6d, 0x48, 0x00, 0x52, 0x0a,
0x66, 0x69, 0x6c, 0x65, 0x73, 0x79, 0x73, 0x74, 0x65, 0x6d, 0x12, 0x28, 0x0a, 0x03, 0x67, 0x69,
0x74, 0x18, 0x10, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x14, 0x2e, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65,
0x5f, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x47, 0x69, 0x74, 0x48, 0x00, 0x52,
0x03, 0x67, 0x69, 0x74, 0x12, 0x2b, 0x0a, 0x04, 0x74, 0x65, 0x73, 0x74, 0x18, 0x11, 0x20, 0x01,
0x28, 0x0b, 0x32, 0x15, 0x2e, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f, 0x6d, 0x65, 0x74, 0x61,
0x64, 0x61, 0x74, 0x61, 0x2e, 0x54, 0x65, 0x73, 0x74, 0x48, 0x00, 0x52, 0x04, 0x74, 0x65, 0x73,
0x74, 0x12, 0x3a, 0x0a, 0x09, 0x62, 0x75, 0x69, 0x6c, 0x64, 0x6b, 0x69, 0x74, 0x65, 0x18, 0x12,
0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f, 0x6d, 0x65,
0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x42, 0x75, 0x69, 0x6c, 0x64, 0x6b, 0x69, 0x74, 0x65,
0x48, 0x00, 0x52, 0x09, 0x62, 0x75, 0x69, 0x6c, 0x64, 0x6b, 0x69, 0x74, 0x65, 0x12, 0x31, 0x0a,
0x06, 0x67, 0x65, 0x72, 0x72, 0x69, 0x74, 0x18, 0x13, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e,
0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e,
0x47, 0x65, 0x72, 0x72, 0x69, 0x74, 0x48, 0x00, 0x52, 0x06, 0x67, 0x65, 0x72, 0x72, 0x69, 0x74,
0x12, 0x34, 0x0a, 0x07, 0x6a, 0x65, 0x6e, 0x6b, 0x69, 0x6e, 0x73, 0x18, 0x14, 0x20, 0x01, 0x28,
0x0b, 0x32, 0x18, 0x2e, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f, 0x6d, 0x65, 0x74, 0x61, 0x64,
0x61, 0x74, 0x61, 0x2e, 0x4a, 0x65, 0x6e, 0x6b, 0x69, 0x6e, 0x73, 0x48, 0x00, 0x52, 0x07, 0x6a,
0x65, 0x6e, 0x6b, 0x69, 0x6e, 0x73, 0x12, 0x2e, 0x0a, 0x05, 0x74, 0x65, 0x61, 0x6d, 0x73, 0x18,
0x15, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x16, 0x2e, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f, 0x6d,
0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x54, 0x65, 0x61, 0x6d, 0x73, 0x48, 0x00, 0x52,
0x05, 0x74, 0x65, 0x61, 0x6d, 0x73, 0x12, 0x40, 0x0a, 0x0b, 0x61, 0x72, 0x74, 0x69, 0x66, 0x61,
0x63, 0x74, 0x6f, 0x72, 0x79, 0x18, 0x16, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1c, 0x2e, 0x73, 0x6f,
0x75, 0x72, 0x63, 0x65, 0x5f, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x41, 0x72,
0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x6f, 0x72, 0x79, 0x48, 0x00, 0x52, 0x0b, 0x61, 0x72, 0x74,
0x69, 0x66, 0x61, 0x63, 0x74, 0x6f, 0x72, 0x79, 0x12, 0x31, 0x0a, 0x06, 0x73, 0x79, 0x73, 0x6c,
0x6f, 0x67, 0x18, 0x17, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x73, 0x6f, 0x75, 0x72, 0x63,
0x65, 0x5f, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x53, 0x79, 0x73, 0x6c, 0x6f,
0x67, 0x48, 0x00, 0x52, 0x06, 0x73, 0x79, 0x73, 0x6c, 0x6f, 0x67, 0x12, 0x34, 0x0a, 0x07, 0x66,
0x6f, 0x72, 0x61, 0x67, 0x65, 0x72, 0x18, 0x18, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x18, 0x2e, 0x73,
0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x46,
0x6f, 0x72, 0x61, 0x67, 0x65, 0x72, 0x48, 0x00, 0x52, 0x07, 0x66, 0x6f, 0x72, 0x61, 0x67, 0x65,
0x72, 0x12, 0x3d, 0x0a, 0x0a, 0x73, 0x68, 0x61, 0x72, 0x65, 0x70, 0x6f, 0x69, 0x6e, 0x74, 0x18,
0x19, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1b, 0x2e, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f, 0x6d,
0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x53, 0x68, 0x61, 0x72, 0x65, 0x50, 0x6f, 0x69,
0x6e, 0x74, 0x48, 0x00, 0x52, 0x0a, 0x73, 0x68, 0x61, 0x72, 0x65, 0x70, 0x6f, 0x69, 0x6e, 0x74,
0x12, 0x40, 0x0a, 0x0b, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x44, 0x72, 0x69, 0x76, 0x65, 0x18,
0x1a, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1c, 0x2e, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f, 0x6d,
0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x47, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x44, 0x72,
0x69, 0x76, 0x65, 0x48, 0x00, 0x52, 0x0b, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x44, 0x72, 0x69,
0x76, 0x65, 0x12, 0x3d, 0x0a, 0x0a, 0x61, 0x7a, 0x75, 0x72, 0x65, 0x52, 0x65, 0x70, 0x6f, 0x73,
0x18, 0x1b, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1b, 0x2e, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f,
0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x41, 0x7a, 0x75, 0x72, 0x65, 0x52, 0x65,
0x70, 0x6f, 0x73, 0x48, 0x00, 0x52, 0x0a, 0x61, 0x7a, 0x75, 0x72, 0x65, 0x52, 0x65, 0x70, 0x6f,
0x73, 0x12, 0x37, 0x0a, 0x08, 0x74, 0x72, 0x61, 0x76, 0x69, 0x73, 0x43, 0x49, 0x18, 0x1c, 0x20,
0x74, 0x61, 0x22, 0x64, 0x0a, 0x0d, 0x45, 0x6c, 0x61, 0x73, 0x74, 0x69, 0x63, 0x73, 0x65, 0x61,
0x72, 0x63, 0x68, 0x12, 0x14, 0x0a, 0x05, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x18, 0x01, 0x20, 0x01,
0x28, 0x09, 0x52, 0x05, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x12, 0x1f, 0x0a, 0x0b, 0x64, 0x6f, 0x63,
0x75, 0x6d, 0x65, 0x6e, 0x74, 0x5f, 0x69, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0a,
0x64, 0x6f, 0x63, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x49, 0x64, 0x12, 0x1c, 0x0a, 0x09, 0x74, 0x69,
0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x74,
0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x22, 0x81, 0x0d, 0x0a, 0x08, 0x4d, 0x65, 0x74,
0x61, 0x44, 0x61, 0x74, 0x61, 0x12, 0x2e, 0x0a, 0x05, 0x61, 0x7a, 0x75, 0x72, 0x65, 0x18, 0x01,
0x20, 0x01, 0x28, 0x0b, 0x32, 0x16, 0x2e, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f, 0x6d, 0x65,
0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x41, 0x7a, 0x75, 0x72, 0x65, 0x48, 0x00, 0x52, 0x05,
0x61, 0x7a, 0x75, 0x72, 0x65, 0x12, 0x3a, 0x0a, 0x09, 0x62, 0x69, 0x74, 0x62, 0x75, 0x63, 0x6b,
0x65, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x73, 0x6f, 0x75, 0x72, 0x63,
0x65, 0x5f, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x42, 0x69, 0x74, 0x62, 0x75,
0x63, 0x6b, 0x65, 0x74, 0x48, 0x00, 0x52, 0x09, 0x62, 0x69, 0x74, 0x62, 0x75, 0x63, 0x6b, 0x65,
0x74, 0x12, 0x37, 0x0a, 0x08, 0x63, 0x69, 0x72, 0x63, 0x6c, 0x65, 0x63, 0x69, 0x18, 0x03, 0x20,
0x01, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f, 0x6d, 0x65, 0x74,
0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x54, 0x72, 0x61, 0x76, 0x69, 0x73, 0x43, 0x49, 0x48, 0x00,
0x52, 0x08, 0x74, 0x72, 0x61, 0x76, 0x69, 0x73, 0x43, 0x49, 0x12, 0x34, 0x0a, 0x07, 0x70, 0x6f,
0x73, 0x74, 0x6d, 0x61, 0x6e, 0x18, 0x1d, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x18, 0x2e, 0x73, 0x6f,
0x75, 0x72, 0x63, 0x65, 0x5f, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x50, 0x6f,
0x73, 0x74, 0x6d, 0x61, 0x6e, 0x48, 0x00, 0x52, 0x07, 0x70, 0x6f, 0x73, 0x74, 0x6d, 0x61, 0x6e,
0x12, 0x34, 0x0a, 0x07, 0x77, 0x65, 0x62, 0x68, 0x6f, 0x6f, 0x6b, 0x18, 0x1e, 0x20, 0x01, 0x28,
0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x43, 0x69, 0x72, 0x63, 0x6c, 0x65, 0x43, 0x49, 0x48, 0x00,
0x52, 0x08, 0x63, 0x69, 0x72, 0x63, 0x6c, 0x65, 0x63, 0x69, 0x12, 0x3d, 0x0a, 0x0a, 0x63, 0x6f,
0x6e, 0x66, 0x6c, 0x75, 0x65, 0x6e, 0x63, 0x65, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1b,
0x2e, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61,
0x2e, 0x43, 0x6f, 0x6e, 0x66, 0x6c, 0x75, 0x65, 0x6e, 0x63, 0x65, 0x48, 0x00, 0x52, 0x0a, 0x63,
0x6f, 0x6e, 0x66, 0x6c, 0x75, 0x65, 0x6e, 0x63, 0x65, 0x12, 0x31, 0x0a, 0x06, 0x64, 0x6f, 0x63,
0x6b, 0x65, 0x72, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x73, 0x6f, 0x75, 0x72,
0x63, 0x65, 0x5f, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x44, 0x6f, 0x63, 0x6b,
0x65, 0x72, 0x48, 0x00, 0x52, 0x06, 0x64, 0x6f, 0x63, 0x6b, 0x65, 0x72, 0x12, 0x28, 0x0a, 0x03,
0x65, 0x63, 0x72, 0x18, 0x06, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x14, 0x2e, 0x73, 0x6f, 0x75, 0x72,
0x63, 0x65, 0x5f, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x45, 0x43, 0x52, 0x48,
0x00, 0x52, 0x03, 0x65, 0x63, 0x72, 0x12, 0x28, 0x0a, 0x03, 0x67, 0x63, 0x73, 0x18, 0x07, 0x20,
0x01, 0x28, 0x0b, 0x32, 0x14, 0x2e, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f, 0x6d, 0x65, 0x74,
0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x47, 0x43, 0x53, 0x48, 0x00, 0x52, 0x03, 0x67, 0x63, 0x73,
0x12, 0x31, 0x0a, 0x06, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x18, 0x08, 0x20, 0x01, 0x28, 0x0b,
0x32, 0x17, 0x2e, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61,
0x74, 0x61, 0x2e, 0x47, 0x69, 0x74, 0x68, 0x75, 0x62, 0x48, 0x00, 0x52, 0x06, 0x67, 0x69, 0x74,
0x68, 0x75, 0x62, 0x12, 0x31, 0x0a, 0x06, 0x67, 0x69, 0x74, 0x6c, 0x61, 0x62, 0x18, 0x09, 0x20,
0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f, 0x6d, 0x65, 0x74,
0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x47, 0x69, 0x74, 0x6c, 0x61, 0x62, 0x48, 0x00, 0x52, 0x06,
0x67, 0x69, 0x74, 0x6c, 0x61, 0x62, 0x12, 0x2b, 0x0a, 0x04, 0x6a, 0x69, 0x72, 0x61, 0x18, 0x0a,
0x20, 0x01, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f, 0x6d, 0x65,
0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x4a, 0x69, 0x72, 0x61, 0x48, 0x00, 0x52, 0x04, 0x6a,
0x69, 0x72, 0x61, 0x12, 0x28, 0x0a, 0x03, 0x6e, 0x70, 0x6d, 0x18, 0x0b, 0x20, 0x01, 0x28, 0x0b,
0x32, 0x14, 0x2e, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61,
0x74, 0x61, 0x2e, 0x4e, 0x50, 0x4d, 0x48, 0x00, 0x52, 0x03, 0x6e, 0x70, 0x6d, 0x12, 0x2b, 0x0a,
0x04, 0x70, 0x79, 0x70, 0x69, 0x18, 0x0c, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x73, 0x6f,
0x75, 0x72, 0x63, 0x65, 0x5f, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x50, 0x79,
0x50, 0x69, 0x48, 0x00, 0x52, 0x04, 0x70, 0x79, 0x70, 0x69, 0x12, 0x25, 0x0a, 0x02, 0x73, 0x33,
0x18, 0x0d, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x13, 0x2e, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f,
0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x53, 0x33, 0x48, 0x00, 0x52, 0x02, 0x73,
0x33, 0x12, 0x2e, 0x0a, 0x05, 0x73, 0x6c, 0x61, 0x63, 0x6b, 0x18, 0x0e, 0x20, 0x01, 0x28, 0x0b,
0x32, 0x16, 0x2e, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61,
0x74, 0x61, 0x2e, 0x53, 0x6c, 0x61, 0x63, 0x6b, 0x48, 0x00, 0x52, 0x05, 0x73, 0x6c, 0x61, 0x63,
0x6b, 0x12, 0x3d, 0x0a, 0x0a, 0x66, 0x69, 0x6c, 0x65, 0x73, 0x79, 0x73, 0x74, 0x65, 0x6d, 0x18,
0x0f, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1b, 0x2e, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f, 0x6d,
0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x46, 0x69, 0x6c, 0x65, 0x73, 0x79, 0x73, 0x74,
0x65, 0x6d, 0x48, 0x00, 0x52, 0x0a, 0x66, 0x69, 0x6c, 0x65, 0x73, 0x79, 0x73, 0x74, 0x65, 0x6d,
0x12, 0x28, 0x0a, 0x03, 0x67, 0x69, 0x74, 0x18, 0x10, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x14, 0x2e,
0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e,
0x47, 0x69, 0x74, 0x48, 0x00, 0x52, 0x03, 0x67, 0x69, 0x74, 0x12, 0x2b, 0x0a, 0x04, 0x74, 0x65,
0x73, 0x74, 0x18, 0x11, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x73, 0x6f, 0x75, 0x72, 0x63,
0x65, 0x5f, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x54, 0x65, 0x73, 0x74, 0x48,
0x00, 0x52, 0x04, 0x74, 0x65, 0x73, 0x74, 0x12, 0x3a, 0x0a, 0x09, 0x62, 0x75, 0x69, 0x6c, 0x64,
0x6b, 0x69, 0x74, 0x65, 0x18, 0x12, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x73, 0x6f, 0x75,
0x72, 0x63, 0x65, 0x5f, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x42, 0x75, 0x69,
0x6c, 0x64, 0x6b, 0x69, 0x74, 0x65, 0x48, 0x00, 0x52, 0x09, 0x62, 0x75, 0x69, 0x6c, 0x64, 0x6b,
0x69, 0x74, 0x65, 0x12, 0x31, 0x0a, 0x06, 0x67, 0x65, 0x72, 0x72, 0x69, 0x74, 0x18, 0x13, 0x20,
0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f, 0x6d, 0x65, 0x74,
0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x47, 0x65, 0x72, 0x72, 0x69, 0x74, 0x48, 0x00, 0x52, 0x06,
0x67, 0x65, 0x72, 0x72, 0x69, 0x74, 0x12, 0x34, 0x0a, 0x07, 0x6a, 0x65, 0x6e, 0x6b, 0x69, 0x6e,
0x73, 0x18, 0x14, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x18, 0x2e, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65,
0x5f, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x4a, 0x65, 0x6e, 0x6b, 0x69, 0x6e,
0x73, 0x48, 0x00, 0x52, 0x07, 0x6a, 0x65, 0x6e, 0x6b, 0x69, 0x6e, 0x73, 0x12, 0x2e, 0x0a, 0x05,
0x74, 0x65, 0x61, 0x6d, 0x73, 0x18, 0x15, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x16, 0x2e, 0x73, 0x6f,
0x75, 0x72, 0x63, 0x65, 0x5f, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x54, 0x65,
0x61, 0x6d, 0x73, 0x48, 0x00, 0x52, 0x05, 0x74, 0x65, 0x61, 0x6d, 0x73, 0x12, 0x40, 0x0a, 0x0b,
0x61, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x6f, 0x72, 0x79, 0x18, 0x16, 0x20, 0x01, 0x28,
0x0b, 0x32, 0x1c, 0x2e, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f, 0x6d, 0x65, 0x74, 0x61, 0x64,
0x61, 0x74, 0x61, 0x2e, 0x41, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x6f, 0x72, 0x79, 0x48,
0x00, 0x52, 0x0b, 0x61, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x6f, 0x72, 0x79, 0x12, 0x31,
0x0a, 0x06, 0x73, 0x79, 0x73, 0x6c, 0x6f, 0x67, 0x18, 0x17, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17,
0x2e, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61,
0x2e, 0x53, 0x79, 0x73, 0x6c, 0x6f, 0x67, 0x48, 0x00, 0x52, 0x06, 0x73, 0x79, 0x73, 0x6c, 0x6f,
0x67, 0x12, 0x34, 0x0a, 0x07, 0x66, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x72, 0x18, 0x18, 0x20, 0x01,
0x28, 0x0b, 0x32, 0x18, 0x2e, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f, 0x6d, 0x65, 0x74, 0x61,
0x64, 0x61, 0x74, 0x61, 0x2e, 0x46, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x72, 0x48, 0x00, 0x52, 0x07,
0x66, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x72, 0x12, 0x3d, 0x0a, 0x0a, 0x73, 0x68, 0x61, 0x72, 0x65,
0x70, 0x6f, 0x69, 0x6e, 0x74, 0x18, 0x19, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1b, 0x2e, 0x73, 0x6f,
0x75, 0x72, 0x63, 0x65, 0x5f, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x53, 0x68,
0x61, 0x72, 0x65, 0x50, 0x6f, 0x69, 0x6e, 0x74, 0x48, 0x00, 0x52, 0x0a, 0x73, 0x68, 0x61, 0x72,
0x65, 0x70, 0x6f, 0x69, 0x6e, 0x74, 0x12, 0x40, 0x0a, 0x0b, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65,
0x44, 0x72, 0x69, 0x76, 0x65, 0x18, 0x1a, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1c, 0x2e, 0x73, 0x6f,
0x75, 0x72, 0x63, 0x65, 0x5f, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x47, 0x6f,
0x6f, 0x67, 0x6c, 0x65, 0x44, 0x72, 0x69, 0x76, 0x65, 0x48, 0x00, 0x52, 0x0b, 0x67, 0x6f, 0x6f,
0x67, 0x6c, 0x65, 0x44, 0x72, 0x69, 0x76, 0x65, 0x12, 0x3d, 0x0a, 0x0a, 0x61, 0x7a, 0x75, 0x72,
0x65, 0x52, 0x65, 0x70, 0x6f, 0x73, 0x18, 0x1b, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1b, 0x2e, 0x73,
0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x41,
0x7a, 0x75, 0x72, 0x65, 0x52, 0x65, 0x70, 0x6f, 0x73, 0x48, 0x00, 0x52, 0x0a, 0x61, 0x7a, 0x75,
0x72, 0x65, 0x52, 0x65, 0x70, 0x6f, 0x73, 0x12, 0x37, 0x0a, 0x08, 0x74, 0x72, 0x61, 0x76, 0x69,
0x73, 0x43, 0x49, 0x18, 0x1c, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x73, 0x6f, 0x75, 0x72,
0x63, 0x65, 0x5f, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x54, 0x72, 0x61, 0x76,
0x69, 0x73, 0x43, 0x49, 0x48, 0x00, 0x52, 0x08, 0x74, 0x72, 0x61, 0x76, 0x69, 0x73, 0x43, 0x49,
0x12, 0x34, 0x0a, 0x07, 0x70, 0x6f, 0x73, 0x74, 0x6d, 0x61, 0x6e, 0x18, 0x1d, 0x20, 0x01, 0x28,
0x0b, 0x32, 0x18, 0x2e, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f, 0x6d, 0x65, 0x74, 0x61, 0x64,
0x61, 0x74, 0x61, 0x2e, 0x57, 0x65, 0x62, 0x68, 0x6f, 0x6f, 0x6b, 0x48, 0x00, 0x52, 0x07, 0x77,
0x65, 0x62, 0x68, 0x6f, 0x6f, 0x6b, 0x42, 0x06, 0x0a, 0x04, 0x64, 0x61, 0x74, 0x61, 0x2a, 0x3e,
0x0a, 0x0a, 0x56, 0x69, 0x73, 0x69, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x79, 0x12, 0x0a, 0x0a, 0x06,
0x70, 0x75, 0x62, 0x6c, 0x69, 0x63, 0x10, 0x00, 0x12, 0x0b, 0x0a, 0x07, 0x70, 0x72, 0x69, 0x76,
0x61, 0x74, 0x65, 0x10, 0x01, 0x12, 0x0a, 0x0a, 0x06, 0x73, 0x68, 0x61, 0x72, 0x65, 0x64, 0x10,
0x02, 0x12, 0x0b, 0x0a, 0x07, 0x75, 0x6e, 0x6b, 0x6e, 0x6f, 0x77, 0x6e, 0x10, 0x03, 0x42, 0x43,
0x5a, 0x41, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x74, 0x72, 0x75,
0x66, 0x66, 0x6c, 0x65, 0x73, 0x65, 0x63, 0x75, 0x72, 0x69, 0x74, 0x79, 0x2f, 0x74, 0x72, 0x75,
0x66, 0x66, 0x6c, 0x65, 0x68, 0x6f, 0x67, 0x2f, 0x76, 0x33, 0x2f, 0x70, 0x6b, 0x67, 0x2f, 0x70,
0x62, 0x2f, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74,
0x61, 0x70, 0x62, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33,
0x61, 0x74, 0x61, 0x2e, 0x50, 0x6f, 0x73, 0x74, 0x6d, 0x61, 0x6e, 0x48, 0x00, 0x52, 0x07, 0x70,
0x6f, 0x73, 0x74, 0x6d, 0x61, 0x6e, 0x12, 0x34, 0x0a, 0x07, 0x77, 0x65, 0x62, 0x68, 0x6f, 0x6f,
0x6b, 0x18, 0x1e, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x18, 0x2e, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65,
0x5f, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x57, 0x65, 0x62, 0x68, 0x6f, 0x6f,
0x6b, 0x48, 0x00, 0x52, 0x07, 0x77, 0x65, 0x62, 0x68, 0x6f, 0x6f, 0x6b, 0x12, 0x46, 0x0a, 0x0d,
0x65, 0x6c, 0x61, 0x73, 0x74, 0x69, 0x63, 0x73, 0x65, 0x61, 0x72, 0x63, 0x68, 0x18, 0x1f, 0x20,
0x01, 0x28, 0x0b, 0x32, 0x1e, 0x2e, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f, 0x6d, 0x65, 0x74,
0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x45, 0x6c, 0x61, 0x73, 0x74, 0x69, 0x63, 0x73, 0x65, 0x61,
0x72, 0x63, 0x68, 0x48, 0x00, 0x52, 0x0d, 0x65, 0x6c, 0x61, 0x73, 0x74, 0x69, 0x63, 0x73, 0x65,
0x61, 0x72, 0x63, 0x68, 0x42, 0x06, 0x0a, 0x04, 0x64, 0x61, 0x74, 0x61, 0x2a, 0x3e, 0x0a, 0x0a,
0x56, 0x69, 0x73, 0x69, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x79, 0x12, 0x0a, 0x0a, 0x06, 0x70, 0x75,
0x62, 0x6c, 0x69, 0x63, 0x10, 0x00, 0x12, 0x0b, 0x0a, 0x07, 0x70, 0x72, 0x69, 0x76, 0x61, 0x74,
0x65, 0x10, 0x01, 0x12, 0x0a, 0x0a, 0x06, 0x73, 0x68, 0x61, 0x72, 0x65, 0x64, 0x10, 0x02, 0x12,
0x0b, 0x0a, 0x07, 0x75, 0x6e, 0x6b, 0x6e, 0x6f, 0x77, 0x6e, 0x10, 0x03, 0x42, 0x43, 0x5a, 0x41,
0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x74, 0x72, 0x75, 0x66, 0x66,
0x6c, 0x65, 0x73, 0x65, 0x63, 0x75, 0x72, 0x69, 0x74, 0x79, 0x2f, 0x74, 0x72, 0x75, 0x66, 0x66,
0x6c, 0x65, 0x68, 0x6f, 0x67, 0x2f, 0x76, 0x33, 0x2f, 0x70, 0x6b, 0x67, 0x2f, 0x70, 0x62, 0x2f,
0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x70,
0x62, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33,
}
var (
@ -3898,7 +3986,7 @@ func file_source_metadata_proto_rawDescGZIP() []byte {
}
var file_source_metadata_proto_enumTypes = make([]protoimpl.EnumInfo, 1)
var file_source_metadata_proto_msgTypes = make([]protoimpl.MessageInfo, 32)
var file_source_metadata_proto_msgTypes = make([]protoimpl.MessageInfo, 33)
var file_source_metadata_proto_goTypes = []interface{}{
(Visibility)(0), // 0: source_metadata.Visibility
(*Azure)(nil), // 1: source_metadata.Azure
@ -3932,8 +4020,9 @@ var file_source_metadata_proto_goTypes = []interface{}{
(*Postman)(nil), // 29: source_metadata.Postman
(*Vector)(nil), // 30: source_metadata.Vector
(*Webhook)(nil), // 31: source_metadata.Webhook
(*MetaData)(nil), // 32: source_metadata.MetaData
(*timestamppb.Timestamp)(nil), // 33: google.protobuf.Timestamp
(*Elasticsearch)(nil), // 32: source_metadata.Elasticsearch
(*MetaData)(nil), // 33: source_metadata.MetaData
(*timestamppb.Timestamp)(nil), // 34: google.protobuf.Timestamp
}
var file_source_metadata_proto_depIdxs = []int32{
0, // 0: source_metadata.Github.visibility:type_name -> source_metadata.Visibility
@ -3942,7 +4031,7 @@ var file_source_metadata_proto_depIdxs = []int32{
15, // 3: source_metadata.Forager.npm:type_name -> source_metadata.NPM
16, // 4: source_metadata.Forager.pypi:type_name -> source_metadata.PyPi
0, // 5: source_metadata.AzureRepos.visibility:type_name -> source_metadata.Visibility
33, // 6: source_metadata.Vector.timestamp:type_name -> google.protobuf.Timestamp
34, // 6: source_metadata.Vector.timestamp:type_name -> google.protobuf.Timestamp
30, // 7: source_metadata.Webhook.vector:type_name -> source_metadata.Vector
1, // 8: source_metadata.MetaData.azure:type_name -> source_metadata.Azure
2, // 9: source_metadata.MetaData.bitbucket:type_name -> source_metadata.Bitbucket
@ -3974,11 +4063,12 @@ var file_source_metadata_proto_depIdxs = []int32{
5, // 35: source_metadata.MetaData.travisCI:type_name -> source_metadata.TravisCI
29, // 36: source_metadata.MetaData.postman:type_name -> source_metadata.Postman
31, // 37: source_metadata.MetaData.webhook:type_name -> source_metadata.Webhook
38, // [38:38] is the sub-list for method output_type
38, // [38:38] is the sub-list for method input_type
38, // [38:38] is the sub-list for extension type_name
38, // [38:38] is the sub-list for extension extendee
0, // [0:38] is the sub-list for field type_name
32, // 38: source_metadata.MetaData.elasticsearch:type_name -> source_metadata.Elasticsearch
39, // [39:39] is the sub-list for method output_type
39, // [39:39] is the sub-list for method input_type
39, // [39:39] is the sub-list for extension type_name
39, // [39:39] is the sub-list for extension extendee
0, // [0:39] is the sub-list for field type_name
}
func init() { file_source_metadata_proto_init() }
@ -4360,6 +4450,18 @@ func file_source_metadata_proto_init() {
}
}
file_source_metadata_proto_msgTypes[31].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*Elasticsearch); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
file_source_metadata_proto_msgTypes[32].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*MetaData); i {
case 0:
return &v.state
@ -4380,7 +4482,7 @@ func file_source_metadata_proto_init() {
file_source_metadata_proto_msgTypes[30].OneofWrappers = []interface{}{
(*Webhook_Vector)(nil),
}
file_source_metadata_proto_msgTypes[31].OneofWrappers = []interface{}{
file_source_metadata_proto_msgTypes[32].OneofWrappers = []interface{}{
(*MetaData_Azure)(nil),
(*MetaData_Bitbucket)(nil),
(*MetaData_Circleci)(nil),
@ -4411,6 +4513,7 @@ func file_source_metadata_proto_init() {
(*MetaData_TravisCI)(nil),
(*MetaData_Postman)(nil),
(*MetaData_Webhook)(nil),
(*MetaData_Elasticsearch)(nil),
}
type x struct{}
out := protoimpl.TypeBuilder{
@ -4418,7 +4521,7 @@ func file_source_metadata_proto_init() {
GoPackagePath: reflect.TypeOf(x{}).PkgPath(),
RawDescriptor: file_source_metadata_proto_rawDesc,
NumEnums: 1,
NumMessages: 32,
NumMessages: 33,
NumExtensions: 0,
NumServices: 0,
},

View file

@ -3674,6 +3674,112 @@ var _ interface {
ErrorName() string
} = WebhookValidationError{}
// Validate checks the field values on Elasticsearch with the rules defined in
// the proto definition for this message. If any rules are violated, the first
// error encountered is returned, or nil if there are no violations.
func (m *Elasticsearch) Validate() error {
return m.validate(false)
}
// ValidateAll checks the field values on Elasticsearch with the rules defined
// in the proto definition for this message. If any rules are violated, the
// result is a list of violation errors wrapped in ElasticsearchMultiError, or
// nil if none found.
func (m *Elasticsearch) ValidateAll() error {
return m.validate(true)
}
func (m *Elasticsearch) validate(all bool) error {
if m == nil {
return nil
}
var errors []error
// no validation rules for Index
// no validation rules for DocumentId
// no validation rules for Timestamp
if len(errors) > 0 {
return ElasticsearchMultiError(errors)
}
return nil
}
// ElasticsearchMultiError is an error wrapping multiple validation errors
// returned by Elasticsearch.ValidateAll() if the designated constraints
// aren't met.
type ElasticsearchMultiError []error
// Error returns a concatenation of all the error messages it wraps.
func (m ElasticsearchMultiError) Error() string {
var msgs []string
for _, err := range m {
msgs = append(msgs, err.Error())
}
return strings.Join(msgs, "; ")
}
// AllErrors returns a list of validation violation errors.
func (m ElasticsearchMultiError) AllErrors() []error { return m }
// ElasticsearchValidationError is the validation error returned by
// Elasticsearch.Validate if the designated constraints aren't met.
type ElasticsearchValidationError struct {
field string
reason string
cause error
key bool
}
// Field function returns field value.
func (e ElasticsearchValidationError) Field() string { return e.field }
// Reason function returns reason value.
func (e ElasticsearchValidationError) Reason() string { return e.reason }
// Cause function returns cause value.
func (e ElasticsearchValidationError) Cause() error { return e.cause }
// Key function returns key value.
func (e ElasticsearchValidationError) Key() bool { return e.key }
// ErrorName returns error name.
func (e ElasticsearchValidationError) ErrorName() string { return "ElasticsearchValidationError" }
// Error satisfies the builtin error interface
func (e ElasticsearchValidationError) Error() string {
cause := ""
if e.cause != nil {
cause = fmt.Sprintf(" | caused by: %v", e.cause)
}
key := ""
if e.key {
key = "key for "
}
return fmt.Sprintf(
"invalid %sElasticsearch.%s: %s%s",
key,
e.field,
e.reason,
cause)
}
var _ error = ElasticsearchValidationError{}
var _ interface {
Field() string
Reason() string
Key() bool
Cause() error
ErrorName() string
} = ElasticsearchValidationError{}
// Validate checks the field values on MetaData with the rules defined in the
// proto definition for this message. If any rules are violated, the first
// error encountered is returned, or nil if there are no violations.
@ -4927,6 +5033,47 @@ func (m *MetaData) validate(all bool) error {
}
}
case *MetaData_Elasticsearch:
if v == nil {
err := MetaDataValidationError{
field: "Data",
reason: "oneof value cannot be a typed-nil",
}
if !all {
return err
}
errors = append(errors, err)
}
if all {
switch v := interface{}(m.GetElasticsearch()).(type) {
case interface{ ValidateAll() error }:
if err := v.ValidateAll(); err != nil {
errors = append(errors, MetaDataValidationError{
field: "Elasticsearch",
reason: "embedded message failed validation",
cause: err,
})
}
case interface{ Validate() error }:
if err := v.Validate(); err != nil {
errors = append(errors, MetaDataValidationError{
field: "Elasticsearch",
reason: "embedded message failed validation",
cause: err,
})
}
}
} else if v, ok := interface{}(m.GetElasticsearch()).(interface{ Validate() error }); ok {
if err := v.Validate(); err != nil {
return MetaDataValidationError{
field: "Elasticsearch",
reason: "embedded message failed validation",
cause: err,
}
}
}
default:
_ = v // ensures v is used
}

View file

@ -1,7 +1,7 @@
// Code generated by protoc-gen-go. DO NOT EDIT.
// versions:
// protoc-gen-go v1.33.0
// protoc v4.25.3
// protoc-gen-go v1.32.0
// protoc v4.25.2
// source: sources.proto
package sourcespb
@ -63,6 +63,7 @@ const (
SourceType_SOURCE_TYPE_TRAVISCI SourceType = 32
SourceType_SOURCE_TYPE_POSTMAN SourceType = 33
SourceType_SOURCE_TYPE_WEBHOOK SourceType = 34
SourceType_SOURCE_TYPE_ELASTICSEARCH SourceType = 35
)
// Enum value maps for SourceType.
@ -103,6 +104,7 @@ var (
32: "SOURCE_TYPE_TRAVISCI",
33: "SOURCE_TYPE_POSTMAN",
34: "SOURCE_TYPE_WEBHOOK",
35: "SOURCE_TYPE_ELASTICSEARCH",
}
SourceType_value = map[string]int32{
"SOURCE_TYPE_AZURE_STORAGE": 0,
@ -140,6 +142,7 @@ var (
"SOURCE_TYPE_TRAVISCI": 32,
"SOURCE_TYPE_POSTMAN": 33,
"SOURCE_TYPE_WEBHOOK": 34,
"SOURCE_TYPE_ELASTICSEARCH": 35,
}
)
@ -3760,6 +3763,125 @@ type Webhook_Header struct {
func (*Webhook_Header) isWebhook_Credential() {}
type Elasticsearch struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
Nodes []string `protobuf:"bytes,1,rep,name=nodes,proto3" json:"nodes,omitempty"`
Username string `protobuf:"bytes,2,opt,name=username,proto3" json:"username,omitempty"`
Password string `protobuf:"bytes,3,opt,name=password,proto3" json:"password,omitempty"`
CloudId string `protobuf:"bytes,4,opt,name=cloud_id,json=cloudId,proto3" json:"cloud_id,omitempty"`
ApiKey string `protobuf:"bytes,5,opt,name=api_key,json=apiKey,proto3" json:"api_key,omitempty"`
ServiceToken string `protobuf:"bytes,6,opt,name=service_token,json=serviceToken,proto3" json:"service_token,omitempty"`
IndexPattern string `protobuf:"bytes,7,opt,name=index_pattern,json=indexPattern,proto3" json:"index_pattern,omitempty"`
QueryJson string `protobuf:"bytes,8,opt,name=query_json,json=queryJson,proto3" json:"query_json,omitempty"`
SinceTimestamp string `protobuf:"bytes,9,opt,name=since_timestamp,json=sinceTimestamp,proto3" json:"since_timestamp,omitempty"`
BestEffortScan bool `protobuf:"varint,10,opt,name=best_effort_scan,json=bestEffortScan,proto3" json:"best_effort_scan,omitempty"`
}
func (x *Elasticsearch) Reset() {
*x = Elasticsearch{}
if protoimpl.UnsafeEnabled {
mi := &file_sources_proto_msgTypes[32]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *Elasticsearch) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*Elasticsearch) ProtoMessage() {}
func (x *Elasticsearch) ProtoReflect() protoreflect.Message {
mi := &file_sources_proto_msgTypes[32]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use Elasticsearch.ProtoReflect.Descriptor instead.
func (*Elasticsearch) Descriptor() ([]byte, []int) {
return file_sources_proto_rawDescGZIP(), []int{32}
}
func (x *Elasticsearch) GetNodes() []string {
if x != nil {
return x.Nodes
}
return nil
}
func (x *Elasticsearch) GetUsername() string {
if x != nil {
return x.Username
}
return ""
}
func (x *Elasticsearch) GetPassword() string {
if x != nil {
return x.Password
}
return ""
}
func (x *Elasticsearch) GetCloudId() string {
if x != nil {
return x.CloudId
}
return ""
}
func (x *Elasticsearch) GetApiKey() string {
if x != nil {
return x.ApiKey
}
return ""
}
func (x *Elasticsearch) GetServiceToken() string {
if x != nil {
return x.ServiceToken
}
return ""
}
func (x *Elasticsearch) GetIndexPattern() string {
if x != nil {
return x.IndexPattern
}
return ""
}
func (x *Elasticsearch) GetQueryJson() string {
if x != nil {
return x.QueryJson
}
return ""
}
func (x *Elasticsearch) GetSinceTimestamp() string {
if x != nil {
return x.SinceTimestamp
}
return ""
}
func (x *Elasticsearch) GetBestEffortScan() bool {
if x != nil {
return x.BestEffortScan
}
return false
}
var File_sources_proto protoreflect.FileDescriptor
var file_sources_proto_rawDesc = []byte{
@ -4318,7 +4440,28 @@ var file_sources_proto_rawDesc = []byte{
0x65, 0x61, 0x64, 0x65, 0x72, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x13, 0x2e, 0x63, 0x72,
0x65, 0x64, 0x65, 0x6e, 0x74, 0x69, 0x61, 0x6c, 0x73, 0x2e, 0x48, 0x65, 0x61, 0x64, 0x65, 0x72,
0x48, 0x00, 0x52, 0x06, 0x68, 0x65, 0x61, 0x64, 0x65, 0x72, 0x42, 0x0c, 0x0a, 0x0a, 0x63, 0x72,
0x65, 0x64, 0x65, 0x6e, 0x74, 0x69, 0x61, 0x6c, 0x2a, 0xd0, 0x07, 0x0a, 0x0a, 0x53, 0x6f, 0x75,
0x65, 0x64, 0x65, 0x6e, 0x74, 0x69, 0x61, 0x6c, 0x22, 0xcd, 0x02, 0x0a, 0x0d, 0x45, 0x6c, 0x61,
0x73, 0x74, 0x69, 0x63, 0x73, 0x65, 0x61, 0x72, 0x63, 0x68, 0x12, 0x14, 0x0a, 0x05, 0x6e, 0x6f,
0x64, 0x65, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x09, 0x52, 0x05, 0x6e, 0x6f, 0x64, 0x65, 0x73,
0x12, 0x1a, 0x0a, 0x08, 0x75, 0x73, 0x65, 0x72, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x02, 0x20, 0x01,
0x28, 0x09, 0x52, 0x08, 0x75, 0x73, 0x65, 0x72, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x1a, 0x0a, 0x08,
0x70, 0x61, 0x73, 0x73, 0x77, 0x6f, 0x72, 0x64, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08,
0x70, 0x61, 0x73, 0x73, 0x77, 0x6f, 0x72, 0x64, 0x12, 0x19, 0x0a, 0x08, 0x63, 0x6c, 0x6f, 0x75,
0x64, 0x5f, 0x69, 0x64, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x63, 0x6c, 0x6f, 0x75,
0x64, 0x49, 0x64, 0x12, 0x17, 0x0a, 0x07, 0x61, 0x70, 0x69, 0x5f, 0x6b, 0x65, 0x79, 0x18, 0x05,
0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x61, 0x70, 0x69, 0x4b, 0x65, 0x79, 0x12, 0x23, 0x0a, 0x0d,
0x73, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x5f, 0x74, 0x6f, 0x6b, 0x65, 0x6e, 0x18, 0x06, 0x20,
0x01, 0x28, 0x09, 0x52, 0x0c, 0x73, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x54, 0x6f, 0x6b, 0x65,
0x6e, 0x12, 0x23, 0x0a, 0x0d, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x5f, 0x70, 0x61, 0x74, 0x74, 0x65,
0x72, 0x6e, 0x18, 0x07, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0c, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x50,
0x61, 0x74, 0x74, 0x65, 0x72, 0x6e, 0x12, 0x1d, 0x0a, 0x0a, 0x71, 0x75, 0x65, 0x72, 0x79, 0x5f,
0x6a, 0x73, 0x6f, 0x6e, 0x18, 0x08, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x71, 0x75, 0x65, 0x72,
0x79, 0x4a, 0x73, 0x6f, 0x6e, 0x12, 0x27, 0x0a, 0x0f, 0x73, 0x69, 0x6e, 0x63, 0x65, 0x5f, 0x74,
0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x18, 0x09, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0e,
0x73, 0x69, 0x6e, 0x63, 0x65, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x12, 0x28,
0x0a, 0x10, 0x62, 0x65, 0x73, 0x74, 0x5f, 0x65, 0x66, 0x66, 0x6f, 0x72, 0x74, 0x5f, 0x73, 0x63,
0x61, 0x6e, 0x18, 0x0a, 0x20, 0x01, 0x28, 0x08, 0x52, 0x0e, 0x62, 0x65, 0x73, 0x74, 0x45, 0x66,
0x66, 0x6f, 0x72, 0x74, 0x53, 0x63, 0x61, 0x6e, 0x2a, 0xef, 0x07, 0x0a, 0x0a, 0x53, 0x6f, 0x75,
0x72, 0x63, 0x65, 0x54, 0x79, 0x70, 0x65, 0x12, 0x1d, 0x0a, 0x19, 0x53, 0x4f, 0x55, 0x52, 0x43,
0x45, 0x5f, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x41, 0x5a, 0x55, 0x52, 0x45, 0x5f, 0x53, 0x54, 0x4f,
0x52, 0x41, 0x47, 0x45, 0x10, 0x00, 0x12, 0x19, 0x0a, 0x15, 0x53, 0x4f, 0x55, 0x52, 0x43, 0x45,
@ -4379,11 +4522,13 @@ var file_sources_proto_rawDesc = []byte{
0x52, 0x41, 0x56, 0x49, 0x53, 0x43, 0x49, 0x10, 0x20, 0x12, 0x17, 0x0a, 0x13, 0x53, 0x4f, 0x55,
0x52, 0x43, 0x45, 0x5f, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x50, 0x4f, 0x53, 0x54, 0x4d, 0x41, 0x4e,
0x10, 0x21, 0x12, 0x17, 0x0a, 0x13, 0x53, 0x4f, 0x55, 0x52, 0x43, 0x45, 0x5f, 0x54, 0x59, 0x50,
0x45, 0x5f, 0x57, 0x45, 0x42, 0x48, 0x4f, 0x4f, 0x4b, 0x10, 0x22, 0x42, 0x3b, 0x5a, 0x39, 0x67,
0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x74, 0x72, 0x75, 0x66, 0x66, 0x6c,
0x65, 0x73, 0x65, 0x63, 0x75, 0x72, 0x69, 0x74, 0x79, 0x2f, 0x74, 0x72, 0x75, 0x66, 0x66, 0x6c,
0x65, 0x68, 0x6f, 0x67, 0x2f, 0x76, 0x33, 0x2f, 0x70, 0x6b, 0x67, 0x2f, 0x70, 0x62, 0x2f, 0x73,
0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x70, 0x62, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33,
0x45, 0x5f, 0x57, 0x45, 0x42, 0x48, 0x4f, 0x4f, 0x4b, 0x10, 0x22, 0x12, 0x1d, 0x0a, 0x19, 0x53,
0x4f, 0x55, 0x52, 0x43, 0x45, 0x5f, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x45, 0x4c, 0x41, 0x53, 0x54,
0x49, 0x43, 0x53, 0x45, 0x41, 0x52, 0x43, 0x48, 0x10, 0x23, 0x42, 0x3b, 0x5a, 0x39, 0x67, 0x69,
0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x74, 0x72, 0x75, 0x66, 0x66, 0x6c, 0x65,
0x73, 0x65, 0x63, 0x75, 0x72, 0x69, 0x74, 0x79, 0x2f, 0x74, 0x72, 0x75, 0x66, 0x66, 0x6c, 0x65,
0x68, 0x6f, 0x67, 0x2f, 0x76, 0x33, 0x2f, 0x70, 0x6b, 0x67, 0x2f, 0x70, 0x62, 0x2f, 0x73, 0x6f,
0x75, 0x72, 0x63, 0x65, 0x73, 0x70, 0x62, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33,
}
var (
@ -4399,7 +4544,7 @@ func file_sources_proto_rawDescGZIP() []byte {
}
var file_sources_proto_enumTypes = make([]protoimpl.EnumInfo, 2)
var file_sources_proto_msgTypes = make([]protoimpl.MessageInfo, 32)
var file_sources_proto_msgTypes = make([]protoimpl.MessageInfo, 33)
var file_sources_proto_goTypes = []interface{}{
(SourceType)(0), // 0: sources.SourceType
(Confluence_GetAllSpacesScope)(0), // 1: sources.Confluence.GetAllSpacesScope
@ -4435,70 +4580,71 @@ var file_sources_proto_goTypes = []interface{}{
(*AzureRepos)(nil), // 31: sources.AzureRepos
(*Postman)(nil), // 32: sources.Postman
(*Webhook)(nil), // 33: sources.Webhook
(*durationpb.Duration)(nil), // 34: google.protobuf.Duration
(*anypb.Any)(nil), // 35: google.protobuf.Any
(*credentialspb.BasicAuth)(nil), // 36: credentials.BasicAuth
(*credentialspb.Unauthenticated)(nil), // 37: credentials.Unauthenticated
(*credentialspb.Oauth2)(nil), // 38: credentials.Oauth2
(*credentialspb.KeySecret)(nil), // 39: credentials.KeySecret
(*credentialspb.CloudEnvironment)(nil), // 40: credentials.CloudEnvironment
(*credentialspb.SSHAuth)(nil), // 41: credentials.SSHAuth
(*credentialspb.GitHubApp)(nil), // 42: credentials.GitHubApp
(*credentialspb.AWSSessionTokenSecret)(nil), // 43: credentials.AWSSessionTokenSecret
(*credentialspb.SlackTokens)(nil), // 44: credentials.SlackTokens
(*credentialspb.Header)(nil), // 45: credentials.Header
(*credentialspb.ClientCredentials)(nil), // 46: credentials.ClientCredentials
(*timestamppb.Timestamp)(nil), // 47: google.protobuf.Timestamp
(*Elasticsearch)(nil), // 34: sources.Elasticsearch
(*durationpb.Duration)(nil), // 35: google.protobuf.Duration
(*anypb.Any)(nil), // 36: google.protobuf.Any
(*credentialspb.BasicAuth)(nil), // 37: credentials.BasicAuth
(*credentialspb.Unauthenticated)(nil), // 38: credentials.Unauthenticated
(*credentialspb.Oauth2)(nil), // 39: credentials.Oauth2
(*credentialspb.KeySecret)(nil), // 40: credentials.KeySecret
(*credentialspb.CloudEnvironment)(nil), // 41: credentials.CloudEnvironment
(*credentialspb.SSHAuth)(nil), // 42: credentials.SSHAuth
(*credentialspb.GitHubApp)(nil), // 43: credentials.GitHubApp
(*credentialspb.AWSSessionTokenSecret)(nil), // 44: credentials.AWSSessionTokenSecret
(*credentialspb.SlackTokens)(nil), // 45: credentials.SlackTokens
(*credentialspb.Header)(nil), // 46: credentials.Header
(*credentialspb.ClientCredentials)(nil), // 47: credentials.ClientCredentials
(*timestamppb.Timestamp)(nil), // 48: google.protobuf.Timestamp
}
var file_sources_proto_depIdxs = []int32{
34, // 0: sources.LocalSource.scan_interval:type_name -> google.protobuf.Duration
35, // 1: sources.LocalSource.connection:type_name -> google.protobuf.Any
36, // 2: sources.Artifactory.basic_auth:type_name -> credentials.BasicAuth
37, // 3: sources.Artifactory.unauthenticated:type_name -> credentials.Unauthenticated
36, // 4: sources.AzureStorage.basic_auth:type_name -> credentials.BasicAuth
37, // 5: sources.AzureStorage.unauthenticated:type_name -> credentials.Unauthenticated
38, // 6: sources.Bitbucket.oauth:type_name -> credentials.Oauth2
36, // 7: sources.Bitbucket.basic_auth:type_name -> credentials.BasicAuth
37, // 8: sources.Confluence.unauthenticated:type_name -> credentials.Unauthenticated
36, // 9: sources.Confluence.basic_auth:type_name -> credentials.BasicAuth
35, // 0: sources.LocalSource.scan_interval:type_name -> google.protobuf.Duration
36, // 1: sources.LocalSource.connection:type_name -> google.protobuf.Any
37, // 2: sources.Artifactory.basic_auth:type_name -> credentials.BasicAuth
38, // 3: sources.Artifactory.unauthenticated:type_name -> credentials.Unauthenticated
37, // 4: sources.AzureStorage.basic_auth:type_name -> credentials.BasicAuth
38, // 5: sources.AzureStorage.unauthenticated:type_name -> credentials.Unauthenticated
39, // 6: sources.Bitbucket.oauth:type_name -> credentials.Oauth2
37, // 7: sources.Bitbucket.basic_auth:type_name -> credentials.BasicAuth
38, // 8: sources.Confluence.unauthenticated:type_name -> credentials.Unauthenticated
37, // 9: sources.Confluence.basic_auth:type_name -> credentials.BasicAuth
1, // 10: sources.Confluence.spaces_scope:type_name -> sources.Confluence.GetAllSpacesScope
37, // 11: sources.Docker.unauthenticated:type_name -> credentials.Unauthenticated
36, // 12: sources.Docker.basic_auth:type_name -> credentials.BasicAuth
39, // 13: sources.ECR.access_key:type_name -> credentials.KeySecret
37, // 14: sources.GCS.unauthenticated:type_name -> credentials.Unauthenticated
40, // 15: sources.GCS.adc:type_name -> credentials.CloudEnvironment
38, // 16: sources.GCS.oauth:type_name -> credentials.Oauth2
36, // 17: sources.Git.basic_auth:type_name -> credentials.BasicAuth
37, // 18: sources.Git.unauthenticated:type_name -> credentials.Unauthenticated
41, // 19: sources.Git.ssh_auth:type_name -> credentials.SSHAuth
38, // 20: sources.GitLab.oauth:type_name -> credentials.Oauth2
36, // 21: sources.GitLab.basic_auth:type_name -> credentials.BasicAuth
42, // 22: sources.GitHub.github_app:type_name -> credentials.GitHubApp
37, // 23: sources.GitHub.unauthenticated:type_name -> credentials.Unauthenticated
36, // 24: sources.GitHub.basic_auth:type_name -> credentials.BasicAuth
36, // 25: sources.JIRA.basic_auth:type_name -> credentials.BasicAuth
37, // 26: sources.JIRA.unauthenticated:type_name -> credentials.Unauthenticated
38, // 27: sources.JIRA.oauth:type_name -> credentials.Oauth2
37, // 28: sources.NPMUnauthenticatedPackage.unauthenticated:type_name -> credentials.Unauthenticated
37, // 29: sources.PyPIUnauthenticatedPackage.unauthenticated:type_name -> credentials.Unauthenticated
39, // 30: sources.S3.access_key:type_name -> credentials.KeySecret
37, // 31: sources.S3.unauthenticated:type_name -> credentials.Unauthenticated
40, // 32: sources.S3.cloud_environment:type_name -> credentials.CloudEnvironment
43, // 33: sources.S3.session_token:type_name -> credentials.AWSSessionTokenSecret
44, // 34: sources.Slack.tokens:type_name -> credentials.SlackTokens
36, // 35: sources.Gerrit.basic_auth:type_name -> credentials.BasicAuth
37, // 36: sources.Gerrit.unauthenticated:type_name -> credentials.Unauthenticated
36, // 37: sources.Jenkins.basic_auth:type_name -> credentials.BasicAuth
45, // 38: sources.Jenkins.header:type_name -> credentials.Header
46, // 39: sources.Teams.authenticated:type_name -> credentials.ClientCredentials
38, // 40: sources.Teams.oauth:type_name -> credentials.Oauth2
37, // 41: sources.Forager.unauthenticated:type_name -> credentials.Unauthenticated
47, // 42: sources.Forager.since:type_name -> google.protobuf.Timestamp
44, // 43: sources.SlackRealtime.tokens:type_name -> credentials.SlackTokens
38, // 44: sources.Sharepoint.oauth:type_name -> credentials.Oauth2
38, // 45: sources.AzureRepos.oauth:type_name -> credentials.Oauth2
37, // 46: sources.Postman.unauthenticated:type_name -> credentials.Unauthenticated
45, // 47: sources.Webhook.header:type_name -> credentials.Header
38, // 11: sources.Docker.unauthenticated:type_name -> credentials.Unauthenticated
37, // 12: sources.Docker.basic_auth:type_name -> credentials.BasicAuth
40, // 13: sources.ECR.access_key:type_name -> credentials.KeySecret
38, // 14: sources.GCS.unauthenticated:type_name -> credentials.Unauthenticated
41, // 15: sources.GCS.adc:type_name -> credentials.CloudEnvironment
39, // 16: sources.GCS.oauth:type_name -> credentials.Oauth2
37, // 17: sources.Git.basic_auth:type_name -> credentials.BasicAuth
38, // 18: sources.Git.unauthenticated:type_name -> credentials.Unauthenticated
42, // 19: sources.Git.ssh_auth:type_name -> credentials.SSHAuth
39, // 20: sources.GitLab.oauth:type_name -> credentials.Oauth2
37, // 21: sources.GitLab.basic_auth:type_name -> credentials.BasicAuth
43, // 22: sources.GitHub.github_app:type_name -> credentials.GitHubApp
38, // 23: sources.GitHub.unauthenticated:type_name -> credentials.Unauthenticated
37, // 24: sources.GitHub.basic_auth:type_name -> credentials.BasicAuth
37, // 25: sources.JIRA.basic_auth:type_name -> credentials.BasicAuth
38, // 26: sources.JIRA.unauthenticated:type_name -> credentials.Unauthenticated
39, // 27: sources.JIRA.oauth:type_name -> credentials.Oauth2
38, // 28: sources.NPMUnauthenticatedPackage.unauthenticated:type_name -> credentials.Unauthenticated
38, // 29: sources.PyPIUnauthenticatedPackage.unauthenticated:type_name -> credentials.Unauthenticated
40, // 30: sources.S3.access_key:type_name -> credentials.KeySecret
38, // 31: sources.S3.unauthenticated:type_name -> credentials.Unauthenticated
41, // 32: sources.S3.cloud_environment:type_name -> credentials.CloudEnvironment
44, // 33: sources.S3.session_token:type_name -> credentials.AWSSessionTokenSecret
45, // 34: sources.Slack.tokens:type_name -> credentials.SlackTokens
37, // 35: sources.Gerrit.basic_auth:type_name -> credentials.BasicAuth
38, // 36: sources.Gerrit.unauthenticated:type_name -> credentials.Unauthenticated
37, // 37: sources.Jenkins.basic_auth:type_name -> credentials.BasicAuth
46, // 38: sources.Jenkins.header:type_name -> credentials.Header
47, // 39: sources.Teams.authenticated:type_name -> credentials.ClientCredentials
39, // 40: sources.Teams.oauth:type_name -> credentials.Oauth2
38, // 41: sources.Forager.unauthenticated:type_name -> credentials.Unauthenticated
48, // 42: sources.Forager.since:type_name -> google.protobuf.Timestamp
45, // 43: sources.SlackRealtime.tokens:type_name -> credentials.SlackTokens
39, // 44: sources.Sharepoint.oauth:type_name -> credentials.Oauth2
39, // 45: sources.AzureRepos.oauth:type_name -> credentials.Oauth2
38, // 46: sources.Postman.unauthenticated:type_name -> credentials.Unauthenticated
46, // 47: sources.Webhook.header:type_name -> credentials.Header
48, // [48:48] is the sub-list for method output_type
48, // [48:48] is the sub-list for method input_type
48, // [48:48] is the sub-list for extension type_name
@ -4896,6 +5042,18 @@ func file_sources_proto_init() {
return nil
}
}
file_sources_proto_msgTypes[32].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*Elasticsearch); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
}
file_sources_proto_msgTypes[1].OneofWrappers = []interface{}{
(*Artifactory_BasicAuth)(nil),
@ -5024,7 +5182,7 @@ func file_sources_proto_init() {
GoPackagePath: reflect.TypeOf(x{}).PkgPath(),
RawDescriptor: file_sources_proto_rawDesc,
NumEnums: 2,
NumMessages: 32,
NumMessages: 33,
NumExtensions: 0,
NumServices: 0,
},

View file

@ -5775,3 +5775,121 @@ var _ interface {
Cause() error
ErrorName() string
} = WebhookValidationError{}
// Validate checks the field values on Elasticsearch with the rules defined in
// the proto definition for this message. If any rules are violated, the first
// error encountered is returned, or nil if there are no violations.
func (m *Elasticsearch) Validate() error {
return m.validate(false)
}
// ValidateAll checks the field values on Elasticsearch with the rules defined
// in the proto definition for this message. If any rules are violated, the
// result is a list of violation errors wrapped in ElasticsearchMultiError, or
// nil if none found.
func (m *Elasticsearch) ValidateAll() error {
return m.validate(true)
}
func (m *Elasticsearch) validate(all bool) error {
if m == nil {
return nil
}
var errors []error
// no validation rules for Username
// no validation rules for Password
// no validation rules for CloudId
// no validation rules for ApiKey
// no validation rules for ServiceToken
// no validation rules for IndexPattern
// no validation rules for QueryJson
// no validation rules for SinceTimestamp
// no validation rules for BestEffortScan
if len(errors) > 0 {
return ElasticsearchMultiError(errors)
}
return nil
}
// ElasticsearchMultiError is an error wrapping multiple validation errors
// returned by Elasticsearch.ValidateAll() if the designated constraints
// aren't met.
type ElasticsearchMultiError []error
// Error returns a concatenation of all the error messages it wraps.
func (m ElasticsearchMultiError) Error() string {
var msgs []string
for _, err := range m {
msgs = append(msgs, err.Error())
}
return strings.Join(msgs, "; ")
}
// AllErrors returns a list of validation violation errors.
func (m ElasticsearchMultiError) AllErrors() []error { return m }
// ElasticsearchValidationError is the validation error returned by
// Elasticsearch.Validate if the designated constraints aren't met.
type ElasticsearchValidationError struct {
field string
reason string
cause error
key bool
}
// Field function returns field value.
func (e ElasticsearchValidationError) Field() string { return e.field }
// Reason function returns reason value.
func (e ElasticsearchValidationError) Reason() string { return e.reason }
// Cause function returns cause value.
func (e ElasticsearchValidationError) Cause() error { return e.cause }
// Key function returns key value.
func (e ElasticsearchValidationError) Key() bool { return e.key }
// ErrorName returns error name.
func (e ElasticsearchValidationError) ErrorName() string { return "ElasticsearchValidationError" }
// Error satisfies the builtin error interface
func (e ElasticsearchValidationError) Error() string {
cause := ""
if e.cause != nil {
cause = fmt.Sprintf(" | caused by: %v", e.cause)
}
key := ""
if e.key {
key = "key for "
}
return fmt.Sprintf(
"invalid %sElasticsearch.%s: %s%s",
key,
e.field,
e.reason,
cause)
}
var _ error = ElasticsearchValidationError{}
var _ interface {
Field() string
Reason() string
Key() bool
Cause() error
ErrorName() string
} = ElasticsearchValidationError{}

View file

@ -0,0 +1,472 @@
package elasticsearch
import (
"context"
"encoding/json"
"errors"
"fmt"
"io"
"slices"
"strings"
"sync"
"time"
es "github.com/elastic/go-elasticsearch/v8"
"github.com/elastic/go-elasticsearch/v8/esapi"
)
const PAGE_SIZE = 10
type IndexStatus int
type FilterParams struct {
indexPattern string
queryJSON string
sinceTimestamp string
}
type PointInTime struct {
ID string `json:"id"`
KeepAlive string `json:"keep_alive"`
}
type SearchRequestBody struct {
PIT PointInTime `json:"pit"`
Sort []string `json:"sort"`
SearchAfter []int `json:"search_after,omitempty"`
Query map[string]any `json:"query,omitempty"`
}
type Document struct {
id string
timestamp string
message string
}
type Index struct {
name string
documentCount int
latestTimestamp time.Time
latestTimestampLastRun time.Time
latestDocumentIDs []string
lock sync.RWMutex
}
type Indices struct {
indices []*Index
documentCount int
processedDocumentsCount int
filterParams *FilterParams
lock sync.RWMutex
}
type elasticSearchRequest interface {
Do(providedCtx context.Context, transport esapi.Transport) (*esapi.Response, error)
}
func (fp *FilterParams) Query(latestTimestamp time.Time) (map[string]any, error) {
timestampRangeQueryClause := make(map[string]any)
if fp.queryJSON != "" {
err := json.Unmarshal([]byte(fp.queryJSON), &timestampRangeQueryClause)
if err != nil {
return nil, err
}
}
if !latestTimestamp.IsZero() {
gte := make(map[string]string)
gte["gte"] = latestTimestamp.Format(time.RFC3339)
timestamp := make(map[string]map[string]string)
timestamp["@timestamp"] = gte
timestampRangeQueryClause["range"] = timestamp
} else if fp.sinceTimestamp != "" {
gte := make(map[string]string)
gte["gte"] = fp.sinceTimestamp
timestamp := make(map[string]map[string]string)
timestamp["@timestamp"] = gte
timestampRangeQueryClause["range"] = timestamp
}
query := make(map[string]any)
query["query"] = timestampRangeQueryClause
return query, nil
}
func NewIndex() *Index {
return &Index{}
}
func (i *Index) DocumentAlreadySeen(document *Document) bool {
parsedTimestamp, err := time.Parse(time.RFC3339, document.timestamp)
if err != nil {
return false
}
// We mutate the index in different ways depending on whether the timestamp
// is newer, equal, or older than the its current latest timestamp, so
// everything at this point must be write synchronized.
i.lock.Lock()
defer i.lock.Unlock()
if parsedTimestamp.After(i.latestTimestamp) {
i.latestTimestamp = parsedTimestamp
i.latestDocumentIDs = i.latestDocumentIDs[:0]
return false
}
if i.latestTimestamp.Equal(i.latestTimestampLastRun) &&
slices.Contains(i.latestDocumentIDs, document.id) {
return true
}
i.latestDocumentIDs = append(i.latestDocumentIDs, document.id)
return false
}
func (i *Index) UpdateLatestTimestampLastRun() {
i.lock.Lock()
i.latestTimestampLastRun = i.latestTimestamp
i.lock.Unlock()
}
func makeElasticSearchRequest(
ctx context.Context,
transport esapi.Transport,
req elasticSearchRequest,
) (map[string]any, error) {
res, err := req.Do(ctx, transport)
if err != nil {
return nil, err
}
defer res.Body.Close()
rawData, err := io.ReadAll(res.Body)
if err != nil {
return nil, err
}
data := make(map[string]any)
err = json.Unmarshal(rawData, &data)
if err != nil {
return nil, err
}
return data, nil
}
func fetchIndexNames(
ctx context.Context,
client *es.TypedClient,
indexPattern string,
) ([]string, error) {
req := esapi.IndicesGetRequest{
Index: []string{indexPattern},
}
data, err := makeElasticSearchRequest(ctx, client, req)
if err != nil {
return nil, err
}
names := make([]string, len(data))
count := 0
for indexName := range data {
names[count] = indexName
count++
}
return names, nil
}
func fetchIndexDocumentCount(
ctx context.Context,
client *es.TypedClient,
indexName string,
query map[string]any,
) (int, error) {
size := 0
req := esapi.SearchRequest{
Index: []string{indexName},
SearchType: "query_then_fetch",
Size: &size,
}
if len(query["query"].(map[string]any)) > 0 {
body, err := json.MarshalIndent(query, "", " ")
if err != nil {
return 0, err
}
req.Body = strings.NewReader(string(body))
}
data, err := makeElasticSearchRequest(ctx, client, req)
if err != nil {
return 0, err
}
hits, ok := data["hits"].(map[string]any)
if !ok {
return 0, errors.New("No hits in response")
}
total, ok := hits["total"].(map[string]any)
if !ok {
return 0, errors.New("No total in hits")
}
count, ok := total["value"].(float64)
if !ok {
return 0, errors.New("No value in total")
}
return int(count), nil
}
func createPITForSearch(
ctx context.Context,
client *es.TypedClient,
docSearch *DocumentSearch,
) (string, error) {
req := esapi.OpenPointInTimeRequest{
Index: []string{docSearch.index.name},
KeepAlive: "1m",
}
data, err := makeElasticSearchRequest(ctx, client, req)
if err != nil {
return "", err
}
pitID, ok := data["id"].(string)
if !ok {
return "", errors.New("No id in response")
}
return pitID, nil
}
// Processes documents fetched by a search, returns the number of documents
// fetched.
func processSearchedDocuments(
ctx context.Context,
client *es.TypedClient,
docSearch *DocumentSearch,
processDocument func(document *Document) error,
) (int, error) {
pitID, err := createPITForSearch(ctx, client, docSearch)
if err != nil {
return 0, err
}
documentsFetched := 0
documentsProcessed := 0
sort := []int{}
for documentsProcessed < docSearch.documentCount {
searchReqBody := SearchRequestBody{
PIT: PointInTime{
ID: pitID,
KeepAlive: "1m",
},
Sort: []string{"_shard_doc"},
}
query, err := docSearch.filterParams.Query(docSearch.index.latestTimestamp)
if err != nil {
return 0, err
}
searchReqBody.Query = query["query"].(map[string]any)
if len(sort) > 0 {
searchReqBody.SearchAfter = sort
}
body, err := json.MarshalIndent(searchReqBody, "", " ")
if err != nil {
return 0, err
}
req := esapi.SearchRequest{
Body: strings.NewReader(string(body)),
}
// If we've yet to reach our offset, or if we're still in the "skip" phase
// of scanning, don't actually fetch any document bodies.
skipCount := docSearch.offset + docSearch.skipCount
processingDocuments := documentsFetched+PAGE_SIZE > skipCount
if processingDocuments {
req.SourceIncludes = []string{"@timestamp", "message"}
} else {
req.SourceExcludes = []string{"*"}
req.SearchType = "query_then_fetch"
}
searchResults, err := makeElasticSearchRequest(ctx, client, req)
if err != nil {
return 0, err
}
topLevelHits, ok := searchResults["hits"].(map[string]any)
if !ok {
apiErr, ok := searchResults["error"].(map[string]any)
if ok {
return 0, fmt.Errorf("Error fetching search results: %v\n", apiErr)
}
continue
}
hits, ok := topLevelHits["hits"].([]any)
if !ok {
continue
}
if len(hits) == 0 {
break
}
for _, jsonHit := range hits {
documentsFetched++
hit, ok := jsonHit.(map[string]any)
if !ok {
continue
}
jsonSort, ok := hit["sort"].([]any)
if !ok {
continue
}
sort = sort[:0]
for _, elem := range jsonSort {
sort = append(sort, int(elem.(float64)))
}
if documentsFetched <= skipCount {
continue
}
id, ok := hit["_id"].(string)
if !ok {
continue
}
source, ok := hit["_source"].(map[string]any)
if !ok {
continue
}
timestamp, ok := source["@timestamp"].(string)
if !ok {
continue
}
message, ok := source["message"].(string)
if !ok {
continue
}
document := Document{
id: id,
timestamp: timestamp,
message: message,
}
if err = processDocument(&document); err != nil {
return 0, nil
}
documentsProcessed++
}
}
return documentsProcessed, nil
}
// Returns the number of documents processed within these indices
func (indices *Indices) GetProcessedDocumentCount() int {
indices.lock.RLock()
processedDocumentsCount := indices.processedDocumentsCount
indices.lock.RUnlock()
return processedDocumentsCount
}
// Adds documents processed to the count, used for progress
func (indices *Indices) UpdateProcessedDocumentCount(additionalDocumentsProcessed int) {
indices.lock.Lock()
indices.processedDocumentsCount += additionalDocumentsProcessed
indices.lock.Unlock()
}
// Updates a set of indices from an Elasticsearch cluster. If an index has been
// deleted it will be removed; if it's been added it'll be added; if its
// document count has changed (based on filterParams and latestTimestamp) it'll
// be updated.
func (indices *Indices) Update(
ctx context.Context,
client *es.TypedClient,
) error {
indexNames, err := fetchIndexNames(ctx, client, indices.filterParams.indexPattern)
if err != nil {
return err
}
indicesByName := make(map[string]*Index)
if indices.indices != nil {
for _, index := range indices.indices {
indicesByName[index.name] = index
}
}
newIndicesByName := make(map[string]*Index)
for _, name := range indexNames {
index, ok := indicesByName[name]
if ok {
newIndicesByName[name] = index
} else {
index = NewIndex()
index.name = name
newIndicesByName[name] = index
}
}
for _, indexName := range indexNames {
// This can't be an index we don't know about because we passed indexNames
index := newIndicesByName[indexName]
query, err := indices.filterParams.Query(index.latestTimestamp)
if err != nil {
return err
}
documentCount, err := fetchIndexDocumentCount(ctx, client, indexName, query)
if err != nil {
return err
}
index.documentCount = documentCount
}
indices.indices = make([]*Index, 0, len(newIndicesByName))
indices.documentCount = 0
indices.processedDocumentsCount = 0
for _, index := range newIndicesByName {
indices.indices = append(indices.indices, index)
indices.documentCount += index.documentCount
}
return nil
}

View file

@ -0,0 +1,274 @@
package elasticsearch
import (
"fmt"
"time"
es "github.com/elastic/go-elasticsearch/v8"
"github.com/go-errors/errors"
"github.com/go-logr/logr"
"github.com/trufflesecurity/trufflehog/v3/pkg/common"
"github.com/trufflesecurity/trufflehog/v3/pkg/context"
"github.com/trufflesecurity/trufflehog/v3/pkg/pb/source_metadatapb"
"github.com/trufflesecurity/trufflehog/v3/pkg/pb/sourcespb"
"github.com/trufflesecurity/trufflehog/v3/pkg/sanitizer"
"github.com/trufflesecurity/trufflehog/v3/pkg/sources"
"golang.org/x/sync/errgroup"
"google.golang.org/protobuf/proto"
"google.golang.org/protobuf/types/known/anypb"
)
const SourceType = sourcespb.SourceType_SOURCE_TYPE_ELASTICSEARCH
type Source struct {
name string
sourceId sources.SourceID
jobId sources.JobID
concurrency int
verify bool
esConfig es.Config
filterParams FilterParams
bestEffortScan bool
ctx context.Context
client *es.TypedClient
log logr.Logger
sources.Progress
}
// Init returns an initialized Elasticsearch source
func (s *Source) Init(
aCtx context.Context,
name string,
jobId sources.JobID,
sourceId sources.SourceID,
verify bool,
connection *anypb.Any,
concurrency int,
) error {
var conn sourcespb.Elasticsearch
if err := anypb.UnmarshalTo(connection, &conn, proto.UnmarshalOptions{}); err != nil {
return errors.WrapPrefix(err, "error unmarshalling connection", 0)
}
s.name = name
s.sourceId = sourceId
s.jobId = jobId
s.concurrency = concurrency
s.verify = verify
s.ctx = aCtx
s.log = aCtx.Logger()
esConfig := es.Config{}
if len(conn.Nodes) > 0 {
esConfig.Addresses = conn.Nodes
}
if conn.Username != "" {
esConfig.Username = conn.Username
}
if conn.Password != "" {
esConfig.Password = conn.Password
}
if conn.CloudId != "" {
esConfig.CloudID = conn.CloudId
}
if conn.ApiKey != "" {
esConfig.APIKey = conn.ApiKey
}
if conn.ServiceToken != "" {
esConfig.ServiceToken = conn.ServiceToken
}
s.esConfig = esConfig
if conn.IndexPattern == "" {
s.filterParams.indexPattern = "*"
} else {
s.filterParams.indexPattern = conn.IndexPattern
}
s.filterParams.queryJSON = conn.QueryJson
s.filterParams.sinceTimestamp = conn.SinceTimestamp
s.bestEffortScan = conn.BestEffortScan
client, err := es.NewTypedClient(s.esConfig)
if err != nil {
return err
}
s.client = client
return nil
}
func (s *Source) Type() sourcespb.SourceType {
return SourceType
}
func (s *Source) SourceID() sources.SourceID {
return s.sourceId
}
func (s *Source) JobID() sources.JobID {
return s.jobId
}
// Chunks emits chunks of bytes over a channel.
func (s *Source) Chunks(
ctx context.Context,
chunksChan chan *sources.Chunk,
targets ...sources.ChunkingTarget,
) error {
indices := Indices{filterParams: &s.filterParams}
for {
workerPool := new(errgroup.Group)
workerPool.SetLimit(s.concurrency)
previousDocumentCount := indices.documentCount
err := indices.Update(s.ctx, s.client)
if err != nil {
return err
}
// Don't burn up the ES API with rapid requests if there's no work to do
if previousDocumentCount > 0 && indices.documentCount == 0 {
duration, _ := time.ParseDuration("5s")
time.Sleep(duration)
continue
}
// The scanCoverageRate is documentsScanned / documentsAdded. If it's < 1 we
// need each DocumentSearch to skip some records.
scanCoverageRate := 1.0
if previousDocumentCount > 0 && indices.documentCount > 0 && previousDocumentCount < indices.documentCount {
scanCoverageRate =
float64(previousDocumentCount) / float64(indices.documentCount)
s.log.V(1).Info(fmt.Sprintf(
"Scan coverage rate is %f%% (%d/%d); skipping documents to catch up",
scanCoverageRate,
previousDocumentCount,
indices.documentCount,
))
}
unitsOfWork := distributeDocumentScans(&indices, s.concurrency, scanCoverageRate)
for outerUOWIndex, outerUOW := range unitsOfWork {
uowIndex := outerUOWIndex
uow := outerUOW
workerPool.Go(func() error {
// Give each worker its own client
client, err := es.NewTypedClient(s.esConfig)
if err != nil {
return err
}
uowDocumentsProcessed := 0
for _, docSearch := range uow.documentSearches {
documentsProcessed, err := processSearchedDocuments(
s.ctx,
client,
&docSearch,
func(document *Document) error {
if docSearch.index.DocumentAlreadySeen(document) {
return nil
}
chunk := sources.Chunk{
SourceType: s.Type(),
SourceName: s.name,
SourceID: s.SourceID(),
JobID: s.JobID(),
SourceMetadata: &source_metadatapb.MetaData{
Data: &source_metadatapb.MetaData_Elasticsearch{
Elasticsearch: &source_metadatapb.Elasticsearch{
Index: sanitizer.UTF8(docSearch.index.name),
DocumentId: sanitizer.UTF8(document.id),
Timestamp: sanitizer.UTF8(document.timestamp),
},
},
},
Verify: s.verify,
}
chunk.Data = []byte(document.message)
return common.CancellableWrite(ctx, chunksChan, &chunk)
},
)
if err != nil {
return err
}
s.log.V(2).Info(fmt.Sprintf(
"[Worker %d] Scanned %d documents from index %s",
uowIndex,
documentsProcessed,
docSearch.index.name,
))
if documentsProcessed != docSearch.documentCount-docSearch.skipCount {
s.log.V(1).Info(fmt.Sprintf(
"documentsProcessed != docSearch.documentCount = docSearch.skipCount (%d != %d)",
documentsProcessed,
docSearch.documentCount-docSearch.skipCount,
))
}
uowDocumentsProcessed += documentsProcessed
indices.UpdateProcessedDocumentCount(documentsProcessed)
s.SetProgressComplete(
indices.GetProcessedDocumentCount(),
indices.documentCount,
fmt.Sprintf(
"[Worker %d] Scanned %d documents from index %s",
uowIndex,
documentsProcessed,
docSearch.index.name,
),
"",
)
// When we use the Elastic API in this way, we can't tell
// it to only return a specific number of documents. We can
// only say "return a page of documents after this offset".
// So we might have reached the limit of how many documents
// we're supposed to process with this worker in the middle
// of a page, so check for that here.
//
// (We could use the API in a different way to get a
// precise number of documents back, but that use is
// limited to 10000 documents which we could well exceed)
if uowDocumentsProcessed >= uow.documentCount {
break
}
docSearch.index.UpdateLatestTimestampLastRun()
}
return nil
})
}
err = workerPool.Wait()
if err != nil {
s.log.V(2).Info(fmt.Sprintf("Error waiting on worker pool: %s\n", err))
}
if !s.bestEffortScan {
break
}
}
return nil
}

View file

@ -0,0 +1,420 @@
//go:build integration
// +build integration
package elasticsearch
import (
"bytes"
"context"
"encoding/json"
"log"
"testing"
"time"
"github.com/brianvoe/gofakeit/v7"
es "github.com/elastic/go-elasticsearch/v8"
"github.com/elastic/go-elasticsearch/v8/esapi"
"github.com/testcontainers/testcontainers-go"
elasticcontainer "github.com/testcontainers/testcontainers-go/modules/elasticsearch"
)
const USER string = "elastic" // This is hardcoded in the container
func buildTestClient(
ec *elasticcontainer.ElasticsearchContainer,
) (*es.TypedClient, error) {
return es.NewTypedClient(es.Config{
Addresses: []string{ec.Settings.Address},
Username: USER,
Password: ec.Settings.Password,
CACert: ec.Settings.CACert,
})
}
func TestSource_ElasticAPI(t *testing.T) {
ctx := context.Background()
ec, err := elasticcontainer.RunContainer(
ctx,
testcontainers.WithImage("docker.elastic.co/elasticsearch/elasticsearch:8.9.0"),
)
if err != nil {
log.Fatalf("Could not start elasticsearch: %s", err)
}
defer func() {
if err := ec.Terminate(ctx); err != nil {
log.Fatalf("Could not stop elasticsearch: %s", err)
}
}()
es, err := buildTestClient(ec)
if err != nil {
log.Fatalf("error creating the elasticsearch client: %s", err)
}
t.Run("New server contains no indexes", func(t *testing.T) {
indexNames, err := fetchIndexNames(ctx, es, "*")
if err != nil {
t.Error(err)
}
if len(indexNames) != 0 {
t.Errorf("wanted 0 indexNames, got %d\n", len(indexNames))
}
})
indexName := gofakeit.Word()
indexName2 := gofakeit.Word()
now := time.Now()
payload := make(map[string]string)
payload["message"] = gofakeit.SentenceSimple()
payload["@timestamp"] = now.Format(time.RFC3339)
jsonMessage, err := json.Marshal(payload)
if err != nil {
t.Fatal(err)
}
req := esapi.IndexRequest{
Index: indexName,
Body: bytes.NewReader(jsonMessage),
Refresh: "true",
}
res, err := req.Do(ctx, es)
if err != nil {
t.Fatal(err)
}
defer res.Body.Close()
t.Run(
"Adding a document to a new index creates a single index",
func(t *testing.T) {
indexNames, err := fetchIndexNames(ctx, es, "*")
if err != nil {
t.Error(err)
}
if len(indexNames) != 1 {
t.Fatalf("wanted 1 indexNames, got %d\n", len(indexNames))
}
if indexNames[0] != indexName {
t.Errorf("wanted index name \"%s\", got %s", indexName, indexNames[0])
}
},
)
nowAgain := time.Now()
payload2 := make(map[string]string)
payload2["message"] = gofakeit.SentenceSimple()
payload2["@timestamp"] = nowAgain.Format(time.RFC3339)
jsonMessage, err = json.Marshal(payload)
if err != nil {
t.Fatal(err)
}
req = esapi.IndexRequest{
Index: indexName2,
Body: bytes.NewReader(jsonMessage),
Refresh: "true",
}
res, err = req.Do(ctx, es)
if err != nil {
t.Fatal(err)
}
defer res.Body.Close()
t.Run(
"Indices have the correct document count",
func(t *testing.T) {
indices := Indices{filterParams: &FilterParams{indexPattern: "*"}}
err := indices.Update(ctx, es)
if err != nil {
t.Fatal(err)
}
if len(indices.indices) != 2 {
t.Errorf("wanted 2 indices, got %d\n", len(indices.indices))
}
if indices.indices[0].documentCount != 1 {
t.Errorf(
"wanted documentCount of 1 in 1st index, got %d\n",
indices.indices[0].documentCount,
)
}
if indices.indices[1].documentCount != 1 {
t.Errorf(
"wanted documentCount of 1 in 2nd index, got %d\n",
indices.indices[1].documentCount,
)
}
},
)
t.Run(
"A single unit of work has the correct max document count",
func(t *testing.T) {
indices := Indices{filterParams: &FilterParams{indexPattern: "*"}}
err := indices.Update(ctx, es)
if err != nil {
t.Fatal(err)
}
unitsOfWork := distributeDocumentScans(&indices, 1, 1.0)
if len(unitsOfWork) != 1 {
t.Fatalf("wanted 1 unit of work, got %d\n", len(unitsOfWork))
}
if len(unitsOfWork[0].documentSearches) != 2 {
t.Fatalf(
"wanted 1 doc search in 1st unit of work, got %d\n",
len(unitsOfWork[0].documentSearches),
)
}
if unitsOfWork[0].documentSearches[0].documentCount != 1 {
t.Errorf(
"wanted max document count of 1 in unit of work's 1st doc search, got %d\n",
unitsOfWork[0].documentSearches[0].documentCount,
)
}
if unitsOfWork[0].documentSearches[1].documentCount != 1 {
t.Errorf(
"wanted max document count of 1 in unit of work's 2nd doc search, got %d\n",
unitsOfWork[0].documentSearches[1].documentCount,
)
}
},
)
t.Run(
"Multiple units of work have the correct max document count",
func(t *testing.T) {
indices := Indices{filterParams: &FilterParams{indexPattern: "*"}}
err := indices.Update(ctx, es)
if err != nil {
t.Fatal(err)
}
unitsOfWork := distributeDocumentScans(&indices, 2, 1.0)
if len(unitsOfWork) != 2 {
t.Fatalf("wanted 2 units of work, got %d\n", len(unitsOfWork))
}
if len(unitsOfWork[0].documentSearches) != 1 {
t.Fatalf(
"wanted 1 doc search in 1st unit of work, got %d\n",
len(unitsOfWork[0].documentSearches),
)
}
if len(unitsOfWork[1].documentSearches) != 1 {
t.Fatalf(
"wanted 1 doc search in 2nd unit of work, got %d\n",
len(unitsOfWork[0].documentSearches),
)
}
if unitsOfWork[0].documentSearches[0].documentCount != 1 {
t.Errorf(
"wanted max document count of 1 in 1st unit of work's doc search, got %d\n",
unitsOfWork[0].documentSearches[0].documentCount,
)
}
if unitsOfWork[1].documentSearches[0].documentCount != 1 {
t.Errorf(
"wanted max document count of 1 in 2nd unit of work's doc search, got %d\n",
unitsOfWork[1].documentSearches[0].documentCount,
)
}
},
)
t.Run(
"Adding a document to a new index creates a document count of 1",
func(t *testing.T) {
query := make(map[string]any)
query["query"] = make(map[string]any)
indexDocumentCount, err := fetchIndexDocumentCount(
ctx,
es,
indexName,
query,
)
if err != nil {
t.Error(err)
}
if indexDocumentCount != 1 {
t.Errorf("wanted 1 document count, got %d\n", indexDocumentCount)
}
},
)
t.Run(
"Stored document matches passed values",
func(t *testing.T) {
docSearch := DocumentSearch{
index: &Index{
name: indexName,
documentCount: 1,
},
documentCount: 1,
offset: 0,
filterParams: &FilterParams{},
}
docs := []Document{}
docsProcessed, err := processSearchedDocuments(
ctx,
es,
&docSearch,
func(document *Document) error {
docs = append(docs, *document)
return nil
},
)
if err != nil {
t.Error(err)
}
if docsProcessed != 1 {
t.Fatalf("wanted 1 document processed, got %d\n", docsProcessed)
}
if len(docs) != 1 {
t.Fatalf("wanted 1 document, got %d\n", len(docs))
}
// if docSearch.index.latestDocumentID != 0 {
// t.Errorf("Wanted latestDocumentID 0, got %d\n", docSearch.index.latestDocumentID)
// }
doc := docs[0]
if doc.timestamp != now.Format(time.RFC3339) {
t.Errorf(
"wanted timestamp %s, got %s\n",
now.Format(time.RFC3339),
doc.timestamp,
)
}
if doc.message != payload["message"] {
t.Errorf(
"wanted message %s, got %s\n",
payload["message"],
doc.message,
)
}
},
)
t.Run(
"Correct number of documents is skipped given a skipPercent",
func(t *testing.T) {
messagesProcessed := 0
for i := 0; i < 40; i++ {
pl := make(map[string]string)
pl["message"] = gofakeit.Word()
pl["@timestamp"] = time.Now().Format(time.RFC3339)
index := indexName
if i > 19 {
index = indexName2
}
jsonMsg, err := json.Marshal(pl)
if err != nil {
t.Fatal(err)
}
req = esapi.IndexRequest{
Index: index,
Body: bytes.NewReader(jsonMsg),
Refresh: "true",
}
res, err = req.Do(ctx, es)
if err != nil {
t.Fatal(err)
}
defer res.Body.Close()
}
docSearch := DocumentSearch{
index: &Index{
name: indexName,
documentCount: 21,
},
documentCount: 21,
offset: 0,
filterParams: &FilterParams{},
skipCount: 10,
}
documentsProcessed, err := processSearchedDocuments(
ctx,
es,
&docSearch,
func(document *Document) error {
messagesProcessed += 1
return nil
},
)
if err != nil {
t.Error(err)
}
if documentsProcessed != 11 {
t.Errorf("wanted 11 documents processed, got %d\n", documentsProcessed)
}
if messagesProcessed != 11 {
t.Errorf("wanted 11 messages processed, got %d\n", messagesProcessed)
}
docSearch = DocumentSearch{
index: &Index{
name: indexName2,
documentCount: 21,
},
documentCount: 21,
offset: 0,
filterParams: &FilterParams{},
skipCount: 10,
}
documentsProcessed, err = processSearchedDocuments(
ctx,
es,
&docSearch,
func(document *Document) error {
messagesProcessed += 1
return nil
},
)
if err != nil {
t.Error(err)
}
if documentsProcessed != 11 {
t.Errorf("wanted 11 documents processed, got %d\n", documentsProcessed)
}
if messagesProcessed != 22 {
t.Errorf("wanted 22 messages processed, got %d\n", messagesProcessed)
}
},
)
}

View file

@ -0,0 +1,101 @@
package elasticsearch
import "fmt"
type DocumentSearch struct {
index *Index
offset int
documentCount int
skipCount int
filterParams *FilterParams
}
type UnitOfWork struct {
maxDocumentCount int
documentCount int
documentSearches []DocumentSearch
}
func NewUnitOfWork(maxDocumentCount int) UnitOfWork {
uow := UnitOfWork{maxDocumentCount: maxDocumentCount}
uow.documentSearches = []DocumentSearch{}
return uow
}
func (ds *DocumentSearch) String() string {
if ds.offset > 0 {
return fmt.Sprintf("%s [%d:]", ds.index.name, ds.offset)
} else {
return ds.index.name
}
}
func (uow *UnitOfWork) addSearch(
index *Index,
filterParams *FilterParams,
offset int,
scanCoverageRate float64,
) int {
indexDocCount := index.documentCount - offset
addedDocumentCount := min(uow.maxDocumentCount-uow.documentCount, indexDocCount)
if addedDocumentCount > 0 {
uow.documentSearches = append(uow.documentSearches, DocumentSearch{
index: index,
offset: offset,
documentCount: addedDocumentCount,
skipCount: int(float64(addedDocumentCount) * (1.0 - scanCoverageRate)),
filterParams: filterParams,
})
uow.documentCount += addedDocumentCount
}
return addedDocumentCount
}
func distributeDocumentScans(
indices *Indices,
maxUnits int,
scanCoverageRate float64,
) []UnitOfWork {
totalDocumentCount := 0
for _, i := range indices.indices {
totalDocumentCount += i.documentCount
}
unitsOfWork := make([]UnitOfWork, maxUnits)
documentsAssigned := 0
for i := 0; i < maxUnits; i++ {
documentCount := totalDocumentCount / maxUnits
// The total number of documents to process might not be perfectly
// divisible by the number of workers, so make sure any remaining documents
// get processed by assigning them to the last worker
if i == maxUnits-1 {
documentCount = totalDocumentCount - documentsAssigned
}
unitsOfWork[i] = NewUnitOfWork(documentCount)
documentsAssigned += documentCount
}
unitOfWorkIndex := 0
for _, i := range indices.indices {
uow := &unitsOfWork[unitOfWorkIndex]
offset := uow.addSearch(i, indices.filterParams, 0, scanCoverageRate)
// If we've yet to distribute all the documents in the index, go into the
// next unit of work, and the next, and the next....
for offset < i.documentCount {
unitOfWorkIndex++
uow := &unitsOfWork[unitOfWorkIndex]
offset += uow.addSearch(i, indices.filterParams, offset, scanCoverageRate)
}
}
return unitsOfWork
}

View file

@ -0,0 +1,140 @@
package elasticsearch
import (
"testing"
"github.com/stretchr/testify/assert"
)
func TestSource_distributeDocumentScans(t *testing.T) {
indices := Indices{
indices: []*Index{
&Index{name: "index", documentCount: 20},
&Index{name: "index2", documentCount: 9},
&Index{name: "index3", documentCount: 0},
},
filterParams: &FilterParams{},
}
t.Run(
"Distributing 30 documents from 3 indices (1 empty) with 2 workers works",
func(t *testing.T) {
uows := distributeDocumentScans(&indices, 2, .9)
assert.Equal(t, 2, len(uows))
assert.Equal(t, 14, uows[0].maxDocumentCount)
assert.Equal(t, 14, uows[0].documentCount)
assert.Equal(t, 1, len(uows[0].documentSearches))
assert.Equal(t, "index", uows[0].documentSearches[0].index.name)
assert.Equal(t, 0, uows[0].documentSearches[0].offset)
assert.Equal(t, 14, uows[0].documentSearches[0].documentCount)
assert.Equal(t, 1, uows[0].documentSearches[0].skipCount)
assert.Equal(t, 15, uows[1].maxDocumentCount)
assert.Equal(t, 15, uows[1].documentCount)
assert.Equal(t, 2, len(uows[1].documentSearches))
assert.Equal(t, "index", uows[1].documentSearches[0].index.name)
assert.Equal(t, 14, uows[1].documentSearches[0].offset)
assert.Equal(t, 6, uows[1].documentSearches[0].documentCount)
assert.Equal(t, 0, uows[1].documentSearches[0].skipCount)
assert.Equal(t, "index2", uows[1].documentSearches[1].index.name)
assert.Equal(t, 0, uows[1].documentSearches[1].offset)
assert.Equal(t, 9, uows[1].documentSearches[1].documentCount)
assert.Equal(t, 0, uows[1].documentSearches[1].skipCount)
},
)
}
func TestSource_addSearch(t *testing.T) {
index := Index{name: "index1", documentCount: 20}
index2 := Index{name: "index2", documentCount: 10}
index3 := Index{name: "index3", documentCount: 0}
uow := NewUnitOfWork(10)
// Does filling up a UOW with a larger index work?
offset := uow.addSearch(&index, &FilterParams{}, 0, 1.0)
assert.Equal(t, 10, offset)
assert.Equal(t, 10, uow.maxDocumentCount)
assert.Equal(t, uow.maxDocumentCount, uow.documentCount)
assert.Equal(t, 1, len(uow.documentSearches))
assert.Equal(t, index.name, uow.documentSearches[0].index.name)
assert.Equal(t, 0, uow.documentSearches[0].offset)
assert.Equal(t, uow.maxDocumentCount, uow.documentSearches[0].documentCount)
// Does trying to add another range into a full UOW leave it unchanged?
offset2 := uow.addSearch(&index2, &FilterParams{}, 0, 1.0)
assert.Equal(t, 0, offset2)
assert.Equal(t, 10, uow.maxDocumentCount)
assert.Equal(t, uow.maxDocumentCount, uow.documentCount)
assert.Equal(t, 1, len(uow.documentSearches))
assert.Equal(t, index.name, uow.documentSearches[0].index.name)
assert.Equal(t, 0, uow.documentSearches[0].offset)
assert.Equal(t, uow.maxDocumentCount, uow.documentSearches[0].documentCount)
// Does trying to add an index with no documents leave it unchanged?
offset += uow.addSearch(&index3, &FilterParams{}, 0, 1.0)
assert.Equal(t, 10, offset)
assert.Equal(t, 10, uow.maxDocumentCount)
assert.Equal(t, uow.maxDocumentCount, uow.documentCount)
assert.Equal(t, 1, len(uow.documentSearches))
assert.Equal(t, index.name, uow.documentSearches[0].index.name)
assert.Equal(t, 0, uow.documentSearches[0].offset)
assert.Equal(t, uow.maxDocumentCount, uow.documentSearches[0].documentCount)
// Does filling up another UOW with a larger index work?
uow2 := NewUnitOfWork(9)
offset += uow2.addSearch(&index, &FilterParams{}, offset, 1.0)
assert.Equal(t, 19, offset)
assert.Equal(t, 9, uow2.maxDocumentCount)
assert.Equal(t, uow2.maxDocumentCount, uow2.documentCount)
assert.Equal(t, 1, len(uow2.documentSearches))
assert.Equal(t, index.name, uow2.documentSearches[0].index.name)
assert.Equal(t, 10, uow2.documentSearches[0].offset)
assert.Equal(t, uow2.maxDocumentCount, uow2.documentSearches[0].documentCount)
// Does finishing off an index into a UOW with room to spare work?
uow3 := NewUnitOfWork(9)
offset += uow3.addSearch(&index, &FilterParams{}, offset, 1.0)
assert.Equal(t, 20, offset)
assert.Equal(t, 9, uow3.maxDocumentCount)
assert.Equal(t, 1, uow3.documentCount)
assert.Equal(t, 1, len(uow3.documentSearches))
assert.Equal(t, index.name, uow3.documentSearches[0].index.name)
assert.Equal(t, 19, uow3.documentSearches[0].offset)
assert.Equal(t, 1, uow3.documentSearches[0].documentCount)
uow = NewUnitOfWork(21)
// Does adding an empty range into a new UOW leave it unchanged?
offset = uow.addSearch(&index3, &FilterParams{}, 0, 1.0)
assert.Equal(t, 0, offset)
assert.Equal(t, 21, uow.maxDocumentCount)
assert.Equal(t, 0, uow.documentCount)
assert.Equal(t, 0, len(uow.documentSearches))
// Does adding a range into a larger UOW work?
offset = uow.addSearch(&index, &FilterParams{}, 0, 1.0)
assert.Equal(t, 20, offset)
assert.Equal(t, 1, len(uow.documentSearches))
assert.Equal(t, index.name, uow.documentSearches[0].index.name)
assert.Equal(t, 0, uow.documentSearches[0].offset)
assert.Equal(t, 20, uow.documentSearches[0].documentCount)
// Does filling up a UOW that already has a range in it work?
offset = uow.addSearch(&index2, &FilterParams{}, 0, 1.0)
assert.Equal(t, 1, offset)
assert.Equal(t, 2, len(uow.documentSearches))
assert.Equal(t, index.name, uow.documentSearches[0].index.name)
assert.Equal(t, 0, uow.documentSearches[0].offset)
assert.Equal(t, 20, uow.documentSearches[0].documentCount)
assert.Equal(t, index2.name, uow.documentSearches[1].index.name)
assert.Equal(t, 0, uow.documentSearches[1].offset)
assert.Equal(t, 1, uow.documentSearches[1].documentCount)
}

View file

@ -329,6 +329,19 @@ type PostmanConfig struct {
Filter *common.Filter
}
type ElasticsearchConfig struct {
Nodes []string
Username string
Password string
CloudID string
APIKey string
ServiceToken string
IndexPattern string
QueryJSON string
SinceTimestamp string
BestEffortScan bool
}
// Progress is used to update job completion progress across sources.
type Progress struct {
mut sync.Mutex

View file

@ -300,8 +300,6 @@ message Postman {
string variable_type = 15;
}
message Vector {
google.protobuf.Timestamp timestamp = 1;
string source_type = 2;
@ -314,6 +312,12 @@ message Webhook {
}
}
message Elasticsearch {
string index = 1;
string document_id = 2;
string timestamp = 3;
}
message MetaData {
oneof data {
Azure azure = 1;
@ -346,5 +350,6 @@ message MetaData {
TravisCI travisCI = 28;
Postman postman = 29;
Webhook webhook = 30;
Elasticsearch elasticsearch = 31;
}
}

View file

@ -47,6 +47,7 @@ enum SourceType {
SOURCE_TYPE_TRAVISCI = 32;
SOURCE_TYPE_POSTMAN = 33;
SOURCE_TYPE_WEBHOOK = 34;
SOURCE_TYPE_ELASTICSEARCH = 35;
}
message LocalSource {
@ -407,3 +408,16 @@ message Webhook {
credentials.Header header = 2;
}
}
message Elasticsearch {
repeated string nodes = 1;
string username = 2;
string password = 3;
string cloud_id = 4;
string api_key = 5;
string service_token = 6;
string index_pattern = 7;
string query_json = 8;
string since_timestamp = 9;
bool best_effort_scan = 10;
}