diff --git a/.github/workflows/acceptance-test.yml b/.github/workflows/acceptance-test.yml index 9442a103..3280abf8 100644 --- a/.github/workflows/acceptance-test.yml +++ b/.github/workflows/acceptance-test.yml @@ -16,7 +16,7 @@ jobs: - name: Set up Go uses: actions/setup-go@v4 with: - go-version: 1.19 + go-version: 1.21 - name: golangci-lint uses: golangci/golangci-lint-action@v3 @@ -40,7 +40,7 @@ jobs: - name: Set up Go uses: actions/setup-go@v4 with: - go-version: 1.19 + go-version: 1.21 - name: Checkout uses: actions/checkout@v3 diff --git a/design/rate_limiters.md b/design/rate_limiters.md new file mode 100644 index 00000000..5ada6acd --- /dev/null +++ b/design/rate_limiters.md @@ -0,0 +1,256 @@ + +# Rate Limiting + +## Overview + +Rate limiting can be applied to all `Get`, `List` and column `Hydrate` calls. + +For each call, multiple rate limiters may apply. When more than one limiter applies to a call, +the rate limiter with the largest required wait is respected. + +The rate limiters which apply to a call are resolved using `scopes`. Each rate limiter definition specifies +scopes which apply to it, for example `region`, `connection`. Then for each call, the values of these scopes are +determined and used to identify which limiters apply. + + +## Defining Rate Limiters + +Rate limiters may be defined in the plugin definition (by the plugin author), or in HCL config (by the user) + +### Plugin Definition +A rate limiters is defined using the `Definition` struct: +```go +type Definition struct { + // the limiter name + Name string + // the actual limiter config + FillRate rate.Limit + BucketSize int + + // the scopes which identify this limiter instance + // one limiter instance will be created for each combination of scopes which is encountered + Scopes []string + + // filter used to target the limiter + Where string +} +``` +`Scopes` is list of all the scopes which apply to the rate limiter. +For example, if you want a rate limiter that applies to a single account, region and service, you could use the scopes: +[`connection`, `region`,`service`]. +(See below for details of predefined vs custom scope names) + +`Where` is a SQL compatible where clause which allows a rate limiter to be targeted to spcific set of scope values, +for example to specify a rate limiter for a specific service only, the filter `"service"="s3` be used. + +For example: +```go +p := &plugin.Plugin{ + Name: "aws", + TableMap: map[string]*plugin.Table{...} + RateLimiters: []*rate_limiter.Definition{ + Name: "connection-region-service", + BucketSize: 10, + FillRate: 50, + Scopes: []string{"region", "connection", "servive"}, + Where: "service = 's3'", + }, + }, +``` + +### HCL Definition +Rate limiters may be define in HCL in an `.spc` file in the config folder. +If a limiter has the same name as one defined in the plugin it will override it, if not, a new limiter is defined. + +``` +limiter "connection-region-service" { + plugin = "aws" + bucket_size = 5 + fill_rate = 25 + scope = ["region", "connection", "servive"] + where = "service = 's3'" +} + +``` + +## Resolving Rate Limiters + +When executing a hydrate call the following steps are followed: +1) Build the set of rate limiter definitions which may apply to the hydrate call +2) Build the set of scope values which apply to the hydrate call +3) Determine which limiter defintions are satisfied by the scope values (looking at both required scopes and the scope filters) +4) Build a MultiLimiter from the resultant limiter defintions + +### Resolving Scope Values +Scope values are popuylated from 3 sources: +- *implicit* scope values populated automatically + - `tabe`, `connection` +- *matrix* scope values populated from matrix quals (e.g. `region`) +- *custom* scope values (tags?) which may be defined in `Table` defintions, `HydrateConfig`, `GetConfig` and `ListConfig` + +## Paged List Calls + +If the list call uses paging, the SDK provides a hook, `WaitForListRateLimit`, which can be called before paging to apply rate limiting to the list call: + +```go + // List call + for paginator.HasMorePages() { + + // apply rate limiting + d.WaitForListRateLimit(ctx) + + output, err := paginator.NextPage(ctx) + if err != nil { + plugin.Logger(ctx).Error("aws_codepipeline_pipeline.listCodepipelinePipelines", "api_error", err) + return nil, err + } + for _, items := range output.Pipelines { + d.StreamListItem(ctx, items) + + // Context can be cancelled due to manual cancellation or the limit has been hit + if d.RowsRemaining(ctx) == 0 { + return nil, nil + } + } + } +``` + +## Scenarios + +### 1. Plugin defines a single unscoped rate limiter + +```go +func Plugin(_ context.Context) *plugin.Plugin { + p := &plugin.Plugin{ + Name: "aws", + TableMap: map[string]*plugin.Table{...}, + RateLimiters: []*rate_limiter.Definition{ + { + Limit: 50, + BurstSize: 10, + }, + }, + ... + } + + return p +} +``` + +### 2. Plugin defines a rate limiter scoped by implicit scope "connection", custom scope "service" and matrix scope "region" + +#### Plugin definition +```go + +func Plugin(_ context.Context) *plugin.Plugin { + p := &plugin.Plugin{ + Name: pluginName, + TableMap: map[string]*plugin.Table{...}, + RateLimiters:[]*rate_limiter.Definition{ + { + Limit: 50, + BurstSize: 10, + Scopes: []string{ + "connection", + "service" + "region", + }, + }, + }, + ... + } + + return p +} +``` +NOTE: `region` must be defined as a matrix qual in order to use the matrix scope value, +and `service` must be defined as a custom scope value for tables or hydrate calls which this limiter targets. + +#### 2a. Table definition which defines a "region" key column and sets the "service" scope value for all hydrate calls + +```go +func tableAwsS3AccessPoint(_ context.Context) *plugin.Table { + return &plugin.Table{ + Name: "aws_s3_access_point", + List: &plugin.ListConfig{ + Hydrate: listS3AccessPoints, + KeyColumns: plugin.SingleColumn("region"), + }, + Get: &plugin.GetConfig{ + KeyColumns: plugin.AllColumns([]string{"name", "region"}), + Hydrate: getS3AccessPoint, + }, + // set "service" scope to "s3" for all hydrate calls + Tags: map[string]string{ + "service": "s3", + }, + Columns: awsRegionalColumns([]*plugin.Column{...}), + } +} + +``` +#### 2b. Hydrate call definition which specifies the "service" scope value + + +```go +func tableAwsS3AccountSettings(_ context.Context) *plugin.Table { + return &plugin.Table{ + Name: "aws_s3_account_settings", + List: &plugin.ListConfig{...}, + HydrateConfig: []plugin.HydrateConfig{ + { + Func: getAccountBucketPublicAccessBlock, + // set the "service" scope value for this hydrate call + Tags: map[string]string{ + "service": "s3", + }, + }, + }, + Columns: awsGlobalRegionColumns([]*plugin.Column{...}), + } +} + +``` + + +### 3. Plugin defines rate limiters for "s3" and "ec2" services and one for all other services +NOTE: also scoped by "connection" and "region" + +```go + +// scopes used for all rate limiters +var rateLimiterScopes=[]string{"connection","service","region",} + +func Plugin(_ context.Context) *plugin.Plugin { + p := &plugin.Plugin{ + Name: pluginName, + TableMap: map[string]*plugin.Table{ ... }, + RateLimiters: []*rate_limiter.Definition{ + // rate limiter for s3 service + { + Limit: 20, + BurstSize: 5, + Scopes: rateLimiterScopes, + Where: "service='s3'", + }, + }, + // rate limiter for ec2 service + { + Limit: 40, + BurstSize: 5, + Scopes: rateLimiterScopes, + Where: "service='ec2'", + }, + // rate limiter for all other services + { + Limit: 75, + BurstSize: 10, + Where: "service not in ('s3,'ec2')", + }, + }, + ... + } + + return p +} +``` diff --git a/filter/filter.go b/filter/filter.go new file mode 100644 index 00000000..ee428b47 --- /dev/null +++ b/filter/filter.go @@ -0,0 +1,3501 @@ +// Code generated by pigeon; DO NOT EDIT. + +package filter + +import ( + "bytes" + "errors" + "fmt" + "io" + "math" + "os" + "sort" + "strconv" + "strings" + "sync" + "unicode" + "unicode/utf8" +) + +type ComparisonNode struct { + Type string + Operator CodeNode + Values interface{} +} + +type CodeNode struct { + Type string + Source string + Value string + JsonbSelector []CodeNode +} + +type FunctionNode struct { + Name string + Function CodeNode + Args []CodeNode +} + +func toIfaceSlice(v interface{}) []interface{} { + if v == nil { + return nil + } + return v.([]interface{}) +} + +func eval(first, rest interface{}) []interface{} { + exprs := []interface{}{} + exprs = append(exprs, first) + restSl := toIfaceSlice(rest) + for _, v := range restSl { + restStmt := toIfaceSlice(v) + exprs = append(exprs, restStmt[3]) + } + return exprs +} + +var g = &grammar{ + rules: []*rule{ + { + name: "Input", + pos: position{line: 50, col: 1, offset: 785}, + expr: &actionExpr{ + pos: position{line: 50, col: 10, offset: 794}, + run: (*parser).callonInput1, + expr: &seqExpr{ + pos: position{line: 50, col: 10, offset: 794}, + exprs: []interface{}{ + &ruleRefExpr{ + pos: position{line: 50, col: 10, offset: 794}, + name: "_", + }, + &labeledExpr{ + pos: position{line: 50, col: 12, offset: 796}, + label: "i", + expr: &ruleRefExpr{ + pos: position{line: 50, col: 14, offset: 798}, + name: "OrComparison", + }, + }, + &ruleRefExpr{ + pos: position{line: 50, col: 27, offset: 811}, + name: "_", + }, + &ruleRefExpr{ + pos: position{line: 50, col: 29, offset: 813}, + name: "EOF", + }, + }, + }, + }, + }, + { + name: "OrComparison", + pos: position{line: 54, col: 1, offset: 838}, + expr: &actionExpr{ + pos: position{line: 54, col: 17, offset: 854}, + run: (*parser).callonOrComparison1, + expr: &seqExpr{ + pos: position{line: 54, col: 17, offset: 854}, + exprs: []interface{}{ + &labeledExpr{ + pos: position{line: 54, col: 17, offset: 854}, + label: "first", + expr: &ruleRefExpr{ + pos: position{line: 54, col: 23, offset: 860}, + name: "AndComparison", + }, + }, + &labeledExpr{ + pos: position{line: 54, col: 37, offset: 874}, + label: "rest", + expr: &zeroOrMoreExpr{ + pos: position{line: 54, col: 42, offset: 879}, + expr: &seqExpr{ + pos: position{line: 54, col: 44, offset: 881}, + exprs: []interface{}{ + &ruleRefExpr{ + pos: position{line: 54, col: 44, offset: 881}, + name: "_", + }, + &ruleRefExpr{ + pos: position{line: 54, col: 46, offset: 883}, + name: "Or", + }, + &ruleRefExpr{ + pos: position{line: 54, col: 49, offset: 886}, + name: "_", + }, + &ruleRefExpr{ + pos: position{line: 54, col: 51, offset: 888}, + name: "AndComparison", + }, + }, + }, + }, + }, + }, + }, + }, + }, + { + name: "AndComparison", + pos: position{line: 66, col: 1, offset: 1066}, + expr: &actionExpr{ + pos: position{line: 66, col: 18, offset: 1083}, + run: (*parser).callonAndComparison1, + expr: &seqExpr{ + pos: position{line: 66, col: 18, offset: 1083}, + exprs: []interface{}{ + &labeledExpr{ + pos: position{line: 66, col: 18, offset: 1083}, + label: "first", + expr: &ruleRefExpr{ + pos: position{line: 66, col: 24, offset: 1089}, + name: "Comparison", + }, + }, + &labeledExpr{ + pos: position{line: 66, col: 35, offset: 1100}, + label: "rest", + expr: &zeroOrMoreExpr{ + pos: position{line: 66, col: 40, offset: 1105}, + expr: &seqExpr{ + pos: position{line: 66, col: 42, offset: 1107}, + exprs: []interface{}{ + &ruleRefExpr{ + pos: position{line: 66, col: 42, offset: 1107}, + name: "_", + }, + &ruleRefExpr{ + pos: position{line: 66, col: 44, offset: 1109}, + name: "And", + }, + &ruleRefExpr{ + pos: position{line: 66, col: 48, offset: 1113}, + name: "_", + }, + &ruleRefExpr{ + pos: position{line: 66, col: 50, offset: 1115}, + name: "Comparison", + }, + }, + }, + }, + }, + }, + }, + }, + }, + { + name: "Comparison", + pos: position{line: 83, col: 1, offset: 1314}, + expr: &choiceExpr{ + pos: position{line: 83, col: 16, offset: 1329}, + alternatives: []interface{}{ + &ruleRefExpr{ + pos: position{line: 83, col: 16, offset: 1329}, + name: "MultiComparison", + }, + &ruleRefExpr{ + pos: position{line: 83, col: 34, offset: 1347}, + name: "NotComparison", + }, + &ruleRefExpr{ + pos: position{line: 83, col: 50, offset: 1363}, + name: "LeftRightComparison", + }, + &ruleRefExpr{ + pos: position{line: 83, col: 72, offset: 1385}, + name: "LikeComparison", + }, + &ruleRefExpr{ + pos: position{line: 83, col: 89, offset: 1402}, + name: "IsComparison", + }, + &ruleRefExpr{ + pos: position{line: 83, col: 104, offset: 1417}, + name: "InComparison", + }, + &ruleRefExpr{ + pos: position{line: 83, col: 119, offset: 1432}, + name: "IdentifierComparison", + }, + }, + }, + }, + { + name: "MultiComparison", + pos: position{line: 85, col: 1, offset: 1454}, + expr: &actionExpr{ + pos: position{line: 85, col: 20, offset: 1473}, + run: (*parser).callonMultiComparison1, + expr: &seqExpr{ + pos: position{line: 85, col: 20, offset: 1473}, + exprs: []interface{}{ + &litMatcher{ + pos: position{line: 85, col: 20, offset: 1473}, + val: "(", + ignoreCase: false, + want: "\"(\"", + }, + &ruleRefExpr{ + pos: position{line: 85, col: 24, offset: 1477}, + name: "_", + }, + &labeledExpr{ + pos: position{line: 85, col: 26, offset: 1479}, + label: "expr", + expr: &ruleRefExpr{ + pos: position{line: 85, col: 31, offset: 1484}, + name: "OrComparison", + }, + }, + &ruleRefExpr{ + pos: position{line: 85, col: 44, offset: 1497}, + name: "_", + }, + &litMatcher{ + pos: position{line: 85, col: 46, offset: 1499}, + val: ")", + ignoreCase: false, + want: "\")\"", + }, + }, + }, + }, + }, + { + name: "NotComparison", + pos: position{line: 89, col: 1, offset: 1527}, + expr: &actionExpr{ + pos: position{line: 89, col: 18, offset: 1544}, + run: (*parser).callonNotComparison1, + expr: &seqExpr{ + pos: position{line: 89, col: 18, offset: 1544}, + exprs: []interface{}{ + &labeledExpr{ + pos: position{line: 89, col: 18, offset: 1544}, + label: "op", + expr: &ruleRefExpr{ + pos: position{line: 89, col: 21, offset: 1547}, + name: "Not", + }, + }, + &ruleRefExpr{ + pos: position{line: 89, col: 25, offset: 1551}, + name: "_", + }, + &labeledExpr{ + pos: position{line: 89, col: 27, offset: 1553}, + label: "right", + expr: &ruleRefExpr{ + pos: position{line: 89, col: 33, offset: 1559}, + name: "Comparison", + }, + }, + }, + }, + }, + }, + { + name: "LeftRightComparison", + pos: position{line: 98, col: 1, offset: 1718}, + expr: &actionExpr{ + pos: position{line: 98, col: 24, offset: 1741}, + run: (*parser).callonLeftRightComparison1, + expr: &seqExpr{ + pos: position{line: 98, col: 24, offset: 1741}, + exprs: []interface{}{ + &labeledExpr{ + pos: position{line: 98, col: 24, offset: 1741}, + label: "left", + expr: &ruleRefExpr{ + pos: position{line: 98, col: 29, offset: 1746}, + name: "Value", + }, + }, + &ruleRefExpr{ + pos: position{line: 98, col: 35, offset: 1752}, + name: "_", + }, + &labeledExpr{ + pos: position{line: 98, col: 37, offset: 1754}, + label: "op", + expr: &ruleRefExpr{ + pos: position{line: 98, col: 40, offset: 1757}, + name: "CompareOperator", + }, + }, + &ruleRefExpr{ + pos: position{line: 98, col: 56, offset: 1773}, + name: "_", + }, + &labeledExpr{ + pos: position{line: 98, col: 58, offset: 1775}, + label: "right", + expr: &ruleRefExpr{ + pos: position{line: 98, col: 64, offset: 1781}, + name: "Value", + }, + }, + }, + }, + }, + }, + { + name: "LikeComparison", + pos: position{line: 107, col: 1, offset: 1944}, + expr: &actionExpr{ + pos: position{line: 107, col: 19, offset: 1962}, + run: (*parser).callonLikeComparison1, + expr: &seqExpr{ + pos: position{line: 107, col: 19, offset: 1962}, + exprs: []interface{}{ + &labeledExpr{ + pos: position{line: 107, col: 19, offset: 1962}, + label: "left", + expr: &ruleRefExpr{ + pos: position{line: 107, col: 25, offset: 1968}, + name: "Identifier", + }, + }, + &ruleRefExpr{ + pos: position{line: 107, col: 37, offset: 1980}, + name: "_", + }, + &labeledExpr{ + pos: position{line: 107, col: 39, offset: 1982}, + label: "op", + expr: &ruleRefExpr{ + pos: position{line: 107, col: 42, offset: 1985}, + name: "Like", + }, + }, + &ruleRefExpr{ + pos: position{line: 107, col: 47, offset: 1990}, + name: "_", + }, + &labeledExpr{ + pos: position{line: 107, col: 49, offset: 1992}, + label: "right", + expr: &ruleRefExpr{ + pos: position{line: 107, col: 56, offset: 1999}, + name: "String", + }, + }, + }, + }, + }, + }, + { + name: "IsComparison", + pos: position{line: 116, col: 1, offset: 2161}, + expr: &actionExpr{ + pos: position{line: 116, col: 17, offset: 2177}, + run: (*parser).callonIsComparison1, + expr: &seqExpr{ + pos: position{line: 116, col: 17, offset: 2177}, + exprs: []interface{}{ + &labeledExpr{ + pos: position{line: 116, col: 17, offset: 2177}, + label: "left", + expr: &choiceExpr{ + pos: position{line: 116, col: 23, offset: 2183}, + alternatives: []interface{}{ + &ruleRefExpr{ + pos: position{line: 116, col: 23, offset: 2183}, + name: "Identifier", + }, + &ruleRefExpr{ + pos: position{line: 116, col: 36, offset: 2196}, + name: "Null", + }, + &ruleRefExpr{ + pos: position{line: 116, col: 43, offset: 2203}, + name: "Bool", + }, + }, + }, + }, + &ruleRefExpr{ + pos: position{line: 116, col: 49, offset: 2209}, + name: "_", + }, + &labeledExpr{ + pos: position{line: 116, col: 51, offset: 2211}, + label: "op", + expr: &ruleRefExpr{ + pos: position{line: 116, col: 54, offset: 2214}, + name: "Is", + }, + }, + &ruleRefExpr{ + pos: position{line: 116, col: 57, offset: 2217}, + name: "_", + }, + &labeledExpr{ + pos: position{line: 116, col: 59, offset: 2219}, + label: "right", + expr: &choiceExpr{ + pos: position{line: 116, col: 66, offset: 2226}, + alternatives: []interface{}{ + &ruleRefExpr{ + pos: position{line: 116, col: 66, offset: 2226}, + name: "Null", + }, + &ruleRefExpr{ + pos: position{line: 116, col: 73, offset: 2233}, + name: "Bool", + }, + }, + }, + }, + }, + }, + }, + }, + { + name: "InComparison", + pos: position{line: 125, col: 1, offset: 2391}, + expr: &actionExpr{ + pos: position{line: 125, col: 17, offset: 2407}, + run: (*parser).callonInComparison1, + expr: &seqExpr{ + pos: position{line: 125, col: 17, offset: 2407}, + exprs: []interface{}{ + &labeledExpr{ + pos: position{line: 125, col: 17, offset: 2407}, + label: "first", + expr: &ruleRefExpr{ + pos: position{line: 125, col: 23, offset: 2413}, + name: "Value", + }, + }, + &ruleRefExpr{ + pos: position{line: 125, col: 29, offset: 2419}, + name: "_", + }, + &labeledExpr{ + pos: position{line: 125, col: 31, offset: 2421}, + label: "op", + expr: &ruleRefExpr{ + pos: position{line: 125, col: 34, offset: 2424}, + name: "In", + }, + }, + &ruleRefExpr{ + pos: position{line: 125, col: 37, offset: 2427}, + name: "_", + }, + &labeledExpr{ + pos: position{line: 125, col: 39, offset: 2429}, + label: "rest", + expr: &ruleRefExpr{ + pos: position{line: 125, col: 44, offset: 2434}, + name: "InList", + }, + }, + }, + }, + }, + }, + { + name: "IdentifierComparison", + pos: position{line: 139, col: 1, offset: 2695}, + expr: &actionExpr{ + pos: position{line: 139, col: 25, offset: 2719}, + run: (*parser).callonIdentifierComparison1, + expr: &labeledExpr{ + pos: position{line: 139, col: 25, offset: 2719}, + label: "i", + expr: &ruleRefExpr{ + pos: position{line: 139, col: 27, offset: 2721}, + name: "Identifier", + }, + }, + }, + }, + { + name: "InList", + pos: position{line: 152, col: 1, offset: 2861}, + expr: &choiceExpr{ + pos: position{line: 152, col: 11, offset: 2871}, + alternatives: []interface{}{ + &ruleRefExpr{ + pos: position{line: 152, col: 11, offset: 2871}, + name: "EmptyInList", + }, + &ruleRefExpr{ + pos: position{line: 152, col: 25, offset: 2885}, + name: "NonEmptyInList", + }, + }, + }, + }, + { + name: "EmptyInList", + pos: position{line: 154, col: 1, offset: 2901}, + expr: &actionExpr{ + pos: position{line: 154, col: 16, offset: 2916}, + run: (*parser).callonEmptyInList1, + expr: &seqExpr{ + pos: position{line: 154, col: 16, offset: 2916}, + exprs: []interface{}{ + &litMatcher{ + pos: position{line: 154, col: 16, offset: 2916}, + val: "(", + ignoreCase: false, + want: "\"(\"", + }, + &ruleRefExpr{ + pos: position{line: 154, col: 20, offset: 2920}, + name: "_", + }, + &litMatcher{ + pos: position{line: 154, col: 22, offset: 2922}, + val: ")", + ignoreCase: false, + want: "\")\"", + }, + }, + }, + }, + }, + { + name: "NonEmptyInList", + pos: position{line: 158, col: 1, offset: 2961}, + expr: &actionExpr{ + pos: position{line: 158, col: 19, offset: 2979}, + run: (*parser).callonNonEmptyInList1, + expr: &seqExpr{ + pos: position{line: 158, col: 19, offset: 2979}, + exprs: []interface{}{ + &litMatcher{ + pos: position{line: 158, col: 19, offset: 2979}, + val: "(", + ignoreCase: false, + want: "\"(\"", + }, + &ruleRefExpr{ + pos: position{line: 158, col: 23, offset: 2983}, + name: "_", + }, + &labeledExpr{ + pos: position{line: 158, col: 25, offset: 2985}, + label: "first", + expr: &ruleRefExpr{ + pos: position{line: 158, col: 31, offset: 2991}, + name: "UnbracketedValue", + }, + }, + &labeledExpr{ + pos: position{line: 158, col: 48, offset: 3008}, + label: "rest", + expr: &zeroOrMoreExpr{ + pos: position{line: 158, col: 53, offset: 3013}, + expr: &seqExpr{ + pos: position{line: 158, col: 55, offset: 3015}, + exprs: []interface{}{ + &ruleRefExpr{ + pos: position{line: 158, col: 55, offset: 3015}, + name: "_", + }, + &litMatcher{ + pos: position{line: 158, col: 57, offset: 3017}, + val: ",", + ignoreCase: false, + want: "\",\"", + }, + &ruleRefExpr{ + pos: position{line: 158, col: 61, offset: 3021}, + name: "_", + }, + &ruleRefExpr{ + pos: position{line: 158, col: 63, offset: 3023}, + name: "UnbracketedValue", + }, + }, + }, + }, + }, + &ruleRefExpr{ + pos: position{line: 158, col: 82, offset: 3042}, + name: "_", + }, + &litMatcher{ + pos: position{line: 158, col: 84, offset: 3044}, + val: ")", + ignoreCase: false, + want: "\")\"", + }, + }, + }, + }, + }, + { + name: "Value", + pos: position{line: 168, col: 1, offset: 3120}, + expr: &choiceExpr{ + pos: position{line: 168, col: 10, offset: 3129}, + alternatives: []interface{}{ + &ruleRefExpr{ + pos: position{line: 168, col: 10, offset: 3129}, + name: "BracketedValue", + }, + &ruleRefExpr{ + pos: position{line: 168, col: 27, offset: 3146}, + name: "UnbracketedValue", + }, + }, + }, + }, + { + name: "BracketedValue", + pos: position{line: 170, col: 1, offset: 3164}, + expr: &actionExpr{ + pos: position{line: 170, col: 19, offset: 3182}, + run: (*parser).callonBracketedValue1, + expr: &seqExpr{ + pos: position{line: 170, col: 19, offset: 3182}, + exprs: []interface{}{ + &litMatcher{ + pos: position{line: 170, col: 19, offset: 3182}, + val: "(", + ignoreCase: false, + want: "\"(\"", + }, + &ruleRefExpr{ + pos: position{line: 170, col: 23, offset: 3186}, + name: "_", + }, + &labeledExpr{ + pos: position{line: 170, col: 25, offset: 3188}, + label: "i", + expr: &ruleRefExpr{ + pos: position{line: 170, col: 27, offset: 3190}, + name: "Value", + }, + }, + &ruleRefExpr{ + pos: position{line: 170, col: 33, offset: 3196}, + name: "_", + }, + &litMatcher{ + pos: position{line: 170, col: 35, offset: 3198}, + val: ")", + ignoreCase: false, + want: "\")\"", + }, + }, + }, + }, + }, + { + name: "UnbracketedValue", + pos: position{line: 174, col: 1, offset: 3223}, + expr: &choiceExpr{ + pos: position{line: 174, col: 21, offset: 3243}, + alternatives: []interface{}{ + &ruleRefExpr{ + pos: position{line: 174, col: 21, offset: 3243}, + name: "TimeCalculation", + }, + &ruleRefExpr{ + pos: position{line: 174, col: 39, offset: 3261}, + name: "Constant", + }, + &ruleRefExpr{ + pos: position{line: 174, col: 50, offset: 3272}, + name: "Jsonb", + }, + &ruleRefExpr{ + pos: position{line: 174, col: 58, offset: 3280}, + name: "Identifier", + }, + }, + }, + }, + { + name: "Identifier", + pos: position{line: 181, col: 1, offset: 3315}, + expr: &choiceExpr{ + pos: position{line: 181, col: 15, offset: 3329}, + alternatives: []interface{}{ + &ruleRefExpr{ + pos: position{line: 181, col: 15, offset: 3329}, + name: "Jsonb", + }, + &ruleRefExpr{ + pos: position{line: 181, col: 23, offset: 3337}, + name: "ColumnIdentifier", + }, + }, + }, + }, + { + name: "ColumnIdentifier", + pos: position{line: 183, col: 1, offset: 3355}, + expr: &choiceExpr{ + pos: position{line: 183, col: 21, offset: 3375}, + alternatives: []interface{}{ + &ruleRefExpr{ + pos: position{line: 183, col: 21, offset: 3375}, + name: "QuotedIdentifier", + }, + &ruleRefExpr{ + pos: position{line: 183, col: 40, offset: 3394}, + name: "UnquotedIdentifier", + }, + }, + }, + }, + { + name: "QuotedIdentifier", + pos: position{line: 185, col: 1, offset: 3414}, + expr: &actionExpr{ + pos: position{line: 185, col: 21, offset: 3434}, + run: (*parser).callonQuotedIdentifier1, + expr: &seqExpr{ + pos: position{line: 185, col: 21, offset: 3434}, + exprs: []interface{}{ + &litMatcher{ + pos: position{line: 185, col: 21, offset: 3434}, + val: "\"", + ignoreCase: false, + want: "\"\\\"\"", + }, + &zeroOrMoreExpr{ + pos: position{line: 185, col: 25, offset: 3438}, + expr: &choiceExpr{ + pos: position{line: 185, col: 26, offset: 3439}, + alternatives: []interface{}{ + &litMatcher{ + pos: position{line: 185, col: 26, offset: 3439}, + val: "\"\"", + ignoreCase: false, + want: "\"\\\"\\\"\"", + }, + &charClassMatcher{ + pos: position{line: 185, col: 33, offset: 3446}, + val: "[^\"]", + chars: []rune{'"'}, + ignoreCase: false, + inverted: true, + }, + }, + }, + }, + &litMatcher{ + pos: position{line: 185, col: 40, offset: 3453}, + val: "\"", + ignoreCase: false, + want: "\"\\\"\"", + }, + }, + }, + }, + }, + { + name: "UnquotedIdentifier", + pos: position{line: 196, col: 1, offset: 3649}, + expr: &actionExpr{ + pos: position{line: 196, col: 23, offset: 3671}, + run: (*parser).callonUnquotedIdentifier1, + expr: &seqExpr{ + pos: position{line: 196, col: 23, offset: 3671}, + exprs: []interface{}{ + &charClassMatcher{ + pos: position{line: 196, col: 23, offset: 3671}, + val: "[A-Za-z_]", + chars: []rune{'_'}, + ranges: []rune{'A', 'Z', 'a', 'z'}, + ignoreCase: false, + inverted: false, + }, + &zeroOrMoreExpr{ + pos: position{line: 196, col: 32, offset: 3680}, + expr: &charClassMatcher{ + pos: position{line: 196, col: 32, offset: 3680}, + val: "[A-Za-z0-9_]", + chars: []rune{'_'}, + ranges: []rune{'A', 'Z', 'a', 'z', '0', '9'}, + ignoreCase: false, + inverted: false, + }, + }, + }, + }, + }, + }, + { + name: "Jsonb", + pos: position{line: 206, col: 1, offset: 3843}, + expr: &actionExpr{ + pos: position{line: 206, col: 10, offset: 3852}, + run: (*parser).callonJsonb1, + expr: &seqExpr{ + pos: position{line: 206, col: 10, offset: 3852}, + exprs: []interface{}{ + &labeledExpr{ + pos: position{line: 206, col: 10, offset: 3852}, + label: "i", + expr: &ruleRefExpr{ + pos: position{line: 206, col: 12, offset: 3854}, + name: "ColumnIdentifier", + }, + }, + &ruleRefExpr{ + pos: position{line: 206, col: 29, offset: 3871}, + name: "_", + }, + &labeledExpr{ + pos: position{line: 206, col: 31, offset: 3873}, + label: "op", + expr: &ruleRefExpr{ + pos: position{line: 206, col: 34, offset: 3876}, + name: "JsonbOperator", + }, + }, + &ruleRefExpr{ + pos: position{line: 206, col: 48, offset: 3890}, + name: "_", + }, + &labeledExpr{ + pos: position{line: 206, col: 50, offset: 3892}, + label: "first", + expr: &ruleRefExpr{ + pos: position{line: 206, col: 56, offset: 3898}, + name: "JsonbField", + }, + }, + &labeledExpr{ + pos: position{line: 206, col: 67, offset: 3909}, + label: "rest", + expr: &zeroOrMoreExpr{ + pos: position{line: 206, col: 72, offset: 3914}, + expr: &seqExpr{ + pos: position{line: 206, col: 73, offset: 3915}, + exprs: []interface{}{ + &ruleRefExpr{ + pos: position{line: 206, col: 73, offset: 3915}, + name: "_", + }, + &ruleRefExpr{ + pos: position{line: 206, col: 75, offset: 3917}, + name: "JsonbOperator", + }, + &ruleRefExpr{ + pos: position{line: 206, col: 89, offset: 3931}, + name: "_", + }, + &ruleRefExpr{ + pos: position{line: 206, col: 91, offset: 3933}, + name: "JsonbField", + }, + }, + }, + }, + }, + }, + }, + }, + }, + { + name: "JsonbField", + pos: position{line: 217, col: 1, offset: 4221}, + expr: &choiceExpr{ + pos: position{line: 217, col: 15, offset: 4235}, + alternatives: []interface{}{ + &ruleRefExpr{ + pos: position{line: 217, col: 15, offset: 4235}, + name: "String", + }, + &ruleRefExpr{ + pos: position{line: 217, col: 24, offset: 4244}, + name: "Integer", + }, + }, + }, + }, + { + name: "JsonbOperator", + pos: position{line: 219, col: 1, offset: 4253}, + expr: &actionExpr{ + pos: position{line: 219, col: 18, offset: 4270}, + run: (*parser).callonJsonbOperator1, + expr: &seqExpr{ + pos: position{line: 219, col: 18, offset: 4270}, + exprs: []interface{}{ + &litMatcher{ + pos: position{line: 219, col: 18, offset: 4270}, + val: "->", + ignoreCase: false, + want: "\"->\"", + }, + &zeroOrOneExpr{ + pos: position{line: 219, col: 23, offset: 4275}, + expr: &litMatcher{ + pos: position{line: 219, col: 23, offset: 4275}, + val: ">", + ignoreCase: false, + want: "\">\"", + }, + }, + }, + }, + }, + }, + { + name: "CompareOperator", + pos: position{line: 235, col: 1, offset: 4473}, + expr: &actionExpr{ + pos: position{line: 235, col: 20, offset: 4492}, + run: (*parser).callonCompareOperator1, + expr: &choiceExpr{ + pos: position{line: 235, col: 21, offset: 4493}, + alternatives: []interface{}{ + &litMatcher{ + pos: position{line: 235, col: 21, offset: 4493}, + val: "<=", + ignoreCase: false, + want: "\"<=\"", + }, + &litMatcher{ + pos: position{line: 235, col: 28, offset: 4500}, + val: "<>", + ignoreCase: false, + want: "\"<>\"", + }, + &litMatcher{ + pos: position{line: 235, col: 35, offset: 4507}, + val: ">=", + ignoreCase: false, + want: "\">=\"", + }, + &litMatcher{ + pos: position{line: 235, col: 42, offset: 4514}, + val: "!=", + ignoreCase: false, + want: "\"!=\"", + }, + &litMatcher{ + pos: position{line: 235, col: 49, offset: 4521}, + val: "<", + ignoreCase: false, + want: "\"<\"", + }, + &litMatcher{ + pos: position{line: 235, col: 55, offset: 4527}, + val: "=", + ignoreCase: false, + want: "\"=\"", + }, + &litMatcher{ + pos: position{line: 235, col: 61, offset: 4533}, + val: ">", + ignoreCase: false, + want: "\">\"", + }, + }, + }, + }, + }, + { + name: "And", + pos: position{line: 245, col: 1, offset: 4653}, + expr: &actionExpr{ + pos: position{line: 245, col: 8, offset: 4660}, + run: (*parser).callonAnd1, + expr: &litMatcher{ + pos: position{line: 245, col: 8, offset: 4660}, + val: "and", + ignoreCase: true, + want: "\"and\"i", + }, + }, + }, + { + name: "Or", + pos: position{line: 255, col: 1, offset: 4790}, + expr: &actionExpr{ + pos: position{line: 255, col: 7, offset: 4796}, + run: (*parser).callonOr1, + expr: &litMatcher{ + pos: position{line: 255, col: 7, offset: 4796}, + val: "or", + ignoreCase: true, + want: "\"or\"i", + }, + }, + }, + { + name: "Not", + pos: position{line: 265, col: 1, offset: 4924}, + expr: &actionExpr{ + pos: position{line: 265, col: 8, offset: 4931}, + run: (*parser).callonNot1, + expr: &litMatcher{ + pos: position{line: 265, col: 8, offset: 4931}, + val: "not", + ignoreCase: true, + want: "\"not\"i", + }, + }, + }, + { + name: "In", + pos: position{line: 275, col: 1, offset: 5061}, + expr: &actionExpr{ + pos: position{line: 275, col: 7, offset: 5067}, + run: (*parser).callonIn1, + expr: &seqExpr{ + pos: position{line: 275, col: 7, offset: 5067}, + exprs: []interface{}{ + &labeledExpr{ + pos: position{line: 275, col: 7, offset: 5067}, + label: "not", + expr: &zeroOrOneExpr{ + pos: position{line: 275, col: 11, offset: 5071}, + expr: &seqExpr{ + pos: position{line: 275, col: 12, offset: 5072}, + exprs: []interface{}{ + &litMatcher{ + pos: position{line: 275, col: 12, offset: 5072}, + val: "not", + ignoreCase: true, + want: "\"not\"i", + }, + &ruleRefExpr{ + pos: position{line: 275, col: 19, offset: 5079}, + name: "_", + }, + }, + }, + }, + }, + &litMatcher{ + pos: position{line: 275, col: 23, offset: 5083}, + val: "in", + ignoreCase: true, + want: "\"in\"i", + }, + }, + }, + }, + }, + { + name: "Like", + pos: position{line: 288, col: 1, offset: 5256}, + expr: &actionExpr{ + pos: position{line: 288, col: 9, offset: 5264}, + run: (*parser).callonLike1, + expr: &seqExpr{ + pos: position{line: 288, col: 9, offset: 5264}, + exprs: []interface{}{ + &labeledExpr{ + pos: position{line: 288, col: 9, offset: 5264}, + label: "not", + expr: &zeroOrOneExpr{ + pos: position{line: 288, col: 13, offset: 5268}, + expr: &seqExpr{ + pos: position{line: 288, col: 14, offset: 5269}, + exprs: []interface{}{ + &litMatcher{ + pos: position{line: 288, col: 14, offset: 5269}, + val: "not", + ignoreCase: true, + want: "\"not\"i", + }, + &ruleRefExpr{ + pos: position{line: 288, col: 21, offset: 5276}, + name: "_", + }, + }, + }, + }, + }, + &labeledExpr{ + pos: position{line: 288, col: 25, offset: 5280}, + label: "like", + expr: &ruleRefExpr{ + pos: position{line: 288, col: 30, offset: 5285}, + name: "LikeOrIlike", + }, + }, + }, + }, + }, + }, + { + name: "LikeOrIlike", + pos: position{line: 302, col: 1, offset: 5519}, + expr: &actionExpr{ + pos: position{line: 302, col: 16, offset: 5534}, + run: (*parser).callonLikeOrIlike1, + expr: &choiceExpr{ + pos: position{line: 302, col: 17, offset: 5535}, + alternatives: []interface{}{ + &litMatcher{ + pos: position{line: 302, col: 17, offset: 5535}, + val: "like", + ignoreCase: true, + want: "\"like\"i", + }, + &litMatcher{ + pos: position{line: 302, col: 27, offset: 5545}, + val: "ilike", + ignoreCase: true, + want: "\"ilike\"i", + }, + }, + }, + }, + }, + { + name: "Is", + pos: position{line: 307, col: 1, offset: 5602}, + expr: &actionExpr{ + pos: position{line: 307, col: 7, offset: 5608}, + run: (*parser).callonIs1, + expr: &seqExpr{ + pos: position{line: 307, col: 7, offset: 5608}, + exprs: []interface{}{ + &litMatcher{ + pos: position{line: 307, col: 7, offset: 5608}, + val: "is", + ignoreCase: true, + want: "\"is\"i", + }, + &labeledExpr{ + pos: position{line: 307, col: 13, offset: 5614}, + label: "not", + expr: &zeroOrOneExpr{ + pos: position{line: 307, col: 17, offset: 5618}, + expr: &seqExpr{ + pos: position{line: 307, col: 18, offset: 5619}, + exprs: []interface{}{ + &ruleRefExpr{ + pos: position{line: 307, col: 18, offset: 5619}, + name: "_", + }, + &litMatcher{ + pos: position{line: 307, col: 20, offset: 5621}, + val: "not", + ignoreCase: true, + want: "\"not\"i", + }, + }, + }, + }, + }, + }, + }, + }, + }, + { + name: "TimeCalculation", + pos: position{line: 327, col: 1, offset: 5870}, + expr: &actionExpr{ + pos: position{line: 327, col: 20, offset: 5889}, + run: (*parser).callonTimeCalculation1, + expr: &seqExpr{ + pos: position{line: 327, col: 20, offset: 5889}, + exprs: []interface{}{ + &labeledExpr{ + pos: position{line: 327, col: 20, offset: 5889}, + label: "now", + expr: &ruleRefExpr{ + pos: position{line: 327, col: 24, offset: 5893}, + name: "NoArgsFunction", + }, + }, + &labeledExpr{ + pos: position{line: 327, col: 39, offset: 5908}, + label: "interval", + expr: &zeroOrOneExpr{ + pos: position{line: 327, col: 48, offset: 5917}, + expr: &seqExpr{ + pos: position{line: 327, col: 49, offset: 5918}, + exprs: []interface{}{ + &ruleRefExpr{ + pos: position{line: 327, col: 49, offset: 5918}, + name: "_", + }, + &ruleRefExpr{ + pos: position{line: 327, col: 51, offset: 5920}, + name: "Add", + }, + &ruleRefExpr{ + pos: position{line: 327, col: 55, offset: 5924}, + name: "_", + }, + &ruleRefExpr{ + pos: position{line: 327, col: 57, offset: 5926}, + name: "StringOperatorFunction", + }, + }, + }, + }, + }, + }, + }, + }, + }, + { + name: "Add", + pos: position{line: 342, col: 1, offset: 6334}, + expr: &actionExpr{ + pos: position{line: 342, col: 8, offset: 6341}, + run: (*parser).callonAdd1, + expr: &choiceExpr{ + pos: position{line: 342, col: 9, offset: 6342}, + alternatives: []interface{}{ + &litMatcher{ + pos: position{line: 342, col: 9, offset: 6342}, + val: "+", + ignoreCase: false, + want: "\"+\"", + }, + &litMatcher{ + pos: position{line: 342, col: 15, offset: 6348}, + val: "-", + ignoreCase: false, + want: "\"-\"", + }, + }, + }, + }, + }, + { + name: "Function", + pos: position{line: 352, col: 1, offset: 6468}, + expr: &choiceExpr{ + pos: position{line: 352, col: 13, offset: 6480}, + alternatives: []interface{}{ + &ruleRefExpr{ + pos: position{line: 352, col: 13, offset: 6480}, + name: "StringOperatorFunction", + }, + &ruleRefExpr{ + pos: position{line: 352, col: 38, offset: 6505}, + name: "NoArgsFunction", + }, + }, + }, + }, + { + name: "NoArgsFunction", + pos: position{line: 354, col: 1, offset: 6521}, + expr: &actionExpr{ + pos: position{line: 354, col: 19, offset: 6539}, + run: (*parser).callonNoArgsFunction1, + expr: &seqExpr{ + pos: position{line: 354, col: 19, offset: 6539}, + exprs: []interface{}{ + &ruleRefExpr{ + pos: position{line: 354, col: 19, offset: 6539}, + name: "NoArgsFunctionName", + }, + &litMatcher{ + pos: position{line: 354, col: 38, offset: 6558}, + val: "()", + ignoreCase: false, + want: "\"()\"", + }, + }, + }, + }, + }, + { + name: "NoArgsFunctionName", + pos: position{line: 369, col: 1, offset: 6828}, + expr: &litMatcher{ + pos: position{line: 369, col: 23, offset: 6850}, + val: "now", + ignoreCase: true, + want: "\"now\"i", + }, + }, + { + name: "StringOperatorFunction", + pos: position{line: 371, col: 1, offset: 6858}, + expr: &actionExpr{ + pos: position{line: 371, col: 27, offset: 6884}, + run: (*parser).callonStringOperatorFunction1, + expr: &seqExpr{ + pos: position{line: 371, col: 27, offset: 6884}, + exprs: []interface{}{ + &labeledExpr{ + pos: position{line: 371, col: 27, offset: 6884}, + label: "fn", + expr: &ruleRefExpr{ + pos: position{line: 371, col: 30, offset: 6887}, + name: "StringOperatorFunctionName", + }, + }, + &ruleRefExpr{ + pos: position{line: 371, col: 57, offset: 6914}, + name: "_", + }, + &labeledExpr{ + pos: position{line: 371, col: 59, offset: 6916}, + label: "s", + expr: &ruleRefExpr{ + pos: position{line: 371, col: 61, offset: 6918}, + name: "String", + }, + }, + }, + }, + }, + }, + { + name: "StringOperatorFunctionName", + pos: position{line: 386, col: 1, offset: 7202}, + expr: &litMatcher{ + pos: position{line: 386, col: 31, offset: 7232}, + val: "interval", + ignoreCase: true, + want: "\"interval\"i", + }, + }, + { + name: "Constant", + pos: position{line: 393, col: 1, offset: 7266}, + expr: &choiceExpr{ + pos: position{line: 393, col: 13, offset: 7278}, + alternatives: []interface{}{ + &ruleRefExpr{ + pos: position{line: 393, col: 13, offset: 7278}, + name: "Bool", + }, + &ruleRefExpr{ + pos: position{line: 393, col: 20, offset: 7285}, + name: "Number", + }, + &ruleRefExpr{ + pos: position{line: 393, col: 29, offset: 7294}, + name: "String", + }, + }, + }, + }, + { + name: "String", + pos: position{line: 395, col: 1, offset: 7302}, + expr: &actionExpr{ + pos: position{line: 395, col: 11, offset: 7312}, + run: (*parser).callonString1, + expr: &seqExpr{ + pos: position{line: 395, col: 11, offset: 7312}, + exprs: []interface{}{ + &litMatcher{ + pos: position{line: 395, col: 11, offset: 7312}, + val: "'", + ignoreCase: false, + want: "\"'\"", + }, + &zeroOrMoreExpr{ + pos: position{line: 395, col: 15, offset: 7316}, + expr: &choiceExpr{ + pos: position{line: 395, col: 16, offset: 7317}, + alternatives: []interface{}{ + &litMatcher{ + pos: position{line: 395, col: 16, offset: 7317}, + val: "''", + ignoreCase: false, + want: "\"''\"", + }, + &charClassMatcher{ + pos: position{line: 395, col: 23, offset: 7324}, + val: "[^']", + chars: []rune{'\''}, + ignoreCase: false, + inverted: true, + }, + }, + }, + }, + &litMatcher{ + pos: position{line: 395, col: 30, offset: 7331}, + val: "'", + ignoreCase: false, + want: "\"'\"", + }, + }, + }, + }, + }, + { + name: "Number", + pos: position{line: 406, col: 1, offset: 7516}, + expr: &actionExpr{ + pos: position{line: 406, col: 11, offset: 7526}, + run: (*parser).callonNumber1, + expr: &seqExpr{ + pos: position{line: 406, col: 11, offset: 7526}, + exprs: []interface{}{ + &zeroOrOneExpr{ + pos: position{line: 406, col: 11, offset: 7526}, + expr: &litMatcher{ + pos: position{line: 406, col: 11, offset: 7526}, + val: "-", + ignoreCase: false, + want: "\"-\"", + }, + }, + &ruleRefExpr{ + pos: position{line: 406, col: 16, offset: 7531}, + name: "Integer", + }, + &zeroOrOneExpr{ + pos: position{line: 406, col: 24, offset: 7539}, + expr: &seqExpr{ + pos: position{line: 406, col: 26, offset: 7541}, + exprs: []interface{}{ + &litMatcher{ + pos: position{line: 406, col: 26, offset: 7541}, + val: ".", + ignoreCase: false, + want: "\".\"", + }, + &oneOrMoreExpr{ + pos: position{line: 406, col: 30, offset: 7545}, + expr: &ruleRefExpr{ + pos: position{line: 406, col: 30, offset: 7545}, + name: "DecimalDigit", + }, + }, + }, + }, + }, + &zeroOrOneExpr{ + pos: position{line: 406, col: 47, offset: 7562}, + expr: &ruleRefExpr{ + pos: position{line: 406, col: 47, offset: 7562}, + name: "Exponent", + }, + }, + }, + }, + }, + }, + { + name: "Integer", + pos: position{line: 418, col: 1, offset: 7777}, + expr: &actionExpr{ + pos: position{line: 418, col: 12, offset: 7788}, + run: (*parser).callonInteger1, + expr: &choiceExpr{ + pos: position{line: 418, col: 13, offset: 7789}, + alternatives: []interface{}{ + &litMatcher{ + pos: position{line: 418, col: 13, offset: 7789}, + val: "0", + ignoreCase: false, + want: "\"0\"", + }, + &seqExpr{ + pos: position{line: 418, col: 19, offset: 7795}, + exprs: []interface{}{ + &ruleRefExpr{ + pos: position{line: 418, col: 19, offset: 7795}, + name: "NonZeroDecimalDigit", + }, + &zeroOrMoreExpr{ + pos: position{line: 418, col: 39, offset: 7815}, + expr: &ruleRefExpr{ + pos: position{line: 418, col: 39, offset: 7815}, + name: "DecimalDigit", + }, + }, + }, + }, + }, + }, + }, + }, + { + name: "Exponent", + pos: position{line: 428, col: 1, offset: 7949}, + expr: &seqExpr{ + pos: position{line: 428, col: 13, offset: 7961}, + exprs: []interface{}{ + &litMatcher{ + pos: position{line: 428, col: 13, offset: 7961}, + val: "e", + ignoreCase: true, + want: "\"e\"i", + }, + &zeroOrOneExpr{ + pos: position{line: 428, col: 18, offset: 7966}, + expr: &charClassMatcher{ + pos: position{line: 428, col: 18, offset: 7966}, + val: "[+-]", + chars: []rune{'+', '-'}, + ignoreCase: false, + inverted: false, + }, + }, + &oneOrMoreExpr{ + pos: position{line: 428, col: 24, offset: 7972}, + expr: &ruleRefExpr{ + pos: position{line: 428, col: 24, offset: 7972}, + name: "DecimalDigit", + }, + }, + }, + }, + }, + { + name: "DecimalDigit", + pos: position{line: 430, col: 1, offset: 7987}, + expr: &charClassMatcher{ + pos: position{line: 430, col: 17, offset: 8003}, + val: "[0-9]", + ranges: []rune{'0', '9'}, + ignoreCase: false, + inverted: false, + }, + }, + { + name: "NonZeroDecimalDigit", + pos: position{line: 432, col: 1, offset: 8010}, + expr: &charClassMatcher{ + pos: position{line: 432, col: 24, offset: 8033}, + val: "[1-9]", + ranges: []rune{'1', '9'}, + ignoreCase: false, + inverted: false, + }, + }, + { + name: "Bool", + pos: position{line: 434, col: 1, offset: 8040}, + expr: &actionExpr{ + pos: position{line: 434, col: 9, offset: 8048}, + run: (*parser).callonBool1, + expr: &choiceExpr{ + pos: position{line: 434, col: 10, offset: 8049}, + alternatives: []interface{}{ + &litMatcher{ + pos: position{line: 434, col: 10, offset: 8049}, + val: "true", + ignoreCase: true, + want: "\"true\"i", + }, + &litMatcher{ + pos: position{line: 434, col: 20, offset: 8059}, + val: "false", + ignoreCase: true, + want: "\"false\"i", + }, + }, + }, + }, + }, + { + name: "Null", + pos: position{line: 444, col: 1, offset: 8203}, + expr: &actionExpr{ + pos: position{line: 444, col: 9, offset: 8211}, + run: (*parser).callonNull1, + expr: &litMatcher{ + pos: position{line: 444, col: 9, offset: 8211}, + val: "null", + ignoreCase: true, + want: "\"null\"i", + }, + }, + }, + { + name: "_", + pos: position{line: 460, col: 1, offset: 8385}, + expr: &actionExpr{ + pos: position{line: 460, col: 6, offset: 8390}, + run: (*parser).callon_1, + expr: &zeroOrMoreExpr{ + pos: position{line: 460, col: 6, offset: 8390}, + expr: &charClassMatcher{ + pos: position{line: 460, col: 6, offset: 8390}, + val: "[ \\n\\t\\r]", + chars: []rune{' ', '\n', '\t', '\r'}, + ignoreCase: false, + inverted: false, + }, + }, + }, + }, + { + name: "EOF", + pos: position{line: 470, col: 1, offset: 8544}, + expr: &actionExpr{ + pos: position{line: 470, col: 8, offset: 8551}, + run: (*parser).callonEOF1, + expr: ¬Expr{ + pos: position{line: 470, col: 8, offset: 8551}, + expr: &anyMatcher{ + line: 470, col: 9, offset: 8552, + }, + }, + }, + }, + }, +} + +func (c *current) onInput1(i interface{}) (interface{}, error) { + return i, nil +} + +func (p *parser) callonInput1() (interface{}, error) { + stack := p.vstack[len(p.vstack)-1] + _ = stack + return p.cur.onInput1(stack["i"]) +} + +func (c *current) onOrComparison1(first, rest interface{}) (interface{}, error) { + exprs := eval(first, rest) + if len(exprs) <= 1 { + return first, nil + } + n := ComparisonNode{ + Type: "or", + Values: exprs, + } + return n, nil +} + +func (p *parser) callonOrComparison1() (interface{}, error) { + stack := p.vstack[len(p.vstack)-1] + _ = stack + return p.cur.onOrComparison1(stack["first"], stack["rest"]) +} + +func (c *current) onAndComparison1(first, rest interface{}) (interface{}, error) { + exprs := eval(first, rest) + if len(exprs) <= 1 { + return first, nil + } + n := ComparisonNode{ + Type: "and", + Values: exprs, + } + return n, nil +} + +func (p *parser) callonAndComparison1() (interface{}, error) { + stack := p.vstack[len(p.vstack)-1] + _ = stack + return p.cur.onAndComparison1(stack["first"], stack["rest"]) +} + +func (c *current) onMultiComparison1(expr interface{}) (interface{}, error) { + return expr, nil +} + +func (p *parser) callonMultiComparison1() (interface{}, error) { + stack := p.vstack[len(p.vstack)-1] + _ = stack + return p.cur.onMultiComparison1(stack["expr"]) +} + +func (c *current) onNotComparison1(op, right interface{}) (interface{}, error) { + n := ComparisonNode{ + Type: "not", + Operator: op.(CodeNode), + Values: []ComparisonNode{right.(ComparisonNode)}, + } + return n, nil +} + +func (p *parser) callonNotComparison1() (interface{}, error) { + stack := p.vstack[len(p.vstack)-1] + _ = stack + return p.cur.onNotComparison1(stack["op"], stack["right"]) +} + +func (c *current) onLeftRightComparison1(left, op, right interface{}) (interface{}, error) { + n := ComparisonNode{ + Type: "compare", + Operator: op.(CodeNode), + Values: []CodeNode{left.(CodeNode), right.(CodeNode)}, + } + return n, nil +} + +func (p *parser) callonLeftRightComparison1() (interface{}, error) { + stack := p.vstack[len(p.vstack)-1] + _ = stack + return p.cur.onLeftRightComparison1(stack["left"], stack["op"], stack["right"]) +} + +func (c *current) onLikeComparison1(left, op, right interface{}) (interface{}, error) { + n := ComparisonNode{ + Type: "like", + Operator: op.(CodeNode), + Values: []CodeNode{left.(CodeNode), right.(CodeNode)}, + } + return n, nil +} + +func (p *parser) callonLikeComparison1() (interface{}, error) { + stack := p.vstack[len(p.vstack)-1] + _ = stack + return p.cur.onLikeComparison1(stack["left"], stack["op"], stack["right"]) +} + +func (c *current) onIsComparison1(left, op, right interface{}) (interface{}, error) { + n := ComparisonNode{ + Type: "is", + Operator: op.(CodeNode), + Values: []CodeNode{left.(CodeNode), right.(CodeNode)}, + } + return n, nil +} + +func (p *parser) callonIsComparison1() (interface{}, error) { + stack := p.vstack[len(p.vstack)-1] + _ = stack + return p.cur.onIsComparison1(stack["left"], stack["op"], stack["right"]) +} + +func (c *current) onInComparison1(first, op, rest interface{}) (interface{}, error) { + exprs := []CodeNode{first.(CodeNode)} + resti := toIfaceSlice(rest) + for _, v := range resti { + exprs = append(exprs, v.(CodeNode)) + } + n := ComparisonNode{ + Type: "in", + Operator: op.(CodeNode), + Values: exprs, + } + return n, nil +} + +func (p *parser) callonInComparison1() (interface{}, error) { + stack := p.vstack[len(p.vstack)-1] + _ = stack + return p.cur.onInComparison1(stack["first"], stack["op"], stack["rest"]) +} + +func (c *current) onIdentifierComparison1(i interface{}) (interface{}, error) { + n := ComparisonNode{ + Type: "identifier", + Values: []CodeNode{i.(CodeNode)}, + } + return n, nil +} + +func (p *parser) callonIdentifierComparison1() (interface{}, error) { + stack := p.vstack[len(p.vstack)-1] + _ = stack + return p.cur.onIdentifierComparison1(stack["i"]) +} + +func (c *current) onEmptyInList1() (interface{}, error) { + return []interface{}{}, nil +} + +func (p *parser) callonEmptyInList1() (interface{}, error) { + stack := p.vstack[len(p.vstack)-1] + _ = stack + return p.cur.onEmptyInList1() +} + +func (c *current) onNonEmptyInList1(first, rest interface{}) (interface{}, error) { + exprs := eval(first, rest) + return exprs, nil +} + +func (p *parser) callonNonEmptyInList1() (interface{}, error) { + stack := p.vstack[len(p.vstack)-1] + _ = stack + return p.cur.onNonEmptyInList1(stack["first"], stack["rest"]) +} + +func (c *current) onBracketedValue1(i interface{}) (interface{}, error) { + return i, nil +} + +func (p *parser) callonBracketedValue1() (interface{}, error) { + stack := p.vstack[len(p.vstack)-1] + _ = stack + return p.cur.onBracketedValue1(stack["i"]) +} + +func (c *current) onQuotedIdentifier1() (interface{}, error) { + src := string(c.text) + value := strings.ReplaceAll(src[1:len(src)-1], `""`, `"`) + n := CodeNode{ + Type: "quoted_identifier", + Source: src, + Value: value, + } + return n, nil +} + +func (p *parser) callonQuotedIdentifier1() (interface{}, error) { + stack := p.vstack[len(p.vstack)-1] + _ = stack + return p.cur.onQuotedIdentifier1() +} + +func (c *current) onUnquotedIdentifier1() (interface{}, error) { + src := string(c.text) + n := CodeNode{ + Type: "unquoted_identifier", + Source: src, + Value: strings.ToLower(src), + } + return n, nil +} + +func (p *parser) callonUnquotedIdentifier1() (interface{}, error) { + stack := p.vstack[len(p.vstack)-1] + _ = stack + return p.cur.onUnquotedIdentifier1() +} + +func (c *current) onJsonb1(i, op, first, rest interface{}) (interface{}, error) { + n := i.(CodeNode) + n.JsonbSelector = []CodeNode{op.(CodeNode), first.(CodeNode)} + resti := toIfaceSlice(rest) + for _, e := range resti { + ei := toIfaceSlice(e) + n.JsonbSelector = append(n.JsonbSelector, ei[1].(CodeNode), ei[3].(CodeNode)) + } + return n, nil +} + +func (p *parser) callonJsonb1() (interface{}, error) { + stack := p.vstack[len(p.vstack)-1] + _ = stack + return p.cur.onJsonb1(stack["i"], stack["op"], stack["first"], stack["rest"]) +} + +func (c *current) onJsonbOperator1() (interface{}, error) { + s := string(c.text) + n := CodeNode{ + Type: "operator", + Source: s, + Value: s, + } + return n, nil +} + +func (p *parser) callonJsonbOperator1() (interface{}, error) { + stack := p.vstack[len(p.vstack)-1] + _ = stack + return p.cur.onJsonbOperator1() +} + +func (c *current) onCompareOperator1() (interface{}, error) { + s := string(c.text) + n := CodeNode{ + Type: "operator", + Source: s, + Value: s, + } + return n, nil +} + +func (p *parser) callonCompareOperator1() (interface{}, error) { + stack := p.vstack[len(p.vstack)-1] + _ = stack + return p.cur.onCompareOperator1() +} + +func (c *current) onAnd1() (interface{}, error) { + src := string(c.text) + n := CodeNode{ + Type: "operator", + Source: src, + Value: "and", + } + return n, nil +} + +func (p *parser) callonAnd1() (interface{}, error) { + stack := p.vstack[len(p.vstack)-1] + _ = stack + return p.cur.onAnd1() +} + +func (c *current) onOr1() (interface{}, error) { + src := string(c.text) + n := CodeNode{ + Type: "operator", + Source: src, + Value: "or", + } + return n, nil +} + +func (p *parser) callonOr1() (interface{}, error) { + stack := p.vstack[len(p.vstack)-1] + _ = stack + return p.cur.onOr1() +} + +func (c *current) onNot1() (interface{}, error) { + src := string(c.text) + n := CodeNode{ + Type: "operator", + Source: src, + Value: "not", + } + return n, nil +} + +func (p *parser) callonNot1() (interface{}, error) { + stack := p.vstack[len(p.vstack)-1] + _ = stack + return p.cur.onNot1() +} + +func (c *current) onIn1(not interface{}) (interface{}, error) { + src := string(c.text) + n := CodeNode{ + Type: "operator", + Source: src, + Value: "in", + } + if not != nil { + n.Value = "not in" + } + return n, nil +} + +func (p *parser) callonIn1() (interface{}, error) { + stack := p.vstack[len(p.vstack)-1] + _ = stack + return p.cur.onIn1(stack["not"]) +} + +func (c *current) onLike1(not, like interface{}) (interface{}, error) { + src := string(c.text) + likeStr := strings.ToLower(like.(string)) + n := CodeNode{ + Type: "operator", + Source: src, + Value: likeStr, + } + if not != nil { + n.Value = "not " + likeStr + } + return n, nil +} + +func (p *parser) callonLike1() (interface{}, error) { + stack := p.vstack[len(p.vstack)-1] + _ = stack + return p.cur.onLike1(stack["not"], stack["like"]) +} + +func (c *current) onLikeOrIlike1() (interface{}, error) { + src := string(c.text) + return src, nil +} + +func (p *parser) callonLikeOrIlike1() (interface{}, error) { + stack := p.vstack[len(p.vstack)-1] + _ = stack + return p.cur.onLikeOrIlike1() +} + +func (c *current) onIs1(not interface{}) (interface{}, error) { + src := string(c.text) + n := CodeNode{ + Type: "operator", + Source: src, + Value: "is", + } + if not != nil { + n.Value = "is not" + } + return n, nil +} + +func (p *parser) callonIs1() (interface{}, error) { + stack := p.vstack[len(p.vstack)-1] + _ = stack + return p.cur.onIs1(stack["not"]) +} + +func (c *current) onTimeCalculation1(now, interval interface{}) (interface{}, error) { + n := CodeNode{ + Type: "time_calculation", + Source: string(c.text), + Value: "now()", + } + if interval != nil { + intervalSlice := toIfaceSlice(interval) + addOp := intervalSlice[1].(CodeNode).Value + intervalString, _ := CodeToSQL(intervalSlice[3].(FunctionNode).Args[0]) + n.Value += fmt.Sprintf(" %s interval %s", addOp, intervalString) + } + return n, nil +} + +func (p *parser) callonTimeCalculation1() (interface{}, error) { + stack := p.vstack[len(p.vstack)-1] + _ = stack + return p.cur.onTimeCalculation1(stack["now"], stack["interval"]) +} + +func (c *current) onAdd1() (interface{}, error) { + s := string(c.text) + n := CodeNode{ + Type: "operator", + Source: s, + Value: s, + } + return n, nil +} + +func (p *parser) callonAdd1() (interface{}, error) { + stack := p.vstack[len(p.vstack)-1] + _ = stack + return p.cur.onAdd1() +} + +func (c *current) onNoArgsFunction1() (interface{}, error) { + src := string(c.text) + fnName := src[:len(src)-2] + n := FunctionNode{ + Name: "function", + Function: CodeNode{ + Type: "function_name", + Source: fnName, + Value: strings.ToLower(fnName), + }, + Args: []CodeNode{}, + } + return n, nil +} + +func (p *parser) callonNoArgsFunction1() (interface{}, error) { + stack := p.vstack[len(p.vstack)-1] + _ = stack + return p.cur.onNoArgsFunction1() +} + +func (c *current) onStringOperatorFunction1(fn, s interface{}) (interface{}, error) { + src := string(c.text) + fnName := src[:len(src)-2] + n := FunctionNode{ + Name: "function", + Function: CodeNode{ + Type: "function_name", + Source: fnName, + Value: strings.ToLower(fnName), + }, + Args: []CodeNode{s.(CodeNode)}, + } + return n, nil +} + +func (p *parser) callonStringOperatorFunction1() (interface{}, error) { + stack := p.vstack[len(p.vstack)-1] + _ = stack + return p.cur.onStringOperatorFunction1(stack["fn"], stack["s"]) +} + +func (c *current) onString1() (interface{}, error) { + src := string(c.text) + value := strings.ReplaceAll(src[1:len(src)-1], "''", "'") + n := CodeNode{ + Type: "string", + Source: src, + Value: value, + } + return n, nil +} + +func (p *parser) callonString1() (interface{}, error) { + stack := p.vstack[len(p.vstack)-1] + _ = stack + return p.cur.onString1() +} + +func (c *current) onNumber1() (interface{}, error) { + // JSON numbers have the same syntax as Go's, and are parseable using + // strconv. + src := string(c.text) + n := CodeNode{ + Type: "number", + Source: src, + Value: src, + } + return n, nil +} + +func (p *parser) callonNumber1() (interface{}, error) { + stack := p.vstack[len(p.vstack)-1] + _ = stack + return p.cur.onNumber1() +} + +func (c *current) onInteger1() (interface{}, error) { + src := string(c.text) + n := CodeNode{ + Type: "number", + Source: src, + Value: src, + } + return n, nil +} + +func (p *parser) callonInteger1() (interface{}, error) { + stack := p.vstack[len(p.vstack)-1] + _ = stack + return p.cur.onInteger1() +} + +func (c *current) onBool1() (interface{}, error) { + src := string(c.text) + n := CodeNode{ + Type: "bool", + Source: src, + Value: strings.ToLower(src), + } + return n, nil +} + +func (p *parser) callonBool1() (interface{}, error) { + stack := p.vstack[len(p.vstack)-1] + _ = stack + return p.cur.onBool1() +} + +func (c *current) onNull1() (interface{}, error) { + src := string(c.text) + n := CodeNode{ + Type: "null", + Source: src, + Value: strings.ToLower(src), + } + return n, nil +} + +func (p *parser) callonNull1() (interface{}, error) { + stack := p.vstack[len(p.vstack)-1] + _ = stack + return p.cur.onNull1() +} + +func (c *current) on_1() (interface{}, error) { + src := string(c.text) + n := CodeNode{ + Type: "whitespace", + Source: src, + } + if len(src) > 0 { + n.Value = " " + } + return n, nil +} + +func (p *parser) callon_1() (interface{}, error) { + stack := p.vstack[len(p.vstack)-1] + _ = stack + return p.cur.on_1() +} + +func (c *current) onEOF1() (interface{}, error) { + n := CodeNode{ + Type: "eof", + Source: "", + Value: "", + } + return n, nil +} + +func (p *parser) callonEOF1() (interface{}, error) { + stack := p.vstack[len(p.vstack)-1] + _ = stack + return p.cur.onEOF1() +} + +var ( + // errNoRule is returned when the grammar to parse has no rule. + errNoRule = errors.New("grammar has no rule") + + // errInvalidEntrypoint is returned when the specified entrypoint rule + // does not exit. + errInvalidEntrypoint = errors.New("invalid entrypoint") + + // errInvalidEncoding is returned when the source is not properly + // utf8-encoded. + errInvalidEncoding = errors.New("invalid encoding") + + // errMaxExprCnt is used to signal that the maximum number of + // expressions have been parsed. + errMaxExprCnt = errors.New("max number of expresssions parsed") +) + +// Option is a function that can set an option on the parser. It returns +// the previous setting as an Option. +type Option func(*parser) Option + +// MaxExpressions creates an Option to stop parsing after the provided +// number of expressions have been parsed, if the value is 0 then the parser will +// parse for as many steps as needed (possibly an infinite number). +// +// The default for maxExprCnt is 0. +func MaxExpressions(maxExprCnt uint64) Option { + return func(p *parser) Option { + oldMaxExprCnt := p.maxExprCnt + p.maxExprCnt = maxExprCnt + return MaxExpressions(oldMaxExprCnt) + } +} + +// Entrypoint creates an Option to set the rule name to use as entrypoint. +// The rule name must have been specified in the -alternate-entrypoints +// if generating the parser with the -optimize-grammar flag, otherwise +// it may have been optimized out. Passing an empty string sets the +// entrypoint to the first rule in the grammar. +// +// The default is to start parsing at the first rule in the grammar. +func Entrypoint(ruleName string) Option { + return func(p *parser) Option { + oldEntrypoint := p.entrypoint + p.entrypoint = ruleName + if ruleName == "" { + p.entrypoint = g.rules[0].name + } + return Entrypoint(oldEntrypoint) + } +} + +// Statistics adds a user provided Stats struct to the parser to allow +// the user to process the results after the parsing has finished. +// Also the key for the "no match" counter is set. +// +// Example usage: +// +// input := "input" +// stats := Stats{} +// _, err := Parse("input-file", []byte(input), Statistics(&stats, "no match")) +// if err != nil { +// log.Panicln(err) +// } +// b, err := json.MarshalIndent(stats.ChoiceAltCnt, "", " ") +// if err != nil { +// log.Panicln(err) +// } +func Statistics(stats *Stats, choiceNoMatch string) Option { + return func(p *parser) Option { + oldStats := p.Stats + p.Stats = stats + oldChoiceNoMatch := p.choiceNoMatch + p.choiceNoMatch = choiceNoMatch + if p.Stats.ChoiceAltCnt == nil { + p.Stats.ChoiceAltCnt = make(map[string]map[string]int) + } + return Statistics(oldStats, oldChoiceNoMatch) + } +} + +// Debug creates an Option to set the debug flag to b. When set to true, +// debugging information is printed to stdout while parsing. +// +// The default is false. +func Debug(b bool) Option { + return func(p *parser) Option { + old := p.debug + p.debug = b + return Debug(old) + } +} + +// Memoize creates an Option to set the memoize flag to b. When set to true, +// the parser will cache all results so each expression is evaluated only +// once. This guarantees linear parsing time even for pathological cases, +// at the expense of more memory and slower times for typical cases. +// +// The default is false. +func Memoize(b bool) Option { + return func(p *parser) Option { + old := p.memoize + p.memoize = b + return Memoize(old) + } +} + +// AllowInvalidUTF8 creates an Option to allow invalid UTF-8 bytes. +// Every invalid UTF-8 byte is treated as a utf8.RuneError (U+FFFD) +// by character class matchers and is matched by the any matcher. +// The returned matched value, c.text and c.offset are NOT affected. +// +// The default is false. +func AllowInvalidUTF8(b bool) Option { + return func(p *parser) Option { + old := p.allowInvalidUTF8 + p.allowInvalidUTF8 = b + return AllowInvalidUTF8(old) + } +} + +// Recover creates an Option to set the recover flag to b. When set to +// true, this causes the parser to recover from panics and convert it +// to an error. Setting it to false can be useful while debugging to +// access the full stack trace. +// +// The default is true. +func Recover(b bool) Option { + return func(p *parser) Option { + old := p.recover + p.recover = b + return Recover(old) + } +} + +// GlobalStore creates an Option to set a key to a certain value in +// the globalStore. +func GlobalStore(key string, value interface{}) Option { + return func(p *parser) Option { + old := p.cur.globalStore[key] + p.cur.globalStore[key] = value + return GlobalStore(key, old) + } +} + +// InitState creates an Option to set a key to a certain value in +// the global "state" store. +func InitState(key string, value interface{}) Option { + return func(p *parser) Option { + old := p.cur.state[key] + p.cur.state[key] = value + return InitState(key, old) + } +} + +// ParseFile parses the file identified by filename. +func ParseFile(filename string, opts ...Option) (i interface{}, err error) { + f, err := os.Open(filename) + if err != nil { + return nil, err + } + defer func() { + if closeErr := f.Close(); closeErr != nil { + err = closeErr + } + }() + return ParseReader(filename, f, opts...) +} + +// ParseReader parses the data from r using filename as information in the +// error messages. +func ParseReader(filename string, r io.Reader, opts ...Option) (interface{}, error) { + b, err := io.ReadAll(r) + if err != nil { + return nil, err + } + + return Parse(filename, b, opts...) +} + +// Parse parses the data from b using filename as information in the +// error messages. +func Parse(filename string, b []byte, opts ...Option) (interface{}, error) { + return newParser(filename, b, opts...).parse(g) +} + +// position records a position in the text. +type position struct { + line, col, offset int +} + +func (p position) String() string { + return strconv.Itoa(p.line) + ":" + strconv.Itoa(p.col) + " [" + strconv.Itoa(p.offset) + "]" +} + +// savepoint stores all state required to go back to this point in the +// parser. +type savepoint struct { + position + rn rune + w int +} + +type current struct { + pos position // start position of the match + text []byte // raw text of the match + + // state is a store for arbitrary key,value pairs that the user wants to be + // tied to the backtracking of the parser. + // This is always rolled back if a parsing rule fails. + state storeDict + + // globalStore is a general store for the user to store arbitrary key-value + // pairs that they need to manage and that they do not want tied to the + // backtracking of the parser. This is only modified by the user and never + // rolled back by the parser. It is always up to the user to keep this in a + // consistent state. + globalStore storeDict +} + +type storeDict map[string]interface{} + +// the AST types... + +type grammar struct { + pos position + rules []*rule +} + +type rule struct { + pos position + name string + displayName string + expr interface{} +} + +type choiceExpr struct { + pos position + alternatives []interface{} +} + +type actionExpr struct { + pos position + expr interface{} + run func(*parser) (interface{}, error) +} + +type recoveryExpr struct { + pos position + expr interface{} + recoverExpr interface{} + failureLabel []string +} + +type seqExpr struct { + pos position + exprs []interface{} +} + +type throwExpr struct { + pos position + label string +} + +type labeledExpr struct { + pos position + label string + expr interface{} +} + +type expr struct { + pos position + expr interface{} +} + +type andExpr expr +type notExpr expr +type zeroOrOneExpr expr +type zeroOrMoreExpr expr +type oneOrMoreExpr expr + +type ruleRefExpr struct { + pos position + name string +} + +type stateCodeExpr struct { + pos position + run func(*parser) error +} + +type andCodeExpr struct { + pos position + run func(*parser) (bool, error) +} + +type notCodeExpr struct { + pos position + run func(*parser) (bool, error) +} + +type litMatcher struct { + pos position + val string + ignoreCase bool + want string +} + +type charClassMatcher struct { + pos position + val string + basicLatinChars [128]bool + chars []rune + ranges []rune + classes []*unicode.RangeTable + ignoreCase bool + inverted bool +} + +type anyMatcher position + +// errList cumulates the errors found by the parser. +type errList []error + +func (e *errList) add(err error) { + *e = append(*e, err) +} + +func (e errList) err() error { + if len(e) == 0 { + return nil + } + e.dedupe() + return e +} + +func (e *errList) dedupe() { + var cleaned []error + set := make(map[string]bool) + for _, err := range *e { + if msg := err.Error(); !set[msg] { + set[msg] = true + cleaned = append(cleaned, err) + } + } + *e = cleaned +} + +func (e errList) Error() string { + switch len(e) { + case 0: + return "" + case 1: + return e[0].Error() + default: + var buf bytes.Buffer + + for i, err := range e { + if i > 0 { + buf.WriteRune('\n') + } + buf.WriteString(err.Error()) + } + return buf.String() + } +} + +// parserError wraps an error with a prefix indicating the rule in which +// the error occurred. The original error is stored in the Inner field. +type parserError struct { + Inner error + pos position + prefix string + expected []string +} + +// Error returns the error message. +func (p *parserError) Error() string { + return p.prefix + ": " + p.Inner.Error() +} + +// newParser creates a parser with the specified input source and options. +func newParser(filename string, b []byte, opts ...Option) *parser { + stats := Stats{ + ChoiceAltCnt: make(map[string]map[string]int), + } + + p := &parser{ + filename: filename, + errs: new(errList), + data: b, + pt: savepoint{position: position{line: 1}}, + recover: true, + cur: current{ + state: make(storeDict), + globalStore: make(storeDict), + }, + maxFailPos: position{col: 1, line: 1}, + maxFailExpected: make([]string, 0, 20), + Stats: &stats, + // start rule is rule [0] unless an alternate entrypoint is specified + entrypoint: g.rules[0].name, + } + p.setOptions(opts) + + if p.maxExprCnt == 0 { + p.maxExprCnt = math.MaxUint64 + } + + return p +} + +// setOptions applies the options to the parser. +func (p *parser) setOptions(opts []Option) { + for _, opt := range opts { + opt(p) + } +} + +type resultTuple struct { + v interface{} + b bool + end savepoint +} + +const choiceNoMatch = -1 + +// Stats stores some statistics, gathered during parsing +type Stats struct { + // ExprCnt counts the number of expressions processed during parsing + // This value is compared to the maximum number of expressions allowed + // (set by the MaxExpressions option). + ExprCnt uint64 + + // ChoiceAltCnt is used to count for each ordered choice expression, + // which alternative is used how may times. + // These numbers allow to optimize the order of the ordered choice expression + // to increase the performance of the parser + // + // The outer key of ChoiceAltCnt is composed of the name of the rule as well + // as the line and the column of the ordered choice. + // The inner key of ChoiceAltCnt is the number (one-based) of the matching alternative. + // For each alternative the number of matches are counted. If an ordered choice does not + // match, a special counter is incremented. The name of this counter is set with + // the parser option Statistics. + // For an alternative to be included in ChoiceAltCnt, it has to match at least once. + ChoiceAltCnt map[string]map[string]int +} + +type parser struct { + filename string + pt savepoint + cur current + + data []byte + errs *errList + + depth int + recover bool + debug bool + + memoize bool + // memoization table for the packrat algorithm: + // map[offset in source] map[expression or rule] {value, match} + memo map[int]map[interface{}]resultTuple + + // rules table, maps the rule identifier to the rule node + rules map[string]*rule + // variables stack, map of label to value + vstack []map[string]interface{} + // rule stack, allows identification of the current rule in errors + rstack []*rule + + // parse fail + maxFailPos position + maxFailExpected []string + maxFailInvertExpected bool + + // max number of expressions to be parsed + maxExprCnt uint64 + // entrypoint for the parser + entrypoint string + + allowInvalidUTF8 bool + + *Stats + + choiceNoMatch string + // recovery expression stack, keeps track of the currently available recovery expression, these are traversed in reverse + recoveryStack []map[string]interface{} +} + +// push a variable set on the vstack. +func (p *parser) pushV() { + if cap(p.vstack) == len(p.vstack) { + // create new empty slot in the stack + p.vstack = append(p.vstack, nil) + } else { + // slice to 1 more + p.vstack = p.vstack[:len(p.vstack)+1] + } + + // get the last args set + m := p.vstack[len(p.vstack)-1] + if m != nil && len(m) == 0 { + // empty map, all good + return + } + + m = make(map[string]interface{}) + p.vstack[len(p.vstack)-1] = m +} + +// pop a variable set from the vstack. +func (p *parser) popV() { + // if the map is not empty, clear it + m := p.vstack[len(p.vstack)-1] + if len(m) > 0 { + // GC that map + p.vstack[len(p.vstack)-1] = nil + } + p.vstack = p.vstack[:len(p.vstack)-1] +} + +// push a recovery expression with its labels to the recoveryStack +func (p *parser) pushRecovery(labels []string, expr interface{}) { + if cap(p.recoveryStack) == len(p.recoveryStack) { + // create new empty slot in the stack + p.recoveryStack = append(p.recoveryStack, nil) + } else { + // slice to 1 more + p.recoveryStack = p.recoveryStack[:len(p.recoveryStack)+1] + } + + m := make(map[string]interface{}, len(labels)) + for _, fl := range labels { + m[fl] = expr + } + p.recoveryStack[len(p.recoveryStack)-1] = m +} + +// pop a recovery expression from the recoveryStack +func (p *parser) popRecovery() { + // GC that map + p.recoveryStack[len(p.recoveryStack)-1] = nil + + p.recoveryStack = p.recoveryStack[:len(p.recoveryStack)-1] +} + +func (p *parser) print(prefix, s string) string { + if !p.debug { + return s + } + + fmt.Printf("%s %d:%d:%d: %s [%#U]\n", + prefix, p.pt.line, p.pt.col, p.pt.offset, s, p.pt.rn) + return s +} + +func (p *parser) in(s string) string { + p.depth++ + return p.print(strings.Repeat(" ", p.depth)+">", s) +} + +func (p *parser) out(s string) string { + p.depth-- + return p.print(strings.Repeat(" ", p.depth)+"<", s) +} + +func (p *parser) addErr(err error) { + p.addErrAt(err, p.pt.position, []string{}) +} + +func (p *parser) addErrAt(err error, pos position, expected []string) { + var buf bytes.Buffer + if p.filename != "" { + buf.WriteString(p.filename) + } + if buf.Len() > 0 { + buf.WriteString(":") + } + buf.WriteString(fmt.Sprintf("%d:%d (%d)", pos.line, pos.col, pos.offset)) + if len(p.rstack) > 0 { + if buf.Len() > 0 { + buf.WriteString(": ") + } + rule := p.rstack[len(p.rstack)-1] + if rule.displayName != "" { + buf.WriteString("rule " + rule.displayName) + } else { + buf.WriteString("rule " + rule.name) + } + } + pe := &parserError{Inner: err, pos: pos, prefix: buf.String(), expected: expected} + p.errs.add(pe) +} + +func (p *parser) failAt(fail bool, pos position, want string) { + // process fail if parsing fails and not inverted or parsing succeeds and invert is set + if fail == p.maxFailInvertExpected { + if pos.offset < p.maxFailPos.offset { + return + } + + if pos.offset > p.maxFailPos.offset { + p.maxFailPos = pos + p.maxFailExpected = p.maxFailExpected[:0] + } + + if p.maxFailInvertExpected { + want = "!" + want + } + p.maxFailExpected = append(p.maxFailExpected, want) + } +} + +// read advances the parser to the next rune. +func (p *parser) read() { + p.pt.offset += p.pt.w + rn, n := utf8.DecodeRune(p.data[p.pt.offset:]) + p.pt.rn = rn + p.pt.w = n + p.pt.col++ + if rn == '\n' { + p.pt.line++ + p.pt.col = 0 + } + + if rn == utf8.RuneError && n == 1 { // see utf8.DecodeRune + if !p.allowInvalidUTF8 { + p.addErr(errInvalidEncoding) + } + } +} + +// restore parser position to the savepoint pt. +func (p *parser) restore(pt savepoint) { + if p.debug { + defer p.out(p.in("restore")) + } + if pt.offset == p.pt.offset { + return + } + p.pt = pt +} + +// Cloner is implemented by any value that has a Clone method, which returns a +// copy of the value. This is mainly used for types which are not passed by +// value (e.g map, slice, chan) or structs that contain such types. +// +// This is used in conjunction with the global state feature to create proper +// copies of the state to allow the parser to properly restore the state in +// the case of backtracking. +type Cloner interface { + Clone() interface{} +} + +var statePool = &sync.Pool{ + New: func() interface{} { return make(storeDict) }, +} + +func (sd storeDict) Discard() { + for k := range sd { + delete(sd, k) + } + statePool.Put(sd) +} + +// clone and return parser current state. +func (p *parser) cloneState() storeDict { + if p.debug { + defer p.out(p.in("cloneState")) + } + + state := statePool.Get().(storeDict) + for k, v := range p.cur.state { + if c, ok := v.(Cloner); ok { + state[k] = c.Clone() + } else { + state[k] = v + } + } + return state +} + +// restore parser current state to the state storeDict. +// every restoreState should applied only one time for every cloned state +func (p *parser) restoreState(state storeDict) { + if p.debug { + defer p.out(p.in("restoreState")) + } + p.cur.state.Discard() + p.cur.state = state +} + +// get the slice of bytes from the savepoint start to the current position. +func (p *parser) sliceFrom(start savepoint) []byte { + return p.data[start.position.offset:p.pt.position.offset] +} + +func (p *parser) getMemoized(node interface{}) (resultTuple, bool) { + if len(p.memo) == 0 { + return resultTuple{}, false + } + m := p.memo[p.pt.offset] + if len(m) == 0 { + return resultTuple{}, false + } + res, ok := m[node] + return res, ok +} + +func (p *parser) setMemoized(pt savepoint, node interface{}, tuple resultTuple) { + if p.memo == nil { + p.memo = make(map[int]map[interface{}]resultTuple) + } + m := p.memo[pt.offset] + if m == nil { + m = make(map[interface{}]resultTuple) + p.memo[pt.offset] = m + } + m[node] = tuple +} + +func (p *parser) buildRulesTable(g *grammar) { + p.rules = make(map[string]*rule, len(g.rules)) + for _, r := range g.rules { + p.rules[r.name] = r + } +} + +func (p *parser) parse(g *grammar) (val interface{}, err error) { + if len(g.rules) == 0 { + p.addErr(errNoRule) + return nil, p.errs.err() + } + + // TODO : not super critical but this could be generated + p.buildRulesTable(g) + + if p.recover { + // panic can be used in action code to stop parsing immediately + // and return the panic as an error. + defer func() { + if e := recover(); e != nil { + if p.debug { + defer p.out(p.in("panic handler")) + } + val = nil + switch e := e.(type) { + case error: + p.addErr(e) + default: + p.addErr(fmt.Errorf("%v", e)) + } + err = p.errs.err() + } + }() + } + + startRule, ok := p.rules[p.entrypoint] + if !ok { + p.addErr(errInvalidEntrypoint) + return nil, p.errs.err() + } + + p.read() // advance to first rune + val, ok = p.parseRule(startRule) + if !ok { + if len(*p.errs) == 0 { + // If parsing fails, but no errors have been recorded, the expected values + // for the farthest parser position are returned as error. + maxFailExpectedMap := make(map[string]struct{}, len(p.maxFailExpected)) + for _, v := range p.maxFailExpected { + maxFailExpectedMap[v] = struct{}{} + } + expected := make([]string, 0, len(maxFailExpectedMap)) + eof := false + if _, ok := maxFailExpectedMap["!."]; ok { + delete(maxFailExpectedMap, "!.") + eof = true + } + for k := range maxFailExpectedMap { + expected = append(expected, k) + } + sort.Strings(expected) + if eof { + expected = append(expected, "EOF") + } + p.addErrAt(errors.New("no match found, expected: "+listJoin(expected, ", ", "or")), p.maxFailPos, expected) + } + + return nil, p.errs.err() + } + return val, p.errs.err() +} + +func listJoin(list []string, sep string, lastSep string) string { + switch len(list) { + case 0: + return "" + case 1: + return list[0] + default: + return strings.Join(list[:len(list)-1], sep) + " " + lastSep + " " + list[len(list)-1] + } +} + +func (p *parser) parseRule(rule *rule) (interface{}, bool) { + if p.debug { + defer p.out(p.in("parseRule " + rule.name)) + } + + if p.memoize { + res, ok := p.getMemoized(rule) + if ok { + p.restore(res.end) + return res.v, res.b + } + } + + start := p.pt + p.rstack = append(p.rstack, rule) + p.pushV() + val, ok := p.parseExpr(rule.expr) + p.popV() + p.rstack = p.rstack[:len(p.rstack)-1] + if ok && p.debug { + p.print(strings.Repeat(" ", p.depth)+"MATCH", string(p.sliceFrom(start))) + } + + if p.memoize { + p.setMemoized(start, rule, resultTuple{val, ok, p.pt}) + } + return val, ok +} + +func (p *parser) parseExpr(expr interface{}) (interface{}, bool) { + var pt savepoint + + if p.memoize { + res, ok := p.getMemoized(expr) + if ok { + p.restore(res.end) + return res.v, res.b + } + pt = p.pt + } + + p.ExprCnt++ + if p.ExprCnt > p.maxExprCnt { + panic(errMaxExprCnt) + } + + var val interface{} + var ok bool + switch expr := expr.(type) { + case *actionExpr: + val, ok = p.parseActionExpr(expr) + case *andCodeExpr: + val, ok = p.parseAndCodeExpr(expr) + case *andExpr: + val, ok = p.parseAndExpr(expr) + case *anyMatcher: + val, ok = p.parseAnyMatcher(expr) + case *charClassMatcher: + val, ok = p.parseCharClassMatcher(expr) + case *choiceExpr: + val, ok = p.parseChoiceExpr(expr) + case *labeledExpr: + val, ok = p.parseLabeledExpr(expr) + case *litMatcher: + val, ok = p.parseLitMatcher(expr) + case *notCodeExpr: + val, ok = p.parseNotCodeExpr(expr) + case *notExpr: + val, ok = p.parseNotExpr(expr) + case *oneOrMoreExpr: + val, ok = p.parseOneOrMoreExpr(expr) + case *recoveryExpr: + val, ok = p.parseRecoveryExpr(expr) + case *ruleRefExpr: + val, ok = p.parseRuleRefExpr(expr) + case *seqExpr: + val, ok = p.parseSeqExpr(expr) + case *stateCodeExpr: + val, ok = p.parseStateCodeExpr(expr) + case *throwExpr: + val, ok = p.parseThrowExpr(expr) + case *zeroOrMoreExpr: + val, ok = p.parseZeroOrMoreExpr(expr) + case *zeroOrOneExpr: + val, ok = p.parseZeroOrOneExpr(expr) + default: + panic(fmt.Sprintf("unknown expression type %T", expr)) + } + if p.memoize { + p.setMemoized(pt, expr, resultTuple{val, ok, p.pt}) + } + return val, ok +} + +func (p *parser) parseActionExpr(act *actionExpr) (interface{}, bool) { + if p.debug { + defer p.out(p.in("parseActionExpr")) + } + + start := p.pt + val, ok := p.parseExpr(act.expr) + if ok { + p.cur.pos = start.position + p.cur.text = p.sliceFrom(start) + state := p.cloneState() + actVal, err := act.run(p) + if err != nil { + p.addErrAt(err, start.position, []string{}) + } + p.restoreState(state) + + val = actVal + } + if ok && p.debug { + p.print(strings.Repeat(" ", p.depth)+"MATCH", string(p.sliceFrom(start))) + } + return val, ok +} + +func (p *parser) parseAndCodeExpr(and *andCodeExpr) (interface{}, bool) { + if p.debug { + defer p.out(p.in("parseAndCodeExpr")) + } + + state := p.cloneState() + + ok, err := and.run(p) + if err != nil { + p.addErr(err) + } + p.restoreState(state) + + return nil, ok +} + +func (p *parser) parseAndExpr(and *andExpr) (interface{}, bool) { + if p.debug { + defer p.out(p.in("parseAndExpr")) + } + + pt := p.pt + state := p.cloneState() + p.pushV() + _, ok := p.parseExpr(and.expr) + p.popV() + p.restoreState(state) + p.restore(pt) + + return nil, ok +} + +func (p *parser) parseAnyMatcher(any *anyMatcher) (interface{}, bool) { + if p.debug { + defer p.out(p.in("parseAnyMatcher")) + } + + if p.pt.rn == utf8.RuneError && p.pt.w == 0 { + // EOF - see utf8.DecodeRune + p.failAt(false, p.pt.position, ".") + return nil, false + } + start := p.pt + p.read() + p.failAt(true, start.position, ".") + return p.sliceFrom(start), true +} + +func (p *parser) parseCharClassMatcher(chr *charClassMatcher) (interface{}, bool) { + if p.debug { + defer p.out(p.in("parseCharClassMatcher")) + } + + cur := p.pt.rn + start := p.pt + + // can't match EOF + if cur == utf8.RuneError && p.pt.w == 0 { // see utf8.DecodeRune + p.failAt(false, start.position, chr.val) + return nil, false + } + + if chr.ignoreCase { + cur = unicode.ToLower(cur) + } + + // try to match in the list of available chars + for _, rn := range chr.chars { + if rn == cur { + if chr.inverted { + p.failAt(false, start.position, chr.val) + return nil, false + } + p.read() + p.failAt(true, start.position, chr.val) + return p.sliceFrom(start), true + } + } + + // try to match in the list of ranges + for i := 0; i < len(chr.ranges); i += 2 { + if cur >= chr.ranges[i] && cur <= chr.ranges[i+1] { + if chr.inverted { + p.failAt(false, start.position, chr.val) + return nil, false + } + p.read() + p.failAt(true, start.position, chr.val) + return p.sliceFrom(start), true + } + } + + // try to match in the list of Unicode classes + for _, cl := range chr.classes { + if unicode.Is(cl, cur) { + if chr.inverted { + p.failAt(false, start.position, chr.val) + return nil, false + } + p.read() + p.failAt(true, start.position, chr.val) + return p.sliceFrom(start), true + } + } + + if chr.inverted { + p.read() + p.failAt(true, start.position, chr.val) + return p.sliceFrom(start), true + } + p.failAt(false, start.position, chr.val) + return nil, false +} + +func (p *parser) incChoiceAltCnt(ch *choiceExpr, altI int) { + choiceIdent := fmt.Sprintf("%s %d:%d", p.rstack[len(p.rstack)-1].name, ch.pos.line, ch.pos.col) + m := p.ChoiceAltCnt[choiceIdent] + if m == nil { + m = make(map[string]int) + p.ChoiceAltCnt[choiceIdent] = m + } + // We increment altI by 1, so the keys do not start at 0 + alt := strconv.Itoa(altI + 1) + if altI == choiceNoMatch { + alt = p.choiceNoMatch + } + m[alt]++ +} + +func (p *parser) parseChoiceExpr(ch *choiceExpr) (interface{}, bool) { + if p.debug { + defer p.out(p.in("parseChoiceExpr")) + } + + for altI, alt := range ch.alternatives { + // dummy assignment to prevent compile error if optimized + _ = altI + + state := p.cloneState() + + p.pushV() + val, ok := p.parseExpr(alt) + p.popV() + if ok { + p.incChoiceAltCnt(ch, altI) + return val, ok + } + p.restoreState(state) + } + p.incChoiceAltCnt(ch, choiceNoMatch) + return nil, false +} + +func (p *parser) parseLabeledExpr(lab *labeledExpr) (interface{}, bool) { + if p.debug { + defer p.out(p.in("parseLabeledExpr")) + } + + p.pushV() + val, ok := p.parseExpr(lab.expr) + p.popV() + if ok && lab.label != "" { + m := p.vstack[len(p.vstack)-1] + m[lab.label] = val + } + return val, ok +} + +func (p *parser) parseLitMatcher(lit *litMatcher) (interface{}, bool) { + if p.debug { + defer p.out(p.in("parseLitMatcher")) + } + + start := p.pt + for _, want := range lit.val { + cur := p.pt.rn + if lit.ignoreCase { + cur = unicode.ToLower(cur) + } + if cur != want { + p.failAt(false, start.position, lit.want) + p.restore(start) + return nil, false + } + p.read() + } + p.failAt(true, start.position, lit.want) + return p.sliceFrom(start), true +} + +func (p *parser) parseNotCodeExpr(not *notCodeExpr) (interface{}, bool) { + if p.debug { + defer p.out(p.in("parseNotCodeExpr")) + } + + state := p.cloneState() + + ok, err := not.run(p) + if err != nil { + p.addErr(err) + } + p.restoreState(state) + + return nil, !ok +} + +func (p *parser) parseNotExpr(not *notExpr) (interface{}, bool) { + if p.debug { + defer p.out(p.in("parseNotExpr")) + } + + pt := p.pt + state := p.cloneState() + p.pushV() + p.maxFailInvertExpected = !p.maxFailInvertExpected + _, ok := p.parseExpr(not.expr) + p.maxFailInvertExpected = !p.maxFailInvertExpected + p.popV() + p.restoreState(state) + p.restore(pt) + + return nil, !ok +} + +func (p *parser) parseOneOrMoreExpr(expr *oneOrMoreExpr) (interface{}, bool) { + if p.debug { + defer p.out(p.in("parseOneOrMoreExpr")) + } + + var vals []interface{} + + for { + p.pushV() + val, ok := p.parseExpr(expr.expr) + p.popV() + if !ok { + if len(vals) == 0 { + // did not match once, no match + return nil, false + } + return vals, true + } + vals = append(vals, val) + } +} + +func (p *parser) parseRecoveryExpr(recover *recoveryExpr) (interface{}, bool) { + if p.debug { + defer p.out(p.in("parseRecoveryExpr (" + strings.Join(recover.failureLabel, ",") + ")")) + } + + p.pushRecovery(recover.failureLabel, recover.recoverExpr) + val, ok := p.parseExpr(recover.expr) + p.popRecovery() + + return val, ok +} + +func (p *parser) parseRuleRefExpr(ref *ruleRefExpr) (interface{}, bool) { + if p.debug { + defer p.out(p.in("parseRuleRefExpr " + ref.name)) + } + + if ref.name == "" { + panic(fmt.Sprintf("%s: invalid rule: missing name", ref.pos)) + } + + rule := p.rules[ref.name] + if rule == nil { + p.addErr(fmt.Errorf("undefined rule: %s", ref.name)) + return nil, false + } + return p.parseRule(rule) +} + +func (p *parser) parseSeqExpr(seq *seqExpr) (interface{}, bool) { + if p.debug { + defer p.out(p.in("parseSeqExpr")) + } + + vals := make([]interface{}, 0, len(seq.exprs)) + + pt := p.pt + state := p.cloneState() + for _, expr := range seq.exprs { + val, ok := p.parseExpr(expr) + if !ok { + p.restoreState(state) + p.restore(pt) + return nil, false + } + vals = append(vals, val) + } + return vals, true +} + +func (p *parser) parseStateCodeExpr(state *stateCodeExpr) (interface{}, bool) { + if p.debug { + defer p.out(p.in("parseStateCodeExpr")) + } + + err := state.run(p) + if err != nil { + p.addErr(err) + } + return nil, true +} + +func (p *parser) parseThrowExpr(expr *throwExpr) (interface{}, bool) { + if p.debug { + defer p.out(p.in("parseThrowExpr")) + } + + for i := len(p.recoveryStack) - 1; i >= 0; i-- { + if recoverExpr, ok := p.recoveryStack[i][expr.label]; ok { + if val, ok := p.parseExpr(recoverExpr); ok { + return val, ok + } + } + } + + return nil, false +} + +func (p *parser) parseZeroOrMoreExpr(expr *zeroOrMoreExpr) (interface{}, bool) { + if p.debug { + defer p.out(p.in("parseZeroOrMoreExpr")) + } + + var vals []interface{} + + for { + p.pushV() + val, ok := p.parseExpr(expr.expr) + p.popV() + if !ok { + return vals, true + } + vals = append(vals, val) + } +} + +func (p *parser) parseZeroOrOneExpr(expr *zeroOrOneExpr) (interface{}, bool) { + if p.debug { + defer p.out(p.in("parseZeroOrOneExpr")) + } + + p.pushV() + val, _ := p.parseExpr(expr.expr) + p.popV() + // whether it matched or not, consider it a match + return val, true +} \ No newline at end of file diff --git a/filter/filter.peg b/filter/filter.peg new file mode 100644 index 00000000..f7d476fd --- /dev/null +++ b/filter/filter.peg @@ -0,0 +1,478 @@ +{ + + package filter + + import ( + "fmt" + "strconv" + "strings" + ) + + type ComparisonNode struct { + Type string + Operator CodeNode + Values interface{} + } + + type CodeNode struct { + Type string + Source string + Value string + JsonbSelector []CodeNode + } + + type FunctionNode struct { + Name string + Function CodeNode + Args []CodeNode + } + + func toIfaceSlice(v interface{}) []interface{} { + if v == nil { + return nil + } + return v.([]interface{}) + } + + func eval(first, rest interface{}) []interface{} { + exprs := []interface{}{} + exprs = append(exprs, first) + restSl := toIfaceSlice(rest) + for _, v := range restSl { + restStmt := toIfaceSlice(v) + exprs = append(exprs, restStmt[3]) + } + return exprs + } + +} + +Input <- _ i:OrComparison _ EOF { + return i, nil +} + +OrComparison <- first:AndComparison rest:( _ Or _ AndComparison )* { + exprs := eval(first, rest) + if len(exprs) <= 1 { + return first, nil + } + n := ComparisonNode{ + Type: "or", + Values: exprs, + } + return n, nil +} + +AndComparison <- first:Comparison rest:( _ And _ Comparison )* { + exprs := eval(first, rest) + if len(exprs) <= 1 { + return first, nil + } + n := ComparisonNode{ + Type: "and", + Values: exprs, + } + return n, nil +} + + +// +// Comparisons +// + +Comparison <- MultiComparison / NotComparison / LeftRightComparison / LikeComparison / IsComparison / InComparison / IdentifierComparison + +MultiComparison <- '(' _ expr:OrComparison _ ')' { + return expr, nil +} + +NotComparison <- op:Not _ right:Comparison { + n := ComparisonNode{ + Type: "not", + Operator: op.(CodeNode), + Values: []ComparisonNode{right.(ComparisonNode)}, + } + return n, nil +} + +LeftRightComparison <- left:Value _ op:CompareOperator _ right:Value { + n := ComparisonNode{ + Type: "compare", + Operator: op.(CodeNode), + Values: []CodeNode{left.(CodeNode), right.(CodeNode)}, + } + return n, nil +} + +LikeComparison <- left:(Identifier) _ op:Like _ right:(String) { + n := ComparisonNode{ + Type: "like", + Operator: op.(CodeNode), + Values: []CodeNode{left.(CodeNode), right.(CodeNode)}, + } + return n, nil +} + +IsComparison <- left:(Identifier / Null / Bool) _ op:Is _ right:(Null / Bool) { + n := ComparisonNode{ + Type: "is", + Operator: op.(CodeNode), + Values: []CodeNode{left.(CodeNode), right.(CodeNode)}, + } + return n, nil +} + +InComparison <- first:Value _ op:In _ rest:InList { + exprs := []CodeNode{first.(CodeNode)} + resti := toIfaceSlice(rest) + for _, v := range resti { + exprs = append(exprs, v.(CodeNode)) + } + n := ComparisonNode{ + Type: "in", + Operator: op.(CodeNode), + Values: exprs, + } + return n, nil +} + +IdentifierComparison <- i:Identifier { + n := ComparisonNode{ + Type: "identifier", + Values: []CodeNode{i.(CodeNode)}, + } + return n, nil +} + + +// +// In List +// + +InList <- EmptyInList / NonEmptyInList + +EmptyInList <- '(' _ ')' { + return []interface{}{}, nil +} + +NonEmptyInList <- '(' _ first:UnbracketedValue rest:( _ ',' _ UnbracketedValue)* _ ')' { + exprs := eval(first, rest) + return exprs, nil +} + + +// +// Values +// + +Value <- BracketedValue / UnbracketedValue + +BracketedValue <- '(' _ i:Value _ ')' { + return i, nil +} + +UnbracketedValue <- TimeCalculation / Constant / Jsonb / Identifier + + +// +// Identifiers +// + +Identifier <- Jsonb / ColumnIdentifier + +ColumnIdentifier <- QuotedIdentifier / UnquotedIdentifier + +QuotedIdentifier <- `"` (`""` / [^"])* `"` { + src := string(c.text) + value := strings.ReplaceAll(src[1:len(src)-1], `""`, `"`) + n := CodeNode{ + Type: "quoted_identifier", + Source: src, + Value: value, + } + return n, nil +} + +UnquotedIdentifier <- [A-Za-z_][A-Za-z0-9_]* { + src := string(c.text) + n := CodeNode{ + Type: "unquoted_identifier", + Source: src, + Value: strings.ToLower(src), + } + return n, nil +} + +Jsonb <- i:ColumnIdentifier _ op:JsonbOperator _ first:JsonbField rest:(_ JsonbOperator _ JsonbField)* { + n := i.(CodeNode) + n.JsonbSelector = []CodeNode{op.(CodeNode), first.(CodeNode)} + resti := toIfaceSlice(rest) + for _, e := range resti { + ei := toIfaceSlice(e) + n.JsonbSelector = append(n.JsonbSelector, ei[1].(CodeNode), ei[3].(CodeNode)) + } + return n, nil +} + +JsonbField <- String / Integer + +JsonbOperator <- "->" '>'? { + s := string(c.text) + n := CodeNode{ + Type: "operator", + Source: s, + Value: s, + } + return n, nil +} + + +// +// Operators +// + +// Order of the operators here is important for matching +CompareOperator <- ("<=" / "<>" / ">=" / "!=" / "<" / "=" / ">") { + s := string(c.text) + n := CodeNode{ + Type: "operator", + Source: s, + Value: s, + } + return n, nil +} + +And <- "and"i { + src := string(c.text) + n := CodeNode{ + Type: "operator", + Source: src, + Value: "and", + } + return n, nil +} + +Or <- "or"i { + src := string(c.text) + n := CodeNode{ + Type: "operator", + Source: src, + Value: "or", + } + return n, nil +} + +Not <- "not"i { + src := string(c.text) + n := CodeNode{ + Type: "operator", + Source: src, + Value: "not", + } + return n, nil +} + +In <- not:("not"i _)? "in"i { + src := string(c.text) + n := CodeNode{ + Type: "operator", + Source: src, + Value: "in", + } + if not != nil { + n.Value = "not in" + } + return n, nil +} + +Like <- not:("not"i _)? like:LikeOrIlike { + src := string(c.text) + likeStr := strings.ToLower(like.(string)) + n := CodeNode{ + Type: "operator", + Source: src, + Value: likeStr, + } + if not != nil { + n.Value = "not " + likeStr + } + return n, nil +} + +LikeOrIlike <- ("like"i / "ilike"i) { + src := string(c.text) + return src, nil +} + +Is <- "is"i not:(_ "not"i)? { + src := string(c.text) + n := CodeNode{ + Type: "operator", + Source: src, + Value: "is", + } + if not != nil { + n.Value = "is not" + } + return n, nil +} + + +// +// Functions +// +// Specific set of supported Postgres functions. +// + +TimeCalculation <- now:NoArgsFunction interval:(_ Add _ StringOperatorFunction)? { + n := CodeNode{ + Type: "time_calculation", + Source: string(c.text), + Value: "now()", + } + if interval != nil { + intervalSlice := toIfaceSlice(interval) + addOp := intervalSlice[1].(CodeNode).Value + intervalString, _ := CodeToSQL(intervalSlice[3].(FunctionNode).Args[0]) + n.Value += fmt.Sprintf(" %s interval %s", addOp, intervalString) + } + return n, nil +} + +Add <- ('+' / '-') { + s := string(c.text) + n := CodeNode{ + Type: "operator", + Source: s, + Value: s, + } + return n, nil +} + +Function <- StringOperatorFunction / NoArgsFunction + +NoArgsFunction <- NoArgsFunctionName `()` { + src := string(c.text) + fnName := src[:len(src)-2] + n := FunctionNode{ + Name: "function", + Function: CodeNode{ + Type: "function_name", + Source: fnName, + Value: strings.ToLower(fnName), + }, + Args: []CodeNode{}, + } + return n, nil +} + +NoArgsFunctionName <- "now"i + +StringOperatorFunction <- fn:StringOperatorFunctionName _ s:String { + src := string(c.text) + fnName := src[:len(src)-2] + n := FunctionNode{ + Name: "function", + Function: CodeNode{ + Type: "function_name", + Source: fnName, + Value: strings.ToLower(fnName), + }, + Args: []CodeNode{s.(CodeNode)}, + } + return n, nil +} + +StringOperatorFunctionName <- "interval"i + + +// +// Constants +// + +Constant <- Bool / Number / String + +String <- `'` (`''` / [^'])* `'` { + src := string(c.text) + value := strings.ReplaceAll(src[1:len(src)-1], "''", "'") + n := CodeNode{ + Type: "string", + Source: src, + Value: value, + } + return n, nil +} + +Number <- '-'? Integer ( '.' DecimalDigit+ )? Exponent? { + // JSON numbers have the same syntax as Go's, and are parseable using + // strconv. + src := string(c.text) + n := CodeNode{ + Type: "number", + Source: src, + Value: src, + } + return n, nil +} + +Integer <- ('0' / NonZeroDecimalDigit DecimalDigit*) { + src := string(c.text) + n := CodeNode{ + Type: "number", + Source: src, + Value: src, + } + return n, nil +} + +Exponent <- 'e'i [+-]? DecimalDigit+ + +DecimalDigit <- [0-9] + +NonZeroDecimalDigit <- [1-9] + +Bool <- ("true"i / "false"i) { + src := string(c.text) + n := CodeNode{ + Type: "bool", + Source: src, + Value: strings.ToLower(src), + } + return n, nil +} + +Null <- "null"i { + src := string(c.text) + n := CodeNode{ + Type: "null", + Source: src, + Value: strings.ToLower(src), + } + return n, nil +} + + +// +// Layout +// + + +// Whitespace +_ <- [ \n\t\r]* { + src := string(c.text) + n := CodeNode{ + Type: "whitespace", + Source: src, + } + if len(src) > 0 { n.Value = " " } + return n, nil +} + +EOF <- !. { + n := CodeNode{ + Type: "eof", + Source: "", + Value: "", + } + return n, nil +} \ No newline at end of file diff --git a/filter/filter_test.go b/filter/filter_test.go new file mode 100644 index 00000000..06f0d157 --- /dev/null +++ b/filter/filter_test.go @@ -0,0 +1,247 @@ +package filter + +import ( + "strings" + "testing" +) + +var validCases = map[string]string{ + + // Formatting + `foo = 'foo'`: `( "foo" = 'foo' )`, + `foo = 'foo'`: `( "foo" = 'foo' )`, + `foo='foo'`: `( "foo" = 'foo' )`, + `foo = 'foo'`: `( "foo" = 'foo' )`, + ` foo = 'foo'`: `( "foo" = 'foo' )`, + `foo = 'foo' `: `( "foo" = 'foo' )`, + ` foo = 'foo' `: `( "foo" = 'foo' )`, + "\n\nfoo\n=\n'foo'\n\n": `( "foo" = 'foo' )`, + `(foo = 'foo')`: `( "foo" = 'foo' )`, + `((foo = 'foo'))`: `( "foo" = 'foo' )`, + "foo = bar": `( "foo" = "bar" )`, + + // String constants + `foo = ''`: `( "foo" = '' )`, + `foo = ' '`: `( "foo" = ' ' )`, + `foo = 'with ''escaped'' quotes'`: `( "foo" = 'with ''escaped'' quotes' )`, + `foo = '''fully escaped'''`: `( "foo" = '''fully escaped''' )`, + `'foo' = foo`: `( 'foo' = "foo" )`, + + // Numbers + "foo = 123": `( "foo" = 123 )`, + "foo = -123": `( "foo" = -123 )`, + "foo = 0": `( "foo" = 0 )`, + "foo = 0.0": `( "foo" = 0.0 )`, + "foo = 1.23": `( "foo" = 1.23 )`, + "foo = -1.23": `( "foo" = -1.23 )`, + "123 = foo": `( 123 = "foo" )`, + + // Time calculations + "foo = now()": `( "foo" = now() )`, + "foo = now()+interval '1 hr'": `( "foo" = now() + interval '1 hr' )`, + "foo = now() + interval '1 hr'": `( "foo" = now() + interval '1 hr' )`, + "foo = now() - interval '1 day'": `( "foo" = now() - interval '1 day' )`, + "foo = now() - interval '2 weeks'": `( "foo" = now() - interval '2 weeks' )`, + "foo = NOW() - INTERVAL '2 weeks'": `( "foo" = now() - interval '2 weeks' )`, + "now() = foo": `( now() = "foo" )`, + + // Booleans + `foo`: `"foo"`, + `"FOO"`: `"FOO"`, + `"with ""escaped"" quotes"`: `"with ""escaped"" quotes"`, + `"""fully escaped"""`: `"""fully escaped"""`, + `foo is true`: `( "foo" is true )`, + `foo is TRue`: `( "foo" is true )`, + `foo is false`: `( "foo" is false )`, + `foo is FALSE`: `( "foo" is false )`, + `foo = true`: `( "foo" = true )`, + `foo = false`: `( "foo" = false )`, + `true = foo`: `( true = "foo" )`, + `foo is not false`: `( "foo" is not false )`, + `foo is not TRUE`: `( "foo" is not true )`, + + // Not + `not foo`: `( not "foo" )`, + `not "FOO"`: `( not "FOO" )`, + `not foo and not bar`: `( ( not "foo" ) and ( not "bar" ) )`, + `not foo = 'foo'`: `( not ( "foo" = 'foo' ) )`, + `foo = 'foo' and not bar = 'bar'`: `( ( "foo" = 'foo' ) and ( not ( "bar" = 'bar' ) ) )`, + `not foo = 'foo' or not bar = 'bar'`: `( ( not ( "foo" = 'foo' ) ) or ( not ( "bar" = 'bar' ) ) )`, + `not (foo = 'foo' and bar = 'bar')`: `( not ( ( "foo" = 'foo' ) and ( "bar" = 'bar' ) ) )`, + `foo = 'foo' and not (bar = 'bar' and baz = 'baz')`: `( ( "foo" = 'foo' ) and ( not ( ( "bar" = 'bar' ) and ( "baz" = 'baz' ) ) ) )`, + + // LIKE + `foo like ''`: `( "foo" like '' )`, + `foo like 'bar'`: `( "foo" like 'bar' )`, + `foo like 'bar%'`: `( "foo" like 'bar%' )`, + `foo like '%bar%'`: `( "foo" like '%bar%' )`, + `foo like 'bar''s baz'`: `( "foo" like 'bar''s baz' )`, + `foo like 'bar%baz'`: `( "foo" like 'bar%baz' )`, + `foo like 'bar_baz'`: `( "foo" like 'bar_baz' )`, + `foo LIKE 'bar'`: `( "foo" like 'bar' )`, + `foo LiKe 'bar'`: `( "foo" like 'bar' )`, + `foo not like 'bar'`: `( "foo" not like 'bar' )`, + `foo NoT LiKe 'bar'`: `( "foo" not like 'bar' )`, + + // ILIKE + `foo ilike ''`: `( "foo" ilike '' )`, + `foo ilike 'bar'`: `( "foo" ilike 'bar' )`, + `foo ilike 'bar%'`: `( "foo" ilike 'bar%' )`, + `foo ilike '%bar%'`: `( "foo" ilike '%bar%' )`, + `foo ilike 'bar''s baz'`: `( "foo" ilike 'bar''s baz' )`, + `foo ilike 'bar%baz'`: `( "foo" ilike 'bar%baz' )`, + `foo ilike 'bar_baz'`: `( "foo" ilike 'bar_baz' )`, + `foo iLIKE 'bar'`: `( "foo" ilike 'bar' )`, + `foo iLiKe 'bar'`: `( "foo" ilike 'bar' )`, + `foo not ilike 'bar'`: `( "foo" not ilike 'bar' )`, + `foo NoT iLiKe 'bar'`: `( "foo" not ilike 'bar' )`, + + // In + `foo in ()`: `( "foo" in ( ) )`, + `foo in (12)`: `( "foo" in ( 12 ) )`, + `foo in (12,23)`: `( "foo" in ( 12, 23 ) )`, + `foo in ( 12, 23)`: `( "foo" in ( 12, 23 ) )`, + `foo in (12, 23 )`: `( "foo" in ( 12, 23 ) )`, + `foo in (12,23 )`: `( "foo" in ( 12, 23 ) )`, + `foo in ( 12,23 )`: `( "foo" in ( 12, 23 ) )`, + `foo in ( 12,23)`: `( "foo" in ( 12, 23 ) )`, + `foo in ('foo', 'bar')`: `( "foo" in ( 'foo', 'bar' ) )`, + `foo IN (12)`: `( "foo" in ( 12 ) )`, + `foo not in ()`: `( "foo" not in ( ) )`, + `foo not in (12)`: `( "foo" not in ( 12 ) )`, + `foo not in ( 'foo' , 'bar' )`: `( "foo" not in ( 'foo', 'bar' ) )`, + `foo NoT In (12)`: `( "foo" not in ( 12 ) )`, + + // Null + `foo is null`: `( "foo" is null )`, + `foo is NULL`: `( "foo" is null )`, + `foo is not NULL`: `( "foo" is not null )`, + + // Comparison operators + `foo < 24`: `( "foo" < 24 )`, + `foo <= 24`: `( "foo" <= 24 )`, + `foo = 24`: `( "foo" = 24 )`, + `foo != 24`: `( "foo" != 24 )`, + `foo <> 24`: `( "foo" <> 24 )`, + `foo >= 24`: `( "foo" >= 24 )`, + `foo > 24`: `( "foo" > 24 )`, + + // Identifiers + `_ = 'foo'`: `( "_" = 'foo' )`, + `Foo = 'foo'`: `( "foo" = 'foo' )`, + `FoO = 'foo'`: `( "foo" = 'foo' )`, + `foo_bar = 'foo'`: `( "foo_bar" = 'foo' )`, + `f123 = 'foo'`: `( "f123" = 'foo' )`, + `foo__bar = 'foo'`: `( "foo__bar" = 'foo' )`, + `foo__bar__ = 'foo'`: `( "foo__bar__" = 'foo' )`, + `__foo__bar__ = 'foo'`: `( "__foo__bar__" = 'foo' )`, + `"foo" = 'foo'`: `( "foo" = 'foo' )`, + `"foo bar" = 'foo'`: `( "foo bar" = 'foo' )`, + `"FoO BaR" = 'foo'`: `( "FoO BaR" = 'foo' )`, + `" foo bar " = 'foo'`: `( " foo bar " = 'foo' )`, + `"123_foo_bar" = 'foo'`: `( "123_foo_bar" = 'foo' )`, + `"with ""escaped"" quotes" = 'foo'`: `( "with ""escaped"" quotes" = 'foo' )`, + `"""fully escaped""" = 'foo'`: `( """fully escaped""" = 'foo' )`, + + // JSONB + `foo ->> 'foo' = 'foo'`: `( "foo" ->> 'foo' = 'foo' )`, + `foo ->> 0 = 'foo'`: `( "foo" ->> 0 = 'foo' )`, + `foo = bar ->> 'bar'`: `( "foo" = "bar" ->> 'bar' )`, + `foo -> 'foo' -> 'bar'`: `"foo" -> 'foo' -> 'bar'`, + `foo -> 'foo' ->> 'bar' < "FOO" ->> 'foo'`: `( "foo" -> 'foo' ->> 'bar' < "FOO" ->> 'foo' )`, + `foo -> 'with ''escaped'' quotes' = bar ->> '''fully escaped'''`: `( "foo" -> 'with ''escaped'' quotes' = "bar" ->> '''fully escaped''' )`, + + // AND and OR + `foo and bar`: `( "foo" and "bar" )`, + `foo or bar`: `( "foo" or "bar" )`, + `foo and bar or baz`: `( ( "foo" and "bar" ) or "baz" )`, + `foo and (bar or baz)`: `( "foo" and ( "bar" or "baz" ) )`, + `foo = 'foo' and bar = 'bar'`: `( ( "foo" = 'foo' ) and ( "bar" = 'bar' ) )`, + `foo = 'foo' or bar = 'bar'`: `( ( "foo" = 'foo' ) or ( "bar" = 'bar' ) )`, + `"FOO" = 'foo' and "BAR" = 'bar'`: `( ( "FOO" = 'foo' ) and ( "BAR" = 'bar' ) )`, + `foo = 'foo' aNd bar = 'bar'`: `( ( "foo" = 'foo' ) and ( "bar" = 'bar' ) )`, + `foo = 12 AND bar = 34`: `( ( "foo" = 12 ) and ( "bar" = 34 ) )`, + `foo = 12 AND bar = 34 and baz > 24`: `( ( "foo" = 12 ) and ( "bar" = 34 ) and ( "baz" > 24 ) )`, + `foo = 12 or bar = 34 or baz > 24`: `( ( "foo" = 12 ) or ( "bar" = 34 ) or ( "baz" > 24 ) )`, + `foo = 12 and bar = 34 or baz > 24`: `( ( ( "foo" = 12 ) and ( "bar" = 34 ) ) or ( "baz" > 24 ) )`, + `foo = 12 and (bar = 34 or baz > 24)`: `( ( "foo" = 12 ) and ( ( "bar" = 34 ) or ( "baz" > 24 ) ) )`, + `foo = 12 or (bar = 34 or baz > 24)`: `( ( "foo" = 12 ) or ( ( "bar" = 34 ) or ( "baz" > 24 ) ) )`, + `(foo = 12) and bar = 34`: `( ( "foo" = 12 ) and ( "bar" = 34 ) )`, + `(foo = 12) and (bar = 34)`: `( ( "foo" = 12 ) and ( "bar" = 34 ) )`, + `foo = 12 and (bar = 34)`: `( ( "foo" = 12 ) and ( "bar" = 34 ) )`, + + // Steampipe Cloud examples + `type = 'user'`: `( "type" = 'user' )`, + `type = 'org'`: `( "type" = 'org' )`, + `status = 'accepted'`: `( "status" = 'accepted' )`, + `status = 'invited'`: `( "status" = 'invited' )`, + `action_type = 'workspace.mod.variable.setting.create'`: `( "action_type" = 'workspace.mod.variable.setting.create' )`, + `created_at > now() - interval '7 days'`: `( "created_at" > now() - interval '7 days' )`, + `tags -> 'foo' is not null and created_at > now() - interval '7 days'`: `( ( "tags" -> 'foo' is not null ) and ( "created_at" > now() - interval '7 days' ) )`, + `tags ->> 'foo' = 'bar' and created_at > now() - interval '7 days'`: `( ( "tags" ->> 'foo' = 'bar' ) and ( "created_at" > now() - interval '7 days' ) )`, + `action_type = 'workspace.create' and identity_handle = 'jane' and created_at > '2022-07-14'`: `( ( "action_type" = 'workspace.create' ) and ( "identity_handle" = 'jane' ) and ( "created_at" > '2022-07-14' ) )`, +} + +func TestValidCases(t *testing.T) { + for tc, exp := range validCases { + got, err := Parse("", []byte(tc)) + if err != nil { + t.Errorf("%q: want no error, got %v", tc, err) + continue + } + sql, _, err := ComparisonToSQL(got.(ComparisonNode), []string{}) + if err != nil { + t.Errorf("SQL build error: %v", err) + continue + } + if exp != sql { + t.Errorf("%q: want %s, got %s", tc, exp, sql) + } + } +} + +var invalidCases = []string{ + + // Invalid SQL + "'foo", + "foo = ';delete from foo;", + + // Operators + "foo == 24", + "foo = = 24", + "foo => 24", + + // Identifiers + "123_foo_bar = 'foo'", + + // Operators type combinations + "foo is 24", + "foo is 'bar'", + "foo like 24", + "foo not like true", + "foo ilike false", + "foo not ilike 12", +} + +func TestInvalidCases(t *testing.T) { + for _, tc := range invalidCases { + got, err := Parse("", []byte(tc)) + if err == nil { + t.Errorf("%q: want error, got none (%v)", tc, got) + continue + } + el, ok := err.(errList) + if !ok { + t.Errorf("%q: want error type %T, got %T", tc, &errList{}, err) + continue + } + for _, e := range el { + if _, ok := e.(*parserError); !ok { + t.Errorf("%q: want all individual errors to be %T, got %T (%[3]v)", tc, &parserError{}, e) + } + } + if !strings.Contains(err.Error(), "no match found") { + t.Errorf("%q: wanted no match found, got \n%s\n", tc, err) + } + } +} diff --git a/filter/sql.go b/filter/sql.go new file mode 100644 index 00000000..a4effd4b --- /dev/null +++ b/filter/sql.go @@ -0,0 +1,98 @@ +package filter + +import ( + "fmt" + "strings" +) + +func appendIdentifier(identifiers []string, identifier string) []string { + for _, i := range identifiers { + if i == identifier { + return identifiers + } + } + return append(identifiers, identifier) +} + +// Record the requested identifiers so that we can compare to the ones supported by the API requesting this +func ComparisonToSQL(node ComparisonNode, identifiers []string) (string, []string, error) { + switch node.Type { + case "and", "or": + return LogicToSQL(node, identifiers) + case "compare", "is", "like": + return CompareToSQL(node, identifiers) + case "in": + sql, err := InToSQL(node) + return sql, identifiers, err + case "not": + return NotToSQL(node, identifiers) + case "identifier": + sql, err := IdentifierToSQL(node) + return sql, identifiers, err + } + return "", identifiers, nil +} + +func CodeToSQL(node CodeNode) (string, error) { + s := node.Value + switch node.Type { + case "quoted_identifier", "unquoted_identifier": + s = fmt.Sprintf(`"%s"`, strings.ReplaceAll(node.Value, `"`, `""`)) + for _, i := range node.JsonbSelector { + sql, _ := CodeToSQL(i) + s += fmt.Sprintf(" %s", sql) + } + case "string": + s = fmt.Sprintf(`'%s'`, strings.ReplaceAll(node.Value, `'`, `''`)) + } + return s, nil +} + +func OperatorSQL(node CodeNode) (string, error) { + return node.Value, nil +} + +func LogicToSQL(node ComparisonNode, identifiers []string) (string, []string, error) { + newIdentifiers := identifiers + parts := []string{} + for _, v := range toIfaceSlice(node.Values) { + s, i, _ := ComparisonToSQL(v.(ComparisonNode), newIdentifiers) + newIdentifiers = i + parts = append(parts, s) + } + return fmt.Sprintf("( %s )", strings.Join(parts, fmt.Sprintf(" %s ", node.Type))), newIdentifiers, nil +} + +func IdentifierToSQL(node ComparisonNode) (string, error) { + values := node.Values.([]CodeNode) + return CodeToSQL(values[0]) +} + +func NotToSQL(node ComparisonNode, identifiers []string) (string, []string, error) { + values := node.Values.([]ComparisonNode) + rightSQL, newIdentifiers, _ := ComparisonToSQL(values[0], identifiers) + return fmt.Sprintf(`( not %s )`, rightSQL), newIdentifiers, nil +} + +func CompareToSQL(node ComparisonNode, identifiers []string) (string, []string, error) { + values := node.Values.([]CodeNode) + leftCodeNode := values[0] + newIdentifiers := appendIdentifier(identifiers, leftCodeNode.Value) + rightCodeNode := values[1] + leftSQL, _ := CodeToSQL(leftCodeNode) + opSQL, _ := OperatorSQL(node.Operator) + rightSQL, _ := CodeToSQL(rightCodeNode) + return fmt.Sprintf("( %s %s %s )", leftSQL, opSQL, rightSQL), newIdentifiers, nil +} + +func InToSQL(node ComparisonNode) (string, error) { + values := node.Values.([]CodeNode) + leftSQL, _ := CodeToSQL(values[0]) + opSQL, _ := OperatorSQL(node.Operator) + inValues := []string{} + for _, v := range values[1:] { + s, _ := CodeToSQL(v) + inValues = append(inValues, s) + } + return fmt.Sprintf("( %s %s ( %s ) )", leftSQL, opSQL, strings.Join(inValues, ", ")), nil +} diff --git a/filter/test.sh b/filter/test.sh new file mode 100644 index 00000000..6dc5d673 --- /dev/null +++ b/filter/test.sh @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +WORKING_DIR=/Users/mike/Code/github.com/mna/pigeon +PIGEON=${WORKING_DIR}/bin/pigeon +SPC_DIR=. + +#echo $'package main\n' > ${SPC_DIR}/filter.go + +cat ${SPC_DIR}/filter.peg | ${PIGEON} > ${SPC_DIR}/filter.go + +go test -v diff --git a/go.mod b/go.mod index 6e9222dc..2b1c50ee 100644 --- a/go.mod +++ b/go.mod @@ -8,7 +8,7 @@ require ( github.com/gertd/go-pluralize v0.2.1 github.com/ghodss/yaml v1.0.0 github.com/golang/protobuf v1.5.3 - github.com/hashicorp/go-hclog v1.4.0 + github.com/hashicorp/go-hclog v1.5.0 github.com/hashicorp/go-plugin v1.4.10 github.com/hashicorp/go-version v1.6.0 github.com/hashicorp/hcl/v2 v2.15.0 @@ -16,7 +16,7 @@ require ( github.com/olekukonko/tablewriter v0.0.5 github.com/sethvargo/go-retry v0.2.4 github.com/stevenle/topsort v0.2.0 - github.com/turbot/go-kit v0.7.0 + github.com/turbot/go-kit v0.8.0-rc.0 github.com/zclconf/go-cty v1.13.2 go.opentelemetry.io/otel v1.16.0 go.opentelemetry.io/otel/metric v1.16.0 // indirect @@ -27,6 +27,7 @@ require ( require ( github.com/allegro/bigcache/v3 v3.1.0 + github.com/danwakefield/fnmatch v0.0.0-20160403171240-cbb64ac3d964 github.com/eko/gocache/v3 v3.1.2 github.com/fsnotify/fsnotify v1.6.0 github.com/hashicorp/go-getter v1.7.2 @@ -37,6 +38,7 @@ require ( go.opentelemetry.io/otel/sdk/metric v0.39.0 golang.org/x/exp v0.0.0-20230522175609-2e198f4a06a1 golang.org/x/sync v0.3.0 + golang.org/x/time v0.0.0-20191024005414-555d28b269f0 ) require ( @@ -55,10 +57,9 @@ require ( github.com/bradfitz/gomemcache v0.0.0-20221031212613-62deef7fc822 // indirect github.com/btubbs/datetime v0.1.1 // indirect github.com/cenkalti/backoff/v4 v4.2.1 // indirect - github.com/danwakefield/fnmatch v0.0.0-20160403171240-cbb64ac3d964 // indirect github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f // indirect github.com/dustin/go-humanize v1.0.0 // indirect - github.com/fatih/color v1.13.0 // indirect + github.com/fatih/color v1.15.0 // indirect github.com/go-logr/logr v1.2.4 // indirect github.com/go-logr/stdr v1.2.2 // indirect github.com/go-redis/redis/v8 v8.11.5 // indirect @@ -74,8 +75,8 @@ require ( github.com/hashicorp/yamux v0.0.0-20181012175058-2f1d1f20f75d // indirect github.com/jmespath/go-jmespath v0.4.0 // indirect github.com/klauspost/compress v1.15.11 // indirect - github.com/mattn/go-colorable v0.1.12 // indirect - github.com/mattn/go-isatty v0.0.14 // indirect + github.com/mattn/go-colorable v0.1.13 // indirect + github.com/mattn/go-isatty v0.0.19 // indirect github.com/mattn/go-runewidth v0.0.13 // indirect github.com/matttproud/golang_protobuf_extensions v1.0.4 // indirect github.com/mitchellh/go-homedir v1.1.0 // indirect diff --git a/go.sum b/go.sum index f77819d4..96b04a08 100644 --- a/go.sum +++ b/go.sum @@ -277,8 +277,9 @@ github.com/envoyproxy/go-control-plane v0.10.2-0.20220325020618-49ff273808a1/go. github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c= github.com/evanphx/json-patch v4.2.0+incompatible/go.mod h1:50XU6AFN0ol/bzJsmQLiYLvXMP4fmwYFNcr97nuDLSk= github.com/fatih/color v1.7.0/go.mod h1:Zm6kSWBoL9eyXnKyktHP6abPY2pDugNf5KwzbycvMj4= -github.com/fatih/color v1.13.0 h1:8LOYc1KYPPmyKMuN8QV2DNRWNbLo6LZ0iLs8+mlH53w= github.com/fatih/color v1.13.0/go.mod h1:kLAiJbzzSOZDVNGyDpeOxJ47H46qBXwg5ILebYFFOfk= +github.com/fatih/color v1.15.0 h1:kOqh6YHBtK8aywxGerMG2Eq3H6Qgoqeo13Bk2Mv/nBs= +github.com/fatih/color v1.15.0/go.mod h1:0h5ZqXfHYED7Bhv2ZJamyIOUej9KtShiJESRwBDUSsw= github.com/fortytw2/leaktest v1.3.0 h1:u8491cBMTQ8ft8aeV+adlcytMZylmA5nnwwkRZjI8vw= github.com/fortytw2/leaktest v1.3.0/go.mod h1:jDsjWgpAGjm2CA7WthBh/CdZYEPF31XHquHwclZch5g= github.com/frankban/quicktest v1.14.3 h1:FJKSZTDHjyhriyC81FLQ0LY93eSai0ZyR/ZIkd3ZUKE= @@ -429,8 +430,8 @@ github.com/hashicorp/go-cleanhttp v0.5.2 h1:035FKYIWjmULyFRBKPs8TBQoi0x6d9G4xc9n github.com/hashicorp/go-cleanhttp v0.5.2/go.mod h1:kO/YDlP8L1346E6Sodw+PrpBSV4/SoxCXGY6BqNFT48= github.com/hashicorp/go-getter v1.7.2 h1:uJDtyXwEfalmp1PqdxuhZqrNkUyClZAhVeZYTArbqkg= github.com/hashicorp/go-getter v1.7.2/go.mod h1:W7TalhMmbPmsSMdNjD0ZskARur/9GJ17cfHTRtXV744= -github.com/hashicorp/go-hclog v1.4.0 h1:ctuWFGrhFha8BnnzxqeRGidlEcQkDyL5u8J8t5eA11I= -github.com/hashicorp/go-hclog v1.4.0/go.mod h1:W4Qnvbt70Wk/zYJryRzDRU/4r0kIg0PVHBcfoyhpF5M= +github.com/hashicorp/go-hclog v1.5.0 h1:bI2ocEMgcVlz55Oj1xZNBsVi900c7II+fWDyV9o+13c= +github.com/hashicorp/go-hclog v1.5.0/go.mod h1:W4Qnvbt70Wk/zYJryRzDRU/4r0kIg0PVHBcfoyhpF5M= github.com/hashicorp/go-plugin v1.4.10 h1:xUbmA4jC6Dq163/fWcp8P3JuHilrHHMLNRxzGQJ9hNk= github.com/hashicorp/go-plugin v1.4.10/go.mod h1:6/1TEzT0eQznvI/gV2CM29DLSkAK/e58mUWKVsPaph0= github.com/hashicorp/go-safetemp v1.0.0 h1:2HR189eFNrjHQyENnQMMpCiBAsRxzbTMIgBhEyExpmo= @@ -482,12 +483,15 @@ github.com/kylelemons/godebug v0.0.0-20170820004349-d65d576e9348 h1:MtvEpTB6LX3v github.com/mailru/easyjson v0.0.0-20160728113105-d5b7844b561a/go.mod h1:C1wdFJiN94OJF2b5HbByQZoLdCWB1Yqtg26g4irojpc= github.com/mattn/go-colorable v0.0.9/go.mod h1:9vuHe8Xs5qXnSaW/c/ABM9alt+Vo+STaOChaDxuIBZU= github.com/mattn/go-colorable v0.1.9/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc= -github.com/mattn/go-colorable v0.1.12 h1:jF+Du6AlPIjs2BiUiQlKOX0rt3SujHxPnksPKZbaA40= github.com/mattn/go-colorable v0.1.12/go.mod h1:u5H1YNBxpqRaxsYJYSkiCWKzEfiAb1Gb520KVy5xxl4= +github.com/mattn/go-colorable v0.1.13 h1:fFA4WZxdEF4tXPZVKMLwD8oUnCTTo08duU7wxecdEvA= +github.com/mattn/go-colorable v0.1.13/go.mod h1:7S9/ev0klgBDR4GtXTXX8a3vIGJpMovkB8vQcUbaXHg= github.com/mattn/go-isatty v0.0.4/go.mod h1:M+lRXTBqGeGNdLjl/ufCoiOlB5xdOkqRJdNxMWT7Zi4= github.com/mattn/go-isatty v0.0.12/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Kysco4FUpU= -github.com/mattn/go-isatty v0.0.14 h1:yVuAays6BHfxijgZPzw+3Zlu5yQgKGP2/hcQbHb7S9Y= github.com/mattn/go-isatty v0.0.14/go.mod h1:7GGIvUiUoEMVVmxf/4nioHXj79iQHKdU27kJ6hsGG94= +github.com/mattn/go-isatty v0.0.16/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM= +github.com/mattn/go-isatty v0.0.19 h1:JITubQf0MOLdlGRuRq+jtsDlekdYPia9ZFsB8h/APPA= +github.com/mattn/go-isatty v0.0.19/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y= github.com/mattn/go-runewidth v0.0.4/go.mod h1:LwmH8dsx7+W8Uxz3IHJYH5QSwggIsqBzpuz5H//U1FU= github.com/mattn/go-runewidth v0.0.9/go.mod h1:H031xJmbD/WCDINGzjvQ9THkh0rPKHF+m2gUSrubnMI= github.com/mattn/go-runewidth v0.0.13 h1:lTGmDsbAYt5DmK6OnoV7EuIF1wEIFAcxld6ypU4OSgU= @@ -598,11 +602,11 @@ github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/ github.com/stretchr/testify v1.7.2/go.mod h1:R6va5+xMeoiuVRoj+gSkQ7d3FALtqAAGI1FQKckRals= github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU= github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4= -github.com/stretchr/testify v1.8.3 h1:RP3t2pwF7cMEbC1dqtB6poj3niw/9gnV4Cjg5oW5gtY= +github.com/stretchr/testify v1.8.4 h1:CcVxjf3Q8PM0mHUKJCdn+eZZtm5yQwehR5yeSVQQcUk= github.com/tkrajina/go-reflector v0.5.6 h1:hKQ0gyocG7vgMD2M3dRlYN6WBBOmdoOzJ6njQSepKdE= github.com/tkrajina/go-reflector v0.5.6/go.mod h1:ECbqLgccecY5kPmPmXg1MrHW585yMcDkVl6IvJe64T4= -github.com/turbot/go-kit v0.7.0 h1:Jiua34hnICSWzJgYmqup/sY7G/1jlMqcElJgDIM8uFU= -github.com/turbot/go-kit v0.7.0/go.mod h1:QIOX91BIxQ/1JEtM4rIHWDito3c3GUP2Z+dUT5F6z94= +github.com/turbot/go-kit v0.8.0-rc.0 h1:Vj1w5TmZWwdSwBTcOq6FKVlQQ+XwCd27BZVPZ9m1hT0= +github.com/turbot/go-kit v0.8.0-rc.0/go.mod h1:JkVKhR5XHK86aXY4WzB9Lr0jdnrsafjVh4yJA8ZS3Ck= github.com/ulikunitz/xz v0.5.10 h1:t92gobL9l3HE202wg3rlk19F6X+JOxl9BBrCCMYEYd8= github.com/ulikunitz/xz v0.5.10/go.mod h1:nbz6k7qbPmH4IRqmfOplQw/tblSgqTqBwxkY0oWt/14= github.com/yuin/goldmark v1.1.25/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= @@ -868,9 +872,11 @@ golang.org/x/sys v0.0.0-20220624220833-87e55d714810/go.mod h1:oPkhp1MJrh7nUepCBc golang.org/x/sys v0.0.0-20220715151400-c0bba94af5f8/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220728004956-3c1f35247d10/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220908164124-27713097b956/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20221010170243-090e33056c14/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.1.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.8.0 h1:EBmGv8NaZBZTWvrbjNoL6HVt+IVy3QDQpJs7VRIw3tU= golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= @@ -891,6 +897,7 @@ golang.org/x/text v0.9.0 h1:2sjJmO8cDvYveuX97RDLsxlyUxLl+GHoLxBiRdHllBE= golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8= golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= +golang.org/x/time v0.0.0-20191024005414-555d28b269f0 h1:/5xXl8Y5W96D+TtHSlonuFqGHIWVuyCkGJLwGh9JJFs= golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20181011042414-1f849cf54d09/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= diff --git a/grpc/pluginClient.go b/grpc/pluginClient.go index aac3f10e..4fdaf828 100644 --- a/grpc/pluginClient.go +++ b/grpc/pluginClient.go @@ -114,6 +114,22 @@ func (c *PluginClient) SetCacheOptions(req *proto.SetCacheOptionsRequest) (*prot return resp, nil } +func (c *PluginClient) SetRateLimiters(req *proto.SetRateLimitersRequest) (*proto.SetRateLimitersResponse, error) { + resp, err := c.Stub.SetRateLimiters(req) + if err != nil { + return nil, HandleGrpcError(err, c.Name, "SetRateLimiters") + } + return resp, nil +} + +func (c *PluginClient) GetRateLimiters(req *proto.GetRateLimitersRequest) (*proto.GetRateLimitersResponse, error) { + resp, err := c.Stub.GetRateLimiters(req) + if err != nil { + return nil, HandleGrpcError(err, c.Name, "GetRateLimiters") + } + return resp, nil +} + func (c *PluginClient) GetSchema(connectionName string) (*proto.Schema, error) { resp, err := c.Stub.GetSchema(&proto.GetSchemaRequest{Connection: connectionName}) if err != nil { diff --git a/grpc/pluginServer.go b/grpc/pluginServer.go index 2443709f..1077cb6d 100644 --- a/grpc/pluginServer.go +++ b/grpc/pluginServer.go @@ -16,6 +16,8 @@ type SetConnectionConfigFunc func(string, string) error type SetAllConnectionConfigsFunc func([]*proto.ConnectionConfig, int) (map[string]error, error) type UpdateConnectionConfigsFunc func([]*proto.ConnectionConfig, []*proto.ConnectionConfig, []*proto.ConnectionConfig) (map[string]error, error) type SetCacheOptionsFunc func(*proto.SetCacheOptionsRequest) error +type SetRateLimitersFunc func(*proto.SetRateLimitersRequest) error +type GetRateLimitersFunc func() []*proto.RateLimiterDefinition type EstablishMessageStreamFunc func(stream proto.WrapperPlugin_EstablishMessageStreamServer) error // PluginServer is the server for a single plugin @@ -29,6 +31,8 @@ type PluginServer struct { getSchemaFunc GetSchemaFunc establishMessageStreamFunc EstablishMessageStreamFunc setCacheOptionsFunc SetCacheOptionsFunc + setRateLimitersFunc SetRateLimitersFunc + getRateLimitersFunc GetRateLimitersFunc } func NewPluginServer(pluginName string, @@ -39,6 +43,8 @@ func NewPluginServer(pluginName string, executeFunc ExecuteFunc, establishMessageStreamFunc EstablishMessageStreamFunc, setCacheOptionsFunc SetCacheOptionsFunc, + setRateLimitersFunc SetRateLimitersFunc, + getRateLimitersFunc GetRateLimitersFunc, ) *PluginServer { return &PluginServer{ @@ -50,6 +56,8 @@ func NewPluginServer(pluginName string, getSchemaFunc: getSchemaFunc, establishMessageStreamFunc: establishMessageStreamFunc, setCacheOptionsFunc: setCacheOptionsFunc, + setRateLimitersFunc: setRateLimitersFunc, + getRateLimitersFunc: getRateLimitersFunc, } } @@ -149,13 +157,25 @@ func (s PluginServer) GetSupportedOperations(*proto.GetSupportedOperationsReques MultipleConnections: true, MessageStream: true, SetCacheOptions: true, + RateLimiters: true, }, nil } + func (s PluginServer) SetCacheOptions(req *proto.SetCacheOptionsRequest) (*proto.SetCacheOptionsResponse, error) { err := s.setCacheOptionsFunc(req) return &proto.SetCacheOptionsResponse{}, err } +func (s PluginServer) SetRateLimiters(req *proto.SetRateLimitersRequest) (*proto.SetRateLimitersResponse, error) { + err := s.setRateLimitersFunc(req) + return &proto.SetRateLimitersResponse{}, err +} + +func (s PluginServer) GetRateLimiters(*proto.GetRateLimitersRequest) (*proto.GetRateLimitersResponse, error) { + rateLimiters := s.getRateLimitersFunc() + return &proto.GetRateLimitersResponse{Definitions: rateLimiters}, nil +} + func (s PluginServer) EstablishMessageStream(stream proto.WrapperPlugin_EstablishMessageStreamServer) error { return s.establishMessageStreamFunc(stream) } diff --git a/grpc/plugin_schema.go b/grpc/plugin_schema.go index 24c4da64..10002359 100644 --- a/grpc/plugin_schema.go +++ b/grpc/plugin_schema.go @@ -3,8 +3,9 @@ package grpc import "github.com/turbot/steampipe-plugin-sdk/v5/grpc/proto" type PluginSchema struct { - Schema map[string]*proto.TableSchema - Mode string + Schema map[string]*proto.TableSchema + Mode string + RateLimiters []*proto.RateLimiterDefinition } func NewPluginSchema(mode string) *PluginSchema { diff --git a/grpc/proto/plugin.pb.go b/grpc/proto/plugin.pb.go index a3ec692e..ff5fa5d3 100644 --- a/grpc/proto/plugin.pb.go +++ b/grpc/proto/plugin.pb.go @@ -1332,7 +1332,8 @@ type GetSchemaResponse struct { sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields - Schema *Schema `protobuf:"bytes,1,opt,name=schema,proto3" json:"schema,omitempty"` + Schema *Schema `protobuf:"bytes,1,opt,name=schema,proto3" json:"schema,omitempty"` + RateLimiters []*RateLimiterDefinition `protobuf:"bytes,2,rep,name=rate_limiters,json=rateLimiters,proto3" json:"rate_limiters,omitempty"` } func (x *GetSchemaResponse) Reset() { @@ -1374,6 +1375,13 @@ func (x *GetSchemaResponse) GetSchema() *Schema { return nil } +func (x *GetSchemaResponse) GetRateLimiters() []*RateLimiterDefinition { + if x != nil { + return x.RateLimiters + } + return nil +} + type GetSupportedOperationsRequest struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache @@ -1412,6 +1420,7 @@ func (*GetSupportedOperationsRequest) Descriptor() ([]byte, []int) { return file_plugin_proto_rawDescGZIP(), []int{17} } +// NOTE: this must be consistent with GetSupportedOperationsResponse in steampipe/pkg/pluginmanager_service/grpc/proto/plugin_manager.proto type GetSupportedOperationsResponse struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache @@ -1421,6 +1430,7 @@ type GetSupportedOperationsResponse struct { MultipleConnections bool `protobuf:"varint,2,opt,name=multiple_connections,json=multipleConnections,proto3" json:"multiple_connections,omitempty"` MessageStream bool `protobuf:"varint,3,opt,name=message_stream,json=messageStream,proto3" json:"message_stream,omitempty"` SetCacheOptions bool `protobuf:"varint,4,opt,name=set_cache_options,json=setCacheOptions,proto3" json:"set_cache_options,omitempty"` + RateLimiters bool `protobuf:"varint,5,opt,name=rate_limiters,json=rateLimiters,proto3" json:"rate_limiters,omitempty"` } func (x *GetSupportedOperationsResponse) Reset() { @@ -1483,6 +1493,13 @@ func (x *GetSupportedOperationsResponse) GetSetCacheOptions() bool { return false } +func (x *GetSupportedOperationsResponse) GetRateLimiters() bool { + if x != nil { + return x.RateLimiters + } + return false +} + // Deprecated: Do not use. type SetConnectionConfigRequest struct { state protoimpl.MessageState @@ -1799,7 +1816,6 @@ func (x *ConnectionConfig) GetType() string { return "" } - type SetConnectionConfigResponse struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache @@ -2760,6 +2776,93 @@ func (x *SetCacheOptionsRequest) GetMaxSizeMb() int64 { return 0 } +type RateLimiterDefinition struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + FillRate float32 `protobuf:"fixed32,2,opt,name=fill_rate,json=fillRate,proto3" json:"fill_rate,omitempty"` + BucketSize int64 `protobuf:"varint,3,opt,name=bucket_size,json=bucketSize,proto3" json:"bucket_size,omitempty"` + MaxConcurrency int64 `protobuf:"varint,4,opt,name=max_concurrency,json=maxConcurrency,proto3" json:"max_concurrency,omitempty"` + Scope []string `protobuf:"bytes,5,rep,name=scope,proto3" json:"scope,omitempty"` + Where string `protobuf:"bytes,6,opt,name=where,proto3" json:"where,omitempty"` +} + +func (x *RateLimiterDefinition) Reset() { + *x = RateLimiterDefinition{} + if protoimpl.UnsafeEnabled { + mi := &file_plugin_proto_msgTypes[37] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *RateLimiterDefinition) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*RateLimiterDefinition) ProtoMessage() {} + +func (x *RateLimiterDefinition) ProtoReflect() protoreflect.Message { + mi := &file_plugin_proto_msgTypes[37] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use RateLimiterDefinition.ProtoReflect.Descriptor instead. +func (*RateLimiterDefinition) Descriptor() ([]byte, []int) { + return file_plugin_proto_rawDescGZIP(), []int{37} +} + +func (x *RateLimiterDefinition) GetName() string { + if x != nil { + return x.Name + } + return "" +} + +func (x *RateLimiterDefinition) GetFillRate() float32 { + if x != nil { + return x.FillRate + } + return 0 +} + +func (x *RateLimiterDefinition) GetBucketSize() int64 { + if x != nil { + return x.BucketSize + } + return 0 +} + +func (x *RateLimiterDefinition) GetMaxConcurrency() int64 { + if x != nil { + return x.MaxConcurrency + } + return 0 +} + +func (x *RateLimiterDefinition) GetScope() []string { + if x != nil { + return x.Scope + } + return nil +} + +func (x *RateLimiterDefinition) GetWhere() string { + if x != nil { + return x.Where + } + return "" +} + type SetCacheOptionsResponse struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache @@ -2769,7 +2872,7 @@ type SetCacheOptionsResponse struct { func (x *SetCacheOptionsResponse) Reset() { *x = SetCacheOptionsResponse{} if protoimpl.UnsafeEnabled { - mi := &file_plugin_proto_msgTypes[37] + mi := &file_plugin_proto_msgTypes[38] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -2782,7 +2885,7 @@ func (x *SetCacheOptionsResponse) String() string { func (*SetCacheOptionsResponse) ProtoMessage() {} func (x *SetCacheOptionsResponse) ProtoReflect() protoreflect.Message { - mi := &file_plugin_proto_msgTypes[37] + mi := &file_plugin_proto_msgTypes[38] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -2795,7 +2898,177 @@ func (x *SetCacheOptionsResponse) ProtoReflect() protoreflect.Message { // Deprecated: Use SetCacheOptionsResponse.ProtoReflect.Descriptor instead. func (*SetCacheOptionsResponse) Descriptor() ([]byte, []int) { - return file_plugin_proto_rawDescGZIP(), []int{37} + return file_plugin_proto_rawDescGZIP(), []int{38} +} + +type SetRateLimitersRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Definitions []*RateLimiterDefinition `protobuf:"bytes,1,rep,name=definitions,proto3" json:"definitions,omitempty"` +} + +func (x *SetRateLimitersRequest) Reset() { + *x = SetRateLimitersRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_plugin_proto_msgTypes[39] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *SetRateLimitersRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*SetRateLimitersRequest) ProtoMessage() {} + +func (x *SetRateLimitersRequest) ProtoReflect() protoreflect.Message { + mi := &file_plugin_proto_msgTypes[39] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use SetRateLimitersRequest.ProtoReflect.Descriptor instead. +func (*SetRateLimitersRequest) Descriptor() ([]byte, []int) { + return file_plugin_proto_rawDescGZIP(), []int{39} +} + +func (x *SetRateLimitersRequest) GetDefinitions() []*RateLimiterDefinition { + if x != nil { + return x.Definitions + } + return nil +} + +type SetRateLimitersResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields +} + +func (x *SetRateLimitersResponse) Reset() { + *x = SetRateLimitersResponse{} + if protoimpl.UnsafeEnabled { + mi := &file_plugin_proto_msgTypes[40] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *SetRateLimitersResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*SetRateLimitersResponse) ProtoMessage() {} + +func (x *SetRateLimitersResponse) ProtoReflect() protoreflect.Message { + mi := &file_plugin_proto_msgTypes[40] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use SetRateLimitersResponse.ProtoReflect.Descriptor instead. +func (*SetRateLimitersResponse) Descriptor() ([]byte, []int) { + return file_plugin_proto_rawDescGZIP(), []int{40} +} + +type GetRateLimitersRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields +} + +func (x *GetRateLimitersRequest) Reset() { + *x = GetRateLimitersRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_plugin_proto_msgTypes[41] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *GetRateLimitersRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*GetRateLimitersRequest) ProtoMessage() {} + +func (x *GetRateLimitersRequest) ProtoReflect() protoreflect.Message { + mi := &file_plugin_proto_msgTypes[41] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use GetRateLimitersRequest.ProtoReflect.Descriptor instead. +func (*GetRateLimitersRequest) Descriptor() ([]byte, []int) { + return file_plugin_proto_rawDescGZIP(), []int{41} +} + +type GetRateLimitersResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Definitions []*RateLimiterDefinition `protobuf:"bytes,1,rep,name=definitions,proto3" json:"definitions,omitempty"` +} + +func (x *GetRateLimitersResponse) Reset() { + *x = GetRateLimitersResponse{} + if protoimpl.UnsafeEnabled { + mi := &file_plugin_proto_msgTypes[42] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *GetRateLimitersResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*GetRateLimitersResponse) ProtoMessage() {} + +func (x *GetRateLimitersResponse) ProtoReflect() protoreflect.Message { + mi := &file_plugin_proto_msgTypes[42] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use GetRateLimitersResponse.ProtoReflect.Descriptor instead. +func (*GetRateLimitersResponse) Descriptor() ([]byte, []int) { + return file_plugin_proto_rawDescGZIP(), []int{42} +} + +func (x *GetRateLimitersResponse) GetDefinitions() []*RateLimiterDefinition { + if x != nil { + return x.Definitions + } + return nil } var File_plugin_proto protoreflect.FileDescriptor @@ -2947,293 +3220,336 @@ var file_plugin_proto_rawDesc = []byte{ 0x61, 0x63, 0x68, 0x65, 0x48, 0x69, 0x74, 0x22, 0x32, 0x0a, 0x10, 0x47, 0x65, 0x74, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x1e, 0x0a, 0x0a, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, - 0x0a, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x3a, 0x0a, 0x11, 0x47, + 0x0a, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x7d, 0x0a, 0x11, 0x47, 0x65, 0x74, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x25, 0x0a, 0x06, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0d, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x52, - 0x06, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x22, 0x1f, 0x0a, 0x1d, 0x47, 0x65, 0x74, 0x53, 0x75, - 0x70, 0x70, 0x6f, 0x72, 0x74, 0x65, 0x64, 0x4f, 0x70, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, - 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x22, 0xc7, 0x01, 0x0a, 0x1e, 0x47, 0x65, 0x74, - 0x53, 0x75, 0x70, 0x70, 0x6f, 0x72, 0x74, 0x65, 0x64, 0x4f, 0x70, 0x65, 0x72, 0x61, 0x74, 0x69, - 0x6f, 0x6e, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x1f, 0x0a, 0x0b, 0x71, - 0x75, 0x65, 0x72, 0x79, 0x5f, 0x63, 0x61, 0x63, 0x68, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x08, - 0x52, 0x0a, 0x71, 0x75, 0x65, 0x72, 0x79, 0x43, 0x61, 0x63, 0x68, 0x65, 0x12, 0x31, 0x0a, 0x14, - 0x6d, 0x75, 0x6c, 0x74, 0x69, 0x70, 0x6c, 0x65, 0x5f, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, - 0x69, 0x6f, 0x6e, 0x73, 0x18, 0x02, 0x20, 0x01, 0x28, 0x08, 0x52, 0x13, 0x6d, 0x75, 0x6c, 0x74, - 0x69, 0x70, 0x6c, 0x65, 0x43, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x12, - 0x25, 0x0a, 0x0e, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x5f, 0x73, 0x74, 0x72, 0x65, 0x61, - 0x6d, 0x18, 0x03, 0x20, 0x01, 0x28, 0x08, 0x52, 0x0d, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, - 0x53, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x12, 0x2a, 0x0a, 0x11, 0x73, 0x65, 0x74, 0x5f, 0x63, 0x61, - 0x63, 0x68, 0x65, 0x5f, 0x6f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x18, 0x04, 0x20, 0x01, 0x28, - 0x08, 0x52, 0x0f, 0x73, 0x65, 0x74, 0x43, 0x61, 0x63, 0x68, 0x65, 0x4f, 0x70, 0x74, 0x69, 0x6f, - 0x6e, 0x73, 0x22, 0x76, 0x0a, 0x1a, 0x53, 0x65, 0x74, 0x43, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, - 0x69, 0x6f, 0x6e, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, - 0x12, 0x27, 0x0a, 0x0f, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x6e, - 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, - 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x2b, 0x0a, 0x11, 0x63, 0x6f, 0x6e, - 0x6e, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x18, 0x02, - 0x20, 0x01, 0x28, 0x09, 0x52, 0x10, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, - 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x3a, 0x02, 0x18, 0x01, 0x22, 0x6f, 0x0a, 0x17, 0x43, 0x6f, - 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x50, 0x61, - 0x79, 0x6c, 0x6f, 0x61, 0x64, 0x12, 0x27, 0x0a, 0x0f, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, - 0x69, 0x6f, 0x6e, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0e, - 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x2b, - 0x0a, 0x11, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x63, 0x6f, 0x6e, - 0x66, 0x69, 0x67, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x10, 0x63, 0x6f, 0x6e, 0x6e, 0x65, - 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x22, 0x7e, 0x0a, 0x1e, 0x53, - 0x65, 0x74, 0x41, 0x6c, 0x6c, 0x43, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x43, - 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x31, 0x0a, - 0x07, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x17, - 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x43, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x69, 0x6f, - 0x6e, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x52, 0x07, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x73, - 0x12, 0x29, 0x0a, 0x11, 0x6d, 0x61, 0x78, 0x5f, 0x63, 0x61, 0x63, 0x68, 0x65, 0x5f, 0x73, 0x69, - 0x7a, 0x65, 0x5f, 0x6d, 0x62, 0x18, 0x02, 0x20, 0x01, 0x28, 0x03, 0x52, 0x0e, 0x6d, 0x61, 0x78, - 0x43, 0x61, 0x63, 0x68, 0x65, 0x53, 0x69, 0x7a, 0x65, 0x4d, 0x62, 0x22, 0xb5, 0x01, 0x0a, 0x1e, - 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x43, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, - 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x2d, - 0x0a, 0x05, 0x61, 0x64, 0x64, 0x65, 0x64, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x17, 0x2e, - 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x43, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, - 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x52, 0x05, 0x61, 0x64, 0x64, 0x65, 0x64, 0x12, 0x31, 0x0a, - 0x07, 0x64, 0x65, 0x6c, 0x65, 0x74, 0x65, 0x64, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x17, - 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x43, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x69, 0x6f, - 0x6e, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x52, 0x07, 0x64, 0x65, 0x6c, 0x65, 0x74, 0x65, 0x64, - 0x12, 0x31, 0x0a, 0x07, 0x63, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x64, 0x18, 0x03, 0x20, 0x03, 0x28, - 0x0b, 0x32, 0x17, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x43, 0x6f, 0x6e, 0x6e, 0x65, 0x63, - 0x74, 0x69, 0x6f, 0x6e, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x52, 0x07, 0x63, 0x68, 0x61, 0x6e, - 0x67, 0x65, 0x64, 0x22, 0xcf, 0x01, 0x0a, 0x10, 0x43, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x69, - 0x6f, 0x6e, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x12, 0x1e, 0x0a, 0x0a, 0x63, 0x6f, 0x6e, 0x6e, - 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0a, 0x63, 0x6f, - 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x16, 0x0a, 0x06, 0x70, 0x6c, 0x75, 0x67, - 0x69, 0x6e, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, - 0x12, 0x2a, 0x0a, 0x11, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x5f, 0x73, 0x68, 0x6f, 0x72, 0x74, - 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0f, 0x70, 0x6c, 0x75, - 0x67, 0x69, 0x6e, 0x53, 0x68, 0x6f, 0x72, 0x74, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x16, 0x0a, 0x06, - 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x63, 0x6f, - 0x6e, 0x66, 0x69, 0x67, 0x12, 0x2b, 0x0a, 0x11, 0x63, 0x68, 0x69, 0x6c, 0x64, 0x5f, 0x63, 0x6f, - 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x18, 0x05, 0x20, 0x03, 0x28, 0x09, 0x52, - 0x10, 0x63, 0x68, 0x69, 0x6c, 0x64, 0x43, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, - 0x73, 0x12, 0x12, 0x0a, 0x04, 0x74, 0x79, 0x70, 0x65, 0x18, 0x06, 0x20, 0x01, 0x28, 0x09, 0x52, - 0x04, 0x74, 0x79, 0x70, 0x65, 0x22, 0xcd, 0x01, 0x0a, 0x1b, 0x53, 0x65, 0x74, 0x43, 0x6f, 0x6e, - 0x6e, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x52, 0x65, 0x73, - 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x68, 0x0a, 0x12, 0x66, 0x61, 0x69, 0x6c, 0x65, 0x64, 0x5f, - 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, - 0x0b, 0x32, 0x39, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x53, 0x65, 0x74, 0x43, 0x6f, 0x6e, - 0x6e, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x52, 0x65, 0x73, - 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x2e, 0x46, 0x61, 0x69, 0x6c, 0x65, 0x64, 0x43, 0x6f, 0x6e, 0x6e, - 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x11, 0x66, 0x61, - 0x69, 0x6c, 0x65, 0x64, 0x43, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x1a, - 0x44, 0x0a, 0x16, 0x46, 0x61, 0x69, 0x6c, 0x65, 0x64, 0x43, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, - 0x69, 0x6f, 0x6e, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, - 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x14, 0x0a, 0x05, 0x76, - 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, - 0x65, 0x3a, 0x02, 0x38, 0x01, 0x22, 0xd5, 0x01, 0x0a, 0x1f, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, + 0x06, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x12, 0x41, 0x0a, 0x0d, 0x72, 0x61, 0x74, 0x65, 0x5f, + 0x6c, 0x69, 0x6d, 0x69, 0x74, 0x65, 0x72, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1c, + 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x52, 0x61, 0x74, 0x65, 0x4c, 0x69, 0x6d, 0x69, 0x74, + 0x65, 0x72, 0x44, 0x65, 0x66, 0x69, 0x6e, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x0c, 0x72, 0x61, + 0x74, 0x65, 0x4c, 0x69, 0x6d, 0x69, 0x74, 0x65, 0x72, 0x73, 0x22, 0x1f, 0x0a, 0x1d, 0x47, 0x65, + 0x74, 0x53, 0x75, 0x70, 0x70, 0x6f, 0x72, 0x74, 0x65, 0x64, 0x4f, 0x70, 0x65, 0x72, 0x61, 0x74, + 0x69, 0x6f, 0x6e, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x22, 0xec, 0x01, 0x0a, 0x1e, + 0x47, 0x65, 0x74, 0x53, 0x75, 0x70, 0x70, 0x6f, 0x72, 0x74, 0x65, 0x64, 0x4f, 0x70, 0x65, 0x72, + 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x1f, + 0x0a, 0x0b, 0x71, 0x75, 0x65, 0x72, 0x79, 0x5f, 0x63, 0x61, 0x63, 0x68, 0x65, 0x18, 0x01, 0x20, + 0x01, 0x28, 0x08, 0x52, 0x0a, 0x71, 0x75, 0x65, 0x72, 0x79, 0x43, 0x61, 0x63, 0x68, 0x65, 0x12, + 0x31, 0x0a, 0x14, 0x6d, 0x75, 0x6c, 0x74, 0x69, 0x70, 0x6c, 0x65, 0x5f, 0x63, 0x6f, 0x6e, 0x6e, + 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x18, 0x02, 0x20, 0x01, 0x28, 0x08, 0x52, 0x13, 0x6d, + 0x75, 0x6c, 0x74, 0x69, 0x70, 0x6c, 0x65, 0x43, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x69, 0x6f, + 0x6e, 0x73, 0x12, 0x25, 0x0a, 0x0e, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x5f, 0x73, 0x74, + 0x72, 0x65, 0x61, 0x6d, 0x18, 0x03, 0x20, 0x01, 0x28, 0x08, 0x52, 0x0d, 0x6d, 0x65, 0x73, 0x73, + 0x61, 0x67, 0x65, 0x53, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x12, 0x2a, 0x0a, 0x11, 0x73, 0x65, 0x74, + 0x5f, 0x63, 0x61, 0x63, 0x68, 0x65, 0x5f, 0x6f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x18, 0x04, + 0x20, 0x01, 0x28, 0x08, 0x52, 0x0f, 0x73, 0x65, 0x74, 0x43, 0x61, 0x63, 0x68, 0x65, 0x4f, 0x70, + 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x12, 0x23, 0x0a, 0x0d, 0x72, 0x61, 0x74, 0x65, 0x5f, 0x6c, 0x69, + 0x6d, 0x69, 0x74, 0x65, 0x72, 0x73, 0x18, 0x05, 0x20, 0x01, 0x28, 0x08, 0x52, 0x0c, 0x72, 0x61, + 0x74, 0x65, 0x4c, 0x69, 0x6d, 0x69, 0x74, 0x65, 0x72, 0x73, 0x22, 0x76, 0x0a, 0x1a, 0x53, 0x65, + 0x74, 0x43, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x43, 0x6f, 0x6e, 0x66, 0x69, + 0x67, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x27, 0x0a, 0x0f, 0x63, 0x6f, 0x6e, 0x6e, + 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, + 0x09, 0x52, 0x0e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x4e, 0x61, 0x6d, + 0x65, 0x12, 0x2b, 0x0a, 0x11, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x5f, + 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x10, 0x63, 0x6f, + 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x3a, 0x02, + 0x18, 0x01, 0x22, 0x6f, 0x0a, 0x17, 0x43, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, + 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x50, 0x61, 0x79, 0x6c, 0x6f, 0x61, 0x64, 0x12, 0x27, 0x0a, + 0x0f, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x6e, 0x61, 0x6d, 0x65, + 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0e, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x69, + 0x6f, 0x6e, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x2b, 0x0a, 0x11, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, + 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x18, 0x02, 0x20, 0x01, 0x28, + 0x09, 0x52, 0x10, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x43, 0x6f, 0x6e, + 0x66, 0x69, 0x67, 0x22, 0x7e, 0x0a, 0x1e, 0x53, 0x65, 0x74, 0x41, 0x6c, 0x6c, 0x43, 0x6f, 0x6e, + 0x6e, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x73, 0x52, 0x65, + 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x31, 0x0a, 0x07, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x73, + 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x43, + 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x52, + 0x07, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x73, 0x12, 0x29, 0x0a, 0x11, 0x6d, 0x61, 0x78, 0x5f, + 0x63, 0x61, 0x63, 0x68, 0x65, 0x5f, 0x73, 0x69, 0x7a, 0x65, 0x5f, 0x6d, 0x62, 0x18, 0x02, 0x20, + 0x01, 0x28, 0x03, 0x52, 0x0e, 0x6d, 0x61, 0x78, 0x43, 0x61, 0x63, 0x68, 0x65, 0x53, 0x69, 0x7a, + 0x65, 0x4d, 0x62, 0x22, 0xb5, 0x01, 0x0a, 0x1e, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x43, 0x6f, + 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x73, 0x52, + 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x2d, 0x0a, 0x05, 0x61, 0x64, 0x64, 0x65, 0x64, 0x18, + 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x43, 0x6f, + 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x52, 0x05, + 0x61, 0x64, 0x64, 0x65, 0x64, 0x12, 0x31, 0x0a, 0x07, 0x64, 0x65, 0x6c, 0x65, 0x74, 0x65, 0x64, + 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x43, + 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x52, + 0x07, 0x64, 0x65, 0x6c, 0x65, 0x74, 0x65, 0x64, 0x12, 0x31, 0x0a, 0x07, 0x63, 0x68, 0x61, 0x6e, + 0x67, 0x65, 0x64, 0x18, 0x03, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x70, 0x72, 0x6f, 0x74, + 0x6f, 0x2e, 0x43, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x43, 0x6f, 0x6e, 0x66, + 0x69, 0x67, 0x52, 0x07, 0x63, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x64, 0x22, 0xcf, 0x01, 0x0a, 0x10, 0x43, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, - 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x6c, 0x0a, 0x12, 0x66, 0x61, 0x69, - 0x6c, 0x65, 0x64, 0x5f, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x18, - 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x3d, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x55, 0x70, - 0x64, 0x61, 0x74, 0x65, 0x43, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x43, 0x6f, - 0x6e, 0x66, 0x69, 0x67, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x2e, 0x46, 0x61, + 0x12, 0x1e, 0x0a, 0x0a, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x01, + 0x20, 0x01, 0x28, 0x09, 0x52, 0x0a, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, + 0x12, 0x16, 0x0a, 0x06, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, + 0x52, 0x06, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x12, 0x2a, 0x0a, 0x11, 0x70, 0x6c, 0x75, 0x67, + 0x69, 0x6e, 0x5f, 0x73, 0x68, 0x6f, 0x72, 0x74, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x03, 0x20, + 0x01, 0x28, 0x09, 0x52, 0x0f, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x53, 0x68, 0x6f, 0x72, 0x74, + 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x16, 0x0a, 0x06, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x18, 0x04, + 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x12, 0x2b, 0x0a, 0x11, + 0x63, 0x68, 0x69, 0x6c, 0x64, 0x5f, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, + 0x73, 0x18, 0x05, 0x20, 0x03, 0x28, 0x09, 0x52, 0x10, 0x63, 0x68, 0x69, 0x6c, 0x64, 0x43, 0x6f, + 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x12, 0x12, 0x0a, 0x04, 0x74, 0x79, 0x70, + 0x65, 0x18, 0x06, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x74, 0x79, 0x70, 0x65, 0x22, 0xcd, 0x01, + 0x0a, 0x1b, 0x53, 0x65, 0x74, 0x43, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x43, + 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x68, 0x0a, + 0x12, 0x66, 0x61, 0x69, 0x6c, 0x65, 0x64, 0x5f, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x69, + 0x6f, 0x6e, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x39, 0x2e, 0x70, 0x72, 0x6f, 0x74, + 0x6f, 0x2e, 0x53, 0x65, 0x74, 0x43, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x43, + 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x2e, 0x46, 0x61, 0x69, 0x6c, 0x65, 0x64, 0x43, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x11, 0x66, 0x61, 0x69, 0x6c, 0x65, 0x64, 0x43, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x1a, 0x44, 0x0a, 0x16, 0x46, 0x61, 0x69, 0x6c, 0x65, 0x64, 0x43, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, - 0x28, 0x09, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x22, 0x83, 0x01, - 0x0a, 0x03, 0x52, 0x6f, 0x77, 0x12, 0x31, 0x0a, 0x07, 0x63, 0x6f, 0x6c, 0x75, 0x6d, 0x6e, 0x73, - 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x52, - 0x6f, 0x77, 0x2e, 0x43, 0x6f, 0x6c, 0x75, 0x6d, 0x6e, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, - 0x07, 0x63, 0x6f, 0x6c, 0x75, 0x6d, 0x6e, 0x73, 0x1a, 0x49, 0x0a, 0x0c, 0x43, 0x6f, 0x6c, 0x75, - 0x6d, 0x6e, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, - 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x23, 0x0a, 0x05, 0x76, 0x61, - 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0d, 0x2e, 0x70, 0x72, 0x6f, 0x74, - 0x6f, 0x2e, 0x43, 0x6f, 0x6c, 0x75, 0x6d, 0x6e, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, - 0x02, 0x38, 0x01, 0x22, 0xdc, 0x03, 0x0a, 0x0b, 0x54, 0x61, 0x62, 0x6c, 0x65, 0x53, 0x63, 0x68, - 0x65, 0x6d, 0x61, 0x12, 0x31, 0x0a, 0x07, 0x63, 0x6f, 0x6c, 0x75, 0x6d, 0x6e, 0x73, 0x18, 0x01, - 0x20, 0x03, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x43, 0x6f, 0x6c, - 0x75, 0x6d, 0x6e, 0x44, 0x65, 0x66, 0x69, 0x6e, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x07, 0x63, - 0x6f, 0x6c, 0x75, 0x6d, 0x6e, 0x73, 0x12, 0x20, 0x0a, 0x0b, 0x64, 0x65, 0x73, 0x63, 0x72, 0x69, - 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b, 0x64, 0x65, 0x73, - 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x46, 0x0a, 0x11, 0x67, 0x65, 0x74, 0x43, - 0x61, 0x6c, 0x6c, 0x4b, 0x65, 0x79, 0x43, 0x6f, 0x6c, 0x75, 0x6d, 0x6e, 0x73, 0x18, 0x03, 0x20, - 0x01, 0x28, 0x0b, 0x32, 0x14, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x4b, 0x65, 0x79, 0x43, - 0x6f, 0x6c, 0x75, 0x6d, 0x6e, 0x73, 0x53, 0x65, 0x74, 0x42, 0x02, 0x18, 0x01, 0x52, 0x11, 0x67, - 0x65, 0x74, 0x43, 0x61, 0x6c, 0x6c, 0x4b, 0x65, 0x79, 0x43, 0x6f, 0x6c, 0x75, 0x6d, 0x6e, 0x73, - 0x12, 0x48, 0x0a, 0x12, 0x6c, 0x69, 0x73, 0x74, 0x43, 0x61, 0x6c, 0x6c, 0x4b, 0x65, 0x79, 0x43, - 0x6f, 0x6c, 0x75, 0x6d, 0x6e, 0x73, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x14, 0x2e, 0x70, - 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x4b, 0x65, 0x79, 0x43, 0x6f, 0x6c, 0x75, 0x6d, 0x6e, 0x73, 0x53, - 0x65, 0x74, 0x42, 0x02, 0x18, 0x01, 0x52, 0x12, 0x6c, 0x69, 0x73, 0x74, 0x43, 0x61, 0x6c, 0x6c, - 0x4b, 0x65, 0x79, 0x43, 0x6f, 0x6c, 0x75, 0x6d, 0x6e, 0x73, 0x12, 0x58, 0x0a, 0x1a, 0x6c, 0x69, - 0x73, 0x74, 0x43, 0x61, 0x6c, 0x6c, 0x4f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x61, 0x6c, 0x4b, 0x65, - 0x79, 0x43, 0x6f, 0x6c, 0x75, 0x6d, 0x6e, 0x73, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x14, - 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x4b, 0x65, 0x79, 0x43, 0x6f, 0x6c, 0x75, 0x6d, 0x6e, - 0x73, 0x53, 0x65, 0x74, 0x42, 0x02, 0x18, 0x01, 0x52, 0x1a, 0x6c, 0x69, 0x73, 0x74, 0x43, 0x61, - 0x6c, 0x6c, 0x4f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x61, 0x6c, 0x4b, 0x65, 0x79, 0x43, 0x6f, 0x6c, - 0x75, 0x6d, 0x6e, 0x73, 0x12, 0x44, 0x0a, 0x14, 0x67, 0x65, 0x74, 0x43, 0x61, 0x6c, 0x6c, 0x4b, - 0x65, 0x79, 0x43, 0x6f, 0x6c, 0x75, 0x6d, 0x6e, 0x4c, 0x69, 0x73, 0x74, 0x18, 0x06, 0x20, 0x03, - 0x28, 0x0b, 0x32, 0x10, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x4b, 0x65, 0x79, 0x43, 0x6f, - 0x6c, 0x75, 0x6d, 0x6e, 0x52, 0x14, 0x67, 0x65, 0x74, 0x43, 0x61, 0x6c, 0x6c, 0x4b, 0x65, 0x79, - 0x43, 0x6f, 0x6c, 0x75, 0x6d, 0x6e, 0x4c, 0x69, 0x73, 0x74, 0x12, 0x46, 0x0a, 0x15, 0x6c, 0x69, - 0x73, 0x74, 0x43, 0x61, 0x6c, 0x6c, 0x4b, 0x65, 0x79, 0x43, 0x6f, 0x6c, 0x75, 0x6d, 0x6e, 0x4c, - 0x69, 0x73, 0x74, 0x18, 0x07, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x10, 0x2e, 0x70, 0x72, 0x6f, 0x74, - 0x6f, 0x2e, 0x4b, 0x65, 0x79, 0x43, 0x6f, 0x6c, 0x75, 0x6d, 0x6e, 0x52, 0x15, 0x6c, 0x69, 0x73, + 0x28, 0x09, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x22, 0xd5, 0x01, + 0x0a, 0x1f, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x43, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x69, + 0x6f, 0x6e, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, + 0x65, 0x12, 0x6c, 0x0a, 0x12, 0x66, 0x61, 0x69, 0x6c, 0x65, 0x64, 0x5f, 0x63, 0x6f, 0x6e, 0x6e, + 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x3d, 0x2e, + 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x43, 0x6f, 0x6e, 0x6e, + 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x73, 0x52, 0x65, 0x73, + 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x2e, 0x46, 0x61, 0x69, 0x6c, 0x65, 0x64, 0x43, 0x6f, 0x6e, 0x6e, + 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x11, 0x66, 0x61, + 0x69, 0x6c, 0x65, 0x64, 0x43, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x1a, + 0x44, 0x0a, 0x16, 0x46, 0x61, 0x69, 0x6c, 0x65, 0x64, 0x43, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, + 0x69, 0x6f, 0x6e, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, + 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x14, 0x0a, 0x05, 0x76, + 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, + 0x65, 0x3a, 0x02, 0x38, 0x01, 0x22, 0x83, 0x01, 0x0a, 0x03, 0x52, 0x6f, 0x77, 0x12, 0x31, 0x0a, + 0x07, 0x63, 0x6f, 0x6c, 0x75, 0x6d, 0x6e, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x17, + 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x52, 0x6f, 0x77, 0x2e, 0x43, 0x6f, 0x6c, 0x75, 0x6d, + 0x6e, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x07, 0x63, 0x6f, 0x6c, 0x75, 0x6d, 0x6e, 0x73, + 0x1a, 0x49, 0x0a, 0x0c, 0x43, 0x6f, 0x6c, 0x75, 0x6d, 0x6e, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, + 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, + 0x65, 0x79, 0x12, 0x23, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, + 0x0b, 0x32, 0x0d, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x43, 0x6f, 0x6c, 0x75, 0x6d, 0x6e, + 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x22, 0xdc, 0x03, 0x0a, 0x0b, + 0x54, 0x61, 0x62, 0x6c, 0x65, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x12, 0x31, 0x0a, 0x07, 0x63, + 0x6f, 0x6c, 0x75, 0x6d, 0x6e, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x70, + 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x43, 0x6f, 0x6c, 0x75, 0x6d, 0x6e, 0x44, 0x65, 0x66, 0x69, 0x6e, + 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x07, 0x63, 0x6f, 0x6c, 0x75, 0x6d, 0x6e, 0x73, 0x12, 0x20, + 0x0a, 0x0b, 0x64, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x02, 0x20, + 0x01, 0x28, 0x09, 0x52, 0x0b, 0x64, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, + 0x12, 0x46, 0x0a, 0x11, 0x67, 0x65, 0x74, 0x43, 0x61, 0x6c, 0x6c, 0x4b, 0x65, 0x79, 0x43, 0x6f, + 0x6c, 0x75, 0x6d, 0x6e, 0x73, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x14, 0x2e, 0x70, 0x72, + 0x6f, 0x74, 0x6f, 0x2e, 0x4b, 0x65, 0x79, 0x43, 0x6f, 0x6c, 0x75, 0x6d, 0x6e, 0x73, 0x53, 0x65, + 0x74, 0x42, 0x02, 0x18, 0x01, 0x52, 0x11, 0x67, 0x65, 0x74, 0x43, 0x61, 0x6c, 0x6c, 0x4b, 0x65, + 0x79, 0x43, 0x6f, 0x6c, 0x75, 0x6d, 0x6e, 0x73, 0x12, 0x48, 0x0a, 0x12, 0x6c, 0x69, 0x73, 0x74, + 0x43, 0x61, 0x6c, 0x6c, 0x4b, 0x65, 0x79, 0x43, 0x6f, 0x6c, 0x75, 0x6d, 0x6e, 0x73, 0x18, 0x04, + 0x20, 0x01, 0x28, 0x0b, 0x32, 0x14, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x4b, 0x65, 0x79, + 0x43, 0x6f, 0x6c, 0x75, 0x6d, 0x6e, 0x73, 0x53, 0x65, 0x74, 0x42, 0x02, 0x18, 0x01, 0x52, 0x12, + 0x6c, 0x69, 0x73, 0x74, 0x43, 0x61, 0x6c, 0x6c, 0x4b, 0x65, 0x79, 0x43, 0x6f, 0x6c, 0x75, 0x6d, + 0x6e, 0x73, 0x12, 0x58, 0x0a, 0x1a, 0x6c, 0x69, 0x73, 0x74, 0x43, 0x61, 0x6c, 0x6c, 0x4f, 0x70, + 0x74, 0x69, 0x6f, 0x6e, 0x61, 0x6c, 0x4b, 0x65, 0x79, 0x43, 0x6f, 0x6c, 0x75, 0x6d, 0x6e, 0x73, + 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x14, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x4b, + 0x65, 0x79, 0x43, 0x6f, 0x6c, 0x75, 0x6d, 0x6e, 0x73, 0x53, 0x65, 0x74, 0x42, 0x02, 0x18, 0x01, + 0x52, 0x1a, 0x6c, 0x69, 0x73, 0x74, 0x43, 0x61, 0x6c, 0x6c, 0x4f, 0x70, 0x74, 0x69, 0x6f, 0x6e, + 0x61, 0x6c, 0x4b, 0x65, 0x79, 0x43, 0x6f, 0x6c, 0x75, 0x6d, 0x6e, 0x73, 0x12, 0x44, 0x0a, 0x14, + 0x67, 0x65, 0x74, 0x43, 0x61, 0x6c, 0x6c, 0x4b, 0x65, 0x79, 0x43, 0x6f, 0x6c, 0x75, 0x6d, 0x6e, + 0x4c, 0x69, 0x73, 0x74, 0x18, 0x06, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x10, 0x2e, 0x70, 0x72, 0x6f, + 0x74, 0x6f, 0x2e, 0x4b, 0x65, 0x79, 0x43, 0x6f, 0x6c, 0x75, 0x6d, 0x6e, 0x52, 0x14, 0x67, 0x65, 0x74, 0x43, 0x61, 0x6c, 0x6c, 0x4b, 0x65, 0x79, 0x43, 0x6f, 0x6c, 0x75, 0x6d, 0x6e, 0x4c, 0x69, - 0x73, 0x74, 0x22, 0x4f, 0x0a, 0x0d, 0x4b, 0x65, 0x79, 0x43, 0x6f, 0x6c, 0x75, 0x6d, 0x6e, 0x73, - 0x53, 0x65, 0x74, 0x12, 0x16, 0x0a, 0x06, 0x73, 0x69, 0x6e, 0x67, 0x6c, 0x65, 0x18, 0x01, 0x20, - 0x01, 0x28, 0x09, 0x52, 0x06, 0x73, 0x69, 0x6e, 0x67, 0x6c, 0x65, 0x12, 0x10, 0x0a, 0x03, 0x61, - 0x6c, 0x6c, 0x18, 0x02, 0x20, 0x03, 0x28, 0x09, 0x52, 0x03, 0x61, 0x6c, 0x6c, 0x12, 0x10, 0x0a, - 0x03, 0x61, 0x6e, 0x79, 0x18, 0x03, 0x20, 0x03, 0x28, 0x09, 0x52, 0x03, 0x61, 0x6e, 0x79, 0x3a, - 0x02, 0x18, 0x01, 0x22, 0x78, 0x0a, 0x09, 0x4b, 0x65, 0x79, 0x43, 0x6f, 0x6c, 0x75, 0x6d, 0x6e, - 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, - 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x1c, 0x0a, 0x09, 0x6f, 0x70, 0x65, 0x72, 0x61, 0x74, 0x6f, 0x72, - 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x09, 0x52, 0x09, 0x6f, 0x70, 0x65, 0x72, 0x61, 0x74, 0x6f, - 0x72, 0x73, 0x12, 0x18, 0x0a, 0x07, 0x72, 0x65, 0x71, 0x75, 0x69, 0x72, 0x65, 0x18, 0x03, 0x20, - 0x01, 0x28, 0x09, 0x52, 0x07, 0x72, 0x65, 0x71, 0x75, 0x69, 0x72, 0x65, 0x12, 0x1f, 0x0a, 0x0b, - 0x63, 0x61, 0x63, 0x68, 0x65, 0x5f, 0x6d, 0x61, 0x74, 0x63, 0x68, 0x18, 0x04, 0x20, 0x01, 0x28, - 0x09, 0x52, 0x0a, 0x63, 0x61, 0x63, 0x68, 0x65, 0x4d, 0x61, 0x74, 0x63, 0x68, 0x22, 0xea, 0x01, - 0x0a, 0x06, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x12, 0x31, 0x0a, 0x06, 0x73, 0x63, 0x68, 0x65, - 0x6d, 0x61, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, - 0x2e, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x2e, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x45, 0x6e, - 0x74, 0x72, 0x79, 0x52, 0x06, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x12, 0x1f, 0x0a, 0x0b, 0x73, - 0x64, 0x6b, 0x5f, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, - 0x52, 0x0a, 0x73, 0x64, 0x6b, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x29, 0x0a, 0x10, - 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x5f, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, - 0x18, 0x03, 0x20, 0x01, 0x28, 0x03, 0x52, 0x0f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, - 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x12, 0x0a, 0x04, 0x6d, 0x6f, 0x64, 0x65, 0x18, - 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6d, 0x6f, 0x64, 0x65, 0x1a, 0x4d, 0x0a, 0x0b, 0x53, - 0x63, 0x68, 0x65, 0x6d, 0x61, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, - 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x28, 0x0a, 0x05, - 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x12, 0x2e, 0x70, 0x72, - 0x6f, 0x74, 0x6f, 0x2e, 0x54, 0x61, 0x62, 0x6c, 0x65, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x52, - 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x22, 0xab, 0x03, 0x0a, 0x06, 0x43, - 0x6f, 0x6c, 0x75, 0x6d, 0x6e, 0x12, 0x31, 0x0a, 0x0a, 0x6e, 0x75, 0x6c, 0x6c, 0x5f, 0x76, 0x61, - 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x10, 0x2e, 0x70, 0x72, 0x6f, 0x74, - 0x6f, 0x2e, 0x4e, 0x75, 0x6c, 0x6c, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x48, 0x00, 0x52, 0x09, 0x6e, - 0x75, 0x6c, 0x6c, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x12, 0x23, 0x0a, 0x0c, 0x64, 0x6f, 0x75, 0x62, - 0x6c, 0x65, 0x5f, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x01, 0x48, 0x00, - 0x52, 0x0b, 0x64, 0x6f, 0x75, 0x62, 0x6c, 0x65, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x12, 0x1d, 0x0a, - 0x09, 0x69, 0x6e, 0x74, 0x5f, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x04, 0x20, 0x01, 0x28, 0x03, - 0x48, 0x00, 0x52, 0x08, 0x69, 0x6e, 0x74, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x12, 0x23, 0x0a, 0x0c, - 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x5f, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x05, 0x20, 0x01, - 0x28, 0x09, 0x48, 0x00, 0x52, 0x0b, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x56, 0x61, 0x6c, 0x75, - 0x65, 0x12, 0x1f, 0x0a, 0x0a, 0x62, 0x6f, 0x6f, 0x6c, 0x5f, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, - 0x06, 0x20, 0x01, 0x28, 0x08, 0x48, 0x00, 0x52, 0x09, 0x62, 0x6f, 0x6f, 0x6c, 0x56, 0x61, 0x6c, - 0x75, 0x65, 0x12, 0x1f, 0x0a, 0x0a, 0x6a, 0x73, 0x6f, 0x6e, 0x5f, 0x76, 0x61, 0x6c, 0x75, 0x65, - 0x18, 0x07, 0x20, 0x01, 0x28, 0x0c, 0x48, 0x00, 0x52, 0x09, 0x6a, 0x73, 0x6f, 0x6e, 0x56, 0x61, - 0x6c, 0x75, 0x65, 0x12, 0x45, 0x0a, 0x0f, 0x74, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, - 0x5f, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x08, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, - 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x54, - 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x48, 0x00, 0x52, 0x0e, 0x74, 0x69, 0x6d, 0x65, - 0x73, 0x74, 0x61, 0x6d, 0x70, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x12, 0x24, 0x0a, 0x0d, 0x69, 0x70, - 0x5f, 0x61, 0x64, 0x64, 0x72, 0x5f, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x09, 0x20, 0x01, 0x28, - 0x09, 0x48, 0x00, 0x52, 0x0b, 0x69, 0x70, 0x41, 0x64, 0x64, 0x72, 0x56, 0x61, 0x6c, 0x75, 0x65, - 0x12, 0x2a, 0x0a, 0x10, 0x63, 0x69, 0x64, 0x72, 0x5f, 0x72, 0x61, 0x6e, 0x67, 0x65, 0x5f, 0x76, - 0x61, 0x6c, 0x75, 0x65, 0x18, 0x0a, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x0e, 0x63, 0x69, - 0x64, 0x72, 0x52, 0x61, 0x6e, 0x67, 0x65, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x12, 0x21, 0x0a, 0x0b, - 0x6c, 0x74, 0x72, 0x65, 0x65, 0x5f, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x0b, 0x20, 0x01, 0x28, - 0x09, 0x48, 0x00, 0x52, 0x0a, 0x6c, 0x74, 0x72, 0x65, 0x65, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x42, - 0x07, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x22, 0x6f, 0x0a, 0x10, 0x43, 0x6f, 0x6c, 0x75, - 0x6d, 0x6e, 0x44, 0x65, 0x66, 0x69, 0x6e, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x12, 0x0a, 0x04, - 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, - 0x12, 0x25, 0x0a, 0x04, 0x74, 0x79, 0x70, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x11, - 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x43, 0x6f, 0x6c, 0x75, 0x6d, 0x6e, 0x54, 0x79, 0x70, - 0x65, 0x52, 0x04, 0x74, 0x79, 0x70, 0x65, 0x12, 0x20, 0x0a, 0x0b, 0x64, 0x65, 0x73, 0x63, 0x72, - 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b, 0x64, 0x65, - 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x2d, 0x0a, 0x0b, 0x51, 0x75, 0x65, - 0x72, 0x79, 0x52, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x12, 0x1e, 0x0a, 0x04, 0x72, 0x6f, 0x77, 0x73, - 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x0a, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x52, - 0x6f, 0x77, 0x52, 0x04, 0x72, 0x6f, 0x77, 0x73, 0x22, 0x35, 0x0a, 0x0b, 0x49, 0x6e, 0x64, 0x65, - 0x78, 0x42, 0x75, 0x63, 0x6b, 0x65, 0x74, 0x12, 0x26, 0x0a, 0x05, 0x69, 0x74, 0x65, 0x6d, 0x73, - 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x10, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x49, - 0x6e, 0x64, 0x65, 0x78, 0x49, 0x74, 0x65, 0x6d, 0x52, 0x05, 0x69, 0x74, 0x65, 0x6d, 0x73, 0x22, - 0xaa, 0x02, 0x0a, 0x09, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x49, 0x74, 0x65, 0x6d, 0x12, 0x10, 0x0a, - 0x03, 0x4b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x4b, 0x65, 0x79, 0x12, - 0x31, 0x0a, 0x05, 0x71, 0x75, 0x61, 0x6c, 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1b, - 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x49, 0x74, 0x65, 0x6d, - 0x2e, 0x51, 0x75, 0x61, 0x6c, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x05, 0x71, 0x75, 0x61, - 0x6c, 0x73, 0x12, 0x18, 0x0a, 0x07, 0x63, 0x6f, 0x6c, 0x75, 0x6d, 0x6e, 0x73, 0x18, 0x04, 0x20, - 0x03, 0x28, 0x09, 0x52, 0x07, 0x63, 0x6f, 0x6c, 0x75, 0x6d, 0x6e, 0x73, 0x12, 0x14, 0x0a, 0x05, - 0x6c, 0x69, 0x6d, 0x69, 0x74, 0x18, 0x05, 0x20, 0x01, 0x28, 0x03, 0x52, 0x05, 0x6c, 0x69, 0x6d, - 0x69, 0x74, 0x12, 0x1d, 0x0a, 0x0a, 0x70, 0x61, 0x67, 0x65, 0x5f, 0x63, 0x6f, 0x75, 0x6e, 0x74, - 0x18, 0x06, 0x20, 0x01, 0x28, 0x03, 0x52, 0x09, 0x70, 0x61, 0x67, 0x65, 0x43, 0x6f, 0x75, 0x6e, - 0x74, 0x12, 0x41, 0x0a, 0x0e, 0x69, 0x6e, 0x73, 0x65, 0x72, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x74, - 0x69, 0x6d, 0x65, 0x18, 0x07, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, 0x6f, 0x67, - 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x54, 0x69, 0x6d, 0x65, - 0x73, 0x74, 0x61, 0x6d, 0x70, 0x52, 0x0d, 0x69, 0x6e, 0x73, 0x65, 0x72, 0x74, 0x69, 0x6f, 0x6e, - 0x54, 0x69, 0x6d, 0x65, 0x1a, 0x46, 0x0a, 0x0a, 0x51, 0x75, 0x61, 0x6c, 0x73, 0x45, 0x6e, 0x74, + 0x73, 0x74, 0x12, 0x46, 0x0a, 0x15, 0x6c, 0x69, 0x73, 0x74, 0x43, 0x61, 0x6c, 0x6c, 0x4b, 0x65, + 0x79, 0x43, 0x6f, 0x6c, 0x75, 0x6d, 0x6e, 0x4c, 0x69, 0x73, 0x74, 0x18, 0x07, 0x20, 0x03, 0x28, + 0x0b, 0x32, 0x10, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x4b, 0x65, 0x79, 0x43, 0x6f, 0x6c, + 0x75, 0x6d, 0x6e, 0x52, 0x15, 0x6c, 0x69, 0x73, 0x74, 0x43, 0x61, 0x6c, 0x6c, 0x4b, 0x65, 0x79, + 0x43, 0x6f, 0x6c, 0x75, 0x6d, 0x6e, 0x4c, 0x69, 0x73, 0x74, 0x22, 0x4f, 0x0a, 0x0d, 0x4b, 0x65, + 0x79, 0x43, 0x6f, 0x6c, 0x75, 0x6d, 0x6e, 0x73, 0x53, 0x65, 0x74, 0x12, 0x16, 0x0a, 0x06, 0x73, + 0x69, 0x6e, 0x67, 0x6c, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x73, 0x69, 0x6e, + 0x67, 0x6c, 0x65, 0x12, 0x10, 0x0a, 0x03, 0x61, 0x6c, 0x6c, 0x18, 0x02, 0x20, 0x03, 0x28, 0x09, + 0x52, 0x03, 0x61, 0x6c, 0x6c, 0x12, 0x10, 0x0a, 0x03, 0x61, 0x6e, 0x79, 0x18, 0x03, 0x20, 0x03, + 0x28, 0x09, 0x52, 0x03, 0x61, 0x6e, 0x79, 0x3a, 0x02, 0x18, 0x01, 0x22, 0x78, 0x0a, 0x09, 0x4b, + 0x65, 0x79, 0x43, 0x6f, 0x6c, 0x75, 0x6d, 0x6e, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, + 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x1c, 0x0a, 0x09, + 0x6f, 0x70, 0x65, 0x72, 0x61, 0x74, 0x6f, 0x72, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x09, 0x52, + 0x09, 0x6f, 0x70, 0x65, 0x72, 0x61, 0x74, 0x6f, 0x72, 0x73, 0x12, 0x18, 0x0a, 0x07, 0x72, 0x65, + 0x71, 0x75, 0x69, 0x72, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x72, 0x65, 0x71, + 0x75, 0x69, 0x72, 0x65, 0x12, 0x1f, 0x0a, 0x0b, 0x63, 0x61, 0x63, 0x68, 0x65, 0x5f, 0x6d, 0x61, + 0x74, 0x63, 0x68, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0a, 0x63, 0x61, 0x63, 0x68, 0x65, + 0x4d, 0x61, 0x74, 0x63, 0x68, 0x22, 0xea, 0x01, 0x0a, 0x06, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, + 0x12, 0x31, 0x0a, 0x06, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, + 0x32, 0x19, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x2e, + 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x06, 0x73, 0x63, 0x68, + 0x65, 0x6d, 0x61, 0x12, 0x1f, 0x0a, 0x0b, 0x73, 0x64, 0x6b, 0x5f, 0x76, 0x65, 0x72, 0x73, 0x69, + 0x6f, 0x6e, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0a, 0x73, 0x64, 0x6b, 0x56, 0x65, 0x72, + 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x29, 0x0a, 0x10, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, + 0x5f, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x03, 0x20, 0x01, 0x28, 0x03, 0x52, 0x0f, + 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x12, + 0x12, 0x0a, 0x04, 0x6d, 0x6f, 0x64, 0x65, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6d, + 0x6f, 0x64, 0x65, 0x1a, 0x4d, 0x0a, 0x0b, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, - 0x03, 0x6b, 0x65, 0x79, 0x12, 0x22, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, - 0x01, 0x28, 0x0b, 0x32, 0x0c, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x51, 0x75, 0x61, 0x6c, - 0x73, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x22, 0x64, 0x0a, 0x16, - 0x53, 0x65, 0x74, 0x43, 0x61, 0x63, 0x68, 0x65, 0x4f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x52, - 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x18, 0x0a, 0x07, 0x65, 0x6e, 0x61, 0x62, 0x6c, 0x65, - 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x08, 0x52, 0x07, 0x65, 0x6e, 0x61, 0x62, 0x6c, 0x65, 0x64, - 0x12, 0x10, 0x0a, 0x03, 0x74, 0x74, 0x6c, 0x18, 0x02, 0x20, 0x01, 0x28, 0x03, 0x52, 0x03, 0x74, - 0x74, 0x6c, 0x12, 0x1e, 0x0a, 0x0b, 0x6d, 0x61, 0x78, 0x5f, 0x73, 0x69, 0x7a, 0x65, 0x5f, 0x6d, - 0x62, 0x18, 0x04, 0x20, 0x01, 0x28, 0x03, 0x52, 0x09, 0x6d, 0x61, 0x78, 0x53, 0x69, 0x7a, 0x65, - 0x4d, 0x62, 0x22, 0x19, 0x0a, 0x17, 0x53, 0x65, 0x74, 0x43, 0x61, 0x63, 0x68, 0x65, 0x4f, 0x70, - 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x2a, 0x27, 0x0a, - 0x11, 0x50, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x54, 0x79, - 0x70, 0x65, 0x12, 0x12, 0x0a, 0x0e, 0x53, 0x43, 0x48, 0x45, 0x4d, 0x41, 0x5f, 0x55, 0x50, 0x44, - 0x41, 0x54, 0x45, 0x44, 0x10, 0x00, 0x2a, 0x1b, 0x0a, 0x09, 0x4e, 0x75, 0x6c, 0x6c, 0x56, 0x61, - 0x6c, 0x75, 0x65, 0x12, 0x0e, 0x0a, 0x0a, 0x4e, 0x55, 0x4c, 0x4c, 0x5f, 0x56, 0x41, 0x4c, 0x55, - 0x45, 0x10, 0x00, 0x2a, 0x9f, 0x01, 0x0a, 0x0a, 0x43, 0x6f, 0x6c, 0x75, 0x6d, 0x6e, 0x54, 0x79, - 0x70, 0x65, 0x12, 0x08, 0x0a, 0x04, 0x42, 0x4f, 0x4f, 0x4c, 0x10, 0x00, 0x12, 0x07, 0x0a, 0x03, - 0x49, 0x4e, 0x54, 0x10, 0x01, 0x12, 0x0a, 0x0a, 0x06, 0x44, 0x4f, 0x55, 0x42, 0x4c, 0x45, 0x10, - 0x02, 0x12, 0x0a, 0x0a, 0x06, 0x53, 0x54, 0x52, 0x49, 0x4e, 0x47, 0x10, 0x03, 0x12, 0x08, 0x0a, - 0x04, 0x4a, 0x53, 0x4f, 0x4e, 0x10, 0x04, 0x12, 0x0c, 0x0a, 0x08, 0x44, 0x41, 0x54, 0x45, 0x54, - 0x49, 0x4d, 0x45, 0x10, 0x05, 0x12, 0x0a, 0x0a, 0x06, 0x49, 0x50, 0x41, 0x44, 0x44, 0x52, 0x10, - 0x06, 0x12, 0x08, 0x0a, 0x04, 0x43, 0x49, 0x44, 0x52, 0x10, 0x07, 0x12, 0x0d, 0x0a, 0x09, 0x54, - 0x49, 0x4d, 0x45, 0x53, 0x54, 0x41, 0x4d, 0x50, 0x10, 0x08, 0x12, 0x08, 0x0a, 0x04, 0x49, 0x4e, - 0x45, 0x54, 0x10, 0x09, 0x12, 0x09, 0x0a, 0x05, 0x4c, 0x54, 0x52, 0x45, 0x45, 0x10, 0x0a, 0x12, - 0x14, 0x0a, 0x07, 0x55, 0x4e, 0x4b, 0x4e, 0x4f, 0x57, 0x4e, 0x10, 0xff, 0xff, 0xff, 0xff, 0xff, - 0xff, 0xff, 0xff, 0xff, 0x01, 0x32, 0xca, 0x05, 0x0a, 0x0d, 0x57, 0x72, 0x61, 0x70, 0x70, 0x65, - 0x72, 0x50, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x12, 0x56, 0x0a, 0x16, 0x45, 0x73, 0x74, 0x61, 0x62, - 0x6c, 0x69, 0x73, 0x68, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x53, 0x74, 0x72, 0x65, 0x61, - 0x6d, 0x12, 0x24, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x45, 0x73, 0x74, 0x61, 0x62, 0x6c, - 0x69, 0x73, 0x68, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x53, 0x74, 0x72, 0x65, 0x61, 0x6d, - 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x14, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, - 0x50, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x30, 0x01, 0x12, - 0x3e, 0x0a, 0x09, 0x47, 0x65, 0x74, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x12, 0x17, 0x2e, 0x70, - 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x47, 0x65, 0x74, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x52, 0x65, - 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x18, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x47, 0x65, - 0x74, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, - 0x3a, 0x0a, 0x07, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x65, 0x12, 0x15, 0x2e, 0x70, 0x72, 0x6f, - 0x74, 0x6f, 0x2e, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, - 0x74, 0x1a, 0x16, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, - 0x65, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x30, 0x01, 0x12, 0x5c, 0x0a, 0x13, 0x53, - 0x65, 0x74, 0x43, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x43, 0x6f, 0x6e, 0x66, - 0x69, 0x67, 0x12, 0x21, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x53, 0x65, 0x74, 0x43, 0x6f, - 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x52, 0x65, - 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x22, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x53, 0x65, + 0x03, 0x6b, 0x65, 0x79, 0x12, 0x28, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, + 0x01, 0x28, 0x0b, 0x32, 0x12, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x54, 0x61, 0x62, 0x6c, + 0x65, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, + 0x38, 0x01, 0x22, 0xab, 0x03, 0x0a, 0x06, 0x43, 0x6f, 0x6c, 0x75, 0x6d, 0x6e, 0x12, 0x31, 0x0a, + 0x0a, 0x6e, 0x75, 0x6c, 0x6c, 0x5f, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, + 0x0e, 0x32, 0x10, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x4e, 0x75, 0x6c, 0x6c, 0x56, 0x61, + 0x6c, 0x75, 0x65, 0x48, 0x00, 0x52, 0x09, 0x6e, 0x75, 0x6c, 0x6c, 0x56, 0x61, 0x6c, 0x75, 0x65, + 0x12, 0x23, 0x0a, 0x0c, 0x64, 0x6f, 0x75, 0x62, 0x6c, 0x65, 0x5f, 0x76, 0x61, 0x6c, 0x75, 0x65, + 0x18, 0x03, 0x20, 0x01, 0x28, 0x01, 0x48, 0x00, 0x52, 0x0b, 0x64, 0x6f, 0x75, 0x62, 0x6c, 0x65, + 0x56, 0x61, 0x6c, 0x75, 0x65, 0x12, 0x1d, 0x0a, 0x09, 0x69, 0x6e, 0x74, 0x5f, 0x76, 0x61, 0x6c, + 0x75, 0x65, 0x18, 0x04, 0x20, 0x01, 0x28, 0x03, 0x48, 0x00, 0x52, 0x08, 0x69, 0x6e, 0x74, 0x56, + 0x61, 0x6c, 0x75, 0x65, 0x12, 0x23, 0x0a, 0x0c, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x5f, 0x76, + 0x61, 0x6c, 0x75, 0x65, 0x18, 0x05, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x0b, 0x73, 0x74, + 0x72, 0x69, 0x6e, 0x67, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x12, 0x1f, 0x0a, 0x0a, 0x62, 0x6f, 0x6f, + 0x6c, 0x5f, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x06, 0x20, 0x01, 0x28, 0x08, 0x48, 0x00, 0x52, + 0x09, 0x62, 0x6f, 0x6f, 0x6c, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x12, 0x1f, 0x0a, 0x0a, 0x6a, 0x73, + 0x6f, 0x6e, 0x5f, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x07, 0x20, 0x01, 0x28, 0x0c, 0x48, 0x00, + 0x52, 0x09, 0x6a, 0x73, 0x6f, 0x6e, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x12, 0x45, 0x0a, 0x0f, 0x74, + 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x5f, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x08, + 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, + 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, + 0x48, 0x00, 0x52, 0x0e, 0x74, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x56, 0x61, 0x6c, + 0x75, 0x65, 0x12, 0x24, 0x0a, 0x0d, 0x69, 0x70, 0x5f, 0x61, 0x64, 0x64, 0x72, 0x5f, 0x76, 0x61, + 0x6c, 0x75, 0x65, 0x18, 0x09, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x0b, 0x69, 0x70, 0x41, + 0x64, 0x64, 0x72, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x12, 0x2a, 0x0a, 0x10, 0x63, 0x69, 0x64, 0x72, + 0x5f, 0x72, 0x61, 0x6e, 0x67, 0x65, 0x5f, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x0a, 0x20, 0x01, + 0x28, 0x09, 0x48, 0x00, 0x52, 0x0e, 0x63, 0x69, 0x64, 0x72, 0x52, 0x61, 0x6e, 0x67, 0x65, 0x56, + 0x61, 0x6c, 0x75, 0x65, 0x12, 0x21, 0x0a, 0x0b, 0x6c, 0x74, 0x72, 0x65, 0x65, 0x5f, 0x76, 0x61, + 0x6c, 0x75, 0x65, 0x18, 0x0b, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x0a, 0x6c, 0x74, 0x72, + 0x65, 0x65, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x42, 0x07, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, + 0x22, 0x6f, 0x0a, 0x10, 0x43, 0x6f, 0x6c, 0x75, 0x6d, 0x6e, 0x44, 0x65, 0x66, 0x69, 0x6e, 0x69, + 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, + 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x25, 0x0a, 0x04, 0x74, 0x79, 0x70, 0x65, + 0x18, 0x02, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x11, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x43, + 0x6f, 0x6c, 0x75, 0x6d, 0x6e, 0x54, 0x79, 0x70, 0x65, 0x52, 0x04, 0x74, 0x79, 0x70, 0x65, 0x12, + 0x20, 0x0a, 0x0b, 0x64, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x03, + 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b, 0x64, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, + 0x6e, 0x22, 0x2d, 0x0a, 0x0b, 0x51, 0x75, 0x65, 0x72, 0x79, 0x52, 0x65, 0x73, 0x75, 0x6c, 0x74, + 0x12, 0x1e, 0x0a, 0x04, 0x72, 0x6f, 0x77, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x0a, + 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x52, 0x6f, 0x77, 0x52, 0x04, 0x72, 0x6f, 0x77, 0x73, + 0x22, 0x35, 0x0a, 0x0b, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x42, 0x75, 0x63, 0x6b, 0x65, 0x74, 0x12, + 0x26, 0x0a, 0x05, 0x69, 0x74, 0x65, 0x6d, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x10, + 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x49, 0x74, 0x65, 0x6d, + 0x52, 0x05, 0x69, 0x74, 0x65, 0x6d, 0x73, 0x22, 0xaa, 0x02, 0x0a, 0x09, 0x49, 0x6e, 0x64, 0x65, + 0x78, 0x49, 0x74, 0x65, 0x6d, 0x12, 0x10, 0x0a, 0x03, 0x4b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, + 0x28, 0x09, 0x52, 0x03, 0x4b, 0x65, 0x79, 0x12, 0x31, 0x0a, 0x05, 0x71, 0x75, 0x61, 0x6c, 0x73, + 0x18, 0x03, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1b, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x49, + 0x6e, 0x64, 0x65, 0x78, 0x49, 0x74, 0x65, 0x6d, 0x2e, 0x51, 0x75, 0x61, 0x6c, 0x73, 0x45, 0x6e, + 0x74, 0x72, 0x79, 0x52, 0x05, 0x71, 0x75, 0x61, 0x6c, 0x73, 0x12, 0x18, 0x0a, 0x07, 0x63, 0x6f, + 0x6c, 0x75, 0x6d, 0x6e, 0x73, 0x18, 0x04, 0x20, 0x03, 0x28, 0x09, 0x52, 0x07, 0x63, 0x6f, 0x6c, + 0x75, 0x6d, 0x6e, 0x73, 0x12, 0x14, 0x0a, 0x05, 0x6c, 0x69, 0x6d, 0x69, 0x74, 0x18, 0x05, 0x20, + 0x01, 0x28, 0x03, 0x52, 0x05, 0x6c, 0x69, 0x6d, 0x69, 0x74, 0x12, 0x1d, 0x0a, 0x0a, 0x70, 0x61, + 0x67, 0x65, 0x5f, 0x63, 0x6f, 0x75, 0x6e, 0x74, 0x18, 0x06, 0x20, 0x01, 0x28, 0x03, 0x52, 0x09, + 0x70, 0x61, 0x67, 0x65, 0x43, 0x6f, 0x75, 0x6e, 0x74, 0x12, 0x41, 0x0a, 0x0e, 0x69, 0x6e, 0x73, + 0x65, 0x72, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x74, 0x69, 0x6d, 0x65, 0x18, 0x07, 0x20, 0x01, 0x28, + 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, + 0x62, 0x75, 0x66, 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x52, 0x0d, 0x69, + 0x6e, 0x73, 0x65, 0x72, 0x74, 0x69, 0x6f, 0x6e, 0x54, 0x69, 0x6d, 0x65, 0x1a, 0x46, 0x0a, 0x0a, + 0x51, 0x75, 0x61, 0x6c, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, + 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x22, 0x0a, 0x05, + 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0c, 0x2e, 0x70, 0x72, + 0x6f, 0x74, 0x6f, 0x2e, 0x51, 0x75, 0x61, 0x6c, 0x73, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, + 0x3a, 0x02, 0x38, 0x01, 0x22, 0x64, 0x0a, 0x16, 0x53, 0x65, 0x74, 0x43, 0x61, 0x63, 0x68, 0x65, + 0x4f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x18, + 0x0a, 0x07, 0x65, 0x6e, 0x61, 0x62, 0x6c, 0x65, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x08, 0x52, + 0x07, 0x65, 0x6e, 0x61, 0x62, 0x6c, 0x65, 0x64, 0x12, 0x10, 0x0a, 0x03, 0x74, 0x74, 0x6c, 0x18, + 0x02, 0x20, 0x01, 0x28, 0x03, 0x52, 0x03, 0x74, 0x74, 0x6c, 0x12, 0x1e, 0x0a, 0x0b, 0x6d, 0x61, + 0x78, 0x5f, 0x73, 0x69, 0x7a, 0x65, 0x5f, 0x6d, 0x62, 0x18, 0x04, 0x20, 0x01, 0x28, 0x03, 0x52, + 0x09, 0x6d, 0x61, 0x78, 0x53, 0x69, 0x7a, 0x65, 0x4d, 0x62, 0x22, 0xbe, 0x01, 0x0a, 0x15, 0x52, + 0x61, 0x74, 0x65, 0x4c, 0x69, 0x6d, 0x69, 0x74, 0x65, 0x72, 0x44, 0x65, 0x66, 0x69, 0x6e, 0x69, + 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, + 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x1b, 0x0a, 0x09, 0x66, 0x69, 0x6c, 0x6c, + 0x5f, 0x72, 0x61, 0x74, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x02, 0x52, 0x08, 0x66, 0x69, 0x6c, + 0x6c, 0x52, 0x61, 0x74, 0x65, 0x12, 0x1f, 0x0a, 0x0b, 0x62, 0x75, 0x63, 0x6b, 0x65, 0x74, 0x5f, + 0x73, 0x69, 0x7a, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x03, 0x52, 0x0a, 0x62, 0x75, 0x63, 0x6b, + 0x65, 0x74, 0x53, 0x69, 0x7a, 0x65, 0x12, 0x27, 0x0a, 0x0f, 0x6d, 0x61, 0x78, 0x5f, 0x63, 0x6f, + 0x6e, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x63, 0x79, 0x18, 0x04, 0x20, 0x01, 0x28, 0x03, 0x52, + 0x0e, 0x6d, 0x61, 0x78, 0x43, 0x6f, 0x6e, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x63, 0x79, 0x12, + 0x14, 0x0a, 0x05, 0x73, 0x63, 0x6f, 0x70, 0x65, 0x18, 0x05, 0x20, 0x03, 0x28, 0x09, 0x52, 0x05, + 0x73, 0x63, 0x6f, 0x70, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x77, 0x68, 0x65, 0x72, 0x65, 0x18, 0x06, + 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x77, 0x68, 0x65, 0x72, 0x65, 0x22, 0x19, 0x0a, 0x17, 0x53, + 0x65, 0x74, 0x43, 0x61, 0x63, 0x68, 0x65, 0x4f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x52, 0x65, + 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x58, 0x0a, 0x16, 0x53, 0x65, 0x74, 0x52, 0x61, 0x74, + 0x65, 0x4c, 0x69, 0x6d, 0x69, 0x74, 0x65, 0x72, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, + 0x12, 0x3e, 0x0a, 0x0b, 0x64, 0x65, 0x66, 0x69, 0x6e, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x18, + 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1c, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x52, 0x61, + 0x74, 0x65, 0x4c, 0x69, 0x6d, 0x69, 0x74, 0x65, 0x72, 0x44, 0x65, 0x66, 0x69, 0x6e, 0x69, 0x74, + 0x69, 0x6f, 0x6e, 0x52, 0x0b, 0x64, 0x65, 0x66, 0x69, 0x6e, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x73, + 0x22, 0x19, 0x0a, 0x17, 0x53, 0x65, 0x74, 0x52, 0x61, 0x74, 0x65, 0x4c, 0x69, 0x6d, 0x69, 0x74, + 0x65, 0x72, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x18, 0x0a, 0x16, 0x47, + 0x65, 0x74, 0x52, 0x61, 0x74, 0x65, 0x4c, 0x69, 0x6d, 0x69, 0x74, 0x65, 0x72, 0x73, 0x52, 0x65, + 0x71, 0x75, 0x65, 0x73, 0x74, 0x22, 0x59, 0x0a, 0x17, 0x47, 0x65, 0x74, 0x52, 0x61, 0x74, 0x65, + 0x4c, 0x69, 0x6d, 0x69, 0x74, 0x65, 0x72, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, + 0x12, 0x3e, 0x0a, 0x0b, 0x64, 0x65, 0x66, 0x69, 0x6e, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x18, + 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1c, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x52, 0x61, + 0x74, 0x65, 0x4c, 0x69, 0x6d, 0x69, 0x74, 0x65, 0x72, 0x44, 0x65, 0x66, 0x69, 0x6e, 0x69, 0x74, + 0x69, 0x6f, 0x6e, 0x52, 0x0b, 0x64, 0x65, 0x66, 0x69, 0x6e, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x73, + 0x2a, 0x27, 0x0a, 0x11, 0x50, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, + 0x65, 0x54, 0x79, 0x70, 0x65, 0x12, 0x12, 0x0a, 0x0e, 0x53, 0x43, 0x48, 0x45, 0x4d, 0x41, 0x5f, + 0x55, 0x50, 0x44, 0x41, 0x54, 0x45, 0x44, 0x10, 0x00, 0x2a, 0x1b, 0x0a, 0x09, 0x4e, 0x75, 0x6c, + 0x6c, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x12, 0x0e, 0x0a, 0x0a, 0x4e, 0x55, 0x4c, 0x4c, 0x5f, 0x56, + 0x41, 0x4c, 0x55, 0x45, 0x10, 0x00, 0x2a, 0x9f, 0x01, 0x0a, 0x0a, 0x43, 0x6f, 0x6c, 0x75, 0x6d, + 0x6e, 0x54, 0x79, 0x70, 0x65, 0x12, 0x08, 0x0a, 0x04, 0x42, 0x4f, 0x4f, 0x4c, 0x10, 0x00, 0x12, + 0x07, 0x0a, 0x03, 0x49, 0x4e, 0x54, 0x10, 0x01, 0x12, 0x0a, 0x0a, 0x06, 0x44, 0x4f, 0x55, 0x42, + 0x4c, 0x45, 0x10, 0x02, 0x12, 0x0a, 0x0a, 0x06, 0x53, 0x54, 0x52, 0x49, 0x4e, 0x47, 0x10, 0x03, + 0x12, 0x08, 0x0a, 0x04, 0x4a, 0x53, 0x4f, 0x4e, 0x10, 0x04, 0x12, 0x0c, 0x0a, 0x08, 0x44, 0x41, + 0x54, 0x45, 0x54, 0x49, 0x4d, 0x45, 0x10, 0x05, 0x12, 0x0a, 0x0a, 0x06, 0x49, 0x50, 0x41, 0x44, + 0x44, 0x52, 0x10, 0x06, 0x12, 0x08, 0x0a, 0x04, 0x43, 0x49, 0x44, 0x52, 0x10, 0x07, 0x12, 0x0d, + 0x0a, 0x09, 0x54, 0x49, 0x4d, 0x45, 0x53, 0x54, 0x41, 0x4d, 0x50, 0x10, 0x08, 0x12, 0x08, 0x0a, + 0x04, 0x49, 0x4e, 0x45, 0x54, 0x10, 0x09, 0x12, 0x09, 0x0a, 0x05, 0x4c, 0x54, 0x52, 0x45, 0x45, + 0x10, 0x0a, 0x12, 0x14, 0x0a, 0x07, 0x55, 0x4e, 0x4b, 0x4e, 0x4f, 0x57, 0x4e, 0x10, 0xff, 0xff, + 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x01, 0x32, 0xee, 0x06, 0x0a, 0x0d, 0x57, 0x72, 0x61, + 0x70, 0x70, 0x65, 0x72, 0x50, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x12, 0x56, 0x0a, 0x16, 0x45, 0x73, + 0x74, 0x61, 0x62, 0x6c, 0x69, 0x73, 0x68, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x53, 0x74, + 0x72, 0x65, 0x61, 0x6d, 0x12, 0x24, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x45, 0x73, 0x74, + 0x61, 0x62, 0x6c, 0x69, 0x73, 0x68, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x53, 0x74, 0x72, + 0x65, 0x61, 0x6d, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x14, 0x2e, 0x70, 0x72, 0x6f, + 0x74, 0x6f, 0x2e, 0x50, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, + 0x30, 0x01, 0x12, 0x3e, 0x0a, 0x09, 0x47, 0x65, 0x74, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x12, + 0x17, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x47, 0x65, 0x74, 0x53, 0x63, 0x68, 0x65, 0x6d, + 0x61, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x18, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, + 0x2e, 0x47, 0x65, 0x74, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, + 0x73, 0x65, 0x12, 0x3a, 0x0a, 0x07, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x65, 0x12, 0x15, 0x2e, + 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x65, 0x52, 0x65, 0x71, + 0x75, 0x65, 0x73, 0x74, 0x1a, 0x16, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x45, 0x78, 0x65, + 0x63, 0x75, 0x74, 0x65, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x30, 0x01, 0x12, 0x5c, + 0x0a, 0x13, 0x53, 0x65, 0x74, 0x43, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x43, + 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x12, 0x21, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x53, 0x65, 0x74, 0x43, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x43, 0x6f, 0x6e, 0x66, 0x69, - 0x67, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x64, 0x0a, 0x17, 0x53, 0x65, 0x74, - 0x41, 0x6c, 0x6c, 0x43, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x43, 0x6f, 0x6e, - 0x66, 0x69, 0x67, 0x73, 0x12, 0x25, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x53, 0x65, 0x74, - 0x41, 0x6c, 0x6c, 0x43, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x43, 0x6f, 0x6e, - 0x66, 0x69, 0x67, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x22, 0x2e, 0x70, 0x72, - 0x6f, 0x74, 0x6f, 0x2e, 0x53, 0x65, 0x74, 0x43, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x69, 0x6f, - 0x6e, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, - 0x68, 0x0a, 0x17, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x43, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, - 0x69, 0x6f, 0x6e, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x73, 0x12, 0x25, 0x2e, 0x70, 0x72, 0x6f, - 0x74, 0x6f, 0x2e, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x43, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, - 0x69, 0x6f, 0x6e, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, - 0x74, 0x1a, 0x26, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, - 0x43, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, - 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x65, 0x0a, 0x16, 0x47, 0x65, 0x74, - 0x53, 0x75, 0x70, 0x70, 0x6f, 0x72, 0x74, 0x65, 0x64, 0x4f, 0x70, 0x65, 0x72, 0x61, 0x74, 0x69, - 0x6f, 0x6e, 0x73, 0x12, 0x24, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x47, 0x65, 0x74, 0x53, - 0x75, 0x70, 0x70, 0x6f, 0x72, 0x74, 0x65, 0x64, 0x4f, 0x70, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, - 0x6e, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x25, 0x2e, 0x70, 0x72, 0x6f, 0x74, - 0x6f, 0x2e, 0x47, 0x65, 0x74, 0x53, 0x75, 0x70, 0x70, 0x6f, 0x72, 0x74, 0x65, 0x64, 0x4f, 0x70, - 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, - 0x12, 0x50, 0x0a, 0x0f, 0x53, 0x65, 0x74, 0x43, 0x61, 0x63, 0x68, 0x65, 0x4f, 0x70, 0x74, 0x69, - 0x6f, 0x6e, 0x73, 0x12, 0x1d, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x53, 0x65, 0x74, 0x43, - 0x61, 0x63, 0x68, 0x65, 0x4f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, - 0x73, 0x74, 0x1a, 0x1e, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x53, 0x65, 0x74, 0x43, 0x61, - 0x63, 0x68, 0x65, 0x4f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, - 0x73, 0x65, 0x42, 0x09, 0x5a, 0x07, 0x2e, 0x3b, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x06, 0x70, - 0x72, 0x6f, 0x74, 0x6f, 0x33, + 0x67, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x22, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, + 0x2e, 0x53, 0x65, 0x74, 0x43, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x43, 0x6f, + 0x6e, 0x66, 0x69, 0x67, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x64, 0x0a, 0x17, + 0x53, 0x65, 0x74, 0x41, 0x6c, 0x6c, 0x43, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, + 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x73, 0x12, 0x25, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, + 0x53, 0x65, 0x74, 0x41, 0x6c, 0x6c, 0x43, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, + 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x22, + 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x53, 0x65, 0x74, 0x43, 0x6f, 0x6e, 0x6e, 0x65, 0x63, + 0x74, 0x69, 0x6f, 0x6e, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, + 0x73, 0x65, 0x12, 0x68, 0x0a, 0x17, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x43, 0x6f, 0x6e, 0x6e, + 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x73, 0x12, 0x25, 0x2e, + 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x43, 0x6f, 0x6e, 0x6e, + 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x73, 0x52, 0x65, 0x71, + 0x75, 0x65, 0x73, 0x74, 0x1a, 0x26, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x55, 0x70, 0x64, + 0x61, 0x74, 0x65, 0x43, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x43, 0x6f, 0x6e, + 0x66, 0x69, 0x67, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x65, 0x0a, 0x16, + 0x47, 0x65, 0x74, 0x53, 0x75, 0x70, 0x70, 0x6f, 0x72, 0x74, 0x65, 0x64, 0x4f, 0x70, 0x65, 0x72, + 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x12, 0x24, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x47, + 0x65, 0x74, 0x53, 0x75, 0x70, 0x70, 0x6f, 0x72, 0x74, 0x65, 0x64, 0x4f, 0x70, 0x65, 0x72, 0x61, + 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x25, 0x2e, 0x70, + 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x47, 0x65, 0x74, 0x53, 0x75, 0x70, 0x70, 0x6f, 0x72, 0x74, 0x65, + 0x64, 0x4f, 0x70, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, + 0x6e, 0x73, 0x65, 0x12, 0x50, 0x0a, 0x0f, 0x53, 0x65, 0x74, 0x43, 0x61, 0x63, 0x68, 0x65, 0x4f, + 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x12, 0x1d, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x53, + 0x65, 0x74, 0x43, 0x61, 0x63, 0x68, 0x65, 0x4f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x52, 0x65, + 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1e, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x53, 0x65, + 0x74, 0x43, 0x61, 0x63, 0x68, 0x65, 0x4f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x52, 0x65, 0x73, + 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x50, 0x0a, 0x0f, 0x53, 0x65, 0x74, 0x52, 0x61, 0x74, 0x65, + 0x4c, 0x69, 0x6d, 0x69, 0x74, 0x65, 0x72, 0x73, 0x12, 0x1d, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, + 0x2e, 0x53, 0x65, 0x74, 0x52, 0x61, 0x74, 0x65, 0x4c, 0x69, 0x6d, 0x69, 0x74, 0x65, 0x72, 0x73, + 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1e, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, + 0x53, 0x65, 0x74, 0x52, 0x61, 0x74, 0x65, 0x4c, 0x69, 0x6d, 0x69, 0x74, 0x65, 0x72, 0x73, 0x52, + 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x50, 0x0a, 0x0f, 0x47, 0x65, 0x74, 0x52, 0x61, + 0x74, 0x65, 0x4c, 0x69, 0x6d, 0x69, 0x74, 0x65, 0x72, 0x73, 0x12, 0x1d, 0x2e, 0x70, 0x72, 0x6f, + 0x74, 0x6f, 0x2e, 0x47, 0x65, 0x74, 0x52, 0x61, 0x74, 0x65, 0x4c, 0x69, 0x6d, 0x69, 0x74, 0x65, + 0x72, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1e, 0x2e, 0x70, 0x72, 0x6f, 0x74, + 0x6f, 0x2e, 0x47, 0x65, 0x74, 0x52, 0x61, 0x74, 0x65, 0x4c, 0x69, 0x6d, 0x69, 0x74, 0x65, 0x72, + 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x42, 0x09, 0x5a, 0x07, 0x2e, 0x3b, 0x70, + 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, } var ( @@ -3249,7 +3565,7 @@ func file_plugin_proto_rawDescGZIP() []byte { } var file_plugin_proto_enumTypes = make([]protoimpl.EnumInfo, 4) -var file_plugin_proto_msgTypes = make([]protoimpl.MessageInfo, 45) +var file_plugin_proto_msgTypes = make([]protoimpl.MessageInfo, 50) var file_plugin_proto_goTypes = []interface{}{ (PluginMessageType)(0), // 0: proto.PluginMessageType (NullValue)(0), // 1: proto.NullValue @@ -3292,15 +3608,20 @@ var file_plugin_proto_goTypes = []interface{}{ (*IndexBucket)(nil), // 38: proto.IndexBucket (*IndexItem)(nil), // 39: proto.IndexItem (*SetCacheOptionsRequest)(nil), // 40: proto.SetCacheOptionsRequest - (*SetCacheOptionsResponse)(nil), // 41: proto.SetCacheOptionsResponse - nil, // 42: proto.QueryContext.QualsEntry - nil, // 43: proto.ExecuteRequest.ExecuteConnectionDataEntry - nil, // 44: proto.SetConnectionConfigResponse.FailedConnectionsEntry - nil, // 45: proto.UpdateConnectionConfigsResponse.FailedConnectionsEntry - nil, // 46: proto.Row.ColumnsEntry - nil, // 47: proto.Schema.SchemaEntry - nil, // 48: proto.IndexItem.QualsEntry - (*timestamppb.Timestamp)(nil), // 49: google.protobuf.Timestamp + (*RateLimiterDefinition)(nil), // 41: proto.RateLimiterDefinition + (*SetCacheOptionsResponse)(nil), // 42: proto.SetCacheOptionsResponse + (*SetRateLimitersRequest)(nil), // 43: proto.SetRateLimitersRequest + (*SetRateLimitersResponse)(nil), // 44: proto.SetRateLimitersResponse + (*GetRateLimitersRequest)(nil), // 45: proto.GetRateLimitersRequest + (*GetRateLimitersResponse)(nil), // 46: proto.GetRateLimitersResponse + nil, // 47: proto.QueryContext.QualsEntry + nil, // 48: proto.ExecuteRequest.ExecuteConnectionDataEntry + nil, // 49: proto.SetConnectionConfigResponse.FailedConnectionsEntry + nil, // 50: proto.UpdateConnectionConfigsResponse.FailedConnectionsEntry + nil, // 51: proto.Row.ColumnsEntry + nil, // 52: proto.Schema.SchemaEntry + nil, // 53: proto.IndexItem.QualsEntry + (*timestamppb.Timestamp)(nil), // 54: google.protobuf.Timestamp } var file_plugin_proto_depIdxs = []int32{ 0, // 0: proto.PluginMessage.messageType:type_name -> proto.PluginMessageType @@ -3309,65 +3630,72 @@ var file_plugin_proto_depIdxs = []int32{ 9, // 3: proto.Qual.value:type_name -> proto.QualValue 9, // 4: proto.QualValueList.values:type_name -> proto.QualValue 10, // 5: proto.QualValue.inet_value:type_name -> proto.Inet - 49, // 6: proto.QualValue.timestamp_value:type_name -> google.protobuf.Timestamp + 54, // 6: proto.QualValue.timestamp_value:type_name -> google.protobuf.Timestamp 8, // 7: proto.QualValue.list_value:type_name -> proto.QualValueList 7, // 8: proto.Quals.quals:type_name -> proto.Qual - 42, // 9: proto.QueryContext.quals:type_name -> proto.QueryContext.QualsEntry + 47, // 9: proto.QueryContext.quals:type_name -> proto.QueryContext.QualsEntry 13, // 10: proto.QueryContext.limit:type_name -> proto.NullableInt 12, // 11: proto.ExecuteRequest.query_context:type_name -> proto.QueryContext 14, // 12: proto.ExecuteRequest.trace_context:type_name -> proto.TraceContext - 43, // 13: proto.ExecuteRequest.executeConnectionData:type_name -> proto.ExecuteRequest.ExecuteConnectionDataEntry + 48, // 13: proto.ExecuteRequest.executeConnectionData:type_name -> proto.ExecuteRequest.ExecuteConnectionDataEntry 13, // 14: proto.ExecuteConnectionData.limit:type_name -> proto.NullableInt 30, // 15: proto.ExecuteResponse.row:type_name -> proto.Row 18, // 16: proto.ExecuteResponse.metadata:type_name -> proto.QueryMetadata 34, // 17: proto.GetSchemaResponse.schema:type_name -> proto.Schema - 27, // 18: proto.SetAllConnectionConfigsRequest.configs:type_name -> proto.ConnectionConfig - 27, // 19: proto.UpdateConnectionConfigsRequest.added:type_name -> proto.ConnectionConfig - 27, // 20: proto.UpdateConnectionConfigsRequest.deleted:type_name -> proto.ConnectionConfig - 27, // 21: proto.UpdateConnectionConfigsRequest.changed:type_name -> proto.ConnectionConfig - 44, // 22: proto.SetConnectionConfigResponse.failed_connections:type_name -> proto.SetConnectionConfigResponse.FailedConnectionsEntry - 45, // 23: proto.UpdateConnectionConfigsResponse.failed_connections:type_name -> proto.UpdateConnectionConfigsResponse.FailedConnectionsEntry - 46, // 24: proto.Row.columns:type_name -> proto.Row.ColumnsEntry - 36, // 25: proto.TableSchema.columns:type_name -> proto.ColumnDefinition - 32, // 26: proto.TableSchema.getCallKeyColumns:type_name -> proto.KeyColumnsSet - 32, // 27: proto.TableSchema.listCallKeyColumns:type_name -> proto.KeyColumnsSet - 32, // 28: proto.TableSchema.listCallOptionalKeyColumns:type_name -> proto.KeyColumnsSet - 33, // 29: proto.TableSchema.getCallKeyColumnList:type_name -> proto.KeyColumn - 33, // 30: proto.TableSchema.listCallKeyColumnList:type_name -> proto.KeyColumn - 47, // 31: proto.Schema.schema:type_name -> proto.Schema.SchemaEntry - 1, // 32: proto.Column.null_value:type_name -> proto.NullValue - 49, // 33: proto.Column.timestamp_value:type_name -> google.protobuf.Timestamp - 2, // 34: proto.ColumnDefinition.type:type_name -> proto.ColumnType - 30, // 35: proto.QueryResult.rows:type_name -> proto.Row - 39, // 36: proto.IndexBucket.items:type_name -> proto.IndexItem - 48, // 37: proto.IndexItem.quals:type_name -> proto.IndexItem.QualsEntry - 49, // 38: proto.IndexItem.insertion_time:type_name -> google.protobuf.Timestamp - 11, // 39: proto.QueryContext.QualsEntry.value:type_name -> proto.Quals - 16, // 40: proto.ExecuteRequest.ExecuteConnectionDataEntry.value:type_name -> proto.ExecuteConnectionData - 35, // 41: proto.Row.ColumnsEntry.value:type_name -> proto.Column - 31, // 42: proto.Schema.SchemaEntry.value:type_name -> proto.TableSchema - 11, // 43: proto.IndexItem.QualsEntry.value:type_name -> proto.Quals - 4, // 44: proto.WrapperPlugin.EstablishMessageStream:input_type -> proto.EstablishMessageStreamRequest - 19, // 45: proto.WrapperPlugin.GetSchema:input_type -> proto.GetSchemaRequest - 15, // 46: proto.WrapperPlugin.Execute:input_type -> proto.ExecuteRequest - 23, // 47: proto.WrapperPlugin.SetConnectionConfig:input_type -> proto.SetConnectionConfigRequest - 25, // 48: proto.WrapperPlugin.SetAllConnectionConfigs:input_type -> proto.SetAllConnectionConfigsRequest - 26, // 49: proto.WrapperPlugin.UpdateConnectionConfigs:input_type -> proto.UpdateConnectionConfigsRequest - 21, // 50: proto.WrapperPlugin.GetSupportedOperations:input_type -> proto.GetSupportedOperationsRequest - 40, // 51: proto.WrapperPlugin.SetCacheOptions:input_type -> proto.SetCacheOptionsRequest - 5, // 52: proto.WrapperPlugin.EstablishMessageStream:output_type -> proto.PluginMessage - 20, // 53: proto.WrapperPlugin.GetSchema:output_type -> proto.GetSchemaResponse - 17, // 54: proto.WrapperPlugin.Execute:output_type -> proto.ExecuteResponse - 28, // 55: proto.WrapperPlugin.SetConnectionConfig:output_type -> proto.SetConnectionConfigResponse - 28, // 56: proto.WrapperPlugin.SetAllConnectionConfigs:output_type -> proto.SetConnectionConfigResponse - 29, // 57: proto.WrapperPlugin.UpdateConnectionConfigs:output_type -> proto.UpdateConnectionConfigsResponse - 22, // 58: proto.WrapperPlugin.GetSupportedOperations:output_type -> proto.GetSupportedOperationsResponse - 41, // 59: proto.WrapperPlugin.SetCacheOptions:output_type -> proto.SetCacheOptionsResponse - 52, // [52:60] is the sub-list for method output_type - 44, // [44:52] is the sub-list for method input_type - 44, // [44:44] is the sub-list for extension type_name - 44, // [44:44] is the sub-list for extension extendee - 0, // [0:44] is the sub-list for field type_name + 41, // 18: proto.GetSchemaResponse.rate_limiters:type_name -> proto.RateLimiterDefinition + 27, // 19: proto.SetAllConnectionConfigsRequest.configs:type_name -> proto.ConnectionConfig + 27, // 20: proto.UpdateConnectionConfigsRequest.added:type_name -> proto.ConnectionConfig + 27, // 21: proto.UpdateConnectionConfigsRequest.deleted:type_name -> proto.ConnectionConfig + 27, // 22: proto.UpdateConnectionConfigsRequest.changed:type_name -> proto.ConnectionConfig + 49, // 23: proto.SetConnectionConfigResponse.failed_connections:type_name -> proto.SetConnectionConfigResponse.FailedConnectionsEntry + 50, // 24: proto.UpdateConnectionConfigsResponse.failed_connections:type_name -> proto.UpdateConnectionConfigsResponse.FailedConnectionsEntry + 51, // 25: proto.Row.columns:type_name -> proto.Row.ColumnsEntry + 36, // 26: proto.TableSchema.columns:type_name -> proto.ColumnDefinition + 32, // 27: proto.TableSchema.getCallKeyColumns:type_name -> proto.KeyColumnsSet + 32, // 28: proto.TableSchema.listCallKeyColumns:type_name -> proto.KeyColumnsSet + 32, // 29: proto.TableSchema.listCallOptionalKeyColumns:type_name -> proto.KeyColumnsSet + 33, // 30: proto.TableSchema.getCallKeyColumnList:type_name -> proto.KeyColumn + 33, // 31: proto.TableSchema.listCallKeyColumnList:type_name -> proto.KeyColumn + 52, // 32: proto.Schema.schema:type_name -> proto.Schema.SchemaEntry + 1, // 33: proto.Column.null_value:type_name -> proto.NullValue + 54, // 34: proto.Column.timestamp_value:type_name -> google.protobuf.Timestamp + 2, // 35: proto.ColumnDefinition.type:type_name -> proto.ColumnType + 30, // 36: proto.QueryResult.rows:type_name -> proto.Row + 39, // 37: proto.IndexBucket.items:type_name -> proto.IndexItem + 53, // 38: proto.IndexItem.quals:type_name -> proto.IndexItem.QualsEntry + 54, // 39: proto.IndexItem.insertion_time:type_name -> google.protobuf.Timestamp + 41, // 40: proto.SetRateLimitersRequest.definitions:type_name -> proto.RateLimiterDefinition + 41, // 41: proto.GetRateLimitersResponse.definitions:type_name -> proto.RateLimiterDefinition + 11, // 42: proto.QueryContext.QualsEntry.value:type_name -> proto.Quals + 16, // 43: proto.ExecuteRequest.ExecuteConnectionDataEntry.value:type_name -> proto.ExecuteConnectionData + 35, // 44: proto.Row.ColumnsEntry.value:type_name -> proto.Column + 31, // 45: proto.Schema.SchemaEntry.value:type_name -> proto.TableSchema + 11, // 46: proto.IndexItem.QualsEntry.value:type_name -> proto.Quals + 4, // 47: proto.WrapperPlugin.EstablishMessageStream:input_type -> proto.EstablishMessageStreamRequest + 19, // 48: proto.WrapperPlugin.GetSchema:input_type -> proto.GetSchemaRequest + 15, // 49: proto.WrapperPlugin.Execute:input_type -> proto.ExecuteRequest + 23, // 50: proto.WrapperPlugin.SetConnectionConfig:input_type -> proto.SetConnectionConfigRequest + 25, // 51: proto.WrapperPlugin.SetAllConnectionConfigs:input_type -> proto.SetAllConnectionConfigsRequest + 26, // 52: proto.WrapperPlugin.UpdateConnectionConfigs:input_type -> proto.UpdateConnectionConfigsRequest + 21, // 53: proto.WrapperPlugin.GetSupportedOperations:input_type -> proto.GetSupportedOperationsRequest + 40, // 54: proto.WrapperPlugin.SetCacheOptions:input_type -> proto.SetCacheOptionsRequest + 43, // 55: proto.WrapperPlugin.SetRateLimiters:input_type -> proto.SetRateLimitersRequest + 45, // 56: proto.WrapperPlugin.GetRateLimiters:input_type -> proto.GetRateLimitersRequest + 5, // 57: proto.WrapperPlugin.EstablishMessageStream:output_type -> proto.PluginMessage + 20, // 58: proto.WrapperPlugin.GetSchema:output_type -> proto.GetSchemaResponse + 17, // 59: proto.WrapperPlugin.Execute:output_type -> proto.ExecuteResponse + 28, // 60: proto.WrapperPlugin.SetConnectionConfig:output_type -> proto.SetConnectionConfigResponse + 28, // 61: proto.WrapperPlugin.SetAllConnectionConfigs:output_type -> proto.SetConnectionConfigResponse + 29, // 62: proto.WrapperPlugin.UpdateConnectionConfigs:output_type -> proto.UpdateConnectionConfigsResponse + 22, // 63: proto.WrapperPlugin.GetSupportedOperations:output_type -> proto.GetSupportedOperationsResponse + 42, // 64: proto.WrapperPlugin.SetCacheOptions:output_type -> proto.SetCacheOptionsResponse + 44, // 65: proto.WrapperPlugin.SetRateLimiters:output_type -> proto.SetRateLimitersResponse + 46, // 66: proto.WrapperPlugin.GetRateLimiters:output_type -> proto.GetRateLimitersResponse + 57, // [57:67] is the sub-list for method output_type + 47, // [47:57] is the sub-list for method input_type + 47, // [47:47] is the sub-list for extension type_name + 47, // [47:47] is the sub-list for extension extendee + 0, // [0:47] is the sub-list for field type_name } func init() { file_plugin_proto_init() } @@ -3821,6 +4149,18 @@ func file_plugin_proto_init() { } } file_plugin_proto_msgTypes[37].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*RateLimiterDefinition); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_plugin_proto_msgTypes[38].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*SetCacheOptionsResponse); i { case 0: return &v.state @@ -3832,6 +4172,54 @@ func file_plugin_proto_init() { return nil } } + file_plugin_proto_msgTypes[39].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*SetRateLimitersRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_plugin_proto_msgTypes[40].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*SetRateLimitersResponse); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_plugin_proto_msgTypes[41].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*GetRateLimitersRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_plugin_proto_msgTypes[42].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*GetRateLimitersResponse); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } } file_plugin_proto_msgTypes[3].OneofWrappers = []interface{}{ (*Qual_StringValue)(nil), @@ -3866,7 +4254,7 @@ func file_plugin_proto_init() { GoPackagePath: reflect.TypeOf(x{}).PkgPath(), RawDescriptor: file_plugin_proto_rawDesc, NumEnums: 4, - NumMessages: 45, + NumMessages: 50, NumExtensions: 0, NumServices: 1, }, diff --git a/grpc/proto/plugin.proto b/grpc/proto/plugin.proto index 2db389cd..068f041b 100644 --- a/grpc/proto/plugin.proto +++ b/grpc/proto/plugin.proto @@ -12,6 +12,8 @@ service WrapperPlugin { rpc UpdateConnectionConfigs(UpdateConnectionConfigsRequest) returns (UpdateConnectionConfigsResponse); rpc GetSupportedOperations(GetSupportedOperationsRequest) returns (GetSupportedOperationsResponse); rpc SetCacheOptions(SetCacheOptionsRequest) returns (SetCacheOptionsResponse); + rpc SetRateLimiters(SetRateLimitersRequest) returns (SetRateLimitersResponse); + rpc GetRateLimiters(GetRateLimitersRequest) returns (GetRateLimitersResponse); } message EstablishMessageStreamRequest{ @@ -125,15 +127,18 @@ message GetSchemaRequest { message GetSchemaResponse { Schema schema = 1; + repeated RateLimiterDefinition rate_limiters = 2; } message GetSupportedOperationsRequest{} +// NOTE: this must be consistent with GetSupportedOperationsResponse in steampipe/pkg/pluginmanager_service/grpc/proto/plugin_manager.proto message GetSupportedOperationsResponse{ bool query_cache = 1; bool multiple_connections = 2; bool message_stream = 3; bool set_cache_options = 4; + bool rate_limiters = 5; } message SetConnectionConfigRequest{ @@ -296,5 +301,30 @@ message SetCacheOptionsRequest { int64 max_size_mb = 4; } +message RateLimiterDefinition { + string name = 1; + float fill_rate = 2; + int64 bucket_size = 3; + int64 max_concurrency = 4; + repeated string scope = 5; + string where = 6; +} + message SetCacheOptionsResponse { } + +message SetRateLimitersRequest { + repeated RateLimiterDefinition definitions = 1; +} + + +message SetRateLimitersResponse { +} + +message GetRateLimitersRequest { +} + + +message GetRateLimitersResponse { + repeated RateLimiterDefinition definitions = 1; +} diff --git a/grpc/proto/plugin_grpc.pb.go b/grpc/proto/plugin_grpc.pb.go index 93d0b8f6..e72d76ea 100644 --- a/grpc/proto/plugin_grpc.pb.go +++ b/grpc/proto/plugin_grpc.pb.go @@ -30,6 +30,8 @@ type WrapperPluginClient interface { UpdateConnectionConfigs(ctx context.Context, in *UpdateConnectionConfigsRequest, opts ...grpc.CallOption) (*UpdateConnectionConfigsResponse, error) GetSupportedOperations(ctx context.Context, in *GetSupportedOperationsRequest, opts ...grpc.CallOption) (*GetSupportedOperationsResponse, error) SetCacheOptions(ctx context.Context, in *SetCacheOptionsRequest, opts ...grpc.CallOption) (*SetCacheOptionsResponse, error) + SetRateLimiters(ctx context.Context, in *SetRateLimitersRequest, opts ...grpc.CallOption) (*SetRateLimitersResponse, error) + GetRateLimiters(ctx context.Context, in *GetRateLimitersRequest, opts ...grpc.CallOption) (*GetRateLimitersResponse, error) } type wrapperPluginClient struct { @@ -158,6 +160,24 @@ func (c *wrapperPluginClient) SetCacheOptions(ctx context.Context, in *SetCacheO return out, nil } +func (c *wrapperPluginClient) SetRateLimiters(ctx context.Context, in *SetRateLimitersRequest, opts ...grpc.CallOption) (*SetRateLimitersResponse, error) { + out := new(SetRateLimitersResponse) + err := c.cc.Invoke(ctx, "/proto.WrapperPlugin/SetRateLimiters", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *wrapperPluginClient) GetRateLimiters(ctx context.Context, in *GetRateLimitersRequest, opts ...grpc.CallOption) (*GetRateLimitersResponse, error) { + out := new(GetRateLimitersResponse) + err := c.cc.Invoke(ctx, "/proto.WrapperPlugin/GetRateLimiters", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + // WrapperPluginServer is the server API for WrapperPlugin service. // All implementations must embed UnimplementedWrapperPluginServer // for forward compatibility @@ -170,6 +190,8 @@ type WrapperPluginServer interface { UpdateConnectionConfigs(context.Context, *UpdateConnectionConfigsRequest) (*UpdateConnectionConfigsResponse, error) GetSupportedOperations(context.Context, *GetSupportedOperationsRequest) (*GetSupportedOperationsResponse, error) SetCacheOptions(context.Context, *SetCacheOptionsRequest) (*SetCacheOptionsResponse, error) + SetRateLimiters(context.Context, *SetRateLimitersRequest) (*SetRateLimitersResponse, error) + GetRateLimiters(context.Context, *GetRateLimitersRequest) (*GetRateLimitersResponse, error) mustEmbedUnimplementedWrapperPluginServer() } @@ -201,6 +223,12 @@ func (UnimplementedWrapperPluginServer) GetSupportedOperations(context.Context, func (UnimplementedWrapperPluginServer) SetCacheOptions(context.Context, *SetCacheOptionsRequest) (*SetCacheOptionsResponse, error) { return nil, status.Errorf(codes.Unimplemented, "method SetCacheOptions not implemented") } +func (UnimplementedWrapperPluginServer) SetRateLimiters(context.Context, *SetRateLimitersRequest) (*SetRateLimitersResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method SetRateLimiters not implemented") +} +func (UnimplementedWrapperPluginServer) GetRateLimiters(context.Context, *GetRateLimitersRequest) (*GetRateLimitersResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method GetRateLimiters not implemented") +} func (UnimplementedWrapperPluginServer) mustEmbedUnimplementedWrapperPluginServer() {} // UnsafeWrapperPluginServer may be embedded to opt out of forward compatibility for this service. @@ -364,6 +392,42 @@ func _WrapperPlugin_SetCacheOptions_Handler(srv interface{}, ctx context.Context return interceptor(ctx, in, info, handler) } +func _WrapperPlugin_SetRateLimiters_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(SetRateLimitersRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(WrapperPluginServer).SetRateLimiters(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/proto.WrapperPlugin/SetRateLimiters", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(WrapperPluginServer).SetRateLimiters(ctx, req.(*SetRateLimitersRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _WrapperPlugin_GetRateLimiters_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetRateLimitersRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(WrapperPluginServer).GetRateLimiters(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/proto.WrapperPlugin/GetRateLimiters", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(WrapperPluginServer).GetRateLimiters(ctx, req.(*GetRateLimitersRequest)) + } + return interceptor(ctx, in, info, handler) +} + // WrapperPlugin_ServiceDesc is the grpc.ServiceDesc for WrapperPlugin service. // It's only intended for direct use with grpc.RegisterService, // and not to be introspected or modified (even as a copy) @@ -395,6 +459,14 @@ var WrapperPlugin_ServiceDesc = grpc.ServiceDesc{ MethodName: "SetCacheOptions", Handler: _WrapperPlugin_SetCacheOptions_Handler, }, + { + MethodName: "SetRateLimiters", + Handler: _WrapperPlugin_SetRateLimiters_Handler, + }, + { + MethodName: "GetRateLimiters", + Handler: _WrapperPlugin_GetRateLimiters_Handler, + }, }, Streams: []grpc.StreamDesc{ { diff --git a/grpc/quals.go b/grpc/quals.go index cd9f45f1..ef04eef0 100644 --- a/grpc/quals.go +++ b/grpc/quals.go @@ -122,6 +122,13 @@ func GetQualValue(v *proto.QualValue) interface{} { values = append(values, GetQualValue(l)) } qv = values + default: + // not expected + qv = "" } return qv } + +func GetQualValueString(v *proto.QualValue) string { + return fmt.Sprintf("%v", GetQualValue(v)) +} diff --git a/grpc/shared/grpc.go b/grpc/shared/grpc.go index f96171ff..f2facec7 100644 --- a/grpc/shared/grpc.go +++ b/grpc/shared/grpc.go @@ -49,6 +49,14 @@ func (c *GRPCClient) SetCacheOptions(req *proto.SetCacheOptionsRequest) (*proto. return c.client.SetCacheOptions(c.ctx, req) } +func (c *GRPCClient) SetRateLimiters(req *proto.SetRateLimitersRequest) (*proto.SetRateLimitersResponse, error) { + return c.client.SetRateLimiters(c.ctx, req) +} + +func (c *GRPCClient) GetRateLimiters(req *proto.GetRateLimitersRequest) (*proto.GetRateLimitersResponse, error) { + return c.client.GetRateLimiters(c.ctx, req) +} + // GRPCServer is the gRPC server that GRPCClient talks to. type GRPCServer struct { proto.UnimplementedWrapperPluginServer @@ -85,6 +93,13 @@ func (m *GRPCServer) SetCacheOptions(_ context.Context, req *proto.SetCacheOptio return m.Impl.SetCacheOptions(req) } +func (m *GRPCServer) SetRateLimiters(_ context.Context, req *proto.SetRateLimitersRequest) (*proto.SetRateLimitersResponse, error) { + return m.Impl.SetRateLimiters(req) +} +func (m *GRPCServer) GetRateLimiters(_ context.Context, req *proto.GetRateLimitersRequest) (*proto.GetRateLimitersResponse, error) { + return m.Impl.GetRateLimiters(req) +} + func (m *GRPCServer) EstablishMessageStream(_ *proto.EstablishMessageStreamRequest, server proto.WrapperPlugin_EstablishMessageStreamServer) error { return m.Impl.EstablishMessageStream(server) } diff --git a/grpc/shared/interface.go b/grpc/shared/interface.go index d051c081..ae132b8b 100644 --- a/grpc/shared/interface.go +++ b/grpc/shared/interface.go @@ -29,6 +29,8 @@ type WrapperPluginServer interface { UpdateConnectionConfigs(req *proto.UpdateConnectionConfigsRequest) (*proto.UpdateConnectionConfigsResponse, error) GetSupportedOperations(req *proto.GetSupportedOperationsRequest) (*proto.GetSupportedOperationsResponse, error) SetCacheOptions(req *proto.SetCacheOptionsRequest) (*proto.SetCacheOptionsResponse, error) + SetRateLimiters(req *proto.SetRateLimitersRequest) (*proto.SetRateLimitersResponse, error) + GetRateLimiters(req *proto.GetRateLimitersRequest) (*proto.GetRateLimitersResponse, error) EstablishMessageStream(server proto.WrapperPlugin_EstablishMessageStreamServer) error } @@ -40,6 +42,8 @@ type WrapperPluginClient interface { UpdateConnectionConfigs(req *proto.UpdateConnectionConfigsRequest) (*proto.UpdateConnectionConfigsResponse, error) GetSupportedOperations(req *proto.GetSupportedOperationsRequest) (*proto.GetSupportedOperationsResponse, error) SetCacheOptions(req *proto.SetCacheOptionsRequest) (*proto.SetCacheOptionsResponse, error) + SetRateLimiters(req *proto.SetRateLimitersRequest) (*proto.SetRateLimitersResponse, error) + GetRateLimiters(req *proto.GetRateLimitersRequest) (*proto.GetRateLimitersResponse, error) EstablishMessageStream() (proto.WrapperPlugin_EstablishMessageStreamClient, error) } diff --git a/plugin/concurrency.go b/plugin/concurrency.go index f3a52dc2..f7b61f18 100644 --- a/plugin/concurrency.go +++ b/plugin/concurrency.go @@ -1,176 +1,9 @@ package plugin -import ( - "log" - "sync" -) - -/* -DefaultConcurrencyConfig sets the default maximum number of concurrent [HydrateFunc] calls. - -Limit total concurrent hydrate calls: - - DefaultConcurrency: &plugin.DefaultConcurrencyConfig{ - TotalMaxConcurrency: 500, - } - -Limit concurrent hydrate calls to any single HydrateFunc which does not have a [HydrateConfig]: - - DefaultConcurrency: &plugin.DefaultConcurrencyConfig{ - DefaultMaxConcurrency: 100, - } - -Do both: - - DefaultConcurrency: &plugin.DefaultConcurrencyConfig{ - TotalMaxConcurrency: 500, - DefaultMaxConcurrency: 200, - } - -Plugin examples: - - [hackernews] - -[hackernews]: https://github.com/turbot/steampipe-plugin-hackernews/blob/bbfbb12751ad43a2ca0ab70901cde6a88e92cf44/hackernews/plugin.go#L18-L21 -*/ +// Deprecated type DefaultConcurrencyConfig struct { // sets how many HydrateFunc calls can run concurrently in total TotalMaxConcurrency int // sets the default for how many calls to each HydrateFunc can run concurrently DefaultMaxConcurrency int } - -// concurrencyManager struct ensures that hydrate functions stay within concurrency limits -type concurrencyManager struct { - mut sync.RWMutex - // the maximum number of all hydrate calls which can run concurrently - maxConcurrency int - // the maximum concurrency for a single hydrate call - // (this may be overridden by the HydrateConfig for the call) - defaultMaxConcurrencyPerCall int - // total number of hydrate calls in progress - callsInProgress int - // map of the number of instances of each call in progress - callMap map[string]int - // instrumentaton properties - maxCallsInProgress int - maxCallMap map[string]int -} - -func newConcurrencyManager(t *Table) *concurrencyManager { - // if plugin does not define max concurrency, use default - var totalMax int - // if hydrate calls do not define max concurrency, use default - var maxPerCall int - if config := t.Plugin.DefaultConcurrency; config != nil { - if config.TotalMaxConcurrency != 0 { - totalMax = config.TotalMaxConcurrency - } - if config.DefaultMaxConcurrency != 0 { - maxPerCall = config.DefaultMaxConcurrency - } else if totalMax < maxPerCall { - // if the default call concurrency is greater than the total max concurrency, clamp to total - maxPerCall = totalMax - } - } - return &concurrencyManager{ - maxConcurrency: totalMax, - defaultMaxConcurrencyPerCall: maxPerCall, - callMap: make(map[string]int), - maxCallMap: make(map[string]int), - } -} - -// StartIfAllowed checks whether the named hydrate call is permitted to start -// based on the number of running instances of that call, and the total calls in progress -func (c *concurrencyManager) StartIfAllowed(name string, maxCallConcurrency int) (res bool) { - // acquire a Read lock - c.mut.RLock() - // how many concurrent executions of this function are in progress right now? - currentExecutions := c.callMap[name] - // ensure we unlock - c.mut.RUnlock() - - if !c.canStart(currentExecutions, maxCallConcurrency) { - return false - } - - // upgrade the mutex to a Write lock - c.mut.Lock() - // ensure we unlock - defer c.mut.Unlock() - - // check again in case another thread grabbed the Write lock before us - currentExecutions = c.callMap[name] - if !c.canStart(currentExecutions, maxCallConcurrency) { - return false - } - - // to get here we are allowed to execute - increment the call counters - c.callMap[name] = currentExecutions + 1 - c.callsInProgress++ - - // update instrumentation - if c.callMap[name] > c.maxCallMap[name] { - c.maxCallMap[name] = c.callMap[name] - } - if c.callsInProgress > c.maxCallsInProgress { - c.maxCallsInProgress = c.callsInProgress - } - - return true -} - -func (c *concurrencyManager) canStart(currentExecutions int, maxCallConcurrency int) bool { - // is the total call limit exceeded? - if c.maxConcurrency > 0 && c.callsInProgress == c.maxConcurrency { - return false - } - - // if there is no config or empty config, the maxCallConcurrency will be 0 - // - use defaultMaxConcurrencyPerCall set on the concurrencyManager - if maxCallConcurrency == 0 { - maxCallConcurrency = c.defaultMaxConcurrencyPerCall - } - - // if we at the call limit return - if maxCallConcurrency > 0 && currentExecutions == maxCallConcurrency { - return false - } - return true -} - -// Finished decrements the counter for the named function -func (c *concurrencyManager) Finished(name string) { - defer func() { - if r := recover(); r != nil { - log.Printf("[WARN] concurrencyManager Finished caught a panic %v", r) - } - }() - // acquire a Write lock - c.mut.Lock() - c.callMap[name]-- - c.callsInProgress-- - c.mut.Unlock() -} - -// Close executes when the query is complete and dumps out the concurrency stats -func (c *concurrencyManager) Close() { - c.DisplayConcurrencyStats() -} - -// DisplayConcurrencyStats displays the summary of all the concurrent hydrate calls -func (c *concurrencyManager) DisplayConcurrencyStats() { - if len(c.maxCallMap) == 0 { - return - } - log.Printf("[TRACE] ------------------------------------") - log.Printf("[TRACE] Concurrency Summary") - log.Printf("[TRACE] ------------------------------------") - for call, concurrency := range c.maxCallMap { - log.Printf("[TRACE] %-30s: %d", call, concurrency) - } - log.Printf("[TRACE] ------------------------------------") - log.Printf("[TRACE] %-30s: %d", "Total", c.maxCallsInProgress) - - log.Printf("[TRACE] ------------------------------------") -} diff --git a/plugin/context.go b/plugin/context.go index 2f3bd3ae..952eb486 100644 --- a/plugin/context.go +++ b/plugin/context.go @@ -31,9 +31,7 @@ func Logger(ctx context.Context) hclog.Logger { return ctx.Value(context_key.Logger).(hclog.Logger) } -/* -Deprecated: Please use [plugin.Table.GetMatrixItemFunc] instead. -*/ +// GetMatrixItem retrieves the matrix item from the context func GetMatrixItem(ctx context.Context) map[string]interface{} { value := ctx.Value(context_key.MatrixItem) diff --git a/plugin/fetch_call_rate_limiters.go b/plugin/fetch_call_rate_limiters.go new file mode 100644 index 00000000..fdab2a31 --- /dev/null +++ b/plugin/fetch_call_rate_limiters.go @@ -0,0 +1,32 @@ +package plugin + +import ( + "context" + "github.com/turbot/steampipe-plugin-sdk/v5/rate_limiter" + "time" +) + +// a struct defining the rate limiting config the for fetch (list/get) call +type fetchCallRateLimiters struct { + // rate limiter for the get/single-level-list/parent-list call + rateLimiter *rate_limiter.MultiLimiter + + // rate limiters for the child list call - populated if this is a list call and the list has a parent hydrate + childListRateLimiter *rate_limiter.MultiLimiter +} + +// if there is a fetch call rate limiter, wait for it +func (l fetchCallRateLimiters) wait(ctx context.Context) time.Duration { + if l.rateLimiter != nil { + return l.rateLimiter.Wait(ctx) + } + return 0 +} + +// if there is a 'childList' rate limiter, wait for it +func (l fetchCallRateLimiters) childListWait(ctx context.Context) time.Duration { + if l.childListRateLimiter != nil { + return l.childListRateLimiter.Wait(ctx) + } + return 0 +} diff --git a/plugin/funcs.go b/plugin/funcs.go index c3ff38e5..714deb53 100644 --- a/plugin/funcs.go +++ b/plugin/funcs.go @@ -23,7 +23,7 @@ by making an additional API call. However the SDK does all this for you. type HydrateFunc func(context.Context, *QueryData, *HydrateData) (interface{}, error) /* -Deprecated +Deprecated use MatrixItemMapFunc */ type MatrixItemFunc func(context.Context, *Connection) []map[string]interface{} diff --git a/plugin/get_config.go b/plugin/get_config.go index fc24ef8b..78a0d95e 100644 --- a/plugin/get_config.go +++ b/plugin/get_config.go @@ -61,18 +61,20 @@ Plugin examples: [when the get call is used as a column hydrate func]: https://github.com/turbot/steampipe-plugin-hackernews/blob/d14efdd3f2630f0146e575fe07666eda4e126721/hackernews/item.go#L14-L35 */ type GetConfig struct { - // key or keys which are used to uniquely identify rows - used to determine whether a query is a 'get' call - KeyColumns KeyColumnSlice // the hydrate function which is called first when performing a 'get' call. // if this returns 'not found', no further hydrate functions are called Hydrate HydrateFunc + // key or keys which are used to uniquely identify rows - used to determine whether a query is a 'get' call + KeyColumns KeyColumnSlice // a function which will return whenther to ignore a given error - // deprecated - use IgnoreConfig + IgnoreConfig *IgnoreConfig + // a function which will return whenther to retry the call if an error is returned + RetryConfig *RetryConfig + Tags map[string]string + + // Deprecated: use IgnoreConfig ShouldIgnoreError ErrorPredicate - IgnoreConfig *IgnoreConfig - RetryConfig *RetryConfig - // max concurrency - this applies when the get function is ALSO used as a column hydrate function - MaxConcurrency int + MaxConcurrency int } // initialise the GetConfig @@ -96,6 +98,12 @@ func (c *GetConfig) initialise(table *Table) { if c.IgnoreConfig == nil { c.IgnoreConfig = &IgnoreConfig{} } + + // create empty tags if needed + if c.Tags == nil { + c.Tags = map[string]string{} + } + // copy the (deprecated) top level ShouldIgnoreError property into the ignore config if c.IgnoreConfig.ShouldIgnoreError == nil { c.IgnoreConfig.ShouldIgnoreError = c.ShouldIgnoreError @@ -131,14 +139,17 @@ func (c *GetConfig) Validate(table *Table) []string { if c.IgnoreConfig != nil { validationErrors = append(validationErrors, c.IgnoreConfig.validate(table)...) } - // ensure there is no explicit hydrate config for the get config + // ensure that if there is an explicit hydrate config for the get hydrate, it does not declare dependencies getHydrateName := helpers.GetFunctionName(table.Get.Hydrate) for _, h := range table.HydrateConfig { if helpers.GetFunctionName(h.Func) == getHydrateName { - validationErrors = append(validationErrors, fmt.Sprintf("table '%s' Get hydrate function '%s' also has an explicit hydrate config declared in `HydrateConfig`", table.Name, getHydrateName)) + if len(h.Depends) > 0 { + validationErrors = append(validationErrors, fmt.Sprintf("table '%s' Get hydrate function '%s' defines dependendencies in its `HydrateConfig`", table.Name, getHydrateName)) + } break } } + // ensure there is no hydrate dependency declared for the get hydrate for _, h := range table.HydrateDependencies { if helpers.GetFunctionName(h.Func) == getHydrateName { diff --git a/plugin/hydrate_call.go b/plugin/hydrate_call.go index 7f4f9f9a..7bf7bade 100644 --- a/plugin/hydrate_call.go +++ b/plugin/hydrate_call.go @@ -2,9 +2,11 @@ package plugin import ( "context" - "sync/atomic" - "github.com/turbot/go-kit/helpers" + "github.com/turbot/steampipe-plugin-sdk/v5/rate_limiter" + "log" + "sync/atomic" + "time" ) // hydrateCall struct encapsulates a hydrate call, its config and dependencies @@ -14,40 +16,90 @@ type hydrateCall struct { Depends []string Config *HydrateConfig Name string + + queryData *QueryData + rateLimiter *rate_limiter.MultiLimiter + // the time when we _could_ start the call, if comncurrency limits allowed + potentialStartTime time.Time + concurrencyDelay time.Duration } -func newHydrateCall( config *HydrateConfig) *hydrateCall { +func newHydrateCall(config *HydrateConfig, d *QueryData) (*hydrateCall, error) { res := &hydrateCall{ - Name: helpers.GetFunctionName(config.Func), - Func: config.Func, - Config: config, + Name: helpers.GetFunctionName(config.Func), + Func: config.Func, + Config: config, + queryData: d, } for _, f := range config.Depends { res.Depends = append(res.Depends, helpers.GetFunctionName(f)) } - return res + return res, nil +} + +func (h *hydrateCall) shallowCopy() *hydrateCall { + return &hydrateCall{ + Func: h.Func, + Depends: h.Depends, + Config: h.Config, + Name: h.Name, + queryData: h.queryData, + rateLimiter: h.rateLimiter, + } +} + +// identify any rate limiters which apply to this hydrate call +func (h *hydrateCall) initialiseRateLimiter() error { + log.Printf("[INFO] hydrateCall %s initialiseRateLimiter (%s)", h.Name, h.queryData.connectionCallId) + + // ask plugin to build a rate limiter for us + p := h.queryData.plugin + + // now try to construct a multi rate limiter for this call + rateLimiter, err := p.getHydrateCallRateLimiter(h.Config.Tags, h.queryData) + if err != nil { + log.Printf("[WARN] hydrateCall %s getHydrateCallRateLimiter failed: %s (%s)", h.Name, err.Error(), h.queryData.connectionCallId) + return err + } + + h.rateLimiter = rateLimiter + + return nil } // CanStart returns whether this hydrate call can execute // - check whether all dependency hydrate functions have been completed // - check whether the concurrency limits would be exceeded -func (h hydrateCall) canStart(rowData *rowData, name string, concurrencyManager *concurrencyManager) bool { +func (h *hydrateCall) canStart(rowData *rowData) bool { // check whether all hydrate functions we depend on have saved their results for _, dep := range h.Depends { if !helpers.StringSliceContains(rowData.getHydrateKeys(), dep) { return false } } - // ask the concurrency manager whether the call can start - // NOTE: if the call is allowed to start, the concurrency manager ASSUMES THE CALL WILL START - // and increments the counters - // it may seem more logical to do this in the Start() function below, but we need to check and increment the counters - // within the same mutex lock to ensure another call does not start between checking and starting - return concurrencyManager.StartIfAllowed(name, h.Config.MaxConcurrency) + // so all dependencies have been satisfied - if a rate limiting config is defined, + // check whether we satisfy the concurrency limits + if h.rateLimiter == nil { + return true + } + + // if no potentiual start time is set, set it now + if h.potentialStartTime.IsZero() { + h.potentialStartTime = time.Now() + } + + canStart := h.rateLimiter.TryToAcquireSemaphore() + if canStart { + // record the delay in startiung due to concurrency limits + h.concurrencyDelay = time.Since(h.potentialStartTime) + } + return canStart } // Start starts a hydrate call -func (h *hydrateCall) start(ctx context.Context, r *rowData, d *QueryData, concurrencyManager *concurrencyManager) { +func (h *hydrateCall) start(ctx context.Context, r *rowData, d *QueryData) time.Duration { + rateLimitDelay := h.rateLimit(ctx, d) + // tell the rowdata to wait for this call to complete r.wg.Add(1) // update the hydrate count @@ -56,7 +108,29 @@ func (h *hydrateCall) start(ctx context.Context, r *rowData, d *QueryData, concu // call callHydrate async, ignoring return values go func() { r.callHydrate(ctx, d, h.Func, h.Name, h.Config) - // decrement number of hydrate functions running - concurrencyManager.Finished(h.Name) + h.onFinished() }() + return rateLimitDelay + h.concurrencyDelay +} + +func (h *hydrateCall) rateLimit(ctx context.Context, d *QueryData) time.Duration { + // not expected as if there ar eno rate limiters we should have an empty MultiLimiter + if h.rateLimiter == nil { + log.Printf("[WARN] hydrate call %s has a nil rateLimiter - not expected", h.Name) + return 0 + } + log.Printf("[TRACE] ****** start hydrate call %s, wait for rate limiter (%s)", h.Name, d.connectionCallId) + + // wait until we can execute + delay := h.rateLimiter.Wait(ctx) + + log.Printf("[TRACE] ****** AFTER rate limiter %s (%dms) (%s)", h.Name, delay.Milliseconds(), d.connectionCallId) + + return delay +} + +func (h *hydrateCall) onFinished() { + if h.rateLimiter != nil { + h.rateLimiter.ReleaseSemaphore() + } } diff --git a/plugin/hydrate_config.go b/plugin/hydrate_config.go index cbc1ec98..ecca921e 100644 --- a/plugin/hydrate_config.go +++ b/plugin/hydrate_config.go @@ -6,6 +6,7 @@ import ( "strings" "github.com/turbot/go-kit/helpers" + "github.com/turbot/steampipe-plugin-sdk/v5/rate_limiter" ) /* @@ -91,31 +92,47 @@ Examples: [oci]: https://github.com/turbot/steampipe-plugin-oci/blob/27ddf689f7606009cf26b2716e1634fc91d53585/oci/table_oci_identity_tenancy.go#L23-L27 */ type HydrateConfig struct { - Func HydrateFunc + Func HydrateFunc + // a function which will return whenther to ignore a given error + IgnoreConfig *IgnoreConfig + // a function which will return whenther to retry the call if an error is returned + RetryConfig *RetryConfig + Depends []HydrateFunc + + // tags - used to resolve the rate limiter for this hydrate call + // for example: + // "service": "s3" + // + // when resolving a rate limiter for a hydrate call, a map of scope values is automatically populated: + // - the table and connection name + // - values specified in the hydrate config + // - matrix quals (with values as string) + // this map is then used to find a rate limiter + Tags map[string]string + MaxConcurrency int - RetryConfig *RetryConfig - IgnoreConfig *IgnoreConfig - // deprecated - use IgnoreConfig - ShouldIgnoreError ErrorPredicate - Depends []HydrateFunc + // Deprecated: use IgnoreConfig + ShouldIgnoreError ErrorPredicate } -func (c *HydrateConfig) String() interface{} { +func (c *HydrateConfig) String() string { var dependsStrings = make([]string, len(c.Depends)) for i, dep := range c.Depends { dependsStrings[i] = helpers.GetFunctionName(dep) } - return fmt.Sprintf(`Func: %s -MaxConcurrency: %d + str := fmt.Sprintf(`Func: %s RetryConfig: %s IgnoreConfig: %s -Depends: %s`, +Depends: %s +ScopeValues: %s`, helpers.GetFunctionName(c.Func), - c.MaxConcurrency, - c.RetryConfig.String(), - c.IgnoreConfig.String(), - strings.Join(dependsStrings, ",")) + c.RetryConfig, + c.IgnoreConfig, + strings.Join(dependsStrings, ","), + rate_limiter.FormatStringMap(c.Tags)) + + return str } func (c *HydrateConfig) initialise(table *Table) { @@ -130,6 +147,12 @@ func (c *HydrateConfig) initialise(table *Table) { if c.IgnoreConfig == nil { c.IgnoreConfig = &IgnoreConfig{} } + + // create empty Tags if needed + if c.Tags == nil { + c.Tags = map[string]string{} + } + // copy the (deprecated) top level ShouldIgnoreError property into the ignore config if c.IgnoreConfig.ShouldIgnoreError == nil { c.IgnoreConfig.ShouldIgnoreError = c.ShouldIgnoreError @@ -154,5 +177,6 @@ func (c *HydrateConfig) Validate(table *Table) []string { if c.IgnoreConfig != nil { validationErrors = append(validationErrors, c.IgnoreConfig.validate(table)...) } + return validationErrors } diff --git a/plugin/ignore_error_config.go b/plugin/ignore_error_config.go index dc2cd0f3..9a517ff5 100644 --- a/plugin/ignore_error_config.go +++ b/plugin/ignore_error_config.go @@ -61,11 +61,11 @@ Plugin examples: */ type IgnoreConfig struct { ShouldIgnoreErrorFunc ErrorPredicateWithContext - // deprecated, used ShouldIgnoreErrorFunc + // Deprecated: used ShouldIgnoreErrorFunc ShouldIgnoreError ErrorPredicate } -func (c *IgnoreConfig) String() interface{} { +func (c *IgnoreConfig) String() string { var s strings.Builder if c.ShouldIgnoreError != nil { s.WriteString(fmt.Sprintf("ShouldIgnoreError: %s\n", helpers.GetFunctionName(c.ShouldIgnoreError))) diff --git a/plugin/list_config.go b/plugin/list_config.go index d85735bd..2861524e 100644 --- a/plugin/list_config.go +++ b/plugin/list_config.go @@ -2,10 +2,9 @@ package plugin import ( "fmt" - "log" - "github.com/gertd/go-pluralize" "github.com/turbot/go-kit/helpers" + "log" ) /* @@ -36,15 +35,22 @@ Examples: [hackernews]: https://github.com/turbot/steampipe-plugin-hackernews/blob/bbfbb12751ad43a2ca0ab70901cde6a88e92cf44/hackernews/table_hackernews_item.go#L14 */ type ListConfig struct { - KeyColumns KeyColumnSlice // the list function, this should stream the list results back using the QueryData object and return nil Hydrate HydrateFunc + // key or keys which are used to uniquely identify rows - used to optimise the list call + KeyColumns KeyColumnSlice // the parent list function - if we list items with a parent-child relationship, this will list the parent items ParentHydrate HydrateFunc - // deprecated - use IgnoreConfig + // a function which will return whenther to ignore a given error + IgnoreConfig *IgnoreConfig + // a function which will return whenther to retry the call if an error is returned + RetryConfig *RetryConfig + + Tags map[string]string + ParentTags map[string]string + + // Deprecated: Use IgnoreConfig ShouldIgnoreError ErrorPredicate - IgnoreConfig *IgnoreConfig - RetryConfig *RetryConfig } func (c *ListConfig) initialise(table *Table) { @@ -59,6 +65,14 @@ func (c *ListConfig) initialise(table *Table) { if c.IgnoreConfig == nil { c.IgnoreConfig = &IgnoreConfig{} } + + if c.Tags == nil { + c.Tags = map[string]string{} + } + if c.ParentTags == nil { + c.ParentTags = map[string]string{} + } + // copy the (deprecated) top level ShouldIgnoreError property into the ignore config if c.IgnoreConfig.ShouldIgnoreError == nil { c.IgnoreConfig.ShouldIgnoreError = c.ShouldIgnoreError @@ -83,11 +97,13 @@ func (c *ListConfig) Validate(table *Table) []string { validationErrors = append(validationErrors, c.IgnoreConfig.validate(table)...) } - // ensure there is no explicit hydrate config for the list config + // ensure that if there is an explicit hydrate config for the list hydrate, it does not declare dependencies listHydrateName := helpers.GetFunctionName(table.List.Hydrate) for _, h := range table.HydrateConfig { if helpers.GetFunctionName(h.Func) == listHydrateName { - validationErrors = append(validationErrors, fmt.Sprintf("table '%s' List hydrate function '%s' also has an explicit hydrate config declared in `HydrateConfig`", table.Name, listHydrateName)) + if len(h.Depends) > 0 { + validationErrors = append(validationErrors, fmt.Sprintf("table '%s' List hydrate function '%s' defines dependencies in its `HydrateConfig`", table.Name, listHydrateName)) + } break } } diff --git a/plugin/plugin.go b/plugin/plugin.go index bedbe8f1..f8e01c99 100644 --- a/plugin/plugin.go +++ b/plugin/plugin.go @@ -11,12 +11,11 @@ import ( "sync/atomic" "time" - "github.com/gertd/go-pluralize" - "github.com/dgraph-io/ristretto" "github.com/eko/gocache/v3/cache" "github.com/eko/gocache/v3/store" "github.com/fsnotify/fsnotify" + "github.com/gertd/go-pluralize" "github.com/hashicorp/go-hclog" "github.com/turbot/go-kit/helpers" connectionmanager "github.com/turbot/steampipe-plugin-sdk/v5/connection" @@ -26,6 +25,7 @@ import ( "github.com/turbot/steampipe-plugin-sdk/v5/plugin/context_key" "github.com/turbot/steampipe-plugin-sdk/v5/plugin/transform" "github.com/turbot/steampipe-plugin-sdk/v5/query_cache" + "github.com/turbot/steampipe-plugin-sdk/v5/rate_limiter" "github.com/turbot/steampipe-plugin-sdk/v5/telemetry" "github.com/turbot/steampipe-plugin-sdk/v5/version" "go.opentelemetry.io/otel/attribute" @@ -69,13 +69,18 @@ type Plugin struct { Logger hclog.Logger // TableMap is a map of all the tables in the plugin, keyed by the table name // NOTE: it must be NULL for plugins with dynamic schema - TableMap map[string]*Table - TableMapFunc TableMapFunc - DefaultTransform *transform.ColumnTransforms + TableMap map[string]*Table + TableMapFunc TableMapFunc + DefaultTransform *transform.ColumnTransforms + // deprecated - use RateLimiters to control concurrency DefaultConcurrency *DefaultConcurrencyConfig DefaultRetryConfig *RetryConfig DefaultIgnoreConfig *IgnoreConfig + // rate limiter definitions - these are (optionally) defined by the plugin author + // and do NOT include any config overrides + RateLimiters []*rate_limiter.Definition + // deprecated - use DefaultRetryConfig and DefaultIgnoreConfig DefaultGetConfig *GetConfig // deprecated - use DefaultIgnoreConfig @@ -108,6 +113,15 @@ type Plugin struct { // stream used to send messages back to plugin manager messageStream proto.WrapperPlugin_EstablishMessageStreamServer + // map of rate limiter INSTANCES - these are lazy loaded + // keyed by stringified scope values + rateLimiterInstances *rate_limiter.LimiterMap + // map of rate limiter definitions, keyed by limiter name + // NOTE: this includes limiters defined/overridden in config + resolvedRateLimiterDefs map[string]*rate_limiter.Definition + // lock for this map + rateLimiterDefsMut sync.RWMutex + // map of call ids to avoid duplicates callIdLookup map[string]struct{} callIdLookupMut sync.RWMutex @@ -122,6 +136,8 @@ func (p *Plugin) initialise(logger hclog.Logger) { p.Logger = logger log.Printf("[INFO] initialise plugin '%s', using sdk version %s", p.Name, version.String()) + p.initialiseRateLimits() + // default the schema mode to static if p.SchemaMode == "" { log.Println("[TRACE] defaulting SchemaMode to SchemaModeStatic") @@ -174,6 +190,25 @@ func (p *Plugin) initialise(logger hclog.Logger) { p.callIdLookup = make(map[string]struct{}) } +func (p *Plugin) initialiseRateLimits() { + p.rateLimiterInstances = rate_limiter.NewLimiterMap() + p.populatePluginRateLimiters() + return +} + +// populate resolvedRateLimiterDefs map with plugin rate limiter definitions +func (p *Plugin) populatePluginRateLimiters() { + p.resolvedRateLimiterDefs = make(map[string]*rate_limiter.Definition, len(p.RateLimiters)) + for _, d := range p.RateLimiters { + // NOTE: we have not validated the limiter definitions yet + // (this is done from initialiseTables, after setting the connection config), + // so just ignore limiters with no name (validation will fail later if this occurs) + if d.Name != "" { + p.resolvedRateLimiterDefs[d.Name] = d + } + } +} + func (p *Plugin) shutdown() { // iterate through the connections in the plugin and // stop the file watchers for each @@ -353,7 +388,7 @@ func (p *Plugin) executeForConnection(streamContext context.Context, req *proto. if table.GetMatrixItemFunc != nil { matrixItem = table.GetMatrixItemFunc(ctx, queryData) } - queryData.setMatrixItem(matrixItem) + queryData.setMatrix(matrixItem) log.Printf("[TRACE] creating query data") @@ -470,9 +505,10 @@ func (p *Plugin) startExecuteSpan(ctx context.Context, req *proto.ExecuteRequest return ctx, span } -// initialiseTables does 2 things: +// initialiseTables does 3 things: // 1) if a TableMapFunc factory function was provided by the plugin, call it // 2) call initialise on the table, passing the plugin pointer which the table stores +// 3) validate the plugin func (p *Plugin) initialiseTables(ctx context.Context, connection *Connection) (tableMap map[string]*Table, err error) { log.Printf("[TRACE] Plugin %s initialiseTables", p.Name) @@ -503,7 +539,7 @@ func (p *Plugin) initialiseTables(ctx context.Context, connection *Connection) ( table.initialise(p) } - // now validate the plugin + // NOW finally validate the plugin // NOTE: must do this after calling TableMapFunc validationWarnings, validationErrors := p.validate(tableMap) diff --git a/plugin/plugin_connection_config.go b/plugin/plugin_connection_config.go index 3b207fae..e193340f 100644 --- a/plugin/plugin_connection_config.go +++ b/plugin/plugin_connection_config.go @@ -138,7 +138,7 @@ func (p *Plugin) setConnectionData(config *proto.ConnectionConfig, updateData *c func (p *Plugin) getConnectionSchema(c *Connection) (map[string]*Table, *grpc.PluginSchema, error) { ctx := context.WithValue(context.Background(), context_key.Logger, p.Logger) - // if the plugin defines a CreateTables func, call it now + // initialiseTables - if the plugin defines a TableMapFunc func, call it now tableMap, err := p.initialiseTables(ctx, c) if err != nil { return nil, nil, err diff --git a/plugin/plugin_grpc.go b/plugin/plugin_grpc.go index b7e9d36f..ebffd01d 100644 --- a/plugin/plugin_grpc.go +++ b/plugin/plugin_grpc.go @@ -10,10 +10,11 @@ import ( "github.com/turbot/steampipe-plugin-sdk/v5/grpc/proto" "github.com/turbot/steampipe-plugin-sdk/v5/plugin/context_key" "github.com/turbot/steampipe-plugin-sdk/v5/query_cache" + "github.com/turbot/steampipe-plugin-sdk/v5/rate_limiter" + "github.com/turbot/steampipe-plugin-sdk/v5/sperr" "golang.org/x/exp/maps" "golang.org/x/sync/semaphore" "log" - "runtime/debug" "sync" ) @@ -52,6 +53,7 @@ func (p *Plugin) setAllConnectionConfigs(configs []*proto.ConnectionConfig, maxC p.Logger.Debug("setAllConnectionConfigs finished") } }() + // create a struct to populate with exemplar schema and connection failures // this will be passed into update functions and may be mutated updateData := NewConnectionUpdateData() @@ -193,11 +195,6 @@ func (p *Plugin) execute(req *proto.ExecuteRequest, stream proto.WrapperPlugin_E log.Printf("[INFO] Plugin execute table: %s quals: %s (%s)", req.Table, grpc.QualMapToLogLine(req.QueryContext.Quals), req.CallId) defer log.Printf("[INFO] Plugin execute complete (%s)", req.CallId) - // limit the plugin memory - newLimit := GetMaxMemoryBytes() - debug.SetMemoryLimit(newLimit) - log.Printf("[INFO] Plugin execute, setting memory limit to %dMb", newLimit/(1024*1024)) - outputChan := make(chan *proto.ExecuteResponse, len(req.ExecuteConnectionData)) errorChan := make(chan error, len(req.ExecuteConnectionData)) @@ -313,6 +310,71 @@ func (p *Plugin) establishMessageStream(stream proto.WrapperPlugin_EstablishMess return nil } -func (p *Plugin) setCacheOptions(request *proto.SetCacheOptionsRequest) error { +func (p *Plugin) setCacheOptions(request *proto.SetCacheOptionsRequest) (err error) { + defer func() { + if r := recover(); r != nil { + msg := fmt.Sprintf("setCacheOptions experienced unhandled exception: %s", helpers.ToError(r).Error()) + log.Println("[WARN]", msg) + err = fmt.Errorf(msg) + } + }() + return p.ensureCache(p.buildConnectionSchemaMap(), query_cache.NewQueryCacheOptions(request)) } + +// clear current rate limiter definitions and instances and repopulate resolvedRateLimiterDefs using the +// plugin defined rate limiters and any config defined rate limiters +func (p *Plugin) setRateLimiters(request *proto.SetRateLimitersRequest) (err error) { + log.Printf("[INFO] setRateLimiters") + + defer func() { + if r := recover(); r != nil { + msg := fmt.Sprintf("setRateLimiters experienced unhandled exception: %s", helpers.ToError(r).Error()) + log.Println("[WARN]", msg) + err = fmt.Errorf(msg) + } + }() + var errors []error + // clear all current rate limiters + p.rateLimiterDefsMut.Lock() + defer p.rateLimiterDefsMut.Unlock() + + // clear the map of instantiated rate limiters + p.rateLimiterInstances.Clear() + // repopulate the map of resolved definitions from the plugin defs + p.populatePluginRateLimiters() + + // now add in any limiters from config + for _, pd := range request.Definitions { + d, err := rate_limiter.DefinitionFromProto(pd) + if err != nil { + errors = append(errors, sperr.WrapWithMessage(err, "failed to create rate limiter %s from config", err)) + continue + } + + // is this overriding an existing limiter? + if _, ok := p.resolvedRateLimiterDefs[d.Name]; ok { + log.Printf("[INFO] overriding plugin defined rate limiter '%s' with one defined in config: %s", d.Name, d) + } else { + log.Printf("[INFO] adding rate limiter '%s' defined in config: %s", d.Name, d) + } + + // in any case, store to map + p.resolvedRateLimiterDefs[d.Name] = d + } + + return error_helpers.CombineErrors(errors...) +} + +// return the rate limiter defintions defined by the plugin +func (p *Plugin) getRateLimiters() []*proto.RateLimiterDefinition { + if len(p.RateLimiters) == 0 { + return nil + } + res := make([]*proto.RateLimiterDefinition, len(p.RateLimiters)) + for i, d := range p.RateLimiters { + res[i] = d.ToProto() + + } + return res +} diff --git a/plugin/plugin_rate_limiter.go b/plugin/plugin_rate_limiter.go new file mode 100644 index 00000000..a29527cd --- /dev/null +++ b/plugin/plugin_rate_limiter.go @@ -0,0 +1,92 @@ +package plugin + +import ( + "github.com/gertd/go-pluralize" + "github.com/turbot/go-kit/helpers" + "github.com/turbot/steampipe-plugin-sdk/v5/rate_limiter" + "golang.org/x/exp/maps" + "log" + "strings" +) + +func (p *Plugin) getHydrateCallRateLimiter(hydrateCallScopeValues map[string]string, queryData *QueryData) (*rate_limiter.MultiLimiter, error) { + log.Printf("[INFO] getHydrateCallRateLimiter (%s)", queryData.connectionCallId) + + // now build the set of all tag values which applies to this call + rateLimiterScopeValues := queryData.resolveRateLimiterScopeValues(hydrateCallScopeValues) + + // add scope values _even for an empty rate limiter_ so they appear in the _ctx field + res := &rate_limiter.MultiLimiter{ + ScopeValues: rateLimiterScopeValues, + } + // short circuit if there are no defs + if len(p.resolvedRateLimiterDefs) == 0 { + log.Printf("[INFO] resolvedRateLimiterConfig: no rate limiters (%s)", queryData.connectionCallId) + return res, nil + } + + log.Printf("[INFO] rateLimiterScopeValues: %s", rateLimiterScopeValues) + + // build a list of all the limiters which match these tags + limiters, err := p.getRateLimitersForScopeValues(rateLimiterScopeValues) + if err != nil { + return nil, err + } + + log.Printf("[INFO] found %d matching %s", + len(limiters), + pluralize.NewClient().Pluralize("limiter", len(limiters), false)) + + // finally package them into a multi-limiter + res = rate_limiter.NewMultiLimiter(limiters, rateLimiterScopeValues) + + log.Printf("[INFO] returning multi limiter: %s", res) + + return res, nil +} + +func (p *Plugin) getRateLimitersForScopeValues(scopeValues map[string]string) ([]*rate_limiter.HydrateLimiter, error) { + h := helpers.GetMD5Hash(rate_limiter.FormatStringMap(scopeValues)) + h = h[len(h)-4:] + log.Printf("[INFO] getRateLimitersForScopeValues (%s)", h) + log.Printf("[INFO] scope values: %v (%s)", scopeValues, h) + log.Printf("[INFO] resolvedRateLimiterDefs: %s (%s)", strings.Join(maps.Keys(p.resolvedRateLimiterDefs), ","), h) + + // put limiters in map to dedupe + var limiters = make(map[string]*rate_limiter.HydrateLimiter) + // lock the map + p.rateLimiterDefsMut.RLock() + defer p.rateLimiterDefsMut.RUnlock() + + // NOTE: use rateLimiterLookup NOT the public RateLimiter property. + // This is to ensure config overrides are respected + for _, l := range p.resolvedRateLimiterDefs { + // build a filtered map of just the scope values required for this limiter + requiredScopeValues := helpers.FilterMap(scopeValues, l.Scope) + // do we have all the required values? + if len(requiredScopeValues) < len(l.Scope) { + log.Printf("[INFO] we DO NOT have scope values required by limiter '%s' - it requires: %s (%s)", l.Name, strings.Join(l.Scope, ","), h) + // this rate limiter does not apply + continue + } + + // now check whether the tag values satisfy any filters the limiter definition has + if !l.SatisfiesFilters(requiredScopeValues) { + log.Printf("[INFO] we DO NOT satisfy the filter for limiter '%s' - filter: %s (%s)", l.Name, l.Where, h) + continue + } + + // this limiter DOES apply to us, get or create a limiter instance + log.Printf("[INFO] limiter '%s' DOES apply to us (%s)", l.Name, h) + + limiter, err := p.rateLimiterInstances.GetOrCreate(l, requiredScopeValues) + if err != nil { + return nil, err + } + // this limiter DOES apply to us, get or create a limiter instance + log.Printf("[INFO] got limiter instance for '%s'(%s)", limiter.Name, h) + + limiters[limiter.Name] = limiter + } + return maps.Values(limiters), nil +} diff --git a/plugin/plugin_test.go b/plugin/plugin_test.go index f1d182ff..25a1461d 100644 --- a/plugin/plugin_test.go +++ b/plugin/plugin_test.go @@ -2,6 +2,8 @@ package plugin import ( "context" + "github.com/turbot/steampipe-plugin-sdk/v5/rate_limiter" + "log" "strings" "testing" @@ -76,6 +78,50 @@ var testCasesValidate = map[string]validateTest{ }, expected: []string{""}, }, + "invalid limiter name": { + plugin: Plugin{ + Name: "plugin", + TableMap: map[string]*Table{ + "table": { + Name: "table", + Columns: []*Column{ + { + Name: "name", + Type: proto.ColumnType_STRING, + }, + { + Name: "c1", + Type: proto.ColumnType_STRING, + Hydrate: hydrate1, + }, + { + Name: "c2", + Type: proto.ColumnType_STRING, + Hydrate: hydrate2, + }, + }, + List: &ListConfig{ + Hydrate: listHydrate, + }, + Get: &GetConfig{ + KeyColumns: SingleColumn("name"), + Hydrate: getHydrate, + ShouldIgnoreError: isNotFound, + }, + HydrateDependencies: []HydrateDependencies{{Func: hydrate2, Depends: []HydrateFunc{hydrate1}}}, + }, + }, + RequiredColumns: []*Column{{Name: "name", Type: proto.ColumnType_STRING}}, + RateLimiters: []*rate_limiter.Definition{ + { + Name: "1invalid", + MaxConcurrency: 10, + }, + }, + }, + + expected: []string{"invalid rate limiter name '1invalid' - names can contain letters, digits, underscores (_), and hyphens (-), and cannot start with a digit"}, + }, "get with hydrate dependency": { plugin: Plugin{ Name: "plugin", @@ -138,7 +184,7 @@ var testCasesValidate = map[string]validateTest{ }, RequiredColumns: []*Column{{Name: "name", Type: proto.ColumnType_STRING}}, }, - expected: []string{"table 'table' Get hydrate function 'getHydrate' also has an explicit hydrate config declared in `HydrateConfig`"}, + expected: []string{"table 'table' Get hydrate function 'getHydrate' defines dependendencies in its `HydrateConfig`"}, }, "list with hydrate dependency": { plugin: Plugin{ @@ -202,7 +248,7 @@ var testCasesValidate = map[string]validateTest{ }, RequiredColumns: []*Column{{Name: "name", Type: proto.ColumnType_STRING}}, }, - expected: []string{"table 'table' List hydrate function 'listHydrate' also has an explicit hydrate config declared in `HydrateConfig`"}, + expected: []string{"table 'table' List hydrate function 'listHydrate' defines dependencies in its `HydrateConfig`"}, }, // non deterministic - skip //"circular dep": { @@ -460,7 +506,10 @@ var testCasesValidate = map[string]validateTest{ func TestValidate(t *testing.T) { for name, test := range testCasesValidate { - test.plugin.initialise(hclog.NewNullLogger()) + logger := hclog.NewNullLogger() + log.SetOutput(logger.StandardWriter(&hclog.StandardLoggerOptions{InferLevels: true})) + + test.plugin.initialise(logger) test.plugin.initialiseTables(context.Background(), &Connection{Name: "test"}) _, validationErrors := test.plugin.validate(test.plugin.TableMap) diff --git a/plugin/plugin_validate.go b/plugin/plugin_validate.go index 799666e1..0e27964f 100644 --- a/plugin/plugin_validate.go +++ b/plugin/plugin_validate.go @@ -3,6 +3,7 @@ package plugin import ( "fmt" "github.com/gertd/go-pluralize" + "github.com/turbot/go-kit/helpers" "log" ) @@ -31,12 +32,19 @@ func (p *Plugin) validate(tableMap map[string]*Table) (validationWarnings, valid log.Printf("[TRACE] validate table names") validationErrors = append(validationErrors, p.validateTableNames()...) + log.Printf("[TRACE] validate rate limiters") + validationErrors = append(validationErrors, p.validateRateLimiters()...) + log.Printf("[INFO] plugin validation result: %d %s %d %s", len(validationWarnings), pluralize.NewClient().Pluralize("warning", len(validationWarnings), false), len(validationErrors), pluralize.NewClient().Pluralize("error", len(validationErrors), false)) + // dedupe the errors and warnins + validationWarnings = helpers.SortedMapKeys(helpers.SliceToLookup(validationWarnings)) + validationErrors = helpers.SortedMapKeys(helpers.SliceToLookup(validationErrors)) + return validationWarnings, validationErrors } @@ -50,3 +58,23 @@ func (p *Plugin) validateTableNames() []string { } return validationErrors } + +// validate all rate limiters +func (p *Plugin) validateRateLimiters() []string { + log.Printf("[INFO] validateRateLimiters") + var validationErrors []string + // intialise and validate each limiter + // NOTE: we do not need to validate any limiters defined in config and set via SetRateLimiters GRPC call + // as these are validated when added + // So we can use RateLimiters property, not resolvedRateLimiterDefs + for _, l := range p.RateLimiters { + if err := l.Initialise(); err != nil { + validationErrors = append(validationErrors, err.Error()) + } else { + // initialised ok, now validate + validationErrors = append(validationErrors, l.Validate()...) + } + } + + return validationErrors +} diff --git a/plugin/query_data.go b/plugin/query_data.go index b1b67d9c..2536ef9b 100644 --- a/plugin/query_data.go +++ b/plugin/query_data.go @@ -4,7 +4,6 @@ import ( "context" "encoding/json" "fmt" - "golang.org/x/exp/maps" "log" "runtime/debug" "sync" @@ -19,7 +18,10 @@ import ( "github.com/turbot/steampipe-plugin-sdk/v5/logging" "github.com/turbot/steampipe-plugin-sdk/v5/plugin/quals" "github.com/turbot/steampipe-plugin-sdk/v5/query_cache" + "github.com/turbot/steampipe-plugin-sdk/v5/rate_limiter" "github.com/turbot/steampipe-plugin-sdk/v5/telemetry" + "golang.org/x/exp/maps" + "golang.org/x/sync/semaphore" ) // how may rows do we cache in the rowdata channel @@ -88,6 +90,7 @@ type QueryData struct { StreamLeafListItem func(context.Context, ...interface{}) // internal + // the status of the in-progress query queryStatus *queryStatus // the callId for this connection @@ -95,21 +98,27 @@ type QueryData struct { plugin *Plugin // a list of the required hydrate calls (EXCLUDING the fetch call) hydrateCalls []*hydrateCall + // the rate limiter(s) which apply to the fetch call + fetchLimiters *fetchCallRateLimiters // all the columns that will be returned by this query - columns map[string]*QueryColumn - concurrencyManager *concurrencyManager - rowDataChan chan *rowData - errorChan chan error + columns map[string]*QueryColumn + rowDataChan chan *rowData + errorChan chan error // channel to send results outputChan chan *proto.ExecuteResponse // wait group used to synchronise parent-child list fetches - each child hydrate function increments this wait group listWg *sync.WaitGroup // when executing parent child list calls, we cache the parent list result in the query data passed to the child list call - parentItem interface{} + parentItem interface{} + filteredMatrix []map[string]interface{} // column quals which were used to filter the matrix filteredMatrixColumns []string + // lookup keyed by matrix property names - used to add matrix quals to scope values + matrixColLookup map[string]struct{} + // the set of matrix vals we are executing for + matrixItem map[string]interface{} // ttl for the execute call cacheTtl int64 @@ -134,6 +143,15 @@ type QueryData struct { // cancel the execution context // (this is only used if the cache is enabled - if a set request has no subscribers) cancel context.CancelFunc + + // auto populated tags used to resolve a rate limiter for each hydrate call + // (hydrate-call specific tags will be added when we resolve the limiter) + rateLimiterScopeValues map[string]string + + fetchMetadata *hydrateMetadata + parentHydrateMetadata *hydrateMetadata + listHydrate HydrateFunc + childHydrate HydrateFunc } func newQueryData(connectionCallId string, p *Plugin, queryContext *QueryContext, table *Table, connectionData *ConnectionData, executeData *proto.ExecuteConnectionData, outputChan chan *proto.ExecuteResponse) (*QueryData, error) { @@ -168,24 +186,29 @@ func newQueryData(connectionCallId string, p *Plugin, queryContext *QueryContext // temporary dir for this connection // this will only created if getSourceFiles is used - tempDir: getConnectionTempDir(p.tempDir, connectionData.Connection.Name), + tempDir: getConnectionTempDir(p.tempDir, connectionData.Connection.Name), + matrixColLookup: make(map[string]struct{}), } d.StreamListItem = d.streamListItem // for legacy compatibility - plugins should no longer call StreamLeafListItem directly d.StreamLeafListItem = d.streamLeafListItem + + // is this a get or a list fetch? d.setFetchType(table) + // for count(*) queries, there will be no columns - add in 1 column so that we have some data to return queryContext.ensureColumns(table) // build list of required hydrate calls, based on requested columns d.populateRequiredHydrateCalls() + // build list of all columns returned by these hydrate calls (and the fetch call) d.populateColumns() - d.concurrencyManager = newConcurrencyManager(table) // populate the query status // if a limit is set, use this to set rows required - otherwise just set to MaxInt32 d.queryStatus = newQueryStatus(d.QueryContext.Limit) + return d, nil } @@ -200,10 +223,10 @@ func getReservedColumns(table *Table) map[string]struct{} { return res } -// ShallowCopy creates a shallow copy of the QueryData, i.e. most pointer properties are copied +// shallowCopy creates a shallow copy of the QueryData, i.e. most pointer properties are copied // this is used to pass different quals to multiple list/get calls, when an 'in' clause is specified -func (d *QueryData) ShallowCopy() *QueryData { - clone := &QueryData{ +func (d *QueryData) shallowCopy() *QueryData { + copyQueryData := &QueryData{ Table: d.Table, EqualsQuals: make(map[string]*proto.QualValue), Quals: make(KeyColumnQualMap), @@ -213,34 +236,50 @@ func (d *QueryData) ShallowCopy() *QueryData { ConnectionManager: d.ConnectionManager, ConnectionCache: d.ConnectionCache, Matrix: d.Matrix, + connectionCallId: d.connectionCallId, plugin: d.plugin, cacheTtl: d.cacheTtl, cacheEnabled: d.cacheEnabled, - filteredMatrix: d.filteredMatrix, - hydrateCalls: d.hydrateCalls, - concurrencyManager: d.concurrencyManager, - rowDataChan: d.rowDataChan, - errorChan: d.errorChan, - outputChan: d.outputChan, - listWg: d.listWg, - columns: d.columns, - queryStatus: d.queryStatus, + fetchLimiters: d.fetchLimiters, + filteredMatrix: d.filteredMatrix, + + rowDataChan: d.rowDataChan, + errorChan: d.errorChan, + outputChan: d.outputChan, + listWg: d.listWg, + columns: d.columns, + queryStatus: d.queryStatus, + matrixColLookup: d.matrixColLookup, + listHydrate: d.listHydrate, + childHydrate: d.childHydrate, + rateLimiterScopeValues: make(map[string]string), } // NOTE: we create a deep copy of the keyColumnQuals // - this is so they can be updated in the copied QueryData without mutating the original for k, v := range d.EqualsQuals { - clone.EqualsQuals[k] = v + copyQueryData.EqualsQuals[k] = v } for k, v := range d.Quals { - clone.Quals[k] = v + copyQueryData.Quals[k] = v + } + for k, v := range d.rateLimiterScopeValues { + copyQueryData.rateLimiterScopeValues[k] = v + } + // shallow copy the hydrate call but **change the query data to the cloned version** + // this is important as the cloned query data may have the matrix itme set - required ro resolve rate limiter + copyQueryData.hydrateCalls = make([]*hydrateCall, len(d.hydrateCalls)) + for i, c := range d.hydrateCalls { + clonedCall := c.shallowCopy() + clonedCall.queryData = copyQueryData + copyQueryData.hydrateCalls[i] = clonedCall } // NOTE: point the public streaming endpoints to their internal implementations IN THIS OBJECT - clone.StreamListItem = clone.streamListItem - clone.StreamLeafListItem = clone.streamLeafListItem - return clone + copyQueryData.StreamListItem = copyQueryData.streamListItem + copyQueryData.StreamLeafListItem = copyQueryData.streamLeafListItem + return copyQueryData } // RowsRemaining returns how many rows are required to complete the query @@ -270,12 +309,76 @@ func (d *QueryData) GetSourceFiles(source string) ([]string, error) { return getSourceFiles(source, d.tempDir) } -func (d *QueryData) setMatrixItem(matrix []map[string]interface{}) { +func (d *QueryData) setMatrix(matrix []map[string]interface{}) { d.Matrix = matrix // if we have key column quals for any matrix properties, filter the matrix // to exclude items which do not satisfy the quals // this populates the property filteredMatrix d.filterMatrixItems() + // build list of the matrix property names + d.populateMatrixPropertyNames() +} + +func (d *QueryData) filterMatrixItems() { + if len(d.Matrix) == 0 { + return + } + log.Printf("[TRACE] filterMatrixItems - there are %d matrix items", len(d.Matrix)) + log.Printf("[TRACE] unfiltered matrix: %v", d.Matrix) + var filteredMatrix []map[string]interface{} + + // build a keycolumn slice from the matrix items + var matrixKeyColumns KeyColumnSlice + for column := range d.Matrix[0] { + matrixKeyColumns = append(matrixKeyColumns, &KeyColumn{ + Name: column, + Operators: []string{"="}, + }) + } + // now see which of these key columns are satisfied by the provided quals + matrixQualMap := NewKeyColumnQualValueMap(d.QueryContext.UnsafeQuals, matrixKeyColumns) + + for _, m := range d.Matrix { + log.Printf("[TRACE] matrix item %v", m) + // do all key columns which exist for this matrix item match the matrix values? + includeMatrixItem := true + + for col, val := range m { + log.Printf("[TRACE] col %s val %s", col, val) + // is there a quals for this matrix column? + + if matrixQuals, ok := matrixQualMap[col]; ok { + log.Printf("[TRACE] quals found for matrix column: %v", matrixQuals) + // if there IS a single equals qual which DOES NOT match this matrix item, exclude the matrix item + if matrixQuals.SingleEqualsQual() { + includeMatrixItem = d.shouldIncludeMatrixItem(matrixQuals, val) + // store this column - we will need this when building a cache key + if !includeMatrixItem { + d.filteredMatrixColumns = append(d.filteredMatrixColumns, col) + } + } + } else { + log.Printf("[TRACE] quals found for matrix column: %s", col) + } + } + + if includeMatrixItem { + log.Printf("[TRACE] INCLUDE matrix item") + filteredMatrix = append(filteredMatrix, m) + } else { + log.Printf("[TRACE] EXCLUDE matrix item") + } + } + d.filteredMatrix = filteredMatrix + log.Printf("[TRACE] filtered matrix: %v", d.Matrix) +} + +func (d *QueryData) populateMatrixPropertyNames() { + for _, m := range d.Matrix { + for prop := range m { + d.matrixColLookup[prop] = struct{}{} + } + } } // build a list of required hydrate function calls which must be executed, based on the columns which have been requested @@ -294,7 +397,7 @@ func (d *QueryData) populateRequiredHydrateCalls() { // initialise hydrateColumnMap d.hydrateColumnMap = make(map[string][]string) - requiredCallBuilder := newRequiredHydrateCallBuilder(t, fetchCallName) + requiredCallBuilder := newRequiredHydrateCallBuilder(d, fetchCallName) // populate a map keyed by function name to ensure we only store each hydrate function once for _, column := range t.Columns { @@ -317,7 +420,7 @@ func (d *QueryData) populateRequiredHydrateCalls() { hydrateName = helpers.GetFunctionName(hydrateFunc) // if this column was requested in query, add the hydrate call to required calls if helpers.StringSliceContains(colsUsed, column.Name) { - requiredCallBuilder.Add(hydrateFunc) + requiredCallBuilder.Add(hydrateFunc, d.connectionCallId) } } @@ -328,7 +431,6 @@ func (d *QueryData) populateRequiredHydrateCalls() { // now we have all the hydrate calls, build a list of all the columns that will be returned by the hydrate functions. // these will be used for the cache - } // build list of all columns returned by the fetch call and required hydrate calls @@ -353,19 +455,23 @@ func (d *QueryData) addColumnsForHydrate(hydrateName string) { } } +// set the specific matrix item we are executin gfor // add matrix item into KeyColumnQuals and Quals -func (d *QueryData) updateQualsWithMatrixItem(matrixItem map[string]interface{}) { +func (d *QueryData) setMatrixItem(matrixItem map[string]interface{}) { + d.matrixItem = matrixItem + log.Printf("[INFO] setMatrixItem %s", matrixItem) for col, value := range matrixItem { qualValue := proto.NewQualValue(value) - // replace any existing entry for both Quals and KeyColumnQuals + // replace any existing entry for both Quals and EqualsQuals d.EqualsQuals[col] = qualValue - d.Quals[col] = &KeyColumnQuals{Name: col, Quals: []*quals.Qual{{Column: col, Value: qualValue}}} + d.Quals[col] = &KeyColumnQuals{Name: col, Quals: []*quals.Qual{{Column: col, Operator: quals.QualOperatorEqual, Value: qualValue}}} } } // setFetchType determines whether this is a get or a list call, and populates the keyColumnQualValues map func (d *QueryData) setFetchType(table *Table) { log.Printf("[TRACE] setFetchType %v", d.QueryContext.UnsafeQuals) + if table.Get != nil { // default to get, even before checking the quals // this handles the case of a get call only @@ -377,9 +483,7 @@ func (d *QueryData) setFetchType(table *Table) { if unsatisfiedColumns := qualMap.GetUnsatisfiedKeyColumns(table.Get.KeyColumns); len(unsatisfiedColumns) == 0 { // so this IS a get call - all quals are satisfied log.Printf("[TRACE] Set fetchType to fetchTypeGet") - d.EqualsQuals = qualMap.ToEqualsQualValueMap() - d.Quals = qualMap - d.logQualMaps() + d.setQuals(qualMap) return } } @@ -391,68 +495,17 @@ func (d *QueryData) setFetchType(table *Table) { if len(table.List.KeyColumns) > 0 { // build a qual map from List key columns qualMap := NewKeyColumnQualValueMap(d.QueryContext.UnsafeQuals, table.List.KeyColumns) - // assign to the map of all key column quals - d.Quals = qualMap - // convert to a map of equals quals to populate legacy `KeyColumnQuals` map - d.EqualsQuals = d.Quals.ToEqualsQualValueMap() + d.setQuals(qualMap) } - d.logQualMaps() } } -func (d *QueryData) filterMatrixItems() { - if len(d.Matrix) == 0 { - return - } - log.Printf("[TRACE] filterMatrixItems - there are %d matrix items", len(d.Matrix)) - log.Printf("[TRACE] unfiltered matrix: %v", d.Matrix) - var filteredMatrix []map[string]interface{} - - // build a keycolumn slice from the matrix items - var matrixKeyColumns KeyColumnSlice - for column := range d.Matrix[0] { - matrixKeyColumns = append(matrixKeyColumns, &KeyColumn{ - Name: column, - Operators: []string{"="}, - }) - } - // now see which of these key columns are satisfied by the provided quals - matrixQualMap := NewKeyColumnQualValueMap(d.QueryContext.UnsafeQuals, matrixKeyColumns) - - for _, m := range d.Matrix { - log.Printf("[TRACE] matrix item %v", m) - // do all key columns which exist for this matrix item match the matrix values? - includeMatrixItem := true - - for col, val := range m { - log.Printf("[TRACE] col %s val %s", col, val) - // is there a quals for this matrix column? - - if matrixQuals, ok := matrixQualMap[col]; ok { - log.Printf("[TRACE] quals found for matrix column: %v", matrixQuals) - // if there IS a single equals qual which DOES NOT match this matrix item, exclude the matrix item - if matrixQuals.SingleEqualsQual() { - includeMatrixItem = d.shouldIncludeMatrixItem(matrixQuals, val) - // store this column - we will need this when building a cache key - if !includeMatrixItem { - d.filteredMatrixColumns = append(d.filteredMatrixColumns, col) - } - } - } else { - log.Printf("[TRACE] quals found for matrix column: %s", col) - } - } - - if includeMatrixItem { - log.Printf("[TRACE] INCLUDE matrix item") - filteredMatrix = append(filteredMatrix, m) - } else { - log.Printf("[TRACE] EXCLUDE matrix item") - } - } - d.filteredMatrix = filteredMatrix - log.Printf("[TRACE] filtered matrix: %v", d.Matrix) - +func (d *QueryData) setQuals(qualMap KeyColumnQualMap) { + // convert to a map of equals quals to populate legacy `KeyColumnQuals` map + d.EqualsQuals = qualMap.ToEqualsQualValueMap() + // assign to the map of all key column quals + d.Quals = qualMap + d.logQualMaps() } func (d *QueryData) shouldIncludeMatrixItem(quals *KeyColumnQuals, matrixVal interface{}) bool { @@ -522,9 +575,15 @@ func (d *QueryData) callChildListHydrate(ctx context.Context, parentItem interfa if helpers.IsNil(parentItem) { return } + + // wait for any configured child ListCall rate limiters + rateLimitDelay := d.fetchLimiters.childListWait(ctx) + + // populate delay in metadata + d.fetchMetadata.DelayMs = rateLimitDelay.Milliseconds() + callingFunction := helpers.GetCallingFunction(1) d.listWg.Add(1) - go func() { defer func() { if r := recover(); r != nil { @@ -538,11 +597,11 @@ func (d *QueryData) callChildListHydrate(ctx context.Context, parentItem interfa }() defer d.listWg.Done() // create a copy of query data with the stream function set to streamLeafListItem - childQueryData := d.ShallowCopy() + childQueryData := d.shallowCopy() childQueryData.StreamListItem = childQueryData.streamLeafListItem // set parent list result so that it can be stored in rowdata hydrate results in streamLeafListItem childQueryData.parentItem = parentItem - // now call the parent list + // now call the child list _, err := d.Table.List.Hydrate(ctx, childQueryData, &HydrateData{Item: parentItem}) if err != nil { d.streamError(err) @@ -576,7 +635,7 @@ func (d *QueryData) streamLeafListItem(ctx context.Context, items ...interface{} debug.FreeOSMemory() } - // do a deep nil check on item - if nil, just skipthis item + // do a deep nil check on item - if nil, just skip this item if helpers.IsNil(item) { log.Printf("[TRACE] streamLeafListItem received nil item, skipping") continue @@ -586,8 +645,8 @@ func (d *QueryData) streamLeafListItem(ctx context.Context, items ...interface{} // create rowData, passing matrixItem from context rd := newRowData(d, item) - - rd.matrixItem = GetMatrixItem(ctx) + // set the matrix item + rd.matrixItem = d.matrixItem // set the parent item on the row data rd.parentItem = d.parentItem // NOTE: add the item as the hydrate data for the list call @@ -616,6 +675,11 @@ func (d *QueryData) buildRowsAsync(ctx context.Context, rowChan chan *proto.Row, // we need to use a wait group for rows we cannot close the row channel when the item channel is closed // as getRow is executing asyncronously var rowWg sync.WaitGroup + maxConcurrentRows := rate_limiter.GetMaxConcurrentRows() + var rowSemaphore *semaphore.Weighted + if maxConcurrentRows > 0 { + rowSemaphore = semaphore.NewWeighted(int64(maxConcurrentRows)) + } // start goroutine to read items from item chan and generate row data go func() { @@ -637,9 +701,21 @@ func (d *QueryData) buildRowsAsync(ctx context.Context, rowChan chan *proto.Row, // rowData channel closed - nothing more to do return } - + if rowSemaphore != nil { + t := time.Now() + //log.Printf("[INFO] buildRowsAsync acquire semaphore (%s)", d.connectionCallId) + if err := rowSemaphore.Acquire(ctx, 1); err != nil { + log.Printf("[INFO] SEMAPHORE ERROR %s", err) + // TODO KAI does this quit?? + d.errorChan <- err + return + } + if time.Since(t) > 1*time.Millisecond { + log.Printf("[INFO] buildRowsAsync waited %dms to hydrate row (%s)", time.Since(t).Milliseconds(), d.connectionCallId) + } + } rowWg.Add(1) - d.buildRowAsync(ctx, rowData, rowChan, &rowWg) + d.buildRowAsync(ctx, rowData, rowChan, &rowWg, rowSemaphore) } } }() @@ -653,8 +729,6 @@ func (d *QueryData) streamRows(ctx context.Context, rowChan chan *proto.Row, don log.Printf("[INFO] QueryData streamRows (%s)", d.connectionCallId) defer func() { - // tell the concurrency manage we are done (it may log the concurrency stats) - d.concurrencyManager.Close() log.Printf("[INFO] QueryData streamRows DONE (%s)", d.connectionCallId) // if there is an error or cancellation, abort the pending set @@ -754,8 +828,9 @@ func (d *QueryData) streamError(err error) { d.errorChan <- err } +// TODO KAI this seems to get called even after cancellation // execute necessary hydrate calls to populate row data -func (d *QueryData) buildRowAsync(ctx context.Context, rowData *rowData, rowChan chan *proto.Row, wg *sync.WaitGroup) { +func (d *QueryData) buildRowAsync(ctx context.Context, rowData *rowData, rowChan chan *proto.Row, wg *sync.WaitGroup, sem *semaphore.Weighted) { go func() { defer func() { if r := recover(); r != nil { @@ -763,6 +838,7 @@ func (d *QueryData) buildRowAsync(ctx context.Context, rowData *rowData, rowChan d.streamError(helpers.ToError(r)) } wg.Done() + sem.Release(1) }() if rowData == nil { log.Printf("[INFO] buildRowAsync nil rowData - streaming nil row (%s)", d.connectionCallId) @@ -776,18 +852,26 @@ func (d *QueryData) buildRowAsync(ctx context.Context, rowData *rowData, rowChan log.Printf("[WARN] getRow failed with error %v", err) d.streamError(err) } else { - // remove reserved columns - d.removeReservedColumns(row) - // NOTE: add the Steampipecontext data to the row - d.addContextData(row) - + if row != nil { + // remove reserved columns + d.removeReservedColumns(row) + // NOTE: add the Steampipecontext data to the row + d.addContextData(row, rowData) + } rowChan <- row } }() } -func (d *QueryData) addContextData(row *proto.Row) { - jsonValue, _ := json.Marshal(map[string]string{"connection_name": d.Connection.Name}) +func (d *QueryData) addContextData(row *proto.Row, rowData *rowData) { + // NOTE: we use the rowdata QueryData, rather than ourselves + // this may be a child QueryData if there is a matrix + rowCtxData := newRowCtxData(rowData) + jsonValue, err := json.Marshal(rowCtxData) + if err != nil { + log.Printf("[WARN] failed to marshal JSON for row context data: %s", err.Error()) + return + } row.Columns[contextColumnName] = &proto.Column{Value: &proto.Column_JsonValue{JsonValue: jsonValue}} } @@ -822,3 +906,8 @@ func (d *QueryData) removeReservedColumns(row *proto.Row) { delete(row.Columns, c) } } + +func (d *QueryData) setListCalls(listCall, childHydrate HydrateFunc) { + d.listHydrate = listCall + d.childHydrate = childHydrate +} diff --git a/plugin/query_data_rate_limiters.go b/plugin/query_data_rate_limiters.go new file mode 100644 index 00000000..ac0b19ed --- /dev/null +++ b/plugin/query_data_rate_limiters.go @@ -0,0 +1,182 @@ +package plugin + +import ( + "context" + "github.com/turbot/go-kit/helpers" + "github.com/turbot/steampipe-plugin-sdk/v5/grpc" + "github.com/turbot/steampipe-plugin-sdk/v5/plugin/quals" + "github.com/turbot/steampipe-plugin-sdk/v5/rate_limiter" + "log" + "time" +) + +func (d *QueryData) WaitForListRateLimit(ctx context.Context) { + d.fetchLimiters.wait(ctx) +} + +func (d *QueryData) initialiseRateLimiters() { + log.Printf("[INFO] initialiseRateLimiters for query data %p (%s)", d, d.connectionCallId) + // build the base set of scope values used to resolve a rate limiter + d.populateRateLimitScopeValues() + + // populate the rate limiters for the fetch call(s) (get/list/parent-list) + d.resolveFetchRateLimiters() + + // populate the rate limiters for the hydrate calls + d.resolveHydrateRateLimiters() +} + +// resolve the scope values for a given hydrate call +func (d *QueryData) resolveRateLimiterScopeValues(hydrateCallScopeValues map[string]string) map[string]string { + log.Printf("[INFO] resolveRateLimiterScopeValues (%s)", d.connectionCallId) + log.Printf("[INFO] HydrateCall tags %v", hydrateCallScopeValues) + log.Printf("[INFO] Table tags %v", d.Table.Tags) + log.Printf("[INFO] QueryData rateLimiterScopeValues %v", d.rateLimiterScopeValues) + // build list of source value maps which we will merge + // this is in order of DECREASING precedence, i.e. highest first + scopeValueList := []map[string]string{ + // static scope values defined by hydrate config + hydrateCallScopeValues, + // static scope values defined by table config + d.Table.Tags, + // scope values for this scan (static and column values) + d.rateLimiterScopeValues, + } + + // merge these in precedence order + res := rate_limiter.MergeScopeValues(scopeValueList) + log.Printf("[INFO] merged scope values %v", res) + return res +} + +/* + build the base set of scope used to resolve a rate limiter + +this will consist of: +- plugin, connection and table name +- quals (with value as string) +*/ +func (d *QueryData) populateRateLimitScopeValues() { + d.rateLimiterScopeValues = map[string]string{} + + // add the connection + d.rateLimiterScopeValues[rate_limiter.RateLimiterScopeConnection] = d.Connection.Name + + // add matrix quals + for column, qualsForColumn := range d.Quals { + if _, isMatrixQual := d.matrixColLookup[column]; isMatrixQual { + for _, qual := range qualsForColumn.Quals { + if qual.Operator == quals.QualOperatorEqual { + qualValueString := grpc.GetQualValueString(qual.Value) + d.rateLimiterScopeValues[column] = qualValueString + } + } + } + } +} + +func (d *QueryData) resolveFetchRateLimiters() error { + d.fetchLimiters = &fetchCallRateLimiters{} + // is it a get + if d.FetchType == fetchTypeGet { + return d.resolveGetRateLimiters() + } + + // otherwise this is a list + + // is there a parent-child hydrate? + if d.Table.List.ParentHydrate != nil { + // it is a parent child list + return d.resolveParentChildRateLimiters() + } + + // ok it's just a single level list hydrate + return d.resolveListRateLimiters() +} +func (d *QueryData) resolveGetRateLimiters() error { + // NOTE: RateLimit cannot be nil as it is initialized to an empty struct if needed + getLimiter, err := d.plugin.getHydrateCallRateLimiter(d.Table.Get.Tags, d) + if err != nil { + log.Printf("[WARN] get call %s getHydrateCallRateLimiter failed: %s (%s)", helpers.GetFunctionName(d.Table.Get.Hydrate), err.Error(), d.connectionCallId) + return err + } + + d.fetchLimiters.rateLimiter = getLimiter + return nil +} + +func (d *QueryData) resolveParentChildRateLimiters() error { + + // NOTE: RateLimit and ParentRateLimit cannot be nil as they are initialized to an empty struct if needed + + // resolve the parent hydrate rate limiter + parentRateLimiter, err := d.plugin.getHydrateCallRateLimiter(d.Table.List.ParentTags, d) + if err != nil { + log.Printf("[WARN] resolveParentChildRateLimiters: %s: getHydrateCallRateLimiter failed: %s (%s)", helpers.GetFunctionName(d.Table.List.ParentHydrate), err.Error(), d.connectionCallId) + return err + } + // assign the parent rate limiter to d.fetchLimiters + d.fetchLimiters.rateLimiter = parentRateLimiter + + // resolve the child hydrate rate limiter + childRateLimiter, err := d.plugin.getHydrateCallRateLimiter(d.Table.List.Tags, d) + if err != nil { + log.Printf("[WARN] resolveParentChildRateLimiters: %s: getHydrateCallRateLimiter failed: %s (%s)", helpers.GetFunctionName(d.Table.List.Hydrate), err.Error(), d.connectionCallId) + return err + } + d.fetchLimiters.childListRateLimiter = childRateLimiter + + return nil +} + +func (d *QueryData) resolveListRateLimiters() error { + // NOTE: RateLimit cannot be nil as it is initialized to an empty struct if needed + listLimiter, err := d.plugin.getHydrateCallRateLimiter(d.Table.List.Tags, d) + if err != nil { + log.Printf("[WARN] get call %s getHydrateCallRateLimiter failed: %s (%s)", helpers.GetFunctionName(d.Table.Get.Hydrate), err.Error(), d.connectionCallId) + return err + } + d.fetchLimiters.rateLimiter = listLimiter + return nil +} + +func (d *QueryData) setListLimiterMetadata(fetchDelay time.Duration) { + fetchMetadata := &hydrateMetadata{ + FuncName: helpers.GetFunctionName(d.listHydrate), + RateLimiters: d.fetchLimiters.rateLimiter.LimiterNames(), + ScopeValues: d.fetchLimiters.rateLimiter.ScopeValues, + DelayMs: fetchDelay.Milliseconds(), + } + if d.childHydrate == nil { + fetchMetadata.Type = string(fetchTypeList) + d.fetchMetadata = fetchMetadata + } else { + d.fetchMetadata = &hydrateMetadata{ + Type: string(fetchTypeList), + FuncName: helpers.GetFunctionName(d.childHydrate), + RateLimiters: d.fetchLimiters.childListRateLimiter.LimiterNames(), + ScopeValues: d.fetchLimiters.childListRateLimiter.ScopeValues, + } + fetchMetadata.Type = "parentHydrate" + d.parentHydrateMetadata = fetchMetadata + } +} + +func (d *QueryData) setGetLimiterMetadata(fetchDelay time.Duration) { + d.fetchMetadata = &hydrateMetadata{ + Type: string(fetchTypeGet), + FuncName: helpers.GetFunctionName(d.Table.Get.Hydrate), + RateLimiters: d.fetchLimiters.rateLimiter.LimiterNames(), + ScopeValues: d.fetchLimiters.rateLimiter.ScopeValues, + DelayMs: fetchDelay.Milliseconds(), + } +} + +func (d *QueryData) resolveHydrateRateLimiters() error { + for _, h := range d.hydrateCalls { + if err := h.initialiseRateLimiter(); err != nil { + return err + } + } + return nil +} diff --git a/plugin/required_hydrate_calls.go b/plugin/required_hydrate_calls.go index 541e64bc..4201904b 100644 --- a/plugin/required_hydrate_calls.go +++ b/plugin/required_hydrate_calls.go @@ -2,42 +2,52 @@ package plugin import ( "github.com/turbot/go-kit/helpers" + "log" ) // helper class to build list of required hydrate calls type requiredHydrateCallBuilder struct { fetchCallName string requiredHydrateCalls map[string]*hydrateCall - table *Table + queryData *QueryData } -func newRequiredHydrateCallBuilder(t *Table, fetchCallName string) *requiredHydrateCallBuilder { +func newRequiredHydrateCallBuilder(d *QueryData, fetchCallName string) *requiredHydrateCallBuilder { return &requiredHydrateCallBuilder{ - table: t, + queryData: d, fetchCallName: fetchCallName, requiredHydrateCalls: make(map[string]*hydrateCall), } } -func (c requiredHydrateCallBuilder) Add(hydrateFunc HydrateFunc) { +func (c requiredHydrateCallBuilder) Add(hydrateFunc HydrateFunc, callId string) error { hydrateName := helpers.GetFunctionName(hydrateFunc) // if the resolved hydrate call is NOT the same as the fetch call, add to the map of hydrate functions to call if hydrateName != c.fetchCallName { if _, ok := c.requiredHydrateCalls[hydrateName]; ok { - return + return nil } // get the config for this hydrate function - config := c.table.hydrateConfigMap[hydrateName] + config := c.queryData.Table.hydrateConfigMap[hydrateName] - c.requiredHydrateCalls[hydrateName] = newHydrateCall(config) + call, err := newHydrateCall(config, c.queryData) + if err != nil { + log.Printf("[WARN] failed to add a hydrate call for %s: %s", hydrateName, err.Error()) + return err + } + c.requiredHydrateCalls[hydrateName] = call // now add dependencies (we have already checked for circular dependencies so recursion is fine for _, dep := range config.Depends { - c.Add(dep) + if err := c.Add(dep, callId); err != nil { + log.Printf("[WARN] failed to add a hydrate call for %s, which is a dependency of %s: %s", helpers.GetFunctionName(dep), hydrateName, err.Error()) + return err + } } } + return nil } func (c requiredHydrateCallBuilder) Get() []*hydrateCall { @@ -45,5 +55,6 @@ func (c requiredHydrateCallBuilder) Get() []*hydrateCall { for _, call := range c.requiredHydrateCalls { res = append(res, call) } + return res } diff --git a/plugin/retry_config.go b/plugin/retry_config.go index 76832968..2156656d 100644 --- a/plugin/retry_config.go +++ b/plugin/retry_config.go @@ -69,7 +69,7 @@ type RetryConfig struct { MaxDuration int64 } -func (c *RetryConfig) String() interface{} { +func (c *RetryConfig) String() string { if c.ShouldRetryError != nil { return fmt.Sprintf("ShouldRetryError: %s", helpers.GetFunctionName(c.ShouldRetryError)) } diff --git a/plugin/row_data.go b/plugin/row_data.go index 82b51673..4e6c6f63 100644 --- a/plugin/row_data.go +++ b/plugin/row_data.go @@ -22,16 +22,17 @@ type rowData struct { // the output of the get/list call which is passed to all other hydrate calls item interface{} // if there was a parent-child list call, store the parent list item - parentItem interface{} - matrixItem map[string]interface{} - hydrateResults map[string]interface{} - hydrateErrors map[string]error - mut sync.RWMutex - waitChan chan bool - wg sync.WaitGroup - table *Table - errorChan chan error - queryData *QueryData + parentItem interface{} + matrixItem map[string]interface{} + hydrateResults map[string]interface{} + hydrateErrors map[string]error + hydrateMetadata []*hydrateMetadata + mut sync.RWMutex + waitChan chan bool + wg sync.WaitGroup + table *Table + errorChan chan error + queryData *QueryData } // newRowData creates an empty rowData object @@ -56,14 +57,9 @@ func (r *rowData) getRow(ctx context.Context) (*proto.Row, error) { // (this is a data structure containing fetch specific data, e.g. region) // store this in the context for use by the transform functions rowDataCtx := context.WithValue(ctx, context_key.MatrixItem, r.matrixItem) - // clone the query data and add the matrix properties to quals - rowQueryData := r.queryData.ShallowCopy() - rowQueryData.updateQualsWithMatrixItem(r.matrixItem) - // make any required hydrate function calls // - these populate the row with data entries corresponding to the hydrate function name - - if err := r.startAllHydrateCalls(rowDataCtx, rowQueryData); err != nil { + if err := r.startAllHydrateCalls(rowDataCtx, r.queryData); err != nil { log.Printf("[WARN] startAllHydrateCalls failed with error %v", err) return nil, err } @@ -76,6 +72,7 @@ func (r *rowData) startAllHydrateCalls(rowDataCtx context.Context, rowQueryData // make a map of started hydrate calls for this row - this is used to determine which calls have not started yet var callsStarted = map[string]bool{} + // TODO use retry.DO for { var allStarted = true for _, call := range r.queryData.hydrateCalls { @@ -86,9 +83,18 @@ func (r *rowData) startAllHydrateCalls(rowDataCtx context.Context, rowQueryData } // so call needs to start - can it? - if call.canStart(r, hydrateFuncName, r.queryData.concurrencyManager) { + if call.canStart(r) { // execute the hydrate call asynchronously - call.start(rowDataCtx, r, rowQueryData, r.queryData.concurrencyManager) + rateLimitDelay := call.start(rowDataCtx, r, rowQueryData) + // store the call metadata + r.hydrateMetadata = append(r.hydrateMetadata, &hydrateMetadata{ + Type: "hydrate", + FuncName: hydrateFuncName, + ScopeValues: call.rateLimiter.ScopeValues, + RateLimiters: call.rateLimiter.LimiterNames(), + DelayMs: rateLimitDelay.Milliseconds(), + }) + callsStarted[hydrateFuncName] = true } else { allStarted = false diff --git a/plugin/row_with_metadata.go b/plugin/row_with_metadata.go new file mode 100644 index 00000000..39058c07 --- /dev/null +++ b/plugin/row_with_metadata.go @@ -0,0 +1,52 @@ +package plugin + +import ( + "os" + "strings" +) + +const ( + EnvDiagnosticsLevel = "STEAMPIPE_DIAGNOSTICS_LEVEL" + DiagnosticsAll = "ALL" + DiagnosticsNone = "NONE" +) + +var ValidDiagnosticsLevels = map[string]struct{}{ + DiagnosticsAll: {}, + DiagnosticsNone: {}, +} + +type hydrateMetadata struct { + Type string `json:"type"` + FuncName string `json:"function_name"` + ScopeValues map[string]string `json:"scope_values,omitempty"` + RateLimiters []string `json:"rate_limiters,omitempty"` + DelayMs int64 `json:"rate_limiter_delay_ms,omitempty"` +} + +type rowCtxData struct { + Connection string `json:"connection"` + Diagnostics *rowCtxDiagnostics `json:"diagnostics,omitempty"` +} +type rowCtxDiagnostics struct { + Calls []*hydrateMetadata `json:"calls"` +} + +func newRowCtxData(rd *rowData) *rowCtxData { + d := rd.queryData + res := &rowCtxData{ + Connection: d.Connection.Name, + } + + if strings.ToUpper(os.Getenv(EnvDiagnosticsLevel)) == DiagnosticsAll { + calls := append([]*hydrateMetadata{d.fetchMetadata}, rd.hydrateMetadata...) + if d.parentHydrateMetadata != nil { + calls = append([]*hydrateMetadata{d.parentHydrateMetadata}, calls...) + } + + res.Diagnostics = &rowCtxDiagnostics{ + Calls: calls, + } + } + return res +} diff --git a/plugin/serve.go b/plugin/serve.go index f3214d24..dc50f409 100644 --- a/plugin/serve.go +++ b/plugin/serve.go @@ -2,11 +2,11 @@ package plugin import ( "context" + "fmt" "log" "net/http" _ "net/http/pprof" "os" - "fmt" "github.com/hashicorp/go-hclog" "github.com/turbot/go-kit/helpers" @@ -49,15 +49,15 @@ passing callback functions to implement each of the plugin interface functions: const ( UnrecognizedRemotePluginMessage = "Unrecognized remote plugin message:" UnrecognizedRemotePluginMessageSuffix = "\nThis usually means" - StartupPanicMessage = "Unhandled exception starting plugin: " + PluginStartupFailureMessage = "Plugin startup failed: " ) func Serve(opts *ServeOpts) { defer func() { if r := recover(); r != nil { - msg := fmt.Sprintf("%s%s", StartupPanicMessage, helpers.ToError(r).Error()) + msg := fmt.Sprintf("%s%s", PluginStartupFailureMessage, helpers.ToError(r).Error()) log.Println("[WARN]", msg) - // write to stdout so the plugin manager can extract the panic message + // write to stdout so the plugin manager can extract the error message fmt.Println(msg) } }() @@ -90,7 +90,17 @@ func Serve(opts *ServeOpts) { } // TODO add context into all of these handlers - grpc.NewPluginServer(p.Name, p.setConnectionConfig, p.setAllConnectionConfigs, p.updateConnectionConfigs, p.getSchema, p.execute, p.establishMessageStream, p.setCacheOptions).Serve() + grpc.NewPluginServer(p.Name, + p.setConnectionConfig, + p.setAllConnectionConfigs, + p.updateConnectionConfigs, + p.getSchema, + p.execute, + p.establishMessageStream, + p.setCacheOptions, + p.setRateLimiters, + p.getRateLimiters, + ).Serve() } func setupLogger() hclog.Logger { diff --git a/plugin/table.go b/plugin/table.go index 3f556a36..f1c21f93 100644 --- a/plugin/table.go +++ b/plugin/table.go @@ -1,6 +1,7 @@ package plugin import ( + "github.com/turbot/steampipe-plugin-sdk/v5/rate_limiter" "log" "github.com/turbot/go-kit/helpers" @@ -19,6 +20,7 @@ type TableCacheOptions struct { } /* +| Table defines the properties of a plugin table: - The columns that are returned: [plugin.Table.Columns]. @@ -62,6 +64,10 @@ type Table struct { // cache options - allows disabling of cache for this table Cache *TableCacheOptions + // tags used to provide scope values for all child hydrate calls + // (may be used for more in future) + Tags map[string]string + // deprecated - use DefaultIgnoreConfig DefaultShouldIgnoreError ErrorPredicate @@ -89,6 +95,13 @@ func (t *Table) initialise(p *Plugin) { t.DefaultIgnoreConfig = &IgnoreConfig{} } + // create Tags if needed + if t.Tags == nil { + t.Tags = make(map[string]string) + } + // populate tags with table name + t.Tags[rate_limiter.RateLimiterScopeTable] = t.Name + if t.DefaultShouldIgnoreError != nil && t.DefaultIgnoreConfig.ShouldIgnoreError == nil { // copy the (deprecated) top level ShouldIgnoreError property into the ignore config t.DefaultIgnoreConfig.ShouldIgnoreError = t.DefaultShouldIgnoreError @@ -171,9 +184,10 @@ func (t *Table) buildHydrateConfigMap() { hydrateName := helpers.GetFunctionName(get.Hydrate) t.hydrateConfigMap[hydrateName] = &HydrateConfig{ Func: get.Hydrate, - ShouldIgnoreError: get.ShouldIgnoreError, IgnoreConfig: get.IgnoreConfig, RetryConfig: get.RetryConfig, + Tags: get.Tags, + ShouldIgnoreError: get.ShouldIgnoreError, MaxConcurrency: get.MaxConcurrency, } } diff --git a/plugin/table_fetch.go b/plugin/table_fetch.go index d4dedbeb..21043802 100644 --- a/plugin/table_fetch.go +++ b/plugin/table_fetch.go @@ -3,10 +3,6 @@ package plugin import ( "context" "fmt" - "log" - "strings" - "sync" - "github.com/gertd/go-pluralize" "github.com/turbot/go-kit/helpers" "github.com/turbot/steampipe-plugin-sdk/v5/grpc" @@ -17,13 +13,16 @@ import ( "github.com/turbot/steampipe-plugin-sdk/v5/telemetry" "google.golang.org/grpc/codes" "google.golang.org/grpc/status" + "log" + "strings" + "sync" ) type fetchType string const ( fetchTypeList fetchType = "list" - fetchTypeGet = "get" + fetchTypeGet fetchType = "get" ) // call either 'get' or 'list'. @@ -149,7 +148,7 @@ func (t *Table) doGetForQualValues(ctx context.Context, queryData *QueryData, ke // we will make a copy of queryData and update KeyColumnQuals to replace the list value with a single qual value for _, qv := range qualValueList.Values { // make a shallow copy of the query data and modify the quals - queryDataCopy := queryData.ShallowCopy() + queryDataCopy := queryData.shallowCopy() queryDataCopy.EqualsQuals[keyColumnName] = qv queryDataCopy.Quals[keyColumnName] = &KeyColumnQuals{Name: keyColumnName, Quals: quals.QualSlice{{Column: keyColumnName, Operator: "=", Value: qv}}} @@ -195,12 +194,19 @@ func (t *Table) doGet(ctx context.Context, queryData *QueryData, hydrateItem int var getItem interface{} if len(queryData.Matrix) == 0 { + // now we know there is no matrix, initialise the rate limiters for this query data + queryData.initialiseRateLimiters() + // now wait for any configured 'get' rate limiters + fetchDelay := queryData.fetchLimiters.wait(ctx) + // set the metadata + queryData.setGetLimiterMetadata(fetchDelay) + // just invoke callHydrateWithRetries() getItem, err = rd.callHydrateWithRetries(ctx, queryData, t.Get.Hydrate, t.Get.IgnoreConfig, t.Get.RetryConfig) } else { // the table has a matrix - we will invoke get for each matrix item - getItem, err = t.getForEach(ctx, queryData, rd) + getItem, err = t.getForEachMatrixItem(ctx, queryData, rd) } if err != nil { @@ -223,11 +229,10 @@ func (t *Table) doGet(ctx context.Context, queryData *QueryData, hydrateItem int return nil } -// getForEach executes the provided get call for each of a set of matrixItem +// getForEachMatrixItem executes the provided get call for each of a set of matrixItem // enables multi-partition fetching -func (t *Table) getForEach(ctx context.Context, queryData *QueryData, rd *rowData) (interface{}, error) { - - log.Printf("[TRACE] getForEach, matrixItem list: %v\n", queryData.filteredMatrix) +func (t *Table) getForEachMatrixItem(ctx context.Context, queryData *QueryData, rd *rowData) (interface{}, error) { + log.Printf("[TRACE] getForEachMatrixItem, matrixItem list: %v\n", queryData.filteredMatrix) var wg sync.WaitGroup errorChan := make(chan error, len(queryData.Matrix)) @@ -264,8 +269,14 @@ func (t *Table) getForEach(ctx context.Context, queryData *QueryData, rd *rowDat fetchContext := context.WithValue(ctx, context_key.MatrixItem, matrixItem) // clone the query data and add the matrix properties to quals - matrixQueryData := queryData.ShallowCopy() - matrixQueryData.updateQualsWithMatrixItem(matrixItem) + matrixQueryData := queryData.shallowCopy() + matrixQueryData.setMatrixItem(matrixItem) + // now we have set the matrix item, initialise the rate limiters for this query data + matrixQueryData.initialiseRateLimiters() + // now wait for any configured 'get' rate limiters + fetchDelay := matrixQueryData.fetchLimiters.wait(ctx) + // set the metadata + matrixQueryData.setGetLimiterMetadata(fetchDelay) item, err := rd.callHydrateWithRetries(fetchContext, matrixQueryData, t.Get.Hydrate, t.Get.IgnoreConfig, t.Get.RetryConfig) @@ -306,6 +317,7 @@ func (t *Table) getForEach(ctx context.Context, queryData *QueryData, rd *rowDat } var item interface{} if len(results) == 1 { + // TODO KAI what???? // set the matrix item on the row data rd.matrixItem = results[0].matrixItem item = results[0].item @@ -357,64 +369,79 @@ func (t *Table) executeListCall(ctx context.Context, queryData *QueryData) { } // invoke list call - hydrateResults is nil as list call does not use it (it must comply with HydrateFunc signature) + var childHydrate HydrateFunc = nil listCall := t.List.Hydrate // if there is a parent hydrate function, call that // - the child 'Hydrate' function will be called by QueryData.StreamListItem, if t.List.ParentHydrate != nil { listCall = t.List.ParentHydrate + childHydrate = t.List.Hydrate } + // store the list call and child hydrate call - these will be used later when we call setListLimiterMetadata + queryData.setListCalls(listCall, childHydrate) + // NOTE: if there is an IN qual, the qual value will be a list of values // in this case we call list for each value - if len(t.List.KeyColumns) > 0 { - log.Printf("[TRACE] list config defines key columns, checking for list qual values") + if listQual := t.getListCallQualValueList(queryData); listQual != nil { - // we can support IN calls for key columns if only 1 qual has a list value + log.Printf("[TRACE] one qual with list value will be processed: %v", *listQual) + qualValueList := listQual.Value.GetListValue() + t.doListForQualValues(ctx, queryData, listQual.Column, qualValueList, listCall) + return + } - // first determine whether more than 1 qual has a list value - qualsWithListValues := queryData.Quals.GetListQualValues() + t.doList(ctx, queryData, listCall) +} - numQualsWithListValues := len(qualsWithListValues) - if numQualsWithListValues > 0 { - log.Printf("[TRACE] %d %s have list values", - numQualsWithListValues, - pluralize.NewClient().Pluralize("qual", numQualsWithListValues, false)) +// if this table defines key columns, and if there is a SINGLE qual with a list value +// return that qual +func (t *Table) getListCallQualValueList(queryData *QueryData) *quals.Qual { + if len(t.List.KeyColumns) == 0 { + return nil + } - // if we have more than one qual with list values, extract the required ones - // if more than one of these is required, this is an error - // - we do not support multiple required quals with list values - var requiredListQuals quals.QualSlice - if numQualsWithListValues > 1 { - log.Printf("[TRACE] more than 1 qual has a list value - counting required quals with list value") + log.Printf("[TRACE] list config defines key columns, checking for list qual values") - for _, listQual := range qualsWithListValues { + // we can support IN calls for key columns if only 1 qual has a list value - // find key column - if c := t.List.KeyColumns.Find(listQual.Column); c.Require == Required { - requiredListQuals = append(requiredListQuals, listQual) - } - } - if len(requiredListQuals) > 1 { - log.Printf("[WARN] more than 1 required qual has a list value - we cannot call list for each so passing quals through to plugin unaltered") - qualsWithListValues = nil - } else { - log.Printf("[TRACE] after removing optional quals %d required remain", len(requiredListQuals)) - qualsWithListValues = requiredListQuals + // first determine whether more than 1 qual has a list value + qualsWithListValues := queryData.Quals.GetListQualValues() + + numQualsWithListValues := len(qualsWithListValues) + if numQualsWithListValues > 0 { + log.Printf("[TRACE] %d %s have list values", + numQualsWithListValues, + pluralize.NewClient().Pluralize("qual", numQualsWithListValues, false)) + + // if we have more than one qual with list values, extract the required ones + // if more than one of these is required, this is an error + // - we do not support multiple required quals with list values + var requiredListQuals quals.QualSlice + if numQualsWithListValues > 1 { + log.Printf("[TRACE] more than 1 qual has a list value - counting required quals with list value") + + for _, listQual := range qualsWithListValues { + + // find key column + if c := t.List.KeyColumns.Find(listQual.Column); c.Require == Required { + requiredListQuals = append(requiredListQuals, listQual) } } + if len(requiredListQuals) > 1 { + log.Printf("[WARN] more than 1 required qual has a list value - we cannot call list for each so passing quals through to plugin unaltered") + qualsWithListValues = nil + } else { + log.Printf("[TRACE] after removing optional quals %d required remain", len(requiredListQuals)) + qualsWithListValues = requiredListQuals + } } - // list are there any list quals left to process? - if len(qualsWithListValues) == 1 { - listQual := qualsWithListValues[0] - log.Printf("[TRACE] one qual with list value will be processed: %v", *listQual) - qualValueList := listQual.Value.GetListValue() - t.doListForQualValues(ctx, queryData, listQual.Column, qualValueList, listCall) - return - } - } - - t.doList(ctx, queryData, listCall) + // list are there any list quals left to process? + if len(qualsWithListValues) == 1 { + return qualsWithListValues[0] + } + return nil } // doListForQualValues is called when there is an equals qual and the qual value is a list of values @@ -426,7 +453,7 @@ func (t *Table) doListForQualValues(ctx context.Context, queryData *QueryData, k for _, qv := range qualValueList.Values { log.Printf("[TRACE] executeListCall passing updated query data, qv: %v", qv) // make a shallow copy of the query data and modify the value of the key column qual to be the value list item - queryDataCopy := queryData.ShallowCopy() + queryDataCopy := queryData.shallowCopy() // update qual maps to replace list value with list element queryDataCopy.EqualsQuals[keyColumn] = qv queryDataCopy.Quals[keyColumn] = &KeyColumnQuals{ @@ -449,33 +476,43 @@ func (t *Table) doList(ctx context.Context, queryData *QueryData, listCall Hydra log.Printf("[TRACE] doList (%s)", queryData.connectionCallId) + // create rowData, purely so we can call callHydrateWithRetries rd := newRowData(queryData, nil) - // if a matrix is defined, run listForEach + // if a matrix is defined, run listForEachMatrixItem if queryData.Matrix != nil { - log.Printf("[TRACE] doList: matrix len %d - calling listForEach", len(queryData.Matrix)) - t.listForEach(ctx, queryData, listCall) - } else { - log.Printf("[TRACE] doList: no matrix item") + log.Printf("[TRACE] doList: matrix len %d - calling listForEachMatrixItem", len(queryData.Matrix)) + t.listForEachMatrixItem(ctx, queryData, listCall) + return + } - // we cannot retry errors in the list hydrate function after streaming has started - listRetryConfig := t.List.RetryConfig.GetListRetryConfig() + // OK we know there is no matrix, initialise the rate limiters for this query data + queryData.initialiseRateLimiters() + // now wait for any configured 'list' rate limiters + fetchDelay := queryData.fetchLimiters.wait(ctx) + // set the metadata + queryData.setListLimiterMetadata(fetchDelay) - if _, err := rd.callHydrateWithRetries(ctx, queryData, listCall, t.List.IgnoreConfig, listRetryConfig); err != nil { - log.Printf("[WARN] doList callHydrateWithRetries (%s) returned err %s", queryData.connectionCallId, err.Error()) - queryData.streamError(err) - } + log.Printf("[TRACE] doList: no matrix item") + + // we cannot retry errors in the list hydrate function after streaming has started + listRetryConfig := t.List.RetryConfig.GetListRetryConfig() + + if _, err := rd.callHydrateWithRetries(ctx, queryData, listCall, t.List.IgnoreConfig, listRetryConfig); err != nil { + log.Printf("[WARN] doList callHydrateWithRetries (%s) returned err %s", queryData.connectionCallId, err.Error()) + queryData.streamError(err) } + } // ListForEach executes the provided list call for each of a set of matrixItem // enables multi-partition fetching -func (t *Table) listForEach(ctx context.Context, queryData *QueryData, listCall HydrateFunc) { - ctx, span := telemetry.StartSpan(ctx, t.Plugin.Name, "Table.listForEach (%s)", t.Name) +func (t *Table) listForEachMatrixItem(ctx context.Context, queryData *QueryData, listCall HydrateFunc) { + ctx, span := telemetry.StartSpan(ctx, t.Plugin.Name, "Table.listForEachMatrixItem (%s)", t.Name) // TODO add matrix item to span defer span.End() - log.Printf("[TRACE] listForEach: %v\n", queryData.Matrix) + log.Printf("[TRACE] listForEachMatrixItem: %v\n", queryData.Matrix) var wg sync.WaitGroup // NOTE - we use the filtered matrix - which means we may not actually run any hydrate calls // if the quals have filtered out all matrix items (e.g. select where region = 'invalid') @@ -492,11 +529,20 @@ func (t *Table) listForEach(ctx context.Context, queryData *QueryData, listCall } wg.Done() }() + // create rowData, purely so we can call callHydrateWithRetries rd := newRowData(queryData, nil) + rd.matrixItem = matrixItem // clone the query data and add the matrix properties to quals - matrixQueryData := queryData.ShallowCopy() - matrixQueryData.updateQualsWithMatrixItem(matrixItem) + matrixQueryData := queryData.shallowCopy() + matrixQueryData.setMatrixItem(matrixItem) + + // now we have set the matrix item, initialise the rate limiters for this query data + matrixQueryData.initialiseRateLimiters() + // now wait for any configured 'list' rate limiters + fetchDelay := matrixQueryData.fetchLimiters.wait(ctx) + // set the metadata + matrixQueryData.setListLimiterMetadata(fetchDelay) // we cannot retry errors in the list hydrate function after streaming has started listRetryConfig := t.List.RetryConfig.GetListRetryConfig() diff --git a/plugin/table_test.go b/plugin/table_test.go index e4cf4785..8c119d48 100644 --- a/plugin/table_test.go +++ b/plugin/table_test.go @@ -3,6 +3,7 @@ package plugin import ( "context" "fmt" + "log" "sort" "strings" "testing" @@ -371,7 +372,10 @@ var testCasesRequiredHydrateCalls = map[string]requiredHydrateCallsTest{ func TestRequiredHydrateCalls(t *testing.T) { plugin := &Plugin{} - plugin.initialise(hclog.NewNullLogger()) + logger := hclog.NewNullLogger() + log.SetOutput(logger.StandardWriter(&hclog.StandardLoggerOptions{InferLevels: true})) + + plugin.initialise(logger) for name, test := range testCasesRequiredHydrateCalls { test.table.initialise(plugin) @@ -795,8 +799,11 @@ var testCasesGetHydrateConfig = map[string]getHydrateConfigTest{ } func TestGetHydrateConfig(t *testing.T) { + logger := hclog.NewNullLogger() + log.SetOutput(logger.StandardWriter(&hclog.StandardLoggerOptions{InferLevels: true})) + for name, test := range testCasesGetHydrateConfig { - test.table.Plugin.initialise(hclog.NewNullLogger()) + test.table.Plugin.initialise(logger) test.table.initialise(test.table.Plugin) result := test.table.hydrateConfigMap[test.funcName] diff --git a/query_cache/query_cache.go b/query_cache/query_cache.go index b52ec090..cba8539c 100644 --- a/query_cache/query_cache.go +++ b/query_cache/query_cache.go @@ -13,7 +13,6 @@ import ( "github.com/eko/gocache/v3/cache" "github.com/eko/gocache/v3/store" "github.com/gertd/go-pluralize" - "github.com/sethvargo/go-retry" "github.com/turbot/steampipe-plugin-sdk/v5/grpc" sdkproto "github.com/turbot/steampipe-plugin-sdk/v5/grpc/proto" "github.com/turbot/steampipe-plugin-sdk/v5/telemetry" @@ -260,11 +259,6 @@ func (c *QueryCache) IterateSet(ctx context.Context, row *sdkproto.Row, callId s // reset index and update page count log.Printf("[TRACE] IterateSet writing 1 page of %d rows. Page count %d (%s)", rowBufferSize, req.pageCount, req.CallId) req.err = c.writePageToCache(ctx, req, false) - } else { - // TACTICAL - // wait for at least one of our subscribers to have streamed all available rows - // this avoids the cache pulling data from the APIs too quickly, - c.waitForSubscribers(ctx, req) } return nil @@ -397,13 +391,6 @@ func (c *QueryCache) updateIndex(ctx context.Context, callId string, req *setReq // write a page of rows to the cache func (c *QueryCache) writePageToCache(ctx context.Context, req *setRequest, finalPage bool) error { - - // wait for at least one of our subscribers to have streamed all available rows - // this avoids the cache pulling data from the APIs too quickly, - // and also avoids at least one of the subscribers from - // having to read back data from the cache instead of just using the page buffer - c.waitForSubscribers(ctx, req) - // now lock the request req.requestLock.Lock() @@ -565,35 +552,6 @@ func (c *QueryCache) cacheSetIndexBucket(ctx context.Context, indexBucketKey str return doSet(ctx, indexBucketKey, indexBucket.AsProto(), req.ttl(), c.cache, tags) } -// wait for at least one of our subscribers to have streamed all available rows -// this avoids tjhe cache pulling data from the APIs to quickly, -func (c *QueryCache) waitForSubscribers(ctx context.Context, req *setRequest) error { - log.Printf("[TRACE] waitForSubscribers (%s)", req.CallId) - defer log.Printf("[TRACE] waitForSubscribers done(%s)", req.CallId) - baseRetryInterval := 1 * time.Millisecond - maxRetryInterval := 50 * time.Millisecond - backoff := retry.WithCappedDuration(maxRetryInterval, retry.NewExponential(baseRetryInterval)) - - // we know this cannot return an error - return retry.Do(ctx, backoff, func(ctx context.Context) error { - // if context is cancelled just return - if ctx.Err() != nil || req.StreamContext.Err() != nil { - log.Printf("[INFO] allAvailableRowsStreamed context cancelled - returning (%s)", req.CallId) - return ctx.Err() - } - - for s := range req.subscribers { - if s.allAvailableRowsStreamed(req.rowCount) { - return nil - } - } - log.Printf("[TRACE] waitForSubscribers not all available rows streamed (%s)", req.CallId) - - return retry.RetryableError(fmt.Errorf("not all available rows streamed")) - }) - -} - func doGet[T CacheData](ctx context.Context, key string, cache *cache.Cache[[]byte], target T) error { // get the bytes from the cache getRes, err := cache.Get(ctx, key) diff --git a/query_cache/set_request_subscriber.go b/query_cache/set_request_subscriber.go index 5ac5dc24..980c2da5 100644 --- a/query_cache/set_request_subscriber.go +++ b/query_cache/set_request_subscriber.go @@ -138,7 +138,7 @@ func (s *setRequestSubscriber) getRowsToStream(ctx context.Context) ([]*sdkproto var err = s.publisher.err requestState := s.publisher.state - if requestState == requestInProgress { + if requestState != requestError { rowsToStream, err = s.publisher.getRowsSince(ctx, s.rowsStreamed) if err != nil { log.Printf("[INFO] getRowsToStream getRowsSince returned error: %s (%s)", err.Error(), s.callId) @@ -147,17 +147,17 @@ func (s *setRequestSubscriber) getRowsToStream(ctx context.Context) ([]*sdkproto } s.publisher.requestLock.RUnlock() - // now we have unlocked, check for error or completion - if requestState == requestComplete { - // we are done! - log.Printf("[INFO] getRowsToStream - publisher %s complete - returning (%s)", s.publisher.CallId, s.callId) - return nil, nil - } if requestState == requestError { return nil, s.publisher.err } if len(rowsToStream) == 0 { + if requestState == requestComplete { + // we are done! + log.Printf("[INFO] getRowsToStream - publisher %s complete - returning (%s)", s.publisher.CallId, s.callId) + return nil, nil + } + // if no rows are available, retry // (NOTE: we have already checked for completiomn // (this is called from within a retry.Do) diff --git a/rate_limiter/config_values.go b/rate_limiter/config_values.go new file mode 100644 index 00000000..206b3954 --- /dev/null +++ b/rate_limiter/config_values.go @@ -0,0 +1,24 @@ +package rate_limiter + +import ( + "os" + "strconv" +) + +const ( + // todo should these be more unique to avoid clash + RateLimiterScopeConnection = "connection" + RateLimiterScopeTable = "table" + + defaultMaxConcurrentRows = 500 + envMaxConcurrentRows = "STEAMPIPE_MAX_CONCURRENT_ROWS" +) + +func GetMaxConcurrentRows() int { + if envStr, ok := os.LookupEnv(envMaxConcurrentRows); ok { + if b, err := strconv.Atoi(envStr); err == nil { + return b + } + } + return defaultMaxConcurrentRows +} diff --git a/rate_limiter/definition.go b/rate_limiter/definition.go new file mode 100644 index 00000000..1d5c5276 --- /dev/null +++ b/rate_limiter/definition.go @@ -0,0 +1,111 @@ +package rate_limiter + +import ( + "fmt" + "github.com/turbot/steampipe-plugin-sdk/v5/grpc/proto" + "golang.org/x/time/rate" + "log" + "regexp" + "strings" +) + +type Definition struct { + // the limiter name + Name string + // the actual limiter config + FillRate rate.Limit + BucketSize int64 + // the max concurrency supported + MaxConcurrency int64 + // the scope properties which identify this limiter instance + // one limiter instance will be created for each combination of these properties which is encountered + Scope []string + + // filter used to target the limiter + Where string + parsedFilter *scopeFilter +} + +// DefinitionFromProto converts the proto format RateLimiterDefinition into a Defintion +func DefinitionFromProto(p *proto.RateLimiterDefinition) (*Definition, error) { + var res = &Definition{ + Name: p.Name, + FillRate: rate.Limit(p.FillRate), + BucketSize: p.BucketSize, + MaxConcurrency: p.MaxConcurrency, + Scope: p.Scope, + Where: p.Where, + } + if err := res.Initialise(); err != nil { + return nil, err + } + return res, nil +} + +func (d *Definition) ToProto() *proto.RateLimiterDefinition { + return &proto.RateLimiterDefinition{ + Name: d.Name, + FillRate: float32(d.FillRate), + BucketSize: d.BucketSize, + MaxConcurrency: d.MaxConcurrency, + Scope: d.Scope, + Where: d.Where, + } +} + +func (d *Definition) Initialise() error { + log.Printf("[INFO] initialise rate limiter Definition") + if d.Where != "" { + scopeFilter, err := newScopeFilter(d.Where) + if err != nil { + log.Printf("[WARN] failed to parse scope filter: %s", err.Error()) + return err + } + log.Printf("[INFO] parsed scope filter %s", d.Where) + d.parsedFilter = scopeFilter + } + return nil +} + +func (d *Definition) String() string { + limiterString := "" + concurrencyString := "" + if d.FillRate >= 0 { + limiterString = fmt.Sprintf("Limit(/s): %v, Burst: %d", d.FillRate, d.BucketSize) + } + if d.MaxConcurrency >= 0 { + concurrencyString = fmt.Sprintf("MaxConcurrency: %d", d.MaxConcurrency) + } + return fmt.Sprintf("%s Scopes: %s, Where: %s", strings.Join([]string{limiterString, concurrencyString}, " "), d.Scope, d.Where) +} + +func (d *Definition) Validate() []string { + var validationErrors []string + if d.Name == "" { + validationErrors = append(validationErrors, "rate limiter definition must specify a name") + } + if !validHCLLabel(d.Name) { + validationErrors = append(validationErrors, fmt.Sprintf("invalid rate limiter name '%s' - names can contain letters, digits, underscores (_), and hyphens (-), and cannot start with a digit", d.Name)) + } + if (d.FillRate == 0 || d.BucketSize == 0) && d.MaxConcurrency == 0 { + validationErrors = append(validationErrors, "rate limiter definition must definer either a rate limit or max concurrency") + } + + return validationErrors +} + +func validHCLLabel(name string) bool { + // Identifiers can contain letters, digits, underscores (_), and hyphens (-). The first character of an identifier must not be a digit, to avoid ambiguity with literal numbers. + return regexp.MustCompile(`^[a-zA-Z0-9_-]+$`).MatchString(name) && + // must not start with number (no negative lookaheads in go :( ) + !regexp.MustCompile(`^[0-9]+$`).MatchString(name[:1]) +} + +// SatisfiesFilters returns whether the given values satisfy ANY of our filters +func (d *Definition) SatisfiesFilters(scopeValues map[string]string) bool { + if d.parsedFilter == nil { + return true + } + + return d.parsedFilter.satisfied(scopeValues) +} diff --git a/rate_limiter/definition_test.go b/rate_limiter/definition_test.go new file mode 100644 index 00000000..955e9ec9 --- /dev/null +++ b/rate_limiter/definition_test.go @@ -0,0 +1,27 @@ +package rate_limiter + +import "testing" + +func TestValidHCLLabel(t *testing.T) { + testCases := []struct { + input string + expected bool + }{ + {"valid", true}, + {"valid1", true}, + {"valid-2", true}, + {"valid_3", true}, + {"valid--4", true}, + {"valid__5", true}, + {"invalid#1", false}, + {"2-invalid2", false}, + {"invalid 3", false}, + } + + for _, testCase := range testCases { + res := validHCLLabel(testCase.input) + if res != testCase.expected { + t.Errorf("failed for '%s', expected %v, got %v", testCase.input, testCase.expected, res) + } + } +} diff --git a/rate_limiter/hydrate_limiter.go b/rate_limiter/hydrate_limiter.go new file mode 100644 index 00000000..fc2ba161 --- /dev/null +++ b/rate_limiter/hydrate_limiter.go @@ -0,0 +1,73 @@ +package rate_limiter + +import ( + "fmt" + "golang.org/x/sync/semaphore" + "golang.org/x/time/rate" + "log" + "strings" +) + +type HydrateLimiter struct { + Name string + scopeValues map[string]string + // underlying rate limiter + limiter *rate.Limiter + // semaphore to control concurrency + sem *semaphore.Weighted + maxConcurrency int64 +} + +func newLimiter(l *Definition, scopeValues map[string]string) *HydrateLimiter { + log.Printf("[INFO] newLimiter, defintion: %v, scopeValues %v", l, scopeValues) + + res := &HydrateLimiter{ + Name: l.Name, + scopeValues: scopeValues, + maxConcurrency: l.MaxConcurrency, + } + if l.FillRate != 0 { + res.limiter = rate.NewLimiter(l.FillRate, int(l.BucketSize)) + } + if l.MaxConcurrency != 0 { + res.sem = semaphore.NewWeighted(l.MaxConcurrency) + } + return res +} +func (d *HydrateLimiter) String() string { + limiterString := "" + concurrencyString := "" + if d.limiter != nil { + limiterString = fmt.Sprintf("Limit(/s): %v, Burst: %d", d.limiter.Limit(), d.limiter.Burst()) + } + if d.maxConcurrency >= 0 { + concurrencyString = fmt.Sprintf("MaxConcurrency: %d", d.maxConcurrency) + } + return fmt.Sprintf("%s ScopeValues: %s", strings.Join([]string{limiterString, concurrencyString}, " "), d.scopeValues) +} + +func (l *HydrateLimiter) tryToAcquireSemaphore() bool { + if l.sem == nil { + return true + } + return l.sem.TryAcquire(1) +} + +func (l *HydrateLimiter) releaseSemaphore() { + if l.sem == nil { + return + } + l.sem.Release(1) + +} + +func (l *HydrateLimiter) reserve() *rate.Reservation { + if l.limiter != nil { + return l.limiter.Reserve() + } + return nil +} + +func (l *HydrateLimiter) hasLimiter() bool { + return l.limiter != nil +} diff --git a/rate_limiter/limiter_map.go b/rate_limiter/limiter_map.go new file mode 100644 index 00000000..495864a9 --- /dev/null +++ b/rate_limiter/limiter_map.go @@ -0,0 +1,74 @@ +package rate_limiter + +import ( + "crypto/md5" + "encoding/hex" + "sync" +) + +// LimiterMap is a struct encapsulating a map of rate limiters +// map key is built from the limiter tag values, +// e.g. +// tags: {"connection": "aws1", "region": "us-east-1"} +// key: hash("{\"connection\": \"aws1\", \"region\": \"us-east-1\"}) +type LimiterMap struct { + limiters map[string]*HydrateLimiter + mut sync.RWMutex +} + +func NewLimiterMap() *LimiterMap { + return &LimiterMap{ + limiters: make(map[string]*HydrateLimiter), + } +} + +// GetOrCreate checks the map for a limiter with the specified key values - if none exists it creates it +func (m *LimiterMap) GetOrCreate(def *Definition, scopeValues map[string]string) (*HydrateLimiter, error) { + // build the key from the name and scope values + key, err := buildLimiterKey(def.Name, scopeValues) + if err != nil { + return nil, err + } + + m.mut.RLock() + limiter, ok := m.limiters[key] + m.mut.RUnlock() + + if ok { + return limiter, nil + } + + // get a write lock + m.mut.Lock() + // ensure release lock + defer m.mut.Unlock() + + // try to read again + limiter, ok = m.limiters[key] + if ok { + // someone beat us to creation + return limiter, nil + } + + // ok we need to create one + limiter = newLimiter(def, scopeValues) + + // put it in the map + m.limiters[key] = limiter + return limiter, nil +} + +func (m *LimiterMap) Clear() { + m.mut.Lock() + m.limiters = make(map[string]*HydrateLimiter) + m.mut.Unlock() +} + +func buildLimiterKey(name string, values map[string]string) (string, error) { + // build the key for this rate limiter + // map key is the hash of the name and string representation of the value map + hash := md5.Sum([]byte(name + ScopeValuesString(values))) + key := hex.EncodeToString(hash[:]) + + return key, nil +} diff --git a/rate_limiter/rate_limiter.go b/rate_limiter/rate_limiter.go new file mode 100644 index 00000000..e51c9470 --- /dev/null +++ b/rate_limiter/rate_limiter.go @@ -0,0 +1,126 @@ +package rate_limiter + +import ( + "context" + "fmt" + "github.com/turbot/go-kit/helpers" + "golang.org/x/time/rate" + "log" + "strings" + "time" +) + +type MultiLimiter struct { + Limiters []*HydrateLimiter + ScopeValues map[string]string +} + +func NewMultiLimiter(limiters []*HydrateLimiter, scopeValues map[string]string) *MultiLimiter { + res := &MultiLimiter{ + Limiters: limiters, + ScopeValues: scopeValues, + } + + return res +} + +func (m *MultiLimiter) Wait(ctx context.Context) time.Duration { + // short circuit if we have no limiters + if len(m.Limiters) == 0 { + return 0 + } + + var maxDelay time.Duration = 0 + var reservations []*rate.Reservation + + // todo cancel reservations for all but longest delay + // todo think about burst rate + + // find the max delay from all the limiters + for _, l := range m.Limiters { + if l.hasLimiter() { + r := l.reserve() + reservations = append(reservations, r) + if d := r.Delay(); d > maxDelay { + maxDelay = d + } + } + } + + if maxDelay == 0 { + return 0 + } + + log.Printf("[TRACE] rate limiter waiting %dms", maxDelay.Milliseconds()) + // wait for the max delay time + t := time.NewTimer(maxDelay) + defer t.Stop() + select { + case <-t.C: + // We can proceed. + case <-ctx.Done(): + // Context was canceled before we could proceed. Cancel the + // reservations, which may permit other events to proceed sooner. + for _, r := range reservations { + r.Cancel() + } + } + return maxDelay +} + +func (m *MultiLimiter) String() string { + var strs []string + + for _, l := range m.Limiters { + strs = append(strs, l.String()) + } + return strings.Join(strs, "\n") +} + +func (m *MultiLimiter) LimiterNames() []string { + var names = make([]string, len(m.Limiters)) + for i, l := range m.Limiters { + names[i] = l.Name + } + return names +} + +func (m *MultiLimiter) TryToAcquireSemaphore() bool { + + // keep track of limiters whose semaphore we have acquired + var acquired []*HydrateLimiter + for _, l := range m.Limiters { + + if l.tryToAcquireSemaphore() { + acquired = append(acquired, l) + + } else { + + // we failed to acquire the semaphore - + // we must release all acquired semaphores + for _, a := range acquired { + a.releaseSemaphore() + } + return false + } + } + + return true +} + +func (m *MultiLimiter) ReleaseSemaphore() { + for _, l := range m.Limiters { + l.releaseSemaphore() + } +} + +// FormatStringMap orders the map keys and returns a string containing all map keys and values +func FormatStringMap(stringMap map[string]string) string { + var strs []string + + for _, k := range helpers.SortedMapKeys(stringMap) { + strs = append(strs, fmt.Sprintf("%s=%s", k, stringMap[k])) + } + + return strings.Join(strs, ",") +} diff --git a/rate_limiter/rate_limiter_test.go b/rate_limiter/rate_limiter_test.go new file mode 100644 index 00000000..2ea323fe --- /dev/null +++ b/rate_limiter/rate_limiter_test.go @@ -0,0 +1,32 @@ +package rate_limiter + +import ( + "testing" +) + +func TestRateLimiter(t *testing.T) { + //fmt.Printf("x_time_rate") + //limiter := &MultiLimiter{} + //limiter.Add(10, 100, nil) + //limiter.Add(1, 5, KeyMap{"hydrate": "fxn1"}) + //limiter.Add(2, 5, KeyMap{"hydrate": "fxn2"}) + // + //save := time.Now() + // + //var wg sync.WaitGroup + //makeApiCalls := func(hydrate string) { + // for i := 0; i < 50; i++ { + // limiter.Wait(context.Background(), KeyMap{"hydrate": hydrate}) + // fmt.Printf("%s, %d, %v\n", hydrate, i, time.Since(save)) + // } + // wg.Done() + //} + //wg.Add(1) + //go makeApiCalls("fxn1") + //wg.Add(1) + //go makeApiCalls("fxn2") + //wg.Add(1) + //go makeApiCalls("fxn3") + // + //wg.Wait() +} diff --git a/rate_limiter/scope_filter.go b/rate_limiter/scope_filter.go new file mode 100644 index 00000000..c9eb8e65 --- /dev/null +++ b/rate_limiter/scope_filter.go @@ -0,0 +1,185 @@ +package rate_limiter + +import ( + "fmt" + "github.com/danwakefield/fnmatch" + "github.com/turbot/steampipe-plugin-sdk/v5/filter" + "strings" +) + +type scopeFilter struct { + filter filter.ComparisonNode + raw string +} + +func newScopeFilter(raw string) (*scopeFilter, error) { + parsed, err := filter.Parse("", []byte(raw)) + if err != nil { + return nil, err + } + + res := &scopeFilter{ + filter: parsed.(filter.ComparisonNode), + raw: raw, + } + + // do a test run of the filter to ensure all operators are supported + if _, err := scopeFilterSatisfied(res.filter, map[string]string{}); err != nil { + return nil, err + } + + return res, nil + +} + +func (f *scopeFilter) satisfied(values map[string]string) bool { + res, _ := scopeFilterSatisfied(f.filter, values) + return res +} + +func scopeFilterSatisfied(c filter.ComparisonNode, values map[string]string) (bool, error) { + switch c.Type { + case "identifier": + // not sure when wthis would be used + return false, invalidScopeOperatorError(c.Operator.Value) + case "is": + // is is not (currently) supported + return false, invalidScopeOperatorError(c.Operator.Value) + case "like": // (also ilike?) + codeNodes, ok := c.Values.([]filter.CodeNode) + if !ok { + return false, fmt.Errorf("failed to parse filter") + } + if len(codeNodes) != 2 { + return false, fmt.Errorf("failed to parse filter") + } + + // dereference the value from the map + lval := values[codeNodes[0].Value] + pattern := codeNodes[1].Value + + switch c.Operator.Value { + case "like": + res := evaluateLike(lval, pattern, 0) + return res, nil + case "not like": + res := !evaluateLike(lval, pattern, 0) + return res, nil + case "ilike": + res := evaluateLike(lval, pattern, fnmatch.FNM_IGNORECASE) + return res, nil + case "not ilike": + res := !evaluateLike(lval, pattern, fnmatch.FNM_IGNORECASE) + return res, nil + default: + return false, invalidScopeOperatorError(c.Operator.Value) + } + case "compare": + codeNodes, ok := c.Values.([]filter.CodeNode) + if !ok { + return false, fmt.Errorf("failed to parse filter") + } + if len(codeNodes) != 2 { + return false, fmt.Errorf("failed to parse filter") + } + + // dereference the value from the map + lval := values[codeNodes[0].Value] + rval := codeNodes[1].Value + + switch c.Operator.Value { + case "=": + return lval == rval, nil + case "!=", "<>": + return lval != rval, nil + // as we (currently) only suport string scopes, < and > are not supported + case "<=", ">=", "<", ">": + return false, invalidScopeOperatorError(c.Operator.Value) + } + case "in": + codeNodes, ok := c.Values.([]filter.CodeNode) + if !ok { + return false, fmt.Errorf("failed to parse filter") + } + if len(codeNodes) < 2 { + return false, fmt.Errorf("failed to parse filter") + } + + key := codeNodes[0].Value + // build look up of possible values + rvals := make(map[string]struct{}, len(codeNodes)-1) + for _, c := range codeNodes[1:] { + rvals[c.Value] = struct{}{} + } + + lval := values[key] + // does this value exist in rvals? + _, rvalsContainValue := rvals[lval] + + // operator determines expected result + switch c.Operator.Value { + case "in": + return rvalsContainValue, nil + case "not in": + return !rvalsContainValue, nil + } + case "not": + // TODO have not identified queries which give a top level 'not' + return false, fmt.Errorf("unsupported location for 'not' operator") + case "or": + nodes, ok := c.Values.([]any) + if !ok { + return false, fmt.Errorf("failed to parse filter") + } + for _, n := range nodes { + c, ok := n.(filter.ComparisonNode) + if !ok { + return false, fmt.Errorf("failed to parse filter") + } + // if any child nodes are satisfied, return true + childSatisfied, err := scopeFilterSatisfied(c, values) + if err != nil { + return false, err + } + if childSatisfied { + return true, nil + } + } + // nothing is satisfied - return false + return false, nil + case "and": + nodes, ok := c.Values.([]any) + if !ok { + return false, fmt.Errorf("failed to parse filter") + } + for _, n := range nodes { + c, ok := n.(filter.ComparisonNode) + if !ok { + return false, fmt.Errorf("failed to parse filter") + } + // if any child nodes are unsatidsfied, return false + childSatisfied, err := scopeFilterSatisfied(c, values) + if err != nil { + return false, err + } + if !childSatisfied { + return false, nil + } + } + // everything is satisfied - return true + return true, nil + } + + return false, fmt.Errorf("failed to parse filter") +} + +func evaluateLike(val, pattern string, flag int) bool { + pattern = strings.ReplaceAll(pattern, "_", "?") + pattern = strings.ReplaceAll(pattern, "%", "*") + return fnmatch.Match(pattern, val, flag) + +} + +func invalidScopeOperatorError(operator string) error { + return fmt.Errorf("invalid scope filter operator '%s'", operator) +} diff --git a/rate_limiter/scope_filter_test.go b/rate_limiter/scope_filter_test.go new file mode 100644 index 00000000..4600029e --- /dev/null +++ b/rate_limiter/scope_filter_test.go @@ -0,0 +1,320 @@ +package rate_limiter + +import ( + "testing" +) + +func TestScopeFilterSatisfied(t *testing.T) { + testCases := []struct { + filter string + values map[string]string + expected bool + err string + }{ + //comparisons + { + filter: "connection = 'foo'", + values: map[string]string{"connection": "foo"}, + expected: true, + }, + { + filter: "connection = 'foo'", + values: map[string]string{"connection": "bar"}, + expected: false, + }, + { + filter: "connection != 'foo'", + values: map[string]string{"connection": "foo"}, + expected: false, + }, + { + filter: "connection != 'foo'", + values: map[string]string{"connection": "bar"}, + expected: true, + }, + { + filter: "connection != 'foo'", + values: map[string]string{"connection": "bar"}, + expected: true, + }, + { + filter: "connection <> 'foo'", + values: map[string]string{"connection": "bar"}, + expected: true, + }, + { + filter: "connection <> 'foo'", + values: map[string]string{"connection": "bar"}, + expected: true, + }, + // in + { + filter: "connection in ('foo','bar')", + values: map[string]string{"connection": "bar"}, + expected: true, + }, + { + filter: "connection in ('foo','bar')", + values: map[string]string{"connection": "other"}, + expected: false, + }, + { + filter: "connection not in ('foo','bar')", + values: map[string]string{"connection": "other"}, + expected: true, + }, + //like + { + filter: "connection like 'fo_'", + values: map[string]string{"connection": "foo"}, + expected: true, + }, + { + filter: "connection like 'fo_'", + values: map[string]string{"connection": "bar"}, + expected: false, + }, + { + filter: "connection like '_o_'", + values: map[string]string{"connection": "foo"}, + expected: true, + }, + { + filter: "connection like '_o_'", + values: map[string]string{"connection": "bar"}, + expected: false, + }, + { + filter: "connection like 'f%'", + values: map[string]string{"connection": "foo"}, + expected: true, + }, + { + filter: "connection like 'f%'", + values: map[string]string{"connection": "bar"}, + expected: false, + }, + { + filter: "connection like '%ob%'", + values: map[string]string{"connection": "foobar"}, + expected: true, + }, + { + filter: "connection like '%ob%'", + values: map[string]string{"connection": "foo"}, + expected: false, + }, + { + filter: "connection like '_oo%'", + values: map[string]string{"connection": "foobar"}, + expected: true, + }, + { + filter: "connection like '_oo%'", + values: map[string]string{"connection": "foo"}, + expected: true, + }, + { + filter: "connection like '_oo%'", + values: map[string]string{"connection": "bar"}, + expected: false, + }, + { + filter: "connection like 'fo_'", + values: map[string]string{"connection": "foo"}, + expected: true, + }, + { + filter: "connection like 'fo_'", + values: map[string]string{"connection": "foo"}, + expected: true, + }, + { + filter: "connection like 'FO_'", + values: map[string]string{"connection": "FOO"}, + expected: true, + }, + { + filter: "connection like 'FO_'", + values: map[string]string{"connection": "foo"}, + expected: false, + }, + + //ilike + { + filter: "connection ilike 'FO_'", + values: map[string]string{"connection": "foo"}, + expected: true, + }, + // not like + { + filter: "connection not like 'fo_'", + values: map[string]string{"connection": "foo"}, + expected: false, + }, + { + filter: "connection not like 'fo_'", + values: map[string]string{"connection": "bar"}, + expected: true, + }, + { + filter: "connection not like '_o_'", + values: map[string]string{"connection": "foo"}, + expected: false, + }, + { + filter: "connection not like '_o_'", + values: map[string]string{"connection": "bar"}, + expected: true, + }, + { + filter: "connection not like 'f%'", + values: map[string]string{"connection": "foo"}, + expected: false, + }, + { + filter: "connection not like 'f%'", + values: map[string]string{"connection": "bar"}, + expected: true, + }, + { + filter: "connection not like '%ob%'", + values: map[string]string{"connection": "foobar"}, + expected: false, + }, + { + filter: "connection not like '%ob%'", + values: map[string]string{"connection": "foo"}, + expected: true, + }, + { + filter: "connection not like '_oo%'", + values: map[string]string{"connection": "foobar"}, + expected: false, + }, + { + filter: "connection not like '_oo%'", + values: map[string]string{"connection": "foo"}, + expected: false, + }, + { + filter: "connection not like '_oo%'", + values: map[string]string{"connection": "bar"}, + expected: true, + }, + { + filter: "connection not like 'fo_'", + values: map[string]string{"connection": "foo"}, + expected: false, + }, + { + filter: "connection not like 'fo_'", + values: map[string]string{"connection": "foo"}, + expected: false, + }, + { + filter: "connection not like 'FO_'", + values: map[string]string{"connection": "FOO"}, + expected: false, + }, + { + filter: "connection not like 'FO_'", + values: map[string]string{"connection": "foo"}, + expected: true, + }, + // not ilike + { + filter: "connection not ilike 'FO_'", + values: map[string]string{"connection": "foo"}, + expected: false, + }, + { + filter: "connection not ilike 'FO_'", + values: map[string]string{"connection": "bar"}, + expected: true, + }, + //// complex queries + //{ + // filter: "connection not in ('foo','bar') or connection='hello'", + // values: map[string]string{"connection": "bar"}, + // expected: false, + //}, + //{ + // filter: "connection in ('foo','bar') and connection='foo'", + // values: map[string]string{"connection": "foo"}, + // expected: true, + //}, + //{ + // filter: "connection in ('foo','bar') and connection='other'", + // values: map[string]string{"connection": "foo"}, + // expected: false, + //}, + //{ + // filter: "connection in ('a','b') or connection='foo'", + // values: map[string]string{"connection": "foo"}, + // expected: true, + //}, + //{ + // filter: "connection in ('a','b') or connection='c'", + // values: map[string]string{"connection": "foo"}, + // expected: false, + //}, + + //// not supported + //{ + // // 'is not' not supported + // filter: "connection is null", + // values: map[string]string{"connection": "foo"}, + // err: invalidScopeOperatorError("is").Error(), + //}, + //{ + // // 'is' not supported + // filter: "connection is not null", + // values: map[string]string{"connection": "foo"}, + // err: invalidScopeOperatorError("is not").Error(), + //}, + //{ + // // '<' is not supported + // filter: "connection < 'bar'", + // values: map[string]string{"connection": "foo"}, + // err: invalidScopeOperatorError("<").Error(), + //}, + //{ + // // '<=' is not supported + // filter: "connection <= 'bar'", + // values: map[string]string{"connection": "foo"}, + // err: invalidScopeOperatorError("<=").Error(), + //}, + //{ + // // '>' is not supported + // filter: "connection > 'bar'", + // values: map[string]string{"connection": "foo"}, + // err: invalidScopeOperatorError(">").Error(), + //}, + //{ + // // '>=' is not supported + // filter: "connection >= 'bar'", + // values: map[string]string{"connection": "foo"}, + // err: invalidScopeOperatorError(">=").Error(), + //}, + } + for _, testCase := range testCases { + scopeFilter, err := newScopeFilter(testCase.filter) + if testCase.err != "" { + if err == nil || err.Error() != testCase.err { + t.Errorf("parseWhere(%v) err: %v, want %s", testCase.filter, err, testCase.err) + } + continue + } + if err != nil { + t.Fatal(err) + } + + satisfiesFilter := scopeFilter.satisfied(testCase.values) + + if satisfiesFilter != testCase.expected { + t.Errorf("scopeFilterSatisfied(%v, %v) want %v, got %v", testCase.filter, testCase.values, testCase.expected, satisfiesFilter) + } + + } +} diff --git a/rate_limiter/scope_values.go b/rate_limiter/scope_values.go new file mode 100644 index 00000000..21ff10c1 --- /dev/null +++ b/rate_limiter/scope_values.go @@ -0,0 +1,33 @@ +package rate_limiter + +import ( + "fmt" + "github.com/turbot/go-kit/helpers" + "strings" +) + +func ScopeValuesString(sv map[string]string) string { + keys := helpers.SortedMapKeys(sv) + var strs = make([]string, len(keys)) + for i, k := range keys { + strs[i] = fmt.Sprintf("%s=%s", k, sv[k]) + } + return strings.Join(strs, ",") +} + +// MergeScopeValues combines a set of scope values in order of precedence +// NOT: it adds the given values to the resulting map WITHOUT OVERWRITING existing values +// i.e. follow the precedence order +func MergeScopeValues(values []map[string]string) map[string]string { + res := map[string]string{} + + for _, valueMap := range values { + for k, v := range valueMap { + // only set tag if not already set - earlier tag values have precedence + if _, gotValue := res[k]; !gotValue { + res[k] = v + } + } + } + return res +} diff --git a/version/version.go b/version/version.go index 83500358..c9057092 100644 --- a/version/version.go +++ b/version/version.go @@ -12,12 +12,12 @@ import ( var ProtocolVersion int64 = 20220201 // Version is the main version number that is being run at the moment. -var version = "5.5.1" +var version = "5.6.0" // A pre-release marker for the version. If this is "" (empty string) // then it means that it is a final release. Otherwise, this is a pre-release // such as "dev" (in development), "beta", "rc1", etc. -var prerelease = "" +var prerelease = "rc.24" // semVer is an instance of version.Version. This has the secondary // benefit of verifying during tests and init time that our version is a