From a15ddaa34ce14d6d35af8fbd9e5010152cd561fe Mon Sep 17 00:00:00 2001 From: Blake Rouse Date: Wed, 9 Sep 2020 13:30:21 -0400 Subject: [PATCH] Cherry-pick #20994 to 7.x: [Elastic Agent] Add support for EQL based conditions (#21039) * [Elastic Agent] Add support for EQL based conditions (#20994) * Refactor Boolexp to Eql. * Connect new Eql to specs and input emitter. * Fix compare with null. * Fix notice and go.mod. (cherry picked from commit af91b5e76c4b3dc1864eed6850ccb86b91d727ee) * Fix go.mod from cherry-pick resolve. * Add changelog. --- NOTICE.txt | 64 +- go.mod | 3 +- go.sum | 4 +- x-pack/elastic-agent/CHANGELOG.next.asciidoc | 1 + .../pkg/agent/application/emitter.go | 51 +- .../pkg/agent/application/emitter_test.go | 268 +- .../application/filters/constraints_filter.go | 244 -- .../filters/constraints_filter_test.go | 46 - .../agent/application/inspect_output_cmd.go | 4 +- .../pkg/agent/application/local_mode.go | 2 +- .../pkg/agent/application/managed_mode.go | 2 +- .../agent/application/managed_mode_test.go | 3 +- .../pkg/agent/program/methods.go | 168 - .../pkg/agent/program/program.go | 50 +- .../pkg/agent/program/program_test.go | 16 +- .../pkg/agent/program/supported.go | 2 +- .../testdata/constraints_config-filebeat.yml | 27 - .../program/testdata/constraints_config.yml | 36 - .../elastic-agent/pkg/agent/transpiler/ast.go | 96 +- .../pkg/agent/transpiler/ast_test.go | 533 ++- .../pkg/agent/transpiler/vars.go | 69 +- .../pkg/agent/transpiler/vars_test.go | 40 +- x-pack/elastic-agent/pkg/boolexp/Boolexp.g4 | 55 - .../elastic-agent/pkg/boolexp/boolexp_test.go | 273 -- x-pack/elastic-agent/pkg/boolexp/methods.go | 58 - .../pkg/boolexp/parser/Boolexp.interp | 55 - .../pkg/boolexp/parser/Boolexp.tokens | 29 - .../pkg/boolexp/parser/BoolexpLexer.interp | 79 - .../pkg/boolexp/parser/BoolexpLexer.tokens | 29 - .../boolexp/parser/boolexp_base_listener.go | 140 - .../boolexp/parser/boolexp_base_visitor.go | 89 - .../pkg/boolexp/parser/boolexp_lexer.go | 191 - .../pkg/boolexp/parser/boolexp_parser.go | 1952 ---------- .../pkg/boolexp/parser/boolexp_visitor.go | 71 - .../pkg/composable/controller.go | 50 +- .../pkg/composable/controller_test.go | 23 +- x-pack/elastic-agent/pkg/eql/Eql.g4 | 97 + .../pkg/{boolexp => eql}/compare.go | 154 +- .../pkg/{boolexp/boolexp.go => eql/eql.go} | 8 +- x-pack/elastic-agent/pkg/eql/eql_test.go | 377 ++ .../pkg/{boolexp => eql}/expression.go | 19 +- x-pack/elastic-agent/pkg/eql/math.go | 212 ++ x-pack/elastic-agent/pkg/eql/methods.go | 38 + x-pack/elastic-agent/pkg/eql/methods_array.go | 31 + x-pack/elastic-agent/pkg/eql/methods_dict.go | 32 + .../elastic-agent/pkg/eql/methods_length.go | 25 + x-pack/elastic-agent/pkg/eql/methods_math.go | 47 + x-pack/elastic-agent/pkg/eql/methods_str.go | 186 + .../elastic-agent/pkg/eql/parser/Eql.interp | 87 + .../elastic-agent/pkg/eql/parser/Eql.tokens | 54 + .../pkg/eql/parser/EqlLexer.interp | 116 + .../pkg/eql/parser/EqlLexer.tokens | 54 + .../pkg/eql/parser/eql_base_listener.go | 200 + .../pkg/eql/parser/eql_base_visitor.go | 129 + .../elastic-agent/pkg/eql/parser/eql_lexer.go | 228 ++ .../parser/eql_listener.go} | 80 +- .../pkg/eql/parser/eql_parser.go | 3284 +++++++++++++++++ .../pkg/eql/parser/eql_visitor.go | 101 + .../pkg/{boolexp => eql}/visitor.go | 214 +- x-pack/elastic-agent/spec/endpoint.yml | 2 +- x-pack/elastic-agent/spec/filebeat.yml | 2 +- x-pack/elastic-agent/spec/heartbeat.yml | 2 +- x-pack/elastic-agent/spec/metricbeat.yml | 2 +- 63 files changed, 6556 insertions(+), 4048 deletions(-) delete mode 100644 x-pack/elastic-agent/pkg/agent/application/filters/constraints_filter.go delete mode 100644 x-pack/elastic-agent/pkg/agent/application/filters/constraints_filter_test.go delete mode 100644 x-pack/elastic-agent/pkg/agent/program/methods.go delete mode 100644 x-pack/elastic-agent/pkg/agent/program/testdata/constraints_config-filebeat.yml delete mode 100644 x-pack/elastic-agent/pkg/agent/program/testdata/constraints_config.yml delete mode 100644 x-pack/elastic-agent/pkg/boolexp/Boolexp.g4 delete mode 100644 x-pack/elastic-agent/pkg/boolexp/boolexp_test.go delete mode 100644 x-pack/elastic-agent/pkg/boolexp/methods.go delete mode 100644 x-pack/elastic-agent/pkg/boolexp/parser/Boolexp.interp delete mode 100644 x-pack/elastic-agent/pkg/boolexp/parser/Boolexp.tokens delete mode 100644 x-pack/elastic-agent/pkg/boolexp/parser/BoolexpLexer.interp delete mode 100644 x-pack/elastic-agent/pkg/boolexp/parser/BoolexpLexer.tokens delete mode 100644 x-pack/elastic-agent/pkg/boolexp/parser/boolexp_base_listener.go delete mode 100644 x-pack/elastic-agent/pkg/boolexp/parser/boolexp_base_visitor.go delete mode 100644 x-pack/elastic-agent/pkg/boolexp/parser/boolexp_lexer.go delete mode 100644 x-pack/elastic-agent/pkg/boolexp/parser/boolexp_parser.go delete mode 100644 x-pack/elastic-agent/pkg/boolexp/parser/boolexp_visitor.go create mode 100644 x-pack/elastic-agent/pkg/eql/Eql.g4 rename x-pack/elastic-agent/pkg/{boolexp => eql}/compare.go (71%) rename x-pack/elastic-agent/pkg/{boolexp/boolexp.go => eql/eql.go} (70%) create mode 100644 x-pack/elastic-agent/pkg/eql/eql_test.go rename x-pack/elastic-agent/pkg/{boolexp => eql}/expression.go (79%) create mode 100644 x-pack/elastic-agent/pkg/eql/math.go create mode 100644 x-pack/elastic-agent/pkg/eql/methods.go create mode 100644 x-pack/elastic-agent/pkg/eql/methods_array.go create mode 100644 x-pack/elastic-agent/pkg/eql/methods_dict.go create mode 100644 x-pack/elastic-agent/pkg/eql/methods_length.go create mode 100644 x-pack/elastic-agent/pkg/eql/methods_math.go create mode 100644 x-pack/elastic-agent/pkg/eql/methods_str.go create mode 100644 x-pack/elastic-agent/pkg/eql/parser/Eql.interp create mode 100644 x-pack/elastic-agent/pkg/eql/parser/Eql.tokens create mode 100644 x-pack/elastic-agent/pkg/eql/parser/EqlLexer.interp create mode 100644 x-pack/elastic-agent/pkg/eql/parser/EqlLexer.tokens create mode 100644 x-pack/elastic-agent/pkg/eql/parser/eql_base_listener.go create mode 100644 x-pack/elastic-agent/pkg/eql/parser/eql_base_visitor.go create mode 100644 x-pack/elastic-agent/pkg/eql/parser/eql_lexer.go rename x-pack/elastic-agent/pkg/{boolexp/parser/boolexp_listener.go => eql/parser/eql_listener.go} (66%) create mode 100644 x-pack/elastic-agent/pkg/eql/parser/eql_parser.go create mode 100644 x-pack/elastic-agent/pkg/eql/parser/eql_visitor.go rename x-pack/elastic-agent/pkg/{boolexp => eql}/visitor.go (52%) diff --git a/NOTICE.txt b/NOTICE.txt index 0b3a9262245..8151eccec26 100644 --- a/NOTICE.txt +++ b/NOTICE.txt @@ -2182,36 +2182,6 @@ Contents of probable licence file $GOMODCACHE/github.com/!azure/go-autorest/auto limitations under the License. --------------------------------------------------------------------------------- -Dependency : github.com/Masterminds/semver -Version: v1.4.2 -Licence type (autodetected): MIT --------------------------------------------------------------------------------- - -Contents of probable licence file $GOMODCACHE/github.com/!masterminds/semver@v1.4.2/LICENSE.txt: - -The Masterminds -Copyright (C) 2014-2015, Matt Butcher and Matt Farina - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. - - -------------------------------------------------------------------------------- Dependency : github.com/Microsoft/go-winio Version: v0.4.15-0.20190919025122-fc70bd9a86b5 @@ -2555,11 +2525,11 @@ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -------------------------------------------------------------------------------- Dependency : github.com/antlr/antlr4 -Version: v0.0.0-20200326173327-a4c66dc863bb +Version: v0.0.0-20200820155224-be881fa6b91d Licence type (autodetected): MIT -------------------------------------------------------------------------------- -Contents of probable licence file $GOMODCACHE/github.com/antlr/antlr4@v0.0.0-20200326173327-a4c66dc863bb/LICENSE.txt: +Contents of probable licence file $GOMODCACHE/github.com/antlr/antlr4@v0.0.0-20200820155224-be881fa6b91d/LICENSE.txt: [The "BSD 3-clause license"] Copyright (c) 2012-2017 The ANTLR Project. All rights reserved. @@ -19266,6 +19236,36 @@ Contents of probable licence file $GOMODCACHE/github.com/!burnt!sushi/xgb@v0.0.0 // such litigation is filed. +-------------------------------------------------------------------------------- +Dependency : github.com/Masterminds/semver +Version: v1.4.2 +Licence type (autodetected): MIT +-------------------------------------------------------------------------------- + +Contents of probable licence file $GOMODCACHE/github.com/!masterminds/semver@v1.4.2/LICENSE.txt: + +The Masterminds +Copyright (C) 2014-2015, Matt Butcher and Matt Farina + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. + + -------------------------------------------------------------------------------- Dependency : github.com/Microsoft/hcsshim Version: v0.8.7 diff --git a/go.mod b/go.mod index 38245e5e271..5a7b2749c0d 100644 --- a/go.mod +++ b/go.mod @@ -19,14 +19,13 @@ require ( github.com/Azure/go-autorest/autorest/adal v0.8.1 github.com/Azure/go-autorest/autorest/azure/auth v0.4.2 github.com/Azure/go-autorest/autorest/date v0.2.0 - github.com/Masterminds/semver v1.4.2 github.com/Microsoft/go-winio v0.4.15-0.20190919025122-fc70bd9a86b5 github.com/Shopify/sarama v0.0.0-00010101000000-000000000000 github.com/StackExchange/wmi v0.0.0-20170221213301-9f32b5905fd6 github.com/aerospike/aerospike-client-go v1.27.1-0.20170612174108-0f3b54da6bdc github.com/akavel/rsrc v0.8.0 // indirect github.com/andrewkroh/sys v0.0.0-20151128191922-287798fe3e43 - github.com/antlr/antlr4 v0.0.0-20200326173327-a4c66dc863bb + github.com/antlr/antlr4 v0.0.0-20200820155224-be881fa6b91d github.com/apoydence/eachers v0.0.0-20181020210610-23942921fe77 // indirect github.com/armon/go-socks5 v0.0.0-20160902184237-e75332964ef5 github.com/aws/aws-lambda-go v1.6.0 diff --git a/go.sum b/go.sum index 6a60f8d7129..3b0cc4b412d 100644 --- a/go.sum +++ b/go.sum @@ -108,8 +108,8 @@ github.com/andrewkroh/goja v0.0.0-20190128172624-dd2ac4456e20/go.mod h1:cI59GRkC github.com/andrewkroh/sys v0.0.0-20151128191922-287798fe3e43 h1:WFwa9pqou0Nb4DdfBOyaBTH0GqLE74Qwdf61E7ITHwQ= github.com/andrewkroh/sys v0.0.0-20151128191922-287798fe3e43/go.mod h1:tJPYQG4mnMeUtQvQKNkbsFrnmZOg59Qnf8CcctFv5v4= github.com/antihax/optional v0.0.0-20180407024304-ca021399b1a6/go.mod h1:V8iCPQYkqmusNa815XgQio277wI47sdRh1dUOLdyC6Q= -github.com/antlr/antlr4 v0.0.0-20200326173327-a4c66dc863bb h1:jGlQhNsk+RHK5IFSvL5wUi+ed8HZPBR2ovtGyahWOcU= -github.com/antlr/antlr4 v0.0.0-20200326173327-a4c66dc863bb/go.mod h1:T7PbCXFs94rrTttyxjbyT5+/1V8T2TYDejxUfHJjw1Y= +github.com/antlr/antlr4 v0.0.0-20200820155224-be881fa6b91d h1:OE3kzLBpy7pOJEzE55j9sdgrSilUPzzj++FWvp1cmIs= +github.com/antlr/antlr4 v0.0.0-20200820155224-be881fa6b91d/go.mod h1:T7PbCXFs94rrTttyxjbyT5+/1V8T2TYDejxUfHJjw1Y= github.com/apoydence/eachers v0.0.0-20181020210610-23942921fe77 h1:afT88tB6u9JCKQZVAAaa9ICz/uGn5Uw9ekn6P22mYKM= github.com/apoydence/eachers v0.0.0-20181020210610-23942921fe77/go.mod h1:bXvGk6IkT1Agy7qzJ+DjIw/SJ1AaB3AvAuMDVV+Vkoo= github.com/armon/go-radix v1.0.0 h1:F4z6KzEeeQIMeLFa97iZU6vupzoecKdU5TX24SNppXI= diff --git a/x-pack/elastic-agent/CHANGELOG.next.asciidoc b/x-pack/elastic-agent/CHANGELOG.next.asciidoc index efe7ac71d78..938e1ea25ae 100644 --- a/x-pack/elastic-agent/CHANGELOG.next.asciidoc +++ b/x-pack/elastic-agent/CHANGELOG.next.asciidoc @@ -18,3 +18,4 @@ - Add new `synthetics/*` inputs to run Heartbeat {pull}20387[20387] - Users of the Docker image can now pass `FLEET_ENROLL_INSECURE=1` to include the `--insecure` flag with the `elastic-agent enroll` command {issue}20312[20312] {pull}20713[20713] - Add support for dynamic inputs with providers and `{{variable|"default"}}` substitution. {pull}20839[20839] +- Add support for EQL based condition on inputs {pull}20994[20994] diff --git a/x-pack/elastic-agent/pkg/agent/application/emitter.go b/x-pack/elastic-agent/pkg/agent/application/emitter.go index 52391b5eff5..d8a19492e2b 100644 --- a/x-pack/elastic-agent/pkg/agent/application/emitter.go +++ b/x-pack/elastic-agent/pkg/agent/application/emitter.go @@ -45,7 +45,7 @@ type emitterController struct { lock sync.RWMutex config *config.Config ast *transpiler.AST - vars []transpiler.Vars + vars []*transpiler.Vars } func (e *emitterController) Update(c *config.Config) error { @@ -68,38 +68,6 @@ func (e *emitterController) Update(c *config.Config) error { } } - // sanitary check that nothing in the config is wrong when it comes to variable syntax - ast := rawAst.Clone() - inputs, ok := transpiler.Lookup(ast, "inputs") - if ok { - renderedInputs, err := renderInputs(inputs, []transpiler.Vars{ - { - Mapping: map[string]interface{}{}, - }, - }) - if err != nil { - return err - } - err = transpiler.Insert(ast, renderedInputs, "inputs") - if err != nil { - return err - } - } - - programsToRun, err := program.Programs(ast) - if err != nil { - return err - } - - for _, decorator := range e.modifiers.Decorators { - for outputType, ptr := range programsToRun { - programsToRun[outputType], err = decorator(outputType, ast, ptr) - if err != nil { - return err - } - } - } - e.lock.Lock() e.config = c e.ast = rawAst @@ -108,7 +76,7 @@ func (e *emitterController) Update(c *config.Config) error { return e.update() } -func (e *emitterController) Set(vars []transpiler.Vars) { +func (e *emitterController) Set(vars []*transpiler.Vars) { e.lock.Lock() ast := e.ast e.vars = vars @@ -170,19 +138,16 @@ func (e *emitterController) update() error { func emitter(ctx context.Context, log *logger.Logger, controller composable.Controller, router programsDispatcher, modifiers *configModifiers, reloadables ...reloadable) (emitterFunc, error) { log.Debugf("Supported programs: %s", strings.Join(program.KnownProgramNames(), ", ")) + init, _ := transpiler.NewVars(map[string]interface{}{}) ctrl := &emitterController{ logger: log, controller: controller, router: router, modifiers: modifiers, reloadables: reloadables, - vars: []transpiler.Vars{ - { - Mapping: map[string]interface{}{}, - }, - }, + vars: []*transpiler.Vars{init}, } - err := controller.Run(ctx, func(vars []transpiler.Vars) { + err := controller.Run(ctx, func(vars []*transpiler.Vars) { ctrl.Set(vars) }) if err != nil { @@ -202,7 +167,7 @@ func readfiles(files []string, emitter emitterFunc) error { return emitter(c) } -func renderInputs(inputs transpiler.Node, varsArray []transpiler.Vars) (transpiler.Node, error) { +func renderInputs(inputs transpiler.Node, varsArray []*transpiler.Vars) (transpiler.Node, error) { l, ok := inputs.Value().(*transpiler.List) if !ok { return nil, fmt.Errorf("inputs must be an array") @@ -224,6 +189,10 @@ func renderInputs(inputs transpiler.Node, varsArray []transpiler.Vars) (transpil // another error that needs to be reported return nil, err } + if n == nil { + // condition removed it + continue + } dict = n.(*transpiler.Dict) dict = promoteProcessors(dict) hash := string(dict.Hash()) diff --git a/x-pack/elastic-agent/pkg/agent/application/emitter_test.go b/x-pack/elastic-agent/pkg/agent/application/emitter_test.go index 0c5ba837328..32770eaa5df 100644 --- a/x-pack/elastic-agent/pkg/agent/application/emitter_test.go +++ b/x-pack/elastic-agent/pkg/agent/application/emitter_test.go @@ -18,16 +18,14 @@ func TestRenderInputs(t *testing.T) { testcases := map[string]struct { input transpiler.Node expected transpiler.Node - varsArray []transpiler.Vars + varsArray []*transpiler.Vars err bool }{ "inputs not list": { input: transpiler.NewKey("inputs", transpiler.NewStrVal("not list")), err: true, - varsArray: []transpiler.Vars{ - { - Mapping: map[string]interface{}{}, - }, + varsArray: []*transpiler.Vars{ + mustMakeVars(map[string]interface{}{}), }, }, "bad variable error": { @@ -37,14 +35,12 @@ func TestRenderInputs(t *testing.T) { }), })), err: true, - varsArray: []transpiler.Vars{ - { - Mapping: map[string]interface{}{ - "var1": map[string]interface{}{ - "name": "value1", - }, + varsArray: []*transpiler.Vars{ + mustMakeVars(map[string]interface{}{ + "var1": map[string]interface{}{ + "name": "value1", }, - }, + }), }, }, "basic single var": { @@ -58,14 +54,12 @@ func TestRenderInputs(t *testing.T) { transpiler.NewKey("key", transpiler.NewStrVal("value1")), }), }), - varsArray: []transpiler.Vars{ - { - Mapping: map[string]interface{}{ - "var1": map[string]interface{}{ - "name": "value1", - }, + varsArray: []*transpiler.Vars{ + mustMakeVars(map[string]interface{}{ + "var1": map[string]interface{}{ + "name": "value1", }, - }, + }), }, }, "duplicate result is removed": { @@ -82,15 +76,13 @@ func TestRenderInputs(t *testing.T) { transpiler.NewKey("key", transpiler.NewStrVal("value1")), }), }), - varsArray: []transpiler.Vars{ - { - Mapping: map[string]interface{}{ - "var1": map[string]interface{}{ - "name": "value1", - "diff": "value1", - }, + varsArray: []*transpiler.Vars{ + mustMakeVars(map[string]interface{}{ + "var1": map[string]interface{}{ + "name": "value1", + "diff": "value1", }, - }, + }), }, }, "missing var removes input": { @@ -110,15 +102,13 @@ func TestRenderInputs(t *testing.T) { transpiler.NewKey("key", transpiler.NewStrVal("value1")), }), }), - varsArray: []transpiler.Vars{ - { - Mapping: map[string]interface{}{ - "var1": map[string]interface{}{ - "name": "value1", - "diff": "value1", - }, + varsArray: []*transpiler.Vars{ + mustMakeVars(map[string]interface{}{ + "var1": map[string]interface{}{ + "name": "value1", + "diff": "value1", }, - }, + }), }, }, "duplicate var result but unique input not removed": { @@ -142,15 +132,13 @@ func TestRenderInputs(t *testing.T) { transpiler.NewKey("unique", transpiler.NewStrVal("1")), }), }), - varsArray: []transpiler.Vars{ - { - Mapping: map[string]interface{}{ - "var1": map[string]interface{}{ - "name": "value1", - "diff": "value1", - }, + varsArray: []*transpiler.Vars{ + mustMakeVars(map[string]interface{}{ + "var1": map[string]interface{}{ + "name": "value1", + "diff": "value1", }, - }, + }), }, }, "duplicates across vars array handled": { @@ -176,47 +164,37 @@ func TestRenderInputs(t *testing.T) { transpiler.NewKey("key", transpiler.NewStrVal("value4")), }), }), - varsArray: []transpiler.Vars{ - { - Mapping: map[string]interface{}{ - "var1": map[string]interface{}{ - "name": "value1", - "diff": "value1", - }, + varsArray: []*transpiler.Vars{ + mustMakeVars(map[string]interface{}{ + "var1": map[string]interface{}{ + "name": "value1", + "diff": "value1", }, - }, - { - Mapping: map[string]interface{}{ - "var1": map[string]interface{}{ - "name": "value1", - "diff": "value2", - }, + }), + mustMakeVars(map[string]interface{}{ + "var1": map[string]interface{}{ + "name": "value1", + "diff": "value2", }, - }, - { - Mapping: map[string]interface{}{ - "var1": map[string]interface{}{ - "name": "value1", - "diff": "value3", - }, + }), + mustMakeVars(map[string]interface{}{ + "var1": map[string]interface{}{ + "name": "value1", + "diff": "value3", }, - }, - { - Mapping: map[string]interface{}{ - "var1": map[string]interface{}{ - "name": "value1", - "diff": "value2", - }, + }), + mustMakeVars(map[string]interface{}{ + "var1": map[string]interface{}{ + "name": "value1", + "diff": "value2", }, - }, - { - Mapping: map[string]interface{}{ - "var1": map[string]interface{}{ - "name": "value1", - "diff": "value4", - }, + }), + mustMakeVars(map[string]interface{}{ + "var1": map[string]interface{}{ + "name": "value1", + "diff": "value4", }, - }, + }), }, }, "nested in streams": { @@ -274,49 +252,37 @@ func TestRenderInputs(t *testing.T) { })), }), }), - varsArray: []transpiler.Vars{ - { - Mapping: map[string]interface{}{ - "var1": map[string]interface{}{ - "name": "value1", - }, + varsArray: []*transpiler.Vars{ + mustMakeVars(map[string]interface{}{ + "var1": map[string]interface{}{ + "name": "value1", }, - }, - { - Mapping: map[string]interface{}{ - "var1": map[string]interface{}{ - "name": "value2", - }, + }), + mustMakeVars(map[string]interface{}{ + "var1": map[string]interface{}{ + "name": "value2", }, - }, - { - Mapping: map[string]interface{}{ - "var1": map[string]interface{}{ - "name": "value2", - }, + }), + mustMakeVars(map[string]interface{}{ + "var1": map[string]interface{}{ + "name": "value2", }, - }, - { - Mapping: map[string]interface{}{ - "var1": map[string]interface{}{ - "name": "value3", - }, + }), + mustMakeVars(map[string]interface{}{ + "var1": map[string]interface{}{ + "name": "value3", }, - }, - { - Mapping: map[string]interface{}{ - "var1": map[string]interface{}{ - "name": "value4", - }, + }), + mustMakeVars(map[string]interface{}{ + "var1": map[string]interface{}{ + "name": "value4", }, - }, - { - Mapping: map[string]interface{}{ - "var1": map[string]interface{}{ - "missing": "other", - }, + }), + mustMakeVars(map[string]interface{}{ + "var1": map[string]interface{}{ + "missing": "other", }, - }, + }), }, }, "inputs with processors": { @@ -384,21 +350,17 @@ func TestRenderInputs(t *testing.T) { })), }), }), - varsArray: []transpiler.Vars{ - { - Mapping: map[string]interface{}{ - "var1": map[string]interface{}{ - "name": "value1", - }, + varsArray: []*transpiler.Vars{ + mustMakeVars(map[string]interface{}{ + "var1": map[string]interface{}{ + "name": "value1", }, - }, - { - Mapping: map[string]interface{}{ - "var1": map[string]interface{}{ - "name": "value2", - }, + }), + mustMakeVars(map[string]interface{}{ + "var1": map[string]interface{}{ + "name": "value2", }, - }, + }), }, }, "vars with processors": { @@ -482,15 +444,14 @@ func TestRenderInputs(t *testing.T) { })), }), }), - varsArray: []transpiler.Vars{ - { - Mapping: map[string]interface{}{ - "var1": map[string]interface{}{ - "name": "value1", - }, + varsArray: []*transpiler.Vars{ + mustMakeVarsP(map[string]interface{}{ + "var1": map[string]interface{}{ + "name": "value1", }, - ProcessorsKey: "var1", - Processors: []map[string]interface{}{ + }, + "var1", + []map[string]interface{}{ { "add_fields": map[string]interface{}{ "fields": map[string]interface{}{ @@ -499,16 +460,14 @@ func TestRenderInputs(t *testing.T) { "to": "dynamic", }, }, + }), + mustMakeVarsP(map[string]interface{}{ + "var1": map[string]interface{}{ + "name": "value2", }, }, - { - Mapping: map[string]interface{}{ - "var1": map[string]interface{}{ - "name": "value2", - }, - }, - ProcessorsKey: "var1", - Processors: []map[string]interface{}{ + "var1", + []map[string]interface{}{ { "add_fields": map[string]interface{}{ "fields": map[string]interface{}{ @@ -517,8 +476,7 @@ func TestRenderInputs(t *testing.T) { "to": "dynamic", }, }, - }, - }, + }), }, }, } @@ -535,3 +493,19 @@ func TestRenderInputs(t *testing.T) { }) } } + +func mustMakeVars(mapping map[string]interface{}) *transpiler.Vars { + v, err := transpiler.NewVars(mapping) + if err != nil { + panic(err) + } + return v +} + +func mustMakeVarsP(mapping map[string]interface{}, processorKey string, processors transpiler.Processors) *transpiler.Vars { + v, err := transpiler.NewVarsWithProcessors(mapping, processorKey, processors) + if err != nil { + panic(err) + } + return v +} diff --git a/x-pack/elastic-agent/pkg/agent/application/filters/constraints_filter.go b/x-pack/elastic-agent/pkg/agent/application/filters/constraints_filter.go deleted file mode 100644 index d9abd6c7aa3..00000000000 --- a/x-pack/elastic-agent/pkg/agent/application/filters/constraints_filter.go +++ /dev/null @@ -1,244 +0,0 @@ -// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one -// or more contributor license agreements. Licensed under the Elastic License; -// you may not use this file except in compliance with the Elastic License. - -package filters - -import ( - "fmt" - - "github.com/Masterminds/semver" - - "github.com/elastic/beats/v7/x-pack/elastic-agent/pkg/agent/application/info" - "github.com/elastic/beats/v7/x-pack/elastic-agent/pkg/agent/errors" - "github.com/elastic/beats/v7/x-pack/elastic-agent/pkg/agent/transpiler" - "github.com/elastic/beats/v7/x-pack/elastic-agent/pkg/boolexp" - "github.com/elastic/beats/v7/x-pack/elastic-agent/pkg/core/logger" -) - -const ( - inputsKey = "inputs" - constraintsKey = "constraints" - validateVersionFuncName = "validate_version" -) - -var ( - boolexpVarStore *constraintVarStore - boolexpMethodsRegs *boolexp.MethodsReg -) - -// ConstraintFilter filters ast based on included constraints. -// constraints are still Experimental and should not be used in production. -func ConstraintFilter(log *logger.Logger, ast *transpiler.AST) error { - // get datasources - inputsNode, found := transpiler.Lookup(ast, inputsKey) - if !found { - return nil - } - - inputsListNode, ok := inputsNode.Value().(*transpiler.List) - if !ok { - return nil - } - - inputsList, ok := inputsListNode.Value().([]transpiler.Node) - if !ok { - return nil - } - - // for each datasource - i := 0 - originalLen := len(inputsList) - for i < len(inputsList) { - constraintMatch, err := evaluateConstraints(log, inputsList[i]) - if err != nil { - return err - } - - if constraintMatch { - i++ - continue - } - inputsList = append(inputsList[:i], inputsList[i+1:]...) - } - - if len(inputsList) == originalLen { - return nil - } - - // Replace datasources with limited set - if err := transpiler.RemoveKey(inputsKey).Apply(ast); err != nil { - return err - } - - newList := transpiler.NewList(inputsList) - return transpiler.Insert(ast, newList, inputsKey) -} - -func evaluateConstraints(log *logger.Logger, datasourceNode transpiler.Node) (bool, error) { - constraintsNode, found := datasourceNode.Find(constraintsKey) - if !found { - return true, nil - } - - constraintsListNode, ok := constraintsNode.Value().(*transpiler.List) - if !ok { - return false, errors.New("constraints not a list", errors.TypeConfig) - } - - constraintsList, ok := constraintsListNode.Value().([]transpiler.Node) - if !ok { - return false, errors.New("constraints not a list", errors.TypeConfig) - } - - for _, c := range constraintsList { - strval, ok := c.(*transpiler.StrVal) - if !ok { - return false, errors.New("constraints is not a string") - } - - constraint := strval.String() - if isOK, err := evaluateConstraint(constraint); !isOK || err != nil { - if err == nil { - // log only constraint not matching - log.Infof("constraint '%s' not matching for datasource '%s'", constraint, datasourceIdentifier(datasourceNode)) - } - - return false, err - } - } - - return true, nil -} - -func datasourceIdentifier(datasourceNode transpiler.Node) string { - namespace := "default" - output := "default" - - if nsNode, found := datasourceNode.Find("namespace"); found { - nsKey, ok := nsNode.(*transpiler.Key) - if ok { - if valNode, ok := nsKey.Value().(transpiler.Node); ok { - namespace = valNode.String() - } - } - } - - if outNode, found := datasourceNode.Find("use_output"); found { - nsKey, ok := outNode.(*transpiler.Key) - if ok { - if valNode, ok := nsKey.Value().(transpiler.Node); ok { - output = valNode.String() - } - } - } - - ID := "unknown" - if idNode, found := datasourceNode.Find("id"); found { - nsKey, ok := idNode.(*transpiler.Key) - if ok { - if valNode, ok := nsKey.Value().(transpiler.Node); ok { - ID = valNode.String() - } - } - } - - return fmt.Sprintf("namespace:%s, output:%s, id:%s", namespace, output, ID) -} - -func evaluateConstraint(constraint string) (bool, error) { - store, regs, err := boolexpMachinery() - if err != nil { - return false, err - } - - return boolexp.Eval(constraint, regs, store) -} - -func boolexpMachinery() (*constraintVarStore, *boolexp.MethodsReg, error) { - if boolexpMethodsRegs != nil && boolexpVarStore != nil { - return boolexpVarStore, boolexpMethodsRegs, nil - } - - regs := boolexp.NewMethodsReg() - if err := regs.Register(validateVersionFuncName, regValidateVersion); err != nil { - return nil, nil, err - } - - store, err := newVarStore() - if err != nil { - return nil, nil, err - } - - if err := initVarStore(store); err != nil { - return nil, nil, err - } - - boolexpMethodsRegs = regs - boolexpVarStore = store - - return boolexpVarStore, boolexpMethodsRegs, nil -} - -func regValidateVersion(args []interface{}) (interface{}, error) { - if len(args) != 2 { - return false, errors.New("validate_version: invalid number of arguments, expecting 2") - } - - version, isString := args[0].(string) - if !isString { - return false, errors.New("version should be a string") - } - - constraint, isString := args[1].(string) - if !isString { - return false, errors.New("version constraint should be a string") - } - - c, err := semver.NewConstraint(constraint) - if err != nil { - return false, errors.New(fmt.Sprintf("constraint '%s' is invalid", constraint)) - } - - v, err := semver.NewVersion(version) - if err != nil { - return false, errors.New(fmt.Sprintf("version '%s' is invalid", version)) - } - - isOK, _ := c.Validate(v) - return isOK, nil -} - -type constraintVarStore struct { - vars map[string]interface{} -} - -func (s *constraintVarStore) Lookup(v string) (interface{}, bool) { - val, ok := s.vars[v] - return val, ok -} - -func newVarStore() (*constraintVarStore, error) { - return &constraintVarStore{ - vars: make(map[string]interface{}), - }, nil -} - -func initVarStore(store *constraintVarStore) error { - agentInfo, err := info.NewAgentInfo() - if err != nil { - return err - } - - meta, err := agentInfo.ECSMetadataFlatMap() - if err != nil { - return errors.New(err, "failed to gather host metadata") - } - - // keep existing, overwrite gathered - for k, v := range meta { - store.vars[k] = v - } - - return nil -} diff --git a/x-pack/elastic-agent/pkg/agent/application/filters/constraints_filter_test.go b/x-pack/elastic-agent/pkg/agent/application/filters/constraints_filter_test.go deleted file mode 100644 index d7e3190dd92..00000000000 --- a/x-pack/elastic-agent/pkg/agent/application/filters/constraints_filter_test.go +++ /dev/null @@ -1,46 +0,0 @@ -// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one -// or more contributor license agreements. Licensed under the Elastic License; -// you may not use this file except in compliance with the Elastic License. - -package filters - -import ( - "os" - "runtime" - "testing" - - "github.com/stretchr/testify/assert" - - "github.com/elastic/beats/v7/x-pack/elastic-agent/pkg/release" -) - -func TestEvaluation(t *testing.T) { - type testCase struct { - name string - condition string - result bool - } - - testCases := []testCase{ - {"simple version", "validate_version(%{[agent.version]}, '" + release.Version() + "')", true}, - {"~ version release", "validate_version(%{[agent.version]}, '~" + release.Version() + "')", true}, - {"^ version release", "validate_version(%{[agent.version]}, '^" + release.Version() + "')", true}, - {"range to release", "validate_version(%{[agent.version]}, '1.0.0 - " + release.Version() + "')", true}, - {"range lower", "validate_version(%{[agent.version]}, '1.0.0 - 5.0.0')", false}, - {"range include", "validate_version(%{[agent.version]}, '1.0.0 - 100.0.0')", true}, - {"family should equal", "%{[os.family]} == '" + runtime.GOOS + "'", true}, - {"family should not equal", "%{[os.family]} != '" + runtime.GOOS + "'", false}, - } - - for _, tc := range testCases { - t.Run(tc.name, func(t *testing.T) { - r, err := evaluateConstraint(tc.condition) - assert.NoError(t, err) - assert.Equal(t, tc.result, r) - - // cleanup - os.Remove("fleet.yml") - os.Remove("fleet.yml.old") - }) - } -} diff --git a/x-pack/elastic-agent/pkg/agent/application/inspect_output_cmd.go b/x-pack/elastic-agent/pkg/agent/application/inspect_output_cmd.go index ca9983cca22..c25966c6219 100644 --- a/x-pack/elastic-agent/pkg/agent/application/inspect_output_cmd.go +++ b/x-pack/elastic-agent/pkg/agent/application/inspect_output_cmd.go @@ -11,7 +11,6 @@ import ( "github.com/elastic/beats/v7/x-pack/elastic-agent/pkg/agent/transpiler" "github.com/elastic/beats/v7/libbeat/logp" - "github.com/elastic/beats/v7/x-pack/elastic-agent/pkg/agent/application/filters" "github.com/elastic/beats/v7/x-pack/elastic-agent/pkg/agent/configuration" "github.com/elastic/beats/v7/x-pack/elastic-agent/pkg/agent/program" "github.com/elastic/beats/v7/x-pack/elastic-agent/pkg/composable" @@ -191,7 +190,6 @@ func getProgramsFromConfig(log *logger.Logger, cfg *config.Config) (map[string][ router, &configModifiers{ Decorators: []decoratorFunc{injectMonitoring}, - Filters: []filterFunc{filters.ConstraintFilter}, }, monitor, ) @@ -232,7 +230,7 @@ func newWaitForCompose(wrapped composable.Controller) *waitForCompose { } func (w *waitForCompose) Run(ctx context.Context, cb composable.VarsCallback) error { - err := w.controller.Run(ctx, func(vars []transpiler.Vars) { + err := w.controller.Run(ctx, func(vars []*transpiler.Vars) { cb(vars) w.done <- true }) diff --git a/x-pack/elastic-agent/pkg/agent/application/local_mode.go b/x-pack/elastic-agent/pkg/agent/application/local_mode.go index 4b0753af9a8..e88f2734c75 100644 --- a/x-pack/elastic-agent/pkg/agent/application/local_mode.go +++ b/x-pack/elastic-agent/pkg/agent/application/local_mode.go @@ -116,7 +116,7 @@ func newLocal( router, &configModifiers{ Decorators: []decoratorFunc{injectMonitoring}, - Filters: []filterFunc{filters.StreamChecker, filters.ConstraintFilter}, + Filters: []filterFunc{filters.StreamChecker}, }, monitor, ) diff --git a/x-pack/elastic-agent/pkg/agent/application/managed_mode.go b/x-pack/elastic-agent/pkg/agent/application/managed_mode.go index 8b9320b53c0..7970c7000e9 100644 --- a/x-pack/elastic-agent/pkg/agent/application/managed_mode.go +++ b/x-pack/elastic-agent/pkg/agent/application/managed_mode.go @@ -164,7 +164,7 @@ func newManaged( router, &configModifiers{ Decorators: []decoratorFunc{injectMonitoring}, - Filters: []filterFunc{filters.StreamChecker, injectFleet(config), filters.ConstraintFilter}, + Filters: []filterFunc{filters.StreamChecker, injectFleet(config)}, }, monitor, ) diff --git a/x-pack/elastic-agent/pkg/agent/application/managed_mode_test.go b/x-pack/elastic-agent/pkg/agent/application/managed_mode_test.go index efc8608c233..a636790b056 100644 --- a/x-pack/elastic-agent/pkg/agent/application/managed_mode_test.go +++ b/x-pack/elastic-agent/pkg/agent/application/managed_mode_test.go @@ -12,7 +12,6 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - "github.com/elastic/beats/v7/x-pack/elastic-agent/pkg/agent/application/filters" "github.com/elastic/beats/v7/x-pack/elastic-agent/pkg/agent/configrequest" "github.com/elastic/beats/v7/x-pack/elastic-agent/pkg/composable" "github.com/elastic/beats/v7/x-pack/elastic-agent/pkg/core/logger" @@ -34,7 +33,7 @@ func TestManagedModeRouting(t *testing.T) { log, _ := logger.New("") router, _ := newRouter(log, streamFn) composableCtrl, _ := composable.New(nil) - emit, err := emitter(ctx, log, composableCtrl, router, &configModifiers{Decorators: []decoratorFunc{injectMonitoring}, Filters: []filterFunc{filters.ConstraintFilter}}) + emit, err := emitter(ctx, log, composableCtrl, router, &configModifiers{Decorators: []decoratorFunc{injectMonitoring}}) require.NoError(t, err) actionDispatcher, err := newActionDispatcher(ctx, log, &handlerDefault{log: log}) diff --git a/x-pack/elastic-agent/pkg/agent/program/methods.go b/x-pack/elastic-agent/pkg/agent/program/methods.go deleted file mode 100644 index e89d8a5f19f..00000000000 --- a/x-pack/elastic-agent/pkg/agent/program/methods.go +++ /dev/null @@ -1,168 +0,0 @@ -// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one -// or more contributor license agreements. Licensed under the Elastic License; -// you may not use this file except in compliance with the Elastic License. - -package program - -import ( - "fmt" - - "github.com/elastic/beats/v7/x-pack/elastic-agent/pkg/agent/transpiler" - "github.com/elastic/beats/v7/x-pack/elastic-agent/pkg/boolexp" -) - -type env struct { - ast *transpiler.AST - vars boolexp.VarStore -} - -type envFunc = func(*env, []interface{}) (interface{}, error) - -func methodsEnv(ast *transpiler.AST) *boolexp.MethodsReg { - env := &env{ - ast: ast, - vars: &varStoreAST{ast: ast}, - } - - var methods = boolexp.NewMethodsReg() - methods.MustRegister("HasItems", withEnv(env, hasItems)) - methods.MustRegister("HasNamespace", withEnv(env, hasNamespace)) - methods.MustRegister("HasAny", withEnv(env, hasAny)) - return methods -} - -// hasItems the methods take a selector which must be a list, and look for the presence item in the -// list which are "enabled". The logic to determine if an item is enabled is the following: -// - When the "enabled" key is present and set to "true", The item is enabled. -// - When the "enabled" key is missing, the item is enabled. -// - When the "enabled" key is present and set to "false", The item is NOT enabled. -func hasItems(_ *env, args []interface{}) (interface{}, error) { - if len(args) != 1 { - return false, fmt.Errorf("expecting 1 argument received %d", len(args)) - } - - if args[0] == boolexp.Null { - return false, nil - } - - v, ok := args[0].(transpiler.Node).Value().(*transpiler.List) - if !ok { - return false, fmt.Errorf("expecting List and received %T", args[0]) - } - - for _, item := range v.Value().([]transpiler.Node) { - d, ok := item.(*transpiler.Dict) - if !ok { - return false, fmt.Errorf("expecting Dict and received %T", args[0]) - } - - if isEnabled(d) { - return true, nil - } - } - - return false, nil -} - -// hasItems the methods take a selector which must be map and look if the map is enabled. -// The logic to determine if a map is enabled is the following: -// - When the "enabled" key is present and set to "true", The item is enabled. -// - When the "enabled" key is missing, the item is enabled. -// - When the "enabled" key is present and set to "false", The item is NOT enabled. -func hasNamespace(env *env, args []interface{}) (interface{}, error) { - if len(args) < 2 { - return false, fmt.Errorf("expecting at least 2 arguments received %d", len(args)) - } - - namespace, ok := args[0].(string) - if !ok { - return false, fmt.Errorf("invalid namespace %+v", args[0]) - } - - possibleSubKey := make([]string, 0, len(args)) - - for _, v := range args[1:] { - sk, ok := v.(string) - if !ok { - return false, fmt.Errorf("invalid sub key %+v for namespace", v) - } - possibleSubKey = append(possibleSubKey, sk) - } - - var enabledCount int - for _, key := range possibleSubKey { - f := namespace + "." + key - s, ok := transpiler.Lookup(env.ast, transpiler.Selector(f)) - if !ok { - continue - } - - if isEnabled(s) { - enabledCount++ - } - - if enabledCount > 1 { - return false, fmt.Errorf("only one namespace must be enabled in %s", namespace) - } - } - - if enabledCount == 0 { - return false, nil - } - - return true, nil -} - -// hasAny the methods take a list of possible keys where at least one of those keys must exist. -func hasAny(env *env, args []interface{}) (interface{}, error) { - if len(args) < 1 { - return false, fmt.Errorf("expecting at least 1 argument received %d", len(args)) - } - - possibleKeys := make([]string, 0, len(args)) - - for _, v := range args { - sk, ok := v.(string) - if !ok { - return false, fmt.Errorf("invalid key %+v", v) - } - possibleKeys = append(possibleKeys, sk) - } - - for _, key := range possibleKeys { - _, ok := transpiler.Lookup(env.ast, transpiler.Selector(key)) - if ok { - return true, nil - } - } - return false, nil -} - -func withEnv(env *env, method envFunc) boolexp.CallFunc { - return func(args []interface{}) (interface{}, error) { - return method(env, args) - } -} - -func isEnabled(n transpiler.Node) bool { - enabled, ok := n.Find("enabled") - if !ok { - return true - } - - // Get the actual value of the node. - value, ok := enabled.Value().(transpiler.Node).Value().(bool) - if !ok { - return false - } - - return value -} - -type varStoreAST struct { - ast *transpiler.AST -} - -func (v *varStoreAST) Lookup(needle string) (interface{}, bool) { - return transpiler.Lookup(v.ast, transpiler.Selector(needle)) -} diff --git a/x-pack/elastic-agent/pkg/agent/program/program.go b/x-pack/elastic-agent/pkg/agent/program/program.go index 17f796aa75f..25b56081e68 100644 --- a/x-pack/elastic-agent/pkg/agent/program/program.go +++ b/x-pack/elastic-agent/pkg/agent/program/program.go @@ -10,7 +10,7 @@ import ( "github.com/elastic/beats/v7/x-pack/elastic-agent/pkg/agent/errors" "github.com/elastic/beats/v7/x-pack/elastic-agent/pkg/agent/transpiler" - "github.com/elastic/beats/v7/x-pack/elastic-agent/pkg/boolexp" + "github.com/elastic/beats/v7/x-pack/elastic-agent/pkg/eql" ) // Program represents a program that must be started or must run. @@ -78,12 +78,12 @@ func detectPrograms(singleConfig *transpiler.AST) ([]Program, error) { return nil, ErrMissingWhen } - expression, err := boolexp.New(spec.When, methodsEnv(specificAST)) + expression, err := eql.New(spec.When) if err != nil { return nil, err } - ok, err := expression.Eval(&varStoreAST{ast: specificAST}) + ok, err := expression.Eval(specificAST) if err != nil { return nil, err } @@ -131,7 +131,7 @@ func groupByOutputs(single *transpiler.AST) (map[string]*transpiler.AST, error) // Recreates multiple configuration grouped by the name of the outputs. // Each configuration will be started into his own operator with the same name as the output. - grouped := make(map[string]map[string]interface{}) + grouped := make(map[string]*outputType) m, ok := normMap[outputsKey] if !ok { @@ -164,13 +164,21 @@ func groupByOutputs(single *transpiler.AST) (map[string]*transpiler.AST, error) delete(outputsOptions, typeKey) + enabled, err := isEnabled(outputsOptions) + if err != nil { + return nil, err + } + // Propagate global configuration to each individual configuration. clone := cloneMap(normMap) delete(clone, outputsKey) clone[outputKey] = map[string]interface{}{n: v} clone[inputsKey] = make([]map[string]interface{}, 0) - grouped[k] = clone + grouped[k] = &outputType{ + enabled: enabled, + config: clone, + } } s, ok := normMap[inputsKey] @@ -199,21 +207,24 @@ func groupByOutputs(single *transpiler.AST) (map[string]*transpiler.AST, error) return nil, fmt.Errorf("unknown configuration output with name %s", targetName) } - streams := config[inputsKey].([]map[string]interface{}) + streams := config.config[inputsKey].([]map[string]interface{}) streams = append(streams, stream) - config[inputsKey] = streams + config.config[inputsKey] = streams grouped[targetName] = config } transpiled := make(map[string]*transpiler.AST) for name, group := range grouped { - if len(group[inputsKey].([]map[string]interface{})) == 0 { + if !group.enabled { + continue + } + if len(group.config[inputsKey].([]map[string]interface{})) == 0 { continue } - ast, err := transpiler.NewAST(group) + ast, err := transpiler.NewAST(group.config) if err != nil { return nil, errors.New(err, "fail to generate configuration for output name %s", name) } @@ -224,6 +235,22 @@ func groupByOutputs(single *transpiler.AST) (map[string]*transpiler.AST, error) return transpiled, nil } +func isEnabled(m map[string]interface{}) (bool, error) { + const ( + enabledKey = "enabled" + ) + + enabled, ok := m[enabledKey] + if !ok { + return true, nil + } + switch e := enabled.(type) { + case bool: + return e, nil + } + return false, fmt.Errorf("invalid type received for enabled %T and expecting a boolean", enabled) +} + func findOutputName(m map[string]interface{}) string { const ( defaultOutputName = "default" @@ -251,3 +278,8 @@ func cloneMap(m map[string]interface{}) map[string]interface{} { return newMap } + +type outputType struct { + enabled bool + config map[string]interface{} +} diff --git a/x-pack/elastic-agent/pkg/agent/program/program_test.go b/x-pack/elastic-agent/pkg/agent/program/program_test.go index 8c46e15deec..c15510b6655 100644 --- a/x-pack/elastic-agent/pkg/agent/program/program_test.go +++ b/x-pack/elastic-agent/pkg/agent/program/program_test.go @@ -17,10 +17,8 @@ import ( "github.com/stretchr/testify/require" yaml "gopkg.in/yaml.v2" - "github.com/elastic/beats/v7/x-pack/elastic-agent/pkg/agent/application/filters" "github.com/elastic/beats/v7/x-pack/elastic-agent/pkg/agent/internal/yamltest" "github.com/elastic/beats/v7/x-pack/elastic-agent/pkg/agent/transpiler" - "github.com/elastic/beats/v7/x-pack/elastic-agent/pkg/core/logger" ) func TestGroupBy(t *testing.T) { @@ -382,16 +380,13 @@ func TestConfiguration(t *testing.T) { testcases := map[string]struct { programs []string expected int + empty bool err bool }{ "single_config": { programs: []string{"filebeat", "heartbeat", "metricbeat", "endpoint"}, expected: 4, }, - "constraints_config": { - programs: []string{"filebeat"}, - expected: 1, - }, // "audit_config": { // programs: []string{"auditbeat"}, // expected: 1, @@ -416,7 +411,7 @@ func TestConfiguration(t *testing.T) { expected: 1, }, "enabled_output_false": { - expected: 0, + empty: true, }, "endpoint_basic": { programs: []string{"endpoint"}, @@ -430,7 +425,6 @@ func TestConfiguration(t *testing.T) { }, } - l, _ := logger.New("") for name, test := range testcases { t.Run(name, func(t *testing.T) { singleConfig, err := ioutil.ReadFile(filepath.Join("testdata", name+".yml")) @@ -443,14 +437,16 @@ func TestConfiguration(t *testing.T) { ast, err := transpiler.NewAST(m) require.NoError(t, err) - filters.ConstraintFilter(l, ast) - programs, err := Programs(ast) if test.err { require.Error(t, err) return } require.NoError(t, err) + if test.empty { + require.Equal(t, 0, len(programs)) + return + } require.Equal(t, 1, len(programs)) diff --git a/x-pack/elastic-agent/pkg/agent/program/supported.go b/x-pack/elastic-agent/pkg/agent/program/supported.go index ecd1232200a..d7b59f59363 100644 --- a/x-pack/elastic-agent/pkg/agent/program/supported.go +++ b/x-pack/elastic-agent/pkg/agent/program/supported.go @@ -21,7 +21,7 @@ func init() { // spec/filebeat.yml // spec/heartbeat.yml // spec/metricbeat.yml - unpacked := packer.MustUnpack("eJy8WVtzozgWft+f0a+7tQsizg5bNQ82GQTYIW2SSEJvSLINtsBsjC+wtf99S4C5OEn39M5UP3S5jYV0dC7f+b6T/3w55Cv+j1Um8n2SFX8vU/nlX19Yahf0Zb8J8GTHoZmzbLl5BWgriJcLZzcPgb57SmaSpcGZAXkUll5R7Os8ldpqmcc8C3Ka2lvxsN/Qfo+CQgSszJc8o3kIXu/dh9B4etjMQxDLEBTrCE8qAe0De9jPF88zuYJoSwDNGXy9t5LpxrVm55AE+6dkmgz35b1tSbsu5qmonjb7jWtNN4vnaSJSVEaYTtz2mYCyoNjUlY2P1XTOoVkJW+3nayG+HJ42+8KF6I5if01TeaAv+7l6z3VmsYCbe9fyPr7/s9usg3ZJjcfW7mnhWl63tzuwa/Gs6xyKMsSBvHleUuKfBPG2lDwmg30+OXe0/rhK5fmju/rb6dnKZiVFps5SeeRGEDN4vrcSbUNJLEPdTCN8kVffcWhr0cN+46boSJ3ZKcITbUF8GRqojEjQ+TMkXsar1kdXn+PJuzu/t8XTGURV42+ar2yzEo4nQ6zdu05hWu1z5gSSSxOE+KJTcvXrrKL4IkMjOPHtfhPhyVmQoGp/e6Nkd+86wYTD1zZ2NGYOkr2d2jA/57UPUnkQEJXEuFnr+JJBtBXQLJ+SWc6ymS6cxzbWhVy91Lkeh+lF0ml719Q+CIwGeTjTeIZkfafrfnXOBafO3wAdKPY1ZnjVUzJjVO1HlscQ+1tK/IoA+xwhU93t4EJ6oBhpi7TIw9Q+hkgb52j3u32OlnVNFSGZ3tTSLGUQSdHazDN06P07LVzHkwybgDZnXp/X/yKAJk/JLA6BL7nhr0Myywko5GrZ3bekWD+JFK3rte0dhz6LgExCPIlHcd6+9/koZo1Puu/juE8LF5q6cGb69U61HYTmHMgT2+znAsSSbfebZ2hXSzyZzC3R+MDiuWuFqftbHHNNahTr1dxC/8RAHrmDNK7r5vx5mnrJzA2J/xRiXXJD3f9174H2TIsfXEtIBu1KQLnlAMU89fdeuZt/+VsDt+tErtgqege3CmawJ0OyvEJsXYphimIxzRtIS2bMTXTbTc4bN/OlcNB5kcoDe55IltoJg2j3FavU9WW95nZtFkhGZoeQBHKRomOIvQPFS5Om9oGD12RhTZPFa/PJsH0MsZAMo6OwJgUDgfxKNgWH9jYq9dZl7sG13CJ4Vp9eoUJJASqogonB/sLxdPo8WntgQGQRnmSL9CJFig5fcSDDDGWu1OYh8bQI0zg0lvcuVD4JqkXdClBCsa19FzaSOi3+rUqJAHmkEN1d00848qz8zaCZ8XNdFjlLcwUja24EJcV2QYxZyZq0PnXpCM0jAf6JpfQQYV9rYEC1s2AdYqpR0kJ/Azn3LrycqPFYwwrD9vkWUm/gqhT4MoKmEJjnFTJjBi9rAc01g7ISDz3EutZMY9V+c7WZn4fl9c7WIwPmeVi+lMRbSmZanVOZr/EUxYw81rGP8LL+7CCtjrN35qlZw5CCJxWnG1s1ppuHiPjauNRVKam4DHyaPf6/9+h9nqKUGV4Lp6ot1nXUxoqWDGj3LmxL93xtP7/0z4zuzvO2/Wlc0Ra7uQMBDQR8FrdbeyMSSPby/h6jM8+fwvC4pThdfvetI7WPHFxi0dGh6ciuOq+XQ9/pMXdmPZx2zy8n2tKq+v9Df9d5QSXLlqeW9tR1MjzPtWaqXo/CMisBg1xBKTeCXYTvbs5BoMYBI9hyZR/0z5/so1Nneu86aMenY1vU2QsQnEJQqHtsKDS3EUDlzT4HBviJp2gXEX/NweUkwOVEVU7Vzx7f3780qxXx1Xv3ruNP1DtXP/yetiWILwn4oM185z0KbS1EHVZ19cNTVDCDyrp9voxqvKExMIgFtDt8WqSTmGFUKSymP9Bub84/1t+Jr9q/ykvVbzRKvPUtdelpifuuploaoK3ITLY5fUPTVEu2DQ78AzPQTgBbC8Gmxw6S6zxFTS7AWA/7nN8yaOoU0pNQNC3bDfHmKKCds7TLhcKFbZ7072cMmBmD6CywVwns9pTBCTRumz2V7mO4F05wjjJfUYXrs7cQ07ew97PKm24vRmxt8P6RkiBl0DR6u36p/C2KBQ7yca4EOe9zrnoc/L+uq/78a652Zw5qqRjWzeJ5pmj9Xf+byFla5/YfjW/HLz6NsROUAr/WWFr3g9+FxYOe/kMUsOdKXRwMpKn9e78XckVqKi2tTOwpvrt/T+368xfpn0HzzhvP8CSFsvLKs7Iti4CdRuC3bGFNs5pzGEHMwSabL/e/XilhvIreig844TNESk43/kn9gqr+M3rWyEzXPnS8jQOkCTI9RvhSfI/jXdcKiAoO695y7Hr+g56G+FLd8LYbjqefKHw1V5Z+DrH/tsCN5Bhxz1SPWWpnFOuqrwz3r+XQeK3qTSpn+ZHV/eNsUogSgXlCbkYLNS92Hk8jf2TDvlnnyR259vmX/WZl1Jj7gaT01wJILbLNkmIhV86077FXzB/111mp4kMyf8KyYE9JkIfG42mRHDrs/rymviEzv1OLfT/o4tq9d1vTHVYo7r0cn9uvGUjo7HHEK2pePOQk1sfyqsNUgCZc7ZO+jiTWs+JF5HHvkYJduRMh2kBqDXrZ8ya71nFdSwAdQuJrEfYriu0yBJu6jtq+ni1qG0SDz891TSpdUQloV5HFc2vza1dj6ap4S/gHRfaCkcZTuW2F1pZhJTp0KRwvD0EryJr5xgaXXSFUlAQ6tyY5g9r3iua6VgHPmUFbo98TbDdFw7C5oy/63YIoIDoUrQb+lmDr9yc1MI/XQjOjShiVk0NN+B70HcWeTktPKFARUKZhQ7jrwuKlWVASlBH220KbnbgRjGZ2TXI0BGQ0MxvNkfQTdeoZw5FaNQlT4uC4wno3I1KCQPmbkuW9AhwGgrqoF+nyxA1ZKbBaZLJg1kQRvqtAmfezi48Lfyj0IjzZUbK5kvCavDwls+sdq6YZyWOU1jOaljDpa+54pxCgigOzKyIGJmtFSGh6yRvBKo8coFLYZkyzoCMknfBs860VA6XKHYa7GWfKU7N4LwiCU//Mv9rT2qnH/OFmPvmByPlEWGyZMZsQYB+Y/YmAa87uzxyAxPu7T07MmA4JrFxBX3JnWTepThyVdV3krbDscrUZVoyEYkKWN7YawYmAS86N5XgedRVggxiNxOQP3aOLYUIxrUHtJ4vEd2SNGCIXMF7zFGWUxN1A4QMC3jSn5O5tAVocMx5335yZ/tE5621D+mDW2jaiP0EAo8+b9bcEseOpGl/NH8zl12ZI89dFcsjf+6htqOqMh/3GG86QG7F2VKRwLLBakjta2w83FH4LfJE9OdXjCKB1SLwyvJ2rtjnS4QToSO0gV642+7Jv0L9HSA7e+xHhejPb/rlit/5eDWe+P0sw3wj9+RWDrmtp5p1UrG5qoo35kFd8E+tHvbUTIiNs6Prm6O8UH/7doK+Zcc/+fD7+Ronqq2ap6vMz8ja6TzMQrmv0D5G5hsA1YrisCdy3ydx///K/AAAA///cvrB1") + unpacked := packer.MustUnpack("eJy8WUtzozoW3s/P6O2dmuERZy5TdRc2KQTYIW2SSEI7JNmALTAT8AOm5r9PCTAPJ+m+PT3Viy63sZCOzuM733fy7y9FvmF/32Q8PyRZ+bcqFV/++YWmVkleDpGPZnsGjJxm6+hVgzuO3Zzb+2WgqfunZCFo6p+pJo7cVGuCPJWlQtms85hlfk5Sa8cfDhEZ9igJgJqZeYJlJA+013vnIdCfHqJloMUi0MptiGY1B1ZBHw7L1fNCbADcYY3kFLzem8k8cszFOcD+4SmZJ+N92WBb0q2LWcrrp+gQOeY8Wj3PE57CKkRk5nTPOBAlQYYqbXys50sGjJpbcj9PCdCleIoOpQPgHUHelqSiIC+HpXzPsRcxB9G9Y7of3//ZadcBqyL6Y2f3vHRMt9/bGdm1elZVBngVIF/cPK8I9k4cuzuCH5PRPp+cO1l/3KTi/NFdvd38bGaLikBDpak4Mt2PKTjfm4kSERyLQDXSEF3E1XcMWEr4cIicFB6JvTiFaKassCcCHVYh9nt/BtjNWN356OpzNHt35/e2uCoFsG79TfKNZdTcdkWAlHvHLg2ze05tXzBhaAG6qARf/bqoCbqIQPdPbHeIQjQ7c+zX3W9vBO/vHdufMfDaxY7E1IZisFMZ5+ey8UEqCg5ghfWbtbYnKIA7DozqKVnkNFuo3H7sYl2KzUuT63GQXgSZd3dNrYIjOMrDhcIyKJo7Xfdrcs4/9f7WYEGQp1DdrZ+SBSVyP7w+BsjbEezVWLPOITTk3QoHkIIgqKzSMg9S6xhAZZqj/e/WOVw3NVUGeH5TS4uUAih4ZzPLYDH4d146tisoMjTSnnl93vwLNTh7ShZxoHmC6d42wIsca6XYrPv7VgSpJ57CbbO2u+PYZ6EmkgDN4kmcd+99PolZ65P++zTu89IBhsrthXq9U2MHJjnTxIlGhyXXYkF3h4gCeCS6f1ia/j/aPX1j+Tz/zXmYRwGa7R1wETTlSmhG+40mjsyGCtOV3Hm4ix7NRUzTdRQCq37W4EzuQXWoyDXb53PkarAIsKeEyKsJsqpAi7Ll+vDHl7+2kLtNxIZuwneQK6EGuSLA6yvMNuUYpDDm87yFtWRBnUS1nOQcOZknuA3Pq1QU9HkmaGolFMD9VyTT1xPNmtu1mS8oXhQB9sUqhccAuQVBa4OkVsG012RlzpPVa/tJkXUMEBcUwSM3ZyXVfPEVRyUD1i6s1DZ1TKdwTKf0n+WnW8pwEg2WRELFaH9uuyp5nqwtqMazEM2yVXoRPIXFV+SLIIOZI5RlgF0lRCQO9PW9A6RP/HrVtAOYEGQp34WOpEmNf8lywpo4EgDvrinIbXGW/qbAyNi5KY2cprmEki3T/Yogq8T6oqJtap/6lATGEWveiaakCJGntFAgW5q/DRBRCO7gv4WdewdcTkR/bKCFIut8C6s3kFVxdJnAU6AZ5w00YgouWw6MLQWi5g8DzDrmQqH1IbrazM7jEntn65FqxnlcwgTHO4IXSpNTmaewFMYUPzaxD9G6+exhrYmze2ap0UCRhCgZpxtbFaoaRYg9ZVruQiFNXEY+zR7/13sMPk9hSnW3g1TZGps66mJFKqop9w7oSvd8bUG/D8/0/s7LrgUqTFIXq70D1qTdav1Z3G7tDbEv6Mv7e0zOPH8KxdO2Yvf5PbSP1Doy7RLznhLNJ3Y1eb0e+06Nmb0YILV/fjmRjlo1/x/7u8kLImi2PnXUp6mT8XmOuZD1euSmUXPg5xJOme7vQ3R3cw7UGhzQ/R2T9gHv/Mk+KrHn944N92w+tUWevdL8U6CV8h4RAcYu1GB1s09BNXZiKdyH2Nsy7XLi2uVEZE41zx7f378y6g325Hv3ju3N5DtXP/yZ1sWxJ7D2Qav5znsEWEoAe6zq64elsKQ6EU0LfZnUeEtlgB9zYPX4tEpnMUWwllhMfqDl3px/bL5jT1IAmZey3ygEu9tb+jJQE+ddTXVUQNnghehy+oaqybZs6UzzCqrDPdcsJdCiATtwrrIUtrkAYjUYcn5HgaESQE5cUrVsP8abIwdWTtM+F0oHdHkyvJ9RzcgogGeO3JojZ6ANtq8wyxjo9BDDA7f9c5h5ki5cn70FiLwFg59l3vR7UWwpo/ePBPspBYY+2PV77e1gzJGfT3PFz9mQc/Xj6P9NXQ3nX3O1P3NUS+W4blbPC0nt74bfeE7TJrd/Nr49v/g0xrZfcfTaYGnTD/4UFo96+g/RwIEr9XHoKNjg91JscEOnhZnxA0F391NaN5y9Sn+e4q3MedbhTbZq4sHbvHlmhWNyyXekxK1Dk+Vm9EdPB+NN+FZ+wAefAZRyuvVN6pVE9p7Js1ZmOlbRczamQYXj+TFEl/J7/O66lgNYMtD0lWPf7x/UNECX+oaz3fA79UTAq7Ex1XOAvLcVaiXHhHemakxTKyNIlT1lvH8jh6ZrZV+S+cqOtOkdZ4MAmHDEEnwzWmg4sf14mvgjG/fMJkfu8LXHvxyijd7g7QeS0ttyTSihZVQEcbGx50N/veL9pLcuKhkfnHkzmvkHgv080B9Pq6TocfvzevqGzPxOHQ69oI9r/95tPfc4IXn3enrusGYkobPHCadoOPGYj5gfy6seTzU4Y3Kf9PVDiXXlTNvnffQ1mZ8dYB2JuTgE2FsRvD+4dnni2JdrDFlHBF1ipku/eiLA7i40mxpqMbNihbTV1cqYpGXsVrIeJdf2ZC87uNV+ea2tdFO+JeyD4npBUGGp2HXiakeRFBqq4LabB1onwtq5RoSqvgBqgn2VmbOcAuV7xXJdK0nkmQJLId8TaTfFQpGxJy/q3QpLnV2Unfb9lkgb9scNGE/XAiMjUgxVs6IheQ/qniBXJZXLJZhwINKgJdlNQbHKKAn2qxB5XYEtTkz3J7O6Nila0jGZlU3mR+qJ2M1s4UjMhnhJQXDcILWfDUkRIP1N8PpeAg3V/KaYV+n6xHRRS5BaZaKk5kySvKsoWQ4zi48LfizuQjTbExxdiXdDWJ6SxfWOdduAxDFMm9lMR5LULbPdU6DBmmlGXzxUm20lCSHpJW9FqjgyDVbcMmKS+T0J6cVml2+dAKhk7lDUzzZTlhrlexHgn4Zn3tWezk41Zg83c8kPhM0nYmJH9cUMa1ZBrU9EW3v2cOYIHN7ffXai+nxMWsUGeILZ66Y59YKoauoi78Rkn6vtgGIiDhO8vrFV909Yu+RMX0/nUFfRNYrRRED+0D36GCYEkQbMfrEwfEfQsM5zDuItS2FGcNwPET4g3W1TSu7eVlqHY/rj/puz0p+dr942og9mrF0D+j+IXvh5k/6WCLZdWeOb5YOx/toOZn5bJUX+3kddI5VnPBwidzw7bgXaMUCqmIqqjthO1g4DDYnfHF3EQEjVONTgNsBuFdzOU7sc6XFC64nsKFeuNntiaMx/RjyO3vsRsXoz0/61Arf5Xo9nvb9KJN+I++UVg65rSeaeZKxuaqKL+ZhXfBPrJ721FyATbOj75uTvEx/+vWComWnP/nwu/kaw7KtGJevzI9I2uUs7AG7q82dIXEPcdFcQIOqWuH2DxP3nL/8NAAD//4zwrRg=") SupportedMap = make(map[string]Spec) for f, v := range unpacked { diff --git a/x-pack/elastic-agent/pkg/agent/program/testdata/constraints_config-filebeat.yml b/x-pack/elastic-agent/pkg/agent/program/testdata/constraints_config-filebeat.yml deleted file mode 100644 index 15013da3377..00000000000 --- a/x-pack/elastic-agent/pkg/agent/program/testdata/constraints_config-filebeat.yml +++ /dev/null @@ -1,27 +0,0 @@ -filebeat: - inputs: - - type: log - paths: - - /var/log/hello1.log - - /var/log/hello2.log - index: logs-generic-default - processors: - - add_fields: - target: "data_stream" - fields: - type: logs - dataset: generic - namespace: default - - add_fields: - target: "event" - fields: - dataset: generic -output: - elasticsearch: - hosts: - - 127.0.0.1:9200 - - 127.0.0.1:9300 - username: elastic - password: changeme - api_key: TiNAGG4BaaMdaH1tRfuU:KnR6yE41RrSowb0kQ0HWoA - ca_sha256: 7HIpactkIAq2Y49orFOOQKurWxmmSFZhBCoQYcRhJ3Y= diff --git a/x-pack/elastic-agent/pkg/agent/program/testdata/constraints_config.yml b/x-pack/elastic-agent/pkg/agent/program/testdata/constraints_config.yml deleted file mode 100644 index f4045032c07..00000000000 --- a/x-pack/elastic-agent/pkg/agent/program/testdata/constraints_config.yml +++ /dev/null @@ -1,36 +0,0 @@ -name: Production Website DB Servers - -outputs: - default: - type: elasticsearch - hosts: [127.0.0.1:9200, 127.0.0.1:9300] - username: elastic - password: changeme - api_key: TiNAGG4BaaMdaH1tRfuU:KnR6yE41RrSowb0kQ0HWoA - ca_sha256: 7HIpactkIAq2Y49orFOOQKurWxmmSFZhBCoQYcRhJ3Y= - - monitoring: - type: elasticsearch - api_key: VuaCfGcBCdbkQm-e5aOx:ui2lp2axTNmsyakw9tvNnw - hosts: ["monitoring:9200"] - ca_sha256: "7lHLiyp4J8m9kw38SJ7SURJP4bXRZv/BNxyyXkCcE/M=" - -inputs: - - type: logfile - streams: - - paths: - - /var/log/hello1.log - - /var/log/hello2.log - - type: apache/metrics - constraints: - - "validate_version(%{[agent.version]}, '1.0.0 - 7.0.0')" - data_stream.namespace: testing - streams: - - enabled: true - metricset: info - -agent.monitoring: - use_output: monitoring - -agent: - reload: 123 diff --git a/x-pack/elastic-agent/pkg/agent/transpiler/ast.go b/x-pack/elastic-agent/pkg/agent/transpiler/ast.go index 14ebba556eb..cfb02d1660a 100644 --- a/x-pack/elastic-agent/pkg/agent/transpiler/ast.go +++ b/x-pack/elastic-agent/pkg/agent/transpiler/ast.go @@ -15,10 +15,18 @@ import ( "strconv" "strings" + "github.com/elastic/beats/v7/x-pack/elastic-agent/pkg/eql" + "github.com/elastic/go-ucfg" ) -const selectorSep = "." +const ( + selectorSep = "." + // conditionKey is the name of the reserved key that will be computed using EQL to a boolean result. + // + // This makes the key "condition" inside of a dictionary a reserved name. + conditionKey = "condition" +) // Selector defines a path to access an element in the Tree, currently selectors only works when the // target is a Dictionary, accessing list values are not currently supported by any methods using @@ -51,7 +59,7 @@ type Node interface { Hash() []byte // Apply apply the current vars, returning the new value for the node. - Apply(Vars) (Node, error) + Apply(*Vars) (Node, error) // Processors returns any attached processors, because of variable substitution. Processors() Processors @@ -133,14 +141,27 @@ func (d *Dict) Hash() []byte { } // Apply applies the vars to all the nodes in the dictionary. -func (d *Dict) Apply(vars Vars) (Node, error) { - nodes := make([]Node, len(d.value)) - for i, v := range d.value { - n, err := v.Apply(vars) +func (d *Dict) Apply(vars *Vars) (Node, error) { + nodes := make([]Node, 0, len(d.value)) + for _, v := range d.value { + k := v.(*Key) + n, err := k.Apply(vars) if err != nil { return nil, err } - nodes[i] = n + if n == nil { + continue + } + if k.name == conditionKey { + b := n.Value().(*BoolVal) + if !b.value { + // condition failed; whole dictionary should be removed + return nil, nil + } + // condition successful, but don't include condition in result + continue + } + nodes = append(nodes, n) } return &Dict{nodes, nil}, nil } @@ -230,14 +251,30 @@ func (k *Key) Hash() []byte { } // Apply applies the vars to the value. -func (k *Key) Apply(vars Vars) (Node, error) { +func (k *Key) Apply(vars *Vars) (Node, error) { if k.value == nil { return k, nil } + if k.name == conditionKey { + switch v := k.value.(type) { + case *BoolVal: + return k, nil + case *StrVal: + cond, err := eql.Eval(v.value, vars) + if err != nil { + return nil, fmt.Errorf(`condition "%s" evaluation failed: %s`, v.value, err) + } + return &Key{k.name, NewBoolVal(cond)}, nil + } + return nil, fmt.Errorf("condition key's value must be a string; recieved %T", k.value) + } v, err := k.value.Apply(vars) if err != nil { return nil, err } + if v == nil { + return nil, nil + } return &Key{k.name, v}, nil } @@ -319,14 +356,17 @@ func (l *List) Clone() Node { } // Apply applies the vars to all nodes in the list. -func (l *List) Apply(vars Vars) (Node, error) { - nodes := make([]Node, len(l.value)) - for i, v := range l.value { +func (l *List) Apply(vars *Vars) (Node, error) { + nodes := make([]Node, 0, len(l.value)) + for _, v := range l.value { n, err := v.Apply(vars) if err != nil { return nil, err } - nodes[i] = n + if n == nil { + continue + } + nodes = append(nodes, n) } return NewList(nodes), nil } @@ -386,7 +426,7 @@ func (s *StrVal) Hash() []byte { } // Apply applies the vars to the string value. -func (s *StrVal) Apply(vars Vars) (Node, error) { +func (s *StrVal) Apply(vars *Vars) (Node, error) { return vars.Replace(s.value) } @@ -432,7 +472,7 @@ func (s *IntVal) Clone() Node { } // Apply does nothing. -func (s *IntVal) Apply(_ Vars) (Node, error) { +func (s *IntVal) Apply(_ *Vars) (Node, error) { return s, nil } @@ -488,7 +528,7 @@ func (s *UIntVal) Hash() []byte { } // Apply does nothing. -func (s *UIntVal) Apply(_ Vars) (Node, error) { +func (s *UIntVal) Apply(_ *Vars) (Node, error) { return s, nil } @@ -540,7 +580,7 @@ func (s *FloatVal) Hash() []byte { } // Apply does nothing. -func (s *FloatVal) Apply(_ Vars) (Node, error) { +func (s *FloatVal) Apply(_ *Vars) (Node, error) { return s, nil } @@ -597,7 +637,7 @@ func (s *BoolVal) Hash() []byte { } // Apply does nothing. -func (s *BoolVal) Apply(_ Vars) (Node, error) { +func (s *BoolVal) Apply(_ *Vars) (Node, error) { return s, nil } @@ -771,7 +811,7 @@ func (a *AST) MarshalJSON() ([]byte, error) { } // Apply applies the variables to the replacement in the AST. -func (a *AST) Apply(vars Vars) error { +func (a *AST) Apply(vars *Vars) error { n, err := a.root.Apply(vars) if err != nil { return err @@ -780,6 +820,26 @@ func (a *AST) Apply(vars Vars) error { return nil } +// Lookup looks for a value from the AST. +// +// Return type is in the native form and not in the Node types from the AST. +func (a *AST) Lookup(name string) (interface{}, bool) { + node, ok := Lookup(a, name) + if !ok { + return nil, false + } + _, isKey := node.(*Key) + if isKey { + // matched on a key, return the value + node = node.Value().(Node) + } + + m := &MapVisitor{} + a.dispatch(node, m) + + return m.Content, true +} + func splitPath(s Selector) []string { if s == "" { return nil diff --git a/x-pack/elastic-agent/pkg/agent/transpiler/ast_test.go b/x-pack/elastic-agent/pkg/agent/transpiler/ast_test.go index 742850bdeb4..4c56b993e64 100644 --- a/x-pack/elastic-agent/pkg/agent/transpiler/ast_test.go +++ b/x-pack/elastic-agent/pkg/agent/transpiler/ast_test.go @@ -696,98 +696,92 @@ func TestAST_Apply(t *testing.T) { testcases := map[string]struct { input map[string]interface{} expected *AST - vars Vars + vars *Vars matchErr bool }{ - //"2 vars missing with default": { - // input: map[string]interface{}{ - // "inputs": map[string]interface{}{ - // "type": "log/docker", - // "paths": []string{"/var/log/${var1.key1}", "/var/log/${var1.missing|'other'}"}, - // }, - // }, - // expected: &AST{ - // root: &Dict{ - // value: []Node{ - // &Key{ - // name: "inputs", - // value: NewDict( - // []Node{ - // &Key{ - // name: "paths", - // value: &List{ - // value: []Node{ - // &StrVal{value: "/var/log/value1"}, - // &StrVal{value: "/var/log/other"}, - // }, - // }, - // }, - // &Key{name: "type", value: &StrVal{value: "log/docker"}}, - // }), - // }, - // }, - // }, - // }, - // vars: Vars{ - // Mapping: map[string]interface{}{ - // "var1": map[string]interface{}{ - // "key1": "value1", - // }, - // }, - // }, - //}, - //"2 vars missing no default": { - // input: map[string]interface{}{ - // "inputs": map[string]interface{}{ - // "type": "log/docker", - // "paths": []string{"/var/log/${var1.key1}", "/var/log/${var1.missing}"}, - // }, - // }, - // vars: Vars{ - // Mapping: map[string]interface{}{ - // "var1": map[string]interface{}{ - // "key1": "value1", - // }, - // }, - // }, - // matchErr: true, - //}, - //"vars not string": { - // input: map[string]interface{}{ - // "inputs": map[string]interface{}{ - // "type": "log/docker", - // "paths": []string{"/var/log/${var1.key1}"}, - // }, - // }, - // expected: &AST{ - // root: &Dict{ - // value: []Node{ - // &Key{ - // name: "inputs", - // value: NewDict( - // []Node{ - // &Key{ - // name: "paths", - // value: &List{ - // value: []Node{ - // &StrVal{value: "/var/log/1"}, - // }, - // }, - // }, - // &Key{name: "type", value: &StrVal{value: "log/docker"}}, - // }), - // }, - // }, - // }, - // }, - // vars: Vars{ - // Mapping: map[string]interface{}{ - // "var1": map[string]interface{}{ - // "key1": 1, - // }, - // }, - // }, - //}, + "2 vars missing with default": { + input: map[string]interface{}{ + "inputs": map[string]interface{}{ + "type": "log/docker", + "paths": []string{"/var/log/${var1.key1}", "/var/log/${var1.missing|'other'}"}, + }, + }, + expected: &AST{ + root: &Dict{ + value: []Node{ + &Key{ + name: "inputs", + value: NewDict( + []Node{ + &Key{ + name: "paths", + value: &List{ + value: []Node{ + &StrVal{value: "/var/log/value1"}, + &StrVal{value: "/var/log/other"}, + }, + }, + }, + &Key{name: "type", value: &StrVal{value: "log/docker"}}, + }), + }, + }, + }, + }, + vars: mustMakeVars(map[string]interface{}{ + "var1": map[string]interface{}{ + "key1": "value1", + }, + }), + }, + "2 vars missing no default": { + input: map[string]interface{}{ + "inputs": map[string]interface{}{ + "type": "log/docker", + "paths": []string{"/var/log/${var1.key1}", "/var/log/${var1.missing}"}, + }, + }, + vars: mustMakeVars(map[string]interface{}{ + "var1": map[string]interface{}{ + "key1": "value1", + }, + }), + matchErr: true, + }, + "vars not string": { + input: map[string]interface{}{ + "inputs": map[string]interface{}{ + "type": "log/docker", + "paths": []string{"/var/log/${var1.key1}"}, + }, + }, + expected: &AST{ + root: &Dict{ + value: []Node{ + &Key{ + name: "inputs", + value: NewDict( + []Node{ + &Key{ + name: "paths", + value: &List{ + value: []Node{ + &StrVal{value: "/var/log/1"}, + }, + }, + }, + &Key{name: "type", value: &StrVal{value: "log/docker"}}, + }), + }, + }, + }, + }, + vars: mustMakeVars(map[string]interface{}{ + "var1": map[string]interface{}{ + "key1": 1, + }, + }), + }, "vars replace with object": { input: map[string]interface{}{ "inputs": map[string]interface{}{ @@ -850,16 +844,355 @@ func TestAST_Apply(t *testing.T) { }, }, }, - vars: Vars{ - Mapping: map[string]interface{}{ - "host": map[string]interface{}{ - "labels": []string{ - "label1", - "label2", + vars: mustMakeVars(map[string]interface{}{ + "host": map[string]interface{}{ + "labels": []string{ + "label1", + "label2", + }, + }, + }), + }, + "condition false str removes dict from list": { + input: map[string]interface{}{ + "inputs": []map[string]interface{}{ + { + "type": "logfile", + "paths": []string{"/var/log/syslog"}, + }, + { + "type": "logfile", + "paths": []string{"/var/log/other"}, + "condition": "false", + }, + }, + }, + expected: &AST{ + root: &Dict{ + value: []Node{ + &Key{ + name: "inputs", + value: NewList([]Node{ + NewDict( + []Node{ + &Key{ + name: "paths", + value: &List{ + value: []Node{ + &StrVal{value: "/var/log/syslog"}, + }, + }, + }, + &Key{name: "type", value: &StrVal{value: "logfile"}}, + }), + }), + }, + }, + }, + }, + }, + "condition false removes dict from list": { + input: map[string]interface{}{ + "inputs": []map[string]interface{}{ + { + "type": "logfile", + "paths": []string{"/var/log/syslog"}, + }, + { + "type": "logfile", + "paths": []string{"/var/log/other"}, + "condition": false, + }, + }, + }, + expected: &AST{ + root: &Dict{ + value: []Node{ + &Key{ + name: "inputs", + value: NewList([]Node{ + NewDict( + []Node{ + &Key{ + name: "paths", + value: &List{ + value: []Node{ + &StrVal{value: "/var/log/syslog"}, + }, + }, + }, + &Key{name: "type", value: &StrVal{value: "logfile"}}, + }), + }), + }, + }, + }, + }, + }, + "condition true string keeps dict in list w/o condition key": { + input: map[string]interface{}{ + "inputs": []map[string]interface{}{ + { + "type": "logfile", + "paths": []string{"/var/log/syslog"}, + }, + { + "type": "logfile", + "paths": []string{"/var/log/other"}, + "condition": "true", + }, + }, + }, + expected: &AST{ + root: &Dict{ + value: []Node{ + &Key{ + name: "inputs", + value: NewList([]Node{ + NewDict( + []Node{ + &Key{ + name: "paths", + value: &List{ + value: []Node{ + &StrVal{value: "/var/log/syslog"}, + }, + }, + }, + &Key{name: "type", value: &StrVal{value: "logfile"}}, + }), + NewDict( + []Node{ + &Key{ + name: "paths", + value: &List{ + value: []Node{ + &StrVal{value: "/var/log/other"}, + }, + }, + }, + &Key{name: "type", value: &StrVal{value: "logfile"}}, + }), + }), + }, + }, + }, + }, + }, + "condition true keeps dict in list w/o condition key": { + input: map[string]interface{}{ + "inputs": []map[string]interface{}{ + { + "type": "logfile", + "paths": []string{"/var/log/syslog"}, + }, + { + "type": "logfile", + "paths": []string{"/var/log/other"}, + "condition": true, + }, + }, + }, + expected: &AST{ + root: &Dict{ + value: []Node{ + &Key{ + name: "inputs", + value: NewList([]Node{ + NewDict( + []Node{ + &Key{ + name: "paths", + value: &List{ + value: []Node{ + &StrVal{value: "/var/log/syslog"}, + }, + }, + }, + &Key{name: "type", value: &StrVal{value: "logfile"}}, + }), + NewDict( + []Node{ + &Key{ + name: "paths", + value: &List{ + value: []Node{ + &StrVal{value: "/var/log/other"}, + }, + }, + }, + &Key{name: "type", value: &StrVal{value: "logfile"}}, + }), + }), + }, + }, + }, + }, + }, + "condition eval keeps dict in list w/o condition key": { + input: map[string]interface{}{ + "inputs": []map[string]interface{}{ + { + "type": "logfile", + "paths": []string{"/var/log/syslog"}, + }, + { + "type": "logfile", + "paths": []string{"/var/log/other"}, + "condition": "arrayContains(${host.labels}, 'label2')", + }, + }, + }, + expected: &AST{ + root: &Dict{ + value: []Node{ + &Key{ + name: "inputs", + value: NewList([]Node{ + NewDict( + []Node{ + &Key{ + name: "paths", + value: &List{ + value: []Node{ + &StrVal{value: "/var/log/syslog"}, + }, + }, + }, + &Key{name: "type", value: &StrVal{value: "logfile"}}, + }), + NewDict( + []Node{ + &Key{ + name: "paths", + value: &List{ + value: []Node{ + &StrVal{value: "/var/log/other"}, + }, + }, + }, + &Key{name: "type", value: &StrVal{value: "logfile"}}, + }), + }), + }, + }, + }, + }, + vars: mustMakeVars(map[string]interface{}{ + "host": map[string]interface{}{ + "labels": []string{ + "label1", + "label2", + }, + }, + }), + }, + "condition eval removes dict from list": { + input: map[string]interface{}{ + "inputs": []map[string]interface{}{ + { + "type": "logfile", + "paths": []string{"/var/log/syslog"}, + }, + { + "type": "logfile", + "paths": []string{"/var/log/other"}, + "condition": "arrayContains(${host.labels}, 'missing')", + }, + }, + }, + expected: &AST{ + root: &Dict{ + value: []Node{ + &Key{ + name: "inputs", + value: NewList([]Node{ + NewDict( + []Node{ + &Key{ + name: "paths", + value: &List{ + value: []Node{ + &StrVal{value: "/var/log/syslog"}, + }, + }, + }, + &Key{name: "type", value: &StrVal{value: "logfile"}}, + }), + }), }, }, }, }, + vars: mustMakeVars(map[string]interface{}{ + "host": map[string]interface{}{ + "labels": []string{ + "label1", + "label2", + }, + }, + }), + }, + "condition eval removes dict from dict": { + input: map[string]interface{}{ + "inputs": []map[string]interface{}{ + { + "type": "logfile", + "paths": []string{"/var/log/syslog"}, + }, + { + "type": "logfile", + "paths": []string{"/var/log/other"}, + "nested": map[string]interface{}{ + "removed": "whole dict removed", + "condition": "arrayContains(${host.labels}, 'missing')", + }, + }, + }, + }, + expected: &AST{ + root: &Dict{ + value: []Node{ + &Key{ + name: "inputs", + value: NewList([]Node{ + NewDict( + []Node{ + &Key{ + name: "paths", + value: &List{ + value: []Node{ + &StrVal{value: "/var/log/syslog"}, + }, + }, + }, + &Key{name: "type", value: &StrVal{value: "logfile"}}, + }), + NewDict( + []Node{ + &Key{ + name: "paths", + value: &List{ + value: []Node{ + &StrVal{value: "/var/log/other"}, + }, + }, + }, + &Key{name: "type", value: &StrVal{value: "logfile"}}, + }), + }), + }, + }, + }, + }, + vars: mustMakeVars(map[string]interface{}{ + "host": map[string]interface{}{ + "labels": []string{ + "label1", + "label2", + }, + }, + }), }, } @@ -1281,3 +1614,11 @@ func TestHash(t *testing.T) { }) } } + +func mustMakeVars(mapping map[string]interface{}) *Vars { + v, err := NewVars(mapping) + if err != nil { + panic(err) + } + return v +} diff --git a/x-pack/elastic-agent/pkg/agent/transpiler/vars.go b/x-pack/elastic-agent/pkg/agent/transpiler/vars.go index f5b7b9922d3..698847edd16 100644 --- a/x-pack/elastic-agent/pkg/agent/transpiler/vars.go +++ b/x-pack/elastic-agent/pkg/agent/transpiler/vars.go @@ -9,8 +9,6 @@ import ( "regexp" "strings" "unicode" - - "github.com/elastic/go-ucfg" ) var varsRegex = regexp.MustCompile(`\${([\p{L}\d\s\\\-_|.'"]*)}`) @@ -20,19 +18,28 @@ var ErrNoMatch = fmt.Errorf("no matching vars") // Vars is a context of variables that also contain a list of processors that go with the mapping. type Vars struct { - Mapping map[string]interface{} + tree *AST + processorsKey string + processors Processors +} - ProcessorsKey string - Processors Processors +// NewVars returns a new instance of vars. +func NewVars(mapping map[string]interface{}) (*Vars, error) { + return NewVarsWithProcessors(mapping, "", nil) } -// Replace returns a new value based on variable replacement. -func (v *Vars) Replace(value string) (Node, error) { - var processors []map[string]interface{} - c, err := ucfg.NewFrom(v.Mapping, ucfg.PathSep(".")) +// NewVarsWithProcessors returns a new instance of vars with attachment of processors. +func NewVarsWithProcessors(mapping map[string]interface{}, processorKey string, processors Processors) (*Vars, error) { + tree, err := NewAST(mapping) if err != nil { return nil, err } + return &Vars{tree, processorKey, processors}, nil +} + +// Replace returns a new value based on variable replacement. +func (v *Vars) Replace(value string) (Node, error) { + var processors Processors matchIdxs := varsRegex.FindAllSubmatchIndex([]byte(value), -1) if !validBrackets(value, matchIdxs) { return nil, fmt.Errorf("starting ${ is missing ending }") @@ -53,27 +60,19 @@ func (v *Vars) Replace(value string) (Node, error) { result += value[lastIndex:r[0]] + val.Value() set = true case *varString: - if r[i] == 0 && r[i+1] == len(value) { - // possible for complete replacement of object, because the variable - // is not inside of a string - child, err := c.Child(val.Value(), -1, ucfg.PathSep(".")) - if err == nil { - ast, err := NewASTFromConfig(child) - if err == nil { - if v.ProcessorsKey != "" && varPrefixMatched(val.Value(), v.ProcessorsKey) { - processors = v.Processors - } - return attachProcessors(ast.root, processors), nil - } + node, ok := Lookup(v.tree, val.Value()) + if ok { + node := nodeToValue(node) + if v.processorsKey != "" && varPrefixMatched(val.Value(), v.processorsKey) { + processors = v.processors } - } - replace, err := c.String(val.Value(), -1, ucfg.PathSep(".")) - if err == nil { - result += value[lastIndex:r[0]] + replace - set = true - if v.ProcessorsKey != "" && varPrefixMatched(val.Value(), v.ProcessorsKey) { - processors = v.Processors + if r[i] == 0 && r[i+1] == len(value) { + // possible for complete replacement of object, because the variable + // is not inside of a string + return attachProcessors(node, processors), nil } + result += value[lastIndex:r[0]] + node.String() + set = true } } if set { @@ -89,6 +88,20 @@ func (v *Vars) Replace(value string) (Node, error) { return NewStrValWithProcessors(result+value[lastIndex:], processors), nil } +// Lookup returns the value from the vars. +func (v *Vars) Lookup(name string) (interface{}, bool) { + return v.tree.Lookup(name) +} + +// nodeToValue ensures that the node is an actual value. +func nodeToValue(node Node) Node { + switch n := node.(type) { + case *Key: + return n.value.(Node) + } + return node +} + // validBrackets returns true when all starting {$ have a matching ending }. func validBrackets(s string, matchIdxs [][]int) bool { result := "" diff --git a/x-pack/elastic-agent/pkg/agent/transpiler/vars_test.go b/x-pack/elastic-agent/pkg/agent/transpiler/vars_test.go index 31249316099..0b6566a7a94 100644 --- a/x-pack/elastic-agent/pkg/agent/transpiler/vars_test.go +++ b/x-pack/elastic-agent/pkg/agent/transpiler/vars_test.go @@ -12,25 +12,23 @@ import ( ) func TestVars_Replace(t *testing.T) { - vars := &Vars{ - Mapping: map[string]interface{}{ - "un-der_score": map[string]interface{}{ - "key1": "data1", - "key2": "data2", - "list": []string{ - "array1", - "array2", - }, - "dict": map[string]interface{}{ - "key1": "value1", - "key2": "value2", - }, + vars := mustMakeVars(map[string]interface{}{ + "un-der_score": map[string]interface{}{ + "key1": "data1", + "key2": "data2", + "list": []string{ + "array1", + "array2", }, - "other": map[string]interface{}{ - "data": "info", + "dict": map[string]interface{}{ + "key1": "value1", + "key2": "value2", }, }, - } + "other": map[string]interface{}{ + "data": "info", + }, + }) tests := []struct { Input string Result Node @@ -199,8 +197,8 @@ func TestVars_ReplaceWithProcessors(t *testing.T) { }, }, } - vars := &Vars{ - Mapping: map[string]interface{}{ + vars, err := NewVarsWithProcessors( + map[string]interface{}{ "testing": map[string]interface{}{ "key1": "data1", }, @@ -216,9 +214,9 @@ func TestVars_ReplaceWithProcessors(t *testing.T) { }, }, }, - ProcessorsKey: "dynamic", - Processors: processers, - } + "dynamic", + processers) + require.NoError(t, err) res, err := vars.Replace("${testing.key1}") require.NoError(t, err) diff --git a/x-pack/elastic-agent/pkg/boolexp/Boolexp.g4 b/x-pack/elastic-agent/pkg/boolexp/Boolexp.g4 deleted file mode 100644 index fd587c1c8b0..00000000000 --- a/x-pack/elastic-agent/pkg/boolexp/Boolexp.g4 +++ /dev/null @@ -1,55 +0,0 @@ -// boolexp.g4 -grammar Boolexp; - -// Tokens -EQ: '=='; -NEQ: '!='; -GT: '>'; -LT: '<'; -GTE: '>='; -LTE: '<='; -AND: 'and' | 'AND' | '&&'; -OR: 'or' | 'OR' | '||'; -TRUE: 'true' | 'TRUE'; -FALSE: 'false' | 'FALSE'; -FLOAT: [0-9]+ '.' [0-9]+; -NUMBER: [0-9]+; -WHITESPACE: [ \r\n\t]+ -> skip; -NOT: 'NOT' | '!' | 'not'; -VARIABLE: BEGIN_VARIABLE [a-zA-Z0-9_.]+('.'[a-zZ0-9_]+)* END_VARIABLE; -METHODNAME: [a-zA-Z_] [a-zA-Z0-9_]*; -TEXT : '\'' ~[\r\n']* '\''; -LPAR: '('; -RPAR: ')'; -fragment BEGIN_VARIABLE: '%{['; -fragment END_VARIABLE: ']}'; - -expList: exp EOF; - -exp -: LPAR exp RPAR # ExpInParen -| NOT exp # ExpNot -| left=exp EQ right=exp # ExpArithmeticEQ -| left=exp NEQ right=exp # ExpArithmeticNEQ -| left=exp LTE right=exp # ExpArithmeticLTE -| left=exp GTE right=exp # ExpArithmeticGTE -| left=exp LT right=exp # ExpArithmeticLT -| left=exp GT right=exp # ExpArithmeticGT -| left=exp AND right=exp # ExpLogicalAnd -| left=exp OR right=exp # ExpLogicalOR -| boolean # ExpBoolean -| VARIABLE # ExpVariable -| METHODNAME LPAR arguments? RPAR # ExpFunction -| TEXT # ExpText -| FLOAT # ExpFloat -| NUMBER # ExpNumber -; - -boolean -: TRUE | FALSE -; - -arguments -: exp( ',' exp)* -; - diff --git a/x-pack/elastic-agent/pkg/boolexp/boolexp_test.go b/x-pack/elastic-agent/pkg/boolexp/boolexp_test.go deleted file mode 100644 index 44faa8e2bb0..00000000000 --- a/x-pack/elastic-agent/pkg/boolexp/boolexp_test.go +++ /dev/null @@ -1,273 +0,0 @@ -// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one -// or more contributor license agreements. Licensed under the Elastic License; -// you may not use this file except in compliance with the Elastic License. - -package boolexp - -import ( - "fmt" - "os" - "strings" - "testing" - - "github.com/antlr/antlr4/runtime/Go/antlr" - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" - - "github.com/elastic/beats/v7/x-pack/elastic-agent/pkg/boolexp/parser" -) - -var showDebug = lookupEnvOrDefault("DEBUG", "0") - -type testVarStore struct { - vars map[string]interface{} -} - -func (s *testVarStore) Lookup(v string) (interface{}, bool) { - val, ok := s.vars[v] - return val, ok -} - -func TestBoolexp(t *testing.T) { - testcases := []struct { - expression string - result bool - err bool - }{ - // Variables - {expression: "%{[hello.var]} == 'hello'", result: true}, - {expression: "%{[hello.var]} != 'hello'", result: false}, - {expression: "contains(%{[hello.var]}, 'hell')", result: true}, - - {expression: "true", result: true}, - {expression: "false", result: false}, - {expression: "!false", result: true}, - {expression: "!true", result: false}, - {expression: "!(1 == 1)", result: false}, - {expression: "NOT false", result: true}, - {expression: "NOT true", result: false}, - {expression: "not false", result: true}, - {expression: "not true", result: false}, - {expression: "NOT (1 == 1)", result: false}, - - {expression: "1 == 1", result: true}, - {expression: "1 == 2", result: false}, - {expression: "1 != 2", result: true}, - {expression: "1 != 1", result: false}, - {expression: "'hello' == 'hello'", result: true}, - {expression: "'hello' == 'hola'", result: false}, - - // and - {expression: "(1 == 1) AND (2 == 2)", result: true}, - {expression: "(1 == 4) AND (2 == 2)", result: false}, - {expression: "(1 == 1) AND (2 == 3)", result: false}, - {expression: "(1 == 5) AND (2 == 3)", result: false}, - - {expression: "1 == 1 AND 2 == 2", result: true}, - {expression: "1 == 4 AND 2 == 2", result: false}, - {expression: "1 == 1 AND 2 == 3", result: false}, - {expression: "1 == 5 AND 2 == 3", result: false}, - - {expression: "(1 == 1) and (2 == 2)", result: true}, - {expression: "(1 == 4) and (2 == 2)", result: false}, - {expression: "(1 == 1) and (2 == 3)", result: false}, - {expression: "(1 == 5) and (2 == 3)", result: false}, - - {expression: "(1 == 1) && (2 == 2)", result: true}, - {expression: "(1 == 4) && (2 == 2)", result: false}, - {expression: "(1 == 1) && (2 == 3)", result: false}, - {expression: "(1 == 5) && (2 == 3)", result: false}, - - // or - {expression: "(1 == 1) OR (2 == 2)", result: true}, - {expression: "(1 == 1) OR (3 == 2)", result: true}, - {expression: "(1 == 2) OR (2 == 2)", result: true}, - {expression: "(1 == 2) OR (2 == 2)", result: true}, - {expression: "(1 == 2) OR (1 == 2)", result: false}, - - {expression: "(1 == 1) or (2 == 2)", result: true}, - {expression: "(1 == 1) or (3 == 2)", result: true}, - {expression: "(1 == 2) or (2 == 2)", result: true}, - {expression: "(1 == 2) or (2 == 2)", result: true}, - {expression: "(1 == 2) or (1 == 2)", result: false}, - - {expression: "(1 == 1) || (2 == 2)", result: true}, - {expression: "(1 == 1) || (3 == 2)", result: true}, - {expression: "(1 == 2) || (2 == 2)", result: true}, - {expression: "(1 == 2) || (2 == 2)", result: true}, - {expression: "(1 == 2) || (1 == 2)", result: false}, - - // mixed - {expression: "((1 == 1) AND (2 == 2)) OR (2 != 3)", result: true}, - {expression: "(1 == 1 OR 2 == 2) AND 2 != 3", result: true}, - {expression: "((1 == 1) AND (2 == 2)) OR (2 != 3)", result: true}, - {expression: "1 == 1 OR 2 == 2 AND 2 != 3", result: true}, - - // functions - {expression: "len('hello') == 5", result: true}, - {expression: "len('hello') != 1", result: true}, - {expression: "len('hello') == 1", result: false}, - {expression: "(len('hello') == 5) AND (len('Hi') == 2)", result: true}, - {expression: "len('hello') == size('hello')", result: true}, - {expression: "len('hello') == size('hi')", result: false}, - {expression: "contains('hello', 'eial')", result: false}, - {expression: "contains('hello', 'hel')", result: true}, - {expression: "!contains('hello', 'hel')", result: false}, - {expression: "contains('hello', 'hel') == true", result: true}, - {expression: "contains('hello', 'hel') == false", result: false}, - {expression: "countArgs('A', 'B', 'C', 'D', 'E', 'F') == 6", result: true}, - {expression: "countArgs('A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J') == 10", result: true}, - - // integers - {expression: "1 < 5", result: true}, - {expression: "10 < 5", result: false}, - {expression: "1 > 5", result: false}, - {expression: "10 > 5", result: true}, - {expression: "1 <= 5", result: true}, - {expression: "5 <= 5", result: true}, - {expression: "10 <= 5", result: false}, - {expression: "10 >= 5", result: true}, - {expression: "5 >= 5", result: true}, - {expression: "4 >= 5", result: false}, - - // Floats - {expression: "1 == 1.0", result: true}, - {expression: "1.0 == 1.0", result: true}, - {expression: "1.0 == 1", result: true}, - {expression: "1 != 2.0", result: true}, - {expression: "1.0 != 2.0", result: true}, - {expression: "1.0 != 2", result: true}, - {expression: "1 < 5.0", result: true}, - {expression: "10 < 5.0", result: false}, - {expression: "1 > 5.0", result: false}, - {expression: "10 > 5.0", result: true}, - {expression: "1 <= 5.0", result: true}, - {expression: "10 <= 5.0", result: false}, - {expression: "1 >= 5.0", result: false}, - {expression: "10 >= 5.0", result: true}, - {expression: "10 >= 10.0", result: true}, - {expression: "10 <= 10.0", result: true}, - - // Bad expression and malformed expression - {expression: "contains('hello')", err: true}, - {expression: "contains()", err: true}, - {expression: "contains()", err: true}, - {expression: "donotexist()", err: true}, - } - - store := &testVarStore{ - vars: map[string]interface{}{ - "hello.var": "hello", - }, - } - - fn := func(args []interface{}) (interface{}, error) { - if len(args) != 1 { - return nil, fmt.Errorf("expecting 1 argument received %d", len(args)) - } - val, ok := args[0].(string) - if !ok { - return nil, fmt.Errorf("expecting a string received %T", args[0]) - } - return len(val), nil - } - - methods := NewMethodsReg() - methods.Register("len", fn) - // test function aliasing - methods.Register("size", fn) - // test multiples arguments function. - methods.Register("contains", func(args []interface{}) (interface{}, error) { - if len(args) != 2 { - return nil, fmt.Errorf("expecting 2 arguments received %d", len(args)) - } - - haystack, ok := args[0].(string) - if !ok { - return nil, fmt.Errorf("args 1 must be a string and received %T", args[0]) - } - - needle, ok := args[1].(string) - if !ok { - return nil, fmt.Errorf("args 2 must be a string and received %T", args[0]) - } - - return strings.Contains(haystack, needle), nil - }, - ) - - methods.Register("countArgs", func(args []interface{}) (interface{}, error) { - return len(args), nil - }) - - for _, test := range testcases { - test := test - var title string - if test.err { - title = fmt.Sprintf("%s failed parsing", test.expression) - } else { - title = fmt.Sprintf("%s => return %v", test.expression, test.result) - } - t.Run(title, func(t *testing.T) { - if showDebug == "1" { - debug(test.expression) - } - - r, err := Eval(test.expression, methods, store) - - if test.err { - require.Error(t, err) - return - } - - assert.Equal(t, test.result, r) - }) - } -} - -func debug(expression string) { - raw := antlr.NewInputStream(expression) - - lexer := parser.NewBoolexpLexer(raw) - for { - t := lexer.NextToken() - if t.GetTokenType() == antlr.TokenEOF { - break - } - fmt.Printf("%s (%q)\n", - lexer.SymbolicNames[t.GetTokenType()], t.GetText()) - } -} - -var result bool - -func BenchmarkEval(b *testing.B) { - fn := func(args []interface{}) (interface{}, error) { - if len(args) != 1 { - return nil, fmt.Errorf("expecting 1 argument received %d", len(args)) - } - val, ok := args[0].(string) - if !ok { - return nil, fmt.Errorf("expecting a string received %T", args[0]) - } - return len(val), nil - } - - methods := NewMethodsReg() - methods.Register("len", fn) - - expression, _ := New("(len('hello') == 5) AND (len('Hi') == 2)", methods) - - var r bool - for n := 0; n < b.N; n++ { - r, _ = expression.Eval(nil) - } - result = r -} - -func lookupEnvOrDefault(name, d string) string { - if v, ok := os.LookupEnv(name); ok { - return v - } - return d -} diff --git a/x-pack/elastic-agent/pkg/boolexp/methods.go b/x-pack/elastic-agent/pkg/boolexp/methods.go deleted file mode 100644 index eada71e1bde..00000000000 --- a/x-pack/elastic-agent/pkg/boolexp/methods.go +++ /dev/null @@ -1,58 +0,0 @@ -// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one -// or more contributor license agreements. Licensed under the Elastic License; -// you may not use this file except in compliance with the Elastic License. - -package boolexp - -import "fmt" - -// CallFunc is a function called while the expression evaluation is done, the function is responsable -// of doing the type conversion and allow checking the arity of the function. -type CallFunc func(args []interface{}) (interface{}, error) - -// Method encapsulate a method. -type Method struct { - Name string - Func CallFunc -} - -// MethodsReg is the registry of the methods, when the evaluation is done and a function is found we -// will lookup the function in the registry. If the method is found the methods will be executed, -// otherwise the evaluation will fail. -// -// NOTE: Define methods must have a unique name and capitalization is important. -type MethodsReg struct { - methods map[string]Method -} - -// Register registers a new methods, the method will return an error if the method with the same -// name already exists in the registry. -func (m *MethodsReg) Register(name string, f CallFunc) error { - _, ok := m.methods[name] - if ok { - return fmt.Errorf("method %s already exists", name) - } - m.methods[name] = Method{Name: name, Func: f} - return nil -} - -// MustRegister registers a new methods and will panic on any error. -func (m *MethodsReg) MustRegister(name string, f CallFunc) { - err := m.Register(name, f) - if err != nil { - panic(err) - } -} - -// Lookup search a methods by name and return it, will return false if the method is not found. -// -// NOTE: When looking methods name capitalization is important. -func (m *MethodsReg) Lookup(name string) (Method, bool) { - v, ok := m.methods[name] - return v, ok -} - -// NewMethodsReg returns a new methods registry. -func NewMethodsReg() *MethodsReg { - return &MethodsReg{methods: make(map[string]Method)} -} diff --git a/x-pack/elastic-agent/pkg/boolexp/parser/Boolexp.interp b/x-pack/elastic-agent/pkg/boolexp/parser/Boolexp.interp deleted file mode 100644 index b3a5b7da1f5..00000000000 --- a/x-pack/elastic-agent/pkg/boolexp/parser/Boolexp.interp +++ /dev/null @@ -1,55 +0,0 @@ -token literal names: -null -',' -'==' -'!=' -'>' -'<' -'>=' -'<=' -null -null -null -null -null -null -null -null -null -null -null -'(' -')' - -token symbolic names: -null -null -EQ -NEQ -GT -LT -GTE -LTE -AND -OR -TRUE -FALSE -FLOAT -NUMBER -WHITESPACE -NOT -VARIABLE -METHODNAME -TEXT -LPAR -RPAR - -rule names: -expList -exp -boolean -arguments - - -atn: -[3, 24715, 42794, 33075, 47597, 16764, 15335, 30598, 22884, 3, 22, 73, 4, 2, 9, 2, 4, 3, 9, 3, 4, 4, 9, 4, 4, 5, 9, 5, 3, 2, 3, 2, 3, 2, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 5, 3, 26, 10, 3, 3, 3, 3, 3, 3, 3, 3, 3, 5, 3, 32, 10, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 7, 3, 58, 10, 3, 12, 3, 14, 3, 61, 11, 3, 3, 4, 3, 4, 3, 5, 3, 5, 3, 5, 7, 5, 68, 10, 5, 12, 5, 14, 5, 71, 11, 5, 3, 5, 2, 3, 4, 6, 2, 4, 6, 8, 2, 3, 3, 2, 12, 13, 2, 85, 2, 10, 3, 2, 2, 2, 4, 31, 3, 2, 2, 2, 6, 62, 3, 2, 2, 2, 8, 64, 3, 2, 2, 2, 10, 11, 5, 4, 3, 2, 11, 12, 7, 2, 2, 3, 12, 3, 3, 2, 2, 2, 13, 14, 8, 3, 1, 2, 14, 15, 7, 21, 2, 2, 15, 16, 5, 4, 3, 2, 16, 17, 7, 22, 2, 2, 17, 32, 3, 2, 2, 2, 18, 19, 7, 17, 2, 2, 19, 32, 5, 4, 3, 17, 20, 32, 5, 6, 4, 2, 21, 32, 7, 18, 2, 2, 22, 23, 7, 19, 2, 2, 23, 25, 7, 21, 2, 2, 24, 26, 5, 8, 5, 2, 25, 24, 3, 2, 2, 2, 25, 26, 3, 2, 2, 2, 26, 27, 3, 2, 2, 2, 27, 32, 7, 22, 2, 2, 28, 32, 7, 20, 2, 2, 29, 32, 7, 14, 2, 2, 30, 32, 7, 15, 2, 2, 31, 13, 3, 2, 2, 2, 31, 18, 3, 2, 2, 2, 31, 20, 3, 2, 2, 2, 31, 21, 3, 2, 2, 2, 31, 22, 3, 2, 2, 2, 31, 28, 3, 2, 2, 2, 31, 29, 3, 2, 2, 2, 31, 30, 3, 2, 2, 2, 32, 59, 3, 2, 2, 2, 33, 34, 12, 16, 2, 2, 34, 35, 7, 4, 2, 2, 35, 58, 5, 4, 3, 17, 36, 37, 12, 15, 2, 2, 37, 38, 7, 5, 2, 2, 38, 58, 5, 4, 3, 16, 39, 40, 12, 14, 2, 2, 40, 41, 7, 9, 2, 2, 41, 58, 5, 4, 3, 15, 42, 43, 12, 13, 2, 2, 43, 44, 7, 8, 2, 2, 44, 58, 5, 4, 3, 14, 45, 46, 12, 12, 2, 2, 46, 47, 7, 7, 2, 2, 47, 58, 5, 4, 3, 13, 48, 49, 12, 11, 2, 2, 49, 50, 7, 6, 2, 2, 50, 58, 5, 4, 3, 12, 51, 52, 12, 10, 2, 2, 52, 53, 7, 10, 2, 2, 53, 58, 5, 4, 3, 11, 54, 55, 12, 9, 2, 2, 55, 56, 7, 11, 2, 2, 56, 58, 5, 4, 3, 10, 57, 33, 3, 2, 2, 2, 57, 36, 3, 2, 2, 2, 57, 39, 3, 2, 2, 2, 57, 42, 3, 2, 2, 2, 57, 45, 3, 2, 2, 2, 57, 48, 3, 2, 2, 2, 57, 51, 3, 2, 2, 2, 57, 54, 3, 2, 2, 2, 58, 61, 3, 2, 2, 2, 59, 57, 3, 2, 2, 2, 59, 60, 3, 2, 2, 2, 60, 5, 3, 2, 2, 2, 61, 59, 3, 2, 2, 2, 62, 63, 9, 2, 2, 2, 63, 7, 3, 2, 2, 2, 64, 69, 5, 4, 3, 2, 65, 66, 7, 3, 2, 2, 66, 68, 5, 4, 3, 2, 67, 65, 3, 2, 2, 2, 68, 71, 3, 2, 2, 2, 69, 67, 3, 2, 2, 2, 69, 70, 3, 2, 2, 2, 70, 9, 3, 2, 2, 2, 71, 69, 3, 2, 2, 2, 7, 25, 31, 57, 59, 69] \ No newline at end of file diff --git a/x-pack/elastic-agent/pkg/boolexp/parser/Boolexp.tokens b/x-pack/elastic-agent/pkg/boolexp/parser/Boolexp.tokens deleted file mode 100644 index 6892b3a80c1..00000000000 --- a/x-pack/elastic-agent/pkg/boolexp/parser/Boolexp.tokens +++ /dev/null @@ -1,29 +0,0 @@ -T__0=1 -EQ=2 -NEQ=3 -GT=4 -LT=5 -GTE=6 -LTE=7 -AND=8 -OR=9 -TRUE=10 -FALSE=11 -FLOAT=12 -NUMBER=13 -WHITESPACE=14 -NOT=15 -VARIABLE=16 -METHODNAME=17 -TEXT=18 -LPAR=19 -RPAR=20 -','=1 -'=='=2 -'!='=3 -'>'=4 -'<'=5 -'>='=6 -'<='=7 -'('=19 -')'=20 diff --git a/x-pack/elastic-agent/pkg/boolexp/parser/BoolexpLexer.interp b/x-pack/elastic-agent/pkg/boolexp/parser/BoolexpLexer.interp deleted file mode 100644 index 11612b59eb9..00000000000 --- a/x-pack/elastic-agent/pkg/boolexp/parser/BoolexpLexer.interp +++ /dev/null @@ -1,79 +0,0 @@ -token literal names: -null -',' -'==' -'!=' -'>' -'<' -'>=' -'<=' -null -null -null -null -null -null -null -null -null -null -null -'(' -')' - -token symbolic names: -null -null -EQ -NEQ -GT -LT -GTE -LTE -AND -OR -TRUE -FALSE -FLOAT -NUMBER -WHITESPACE -NOT -VARIABLE -METHODNAME -TEXT -LPAR -RPAR - -rule names: -T__0 -EQ -NEQ -GT -LT -GTE -LTE -AND -OR -TRUE -FALSE -FLOAT -NUMBER -WHITESPACE -NOT -VARIABLE -METHODNAME -TEXT -LPAR -RPAR -BEGIN_VARIABLE -END_VARIABLE - -channel names: -DEFAULT_TOKEN_CHANNEL -HIDDEN - -mode names: -DEFAULT_MODE - -atn: -[3, 24715, 42794, 33075, 47597, 16764, 15335, 30598, 22884, 2, 22, 183, 8, 1, 4, 2, 9, 2, 4, 3, 9, 3, 4, 4, 9, 4, 4, 5, 9, 5, 4, 6, 9, 6, 4, 7, 9, 7, 4, 8, 9, 8, 4, 9, 9, 9, 4, 10, 9, 10, 4, 11, 9, 11, 4, 12, 9, 12, 4, 13, 9, 13, 4, 14, 9, 14, 4, 15, 9, 15, 4, 16, 9, 16, 4, 17, 9, 17, 4, 18, 9, 18, 4, 19, 9, 19, 4, 20, 9, 20, 4, 21, 9, 21, 4, 22, 9, 22, 4, 23, 9, 23, 3, 2, 3, 2, 3, 3, 3, 3, 3, 3, 3, 4, 3, 4, 3, 4, 3, 5, 3, 5, 3, 6, 3, 6, 3, 7, 3, 7, 3, 7, 3, 8, 3, 8, 3, 8, 3, 9, 3, 9, 3, 9, 3, 9, 3, 9, 3, 9, 3, 9, 3, 9, 5, 9, 74, 10, 9, 3, 10, 3, 10, 3, 10, 3, 10, 3, 10, 3, 10, 5, 10, 82, 10, 10, 3, 11, 3, 11, 3, 11, 3, 11, 3, 11, 3, 11, 3, 11, 3, 11, 5, 11, 92, 10, 11, 3, 12, 3, 12, 3, 12, 3, 12, 3, 12, 3, 12, 3, 12, 3, 12, 3, 12, 3, 12, 5, 12, 104, 10, 12, 3, 13, 6, 13, 107, 10, 13, 13, 13, 14, 13, 108, 3, 13, 3, 13, 6, 13, 113, 10, 13, 13, 13, 14, 13, 114, 3, 14, 6, 14, 118, 10, 14, 13, 14, 14, 14, 119, 3, 15, 6, 15, 123, 10, 15, 13, 15, 14, 15, 124, 3, 15, 3, 15, 3, 16, 3, 16, 3, 16, 3, 16, 3, 16, 3, 16, 3, 16, 5, 16, 136, 10, 16, 3, 17, 3, 17, 6, 17, 140, 10, 17, 13, 17, 14, 17, 141, 3, 17, 3, 17, 6, 17, 146, 10, 17, 13, 17, 14, 17, 147, 7, 17, 150, 10, 17, 12, 17, 14, 17, 153, 11, 17, 3, 17, 3, 17, 3, 18, 3, 18, 7, 18, 159, 10, 18, 12, 18, 14, 18, 162, 11, 18, 3, 19, 3, 19, 7, 19, 166, 10, 19, 12, 19, 14, 19, 169, 11, 19, 3, 19, 3, 19, 3, 20, 3, 20, 3, 21, 3, 21, 3, 22, 3, 22, 3, 22, 3, 22, 3, 23, 3, 23, 3, 23, 2, 2, 24, 3, 3, 5, 4, 7, 5, 9, 6, 11, 7, 13, 8, 15, 9, 17, 10, 19, 11, 21, 12, 23, 13, 25, 14, 27, 15, 29, 16, 31, 17, 33, 18, 35, 19, 37, 20, 39, 21, 41, 22, 43, 2, 45, 2, 3, 2, 9, 3, 2, 50, 59, 5, 2, 11, 12, 15, 15, 34, 34, 7, 2, 48, 48, 50, 59, 67, 92, 97, 97, 99, 124, 6, 2, 50, 59, 92, 92, 97, 97, 99, 124, 5, 2, 67, 92, 97, 97, 99, 124, 6, 2, 50, 59, 67, 92, 97, 97, 99, 124, 5, 2, 12, 12, 15, 15, 41, 41, 2, 197, 2, 3, 3, 2, 2, 2, 2, 5, 3, 2, 2, 2, 2, 7, 3, 2, 2, 2, 2, 9, 3, 2, 2, 2, 2, 11, 3, 2, 2, 2, 2, 13, 3, 2, 2, 2, 2, 15, 3, 2, 2, 2, 2, 17, 3, 2, 2, 2, 2, 19, 3, 2, 2, 2, 2, 21, 3, 2, 2, 2, 2, 23, 3, 2, 2, 2, 2, 25, 3, 2, 2, 2, 2, 27, 3, 2, 2, 2, 2, 29, 3, 2, 2, 2, 2, 31, 3, 2, 2, 2, 2, 33, 3, 2, 2, 2, 2, 35, 3, 2, 2, 2, 2, 37, 3, 2, 2, 2, 2, 39, 3, 2, 2, 2, 2, 41, 3, 2, 2, 2, 3, 47, 3, 2, 2, 2, 5, 49, 3, 2, 2, 2, 7, 52, 3, 2, 2, 2, 9, 55, 3, 2, 2, 2, 11, 57, 3, 2, 2, 2, 13, 59, 3, 2, 2, 2, 15, 62, 3, 2, 2, 2, 17, 73, 3, 2, 2, 2, 19, 81, 3, 2, 2, 2, 21, 91, 3, 2, 2, 2, 23, 103, 3, 2, 2, 2, 25, 106, 3, 2, 2, 2, 27, 117, 3, 2, 2, 2, 29, 122, 3, 2, 2, 2, 31, 135, 3, 2, 2, 2, 33, 137, 3, 2, 2, 2, 35, 156, 3, 2, 2, 2, 37, 163, 3, 2, 2, 2, 39, 172, 3, 2, 2, 2, 41, 174, 3, 2, 2, 2, 43, 176, 3, 2, 2, 2, 45, 180, 3, 2, 2, 2, 47, 48, 7, 46, 2, 2, 48, 4, 3, 2, 2, 2, 49, 50, 7, 63, 2, 2, 50, 51, 7, 63, 2, 2, 51, 6, 3, 2, 2, 2, 52, 53, 7, 35, 2, 2, 53, 54, 7, 63, 2, 2, 54, 8, 3, 2, 2, 2, 55, 56, 7, 64, 2, 2, 56, 10, 3, 2, 2, 2, 57, 58, 7, 62, 2, 2, 58, 12, 3, 2, 2, 2, 59, 60, 7, 64, 2, 2, 60, 61, 7, 63, 2, 2, 61, 14, 3, 2, 2, 2, 62, 63, 7, 62, 2, 2, 63, 64, 7, 63, 2, 2, 64, 16, 3, 2, 2, 2, 65, 66, 7, 99, 2, 2, 66, 67, 7, 112, 2, 2, 67, 74, 7, 102, 2, 2, 68, 69, 7, 67, 2, 2, 69, 70, 7, 80, 2, 2, 70, 74, 7, 70, 2, 2, 71, 72, 7, 40, 2, 2, 72, 74, 7, 40, 2, 2, 73, 65, 3, 2, 2, 2, 73, 68, 3, 2, 2, 2, 73, 71, 3, 2, 2, 2, 74, 18, 3, 2, 2, 2, 75, 76, 7, 113, 2, 2, 76, 82, 7, 116, 2, 2, 77, 78, 7, 81, 2, 2, 78, 82, 7, 84, 2, 2, 79, 80, 7, 126, 2, 2, 80, 82, 7, 126, 2, 2, 81, 75, 3, 2, 2, 2, 81, 77, 3, 2, 2, 2, 81, 79, 3, 2, 2, 2, 82, 20, 3, 2, 2, 2, 83, 84, 7, 118, 2, 2, 84, 85, 7, 116, 2, 2, 85, 86, 7, 119, 2, 2, 86, 92, 7, 103, 2, 2, 87, 88, 7, 86, 2, 2, 88, 89, 7, 84, 2, 2, 89, 90, 7, 87, 2, 2, 90, 92, 7, 71, 2, 2, 91, 83, 3, 2, 2, 2, 91, 87, 3, 2, 2, 2, 92, 22, 3, 2, 2, 2, 93, 94, 7, 104, 2, 2, 94, 95, 7, 99, 2, 2, 95, 96, 7, 110, 2, 2, 96, 97, 7, 117, 2, 2, 97, 104, 7, 103, 2, 2, 98, 99, 7, 72, 2, 2, 99, 100, 7, 67, 2, 2, 100, 101, 7, 78, 2, 2, 101, 102, 7, 85, 2, 2, 102, 104, 7, 71, 2, 2, 103, 93, 3, 2, 2, 2, 103, 98, 3, 2, 2, 2, 104, 24, 3, 2, 2, 2, 105, 107, 9, 2, 2, 2, 106, 105, 3, 2, 2, 2, 107, 108, 3, 2, 2, 2, 108, 106, 3, 2, 2, 2, 108, 109, 3, 2, 2, 2, 109, 110, 3, 2, 2, 2, 110, 112, 7, 48, 2, 2, 111, 113, 9, 2, 2, 2, 112, 111, 3, 2, 2, 2, 113, 114, 3, 2, 2, 2, 114, 112, 3, 2, 2, 2, 114, 115, 3, 2, 2, 2, 115, 26, 3, 2, 2, 2, 116, 118, 9, 2, 2, 2, 117, 116, 3, 2, 2, 2, 118, 119, 3, 2, 2, 2, 119, 117, 3, 2, 2, 2, 119, 120, 3, 2, 2, 2, 120, 28, 3, 2, 2, 2, 121, 123, 9, 3, 2, 2, 122, 121, 3, 2, 2, 2, 123, 124, 3, 2, 2, 2, 124, 122, 3, 2, 2, 2, 124, 125, 3, 2, 2, 2, 125, 126, 3, 2, 2, 2, 126, 127, 8, 15, 2, 2, 127, 30, 3, 2, 2, 2, 128, 129, 7, 80, 2, 2, 129, 130, 7, 81, 2, 2, 130, 136, 7, 86, 2, 2, 131, 136, 7, 35, 2, 2, 132, 133, 7, 112, 2, 2, 133, 134, 7, 113, 2, 2, 134, 136, 7, 118, 2, 2, 135, 128, 3, 2, 2, 2, 135, 131, 3, 2, 2, 2, 135, 132, 3, 2, 2, 2, 136, 32, 3, 2, 2, 2, 137, 139, 5, 43, 22, 2, 138, 140, 9, 4, 2, 2, 139, 138, 3, 2, 2, 2, 140, 141, 3, 2, 2, 2, 141, 139, 3, 2, 2, 2, 141, 142, 3, 2, 2, 2, 142, 151, 3, 2, 2, 2, 143, 145, 7, 48, 2, 2, 144, 146, 9, 5, 2, 2, 145, 144, 3, 2, 2, 2, 146, 147, 3, 2, 2, 2, 147, 145, 3, 2, 2, 2, 147, 148, 3, 2, 2, 2, 148, 150, 3, 2, 2, 2, 149, 143, 3, 2, 2, 2, 150, 153, 3, 2, 2, 2, 151, 149, 3, 2, 2, 2, 151, 152, 3, 2, 2, 2, 152, 154, 3, 2, 2, 2, 153, 151, 3, 2, 2, 2, 154, 155, 5, 45, 23, 2, 155, 34, 3, 2, 2, 2, 156, 160, 9, 6, 2, 2, 157, 159, 9, 7, 2, 2, 158, 157, 3, 2, 2, 2, 159, 162, 3, 2, 2, 2, 160, 158, 3, 2, 2, 2, 160, 161, 3, 2, 2, 2, 161, 36, 3, 2, 2, 2, 162, 160, 3, 2, 2, 2, 163, 167, 7, 41, 2, 2, 164, 166, 10, 8, 2, 2, 165, 164, 3, 2, 2, 2, 166, 169, 3, 2, 2, 2, 167, 165, 3, 2, 2, 2, 167, 168, 3, 2, 2, 2, 168, 170, 3, 2, 2, 2, 169, 167, 3, 2, 2, 2, 170, 171, 7, 41, 2, 2, 171, 38, 3, 2, 2, 2, 172, 173, 7, 42, 2, 2, 173, 40, 3, 2, 2, 2, 174, 175, 7, 43, 2, 2, 175, 42, 3, 2, 2, 2, 176, 177, 7, 39, 2, 2, 177, 178, 7, 125, 2, 2, 178, 179, 7, 93, 2, 2, 179, 44, 3, 2, 2, 2, 180, 181, 7, 95, 2, 2, 181, 182, 7, 127, 2, 2, 182, 46, 3, 2, 2, 2, 17, 2, 73, 81, 91, 103, 108, 114, 119, 124, 135, 141, 147, 151, 160, 167, 3, 8, 2, 2] \ No newline at end of file diff --git a/x-pack/elastic-agent/pkg/boolexp/parser/BoolexpLexer.tokens b/x-pack/elastic-agent/pkg/boolexp/parser/BoolexpLexer.tokens deleted file mode 100644 index 6892b3a80c1..00000000000 --- a/x-pack/elastic-agent/pkg/boolexp/parser/BoolexpLexer.tokens +++ /dev/null @@ -1,29 +0,0 @@ -T__0=1 -EQ=2 -NEQ=3 -GT=4 -LT=5 -GTE=6 -LTE=7 -AND=8 -OR=9 -TRUE=10 -FALSE=11 -FLOAT=12 -NUMBER=13 -WHITESPACE=14 -NOT=15 -VARIABLE=16 -METHODNAME=17 -TEXT=18 -LPAR=19 -RPAR=20 -','=1 -'=='=2 -'!='=3 -'>'=4 -'<'=5 -'>='=6 -'<='=7 -'('=19 -')'=20 diff --git a/x-pack/elastic-agent/pkg/boolexp/parser/boolexp_base_listener.go b/x-pack/elastic-agent/pkg/boolexp/parser/boolexp_base_listener.go deleted file mode 100644 index 3956df34c1f..00000000000 --- a/x-pack/elastic-agent/pkg/boolexp/parser/boolexp_base_listener.go +++ /dev/null @@ -1,140 +0,0 @@ -// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one -// or more contributor license agreements. Licensed under the Elastic License; -// you may not use this file except in compliance with the Elastic License. - -// Code generated from Boolexp.g4 by ANTLR 4.7.2. DO NOT EDIT. - -package parser // Boolexp - -import "github.com/antlr/antlr4/runtime/Go/antlr" - -// BaseBoolexpListener is a complete listener for a parse tree produced by BoolexpParser. -type BaseBoolexpListener struct{} - -var _ BoolexpListener = &BaseBoolexpListener{} - -// VisitTerminal is called when a terminal node is visited. -func (s *BaseBoolexpListener) VisitTerminal(node antlr.TerminalNode) {} - -// VisitErrorNode is called when an error node is visited. -func (s *BaseBoolexpListener) VisitErrorNode(node antlr.ErrorNode) {} - -// EnterEveryRule is called when any rule is entered. -func (s *BaseBoolexpListener) EnterEveryRule(ctx antlr.ParserRuleContext) {} - -// ExitEveryRule is called when any rule is exited. -func (s *BaseBoolexpListener) ExitEveryRule(ctx antlr.ParserRuleContext) {} - -// EnterExpList is called when production expList is entered. -func (s *BaseBoolexpListener) EnterExpList(ctx *ExpListContext) {} - -// ExitExpList is called when production expList is exited. -func (s *BaseBoolexpListener) ExitExpList(ctx *ExpListContext) {} - -// EnterExpArithmeticNEQ is called when production ExpArithmeticNEQ is entered. -func (s *BaseBoolexpListener) EnterExpArithmeticNEQ(ctx *ExpArithmeticNEQContext) {} - -// ExitExpArithmeticNEQ is called when production ExpArithmeticNEQ is exited. -func (s *BaseBoolexpListener) ExitExpArithmeticNEQ(ctx *ExpArithmeticNEQContext) {} - -// EnterExpArithmeticEQ is called when production ExpArithmeticEQ is entered. -func (s *BaseBoolexpListener) EnterExpArithmeticEQ(ctx *ExpArithmeticEQContext) {} - -// ExitExpArithmeticEQ is called when production ExpArithmeticEQ is exited. -func (s *BaseBoolexpListener) ExitExpArithmeticEQ(ctx *ExpArithmeticEQContext) {} - -// EnterExpArithmeticGTE is called when production ExpArithmeticGTE is entered. -func (s *BaseBoolexpListener) EnterExpArithmeticGTE(ctx *ExpArithmeticGTEContext) {} - -// ExitExpArithmeticGTE is called when production ExpArithmeticGTE is exited. -func (s *BaseBoolexpListener) ExitExpArithmeticGTE(ctx *ExpArithmeticGTEContext) {} - -// EnterExpArithmeticLTE is called when production ExpArithmeticLTE is entered. -func (s *BaseBoolexpListener) EnterExpArithmeticLTE(ctx *ExpArithmeticLTEContext) {} - -// ExitExpArithmeticLTE is called when production ExpArithmeticLTE is exited. -func (s *BaseBoolexpListener) ExitExpArithmeticLTE(ctx *ExpArithmeticLTEContext) {} - -// EnterExpArithmeticGT is called when production ExpArithmeticGT is entered. -func (s *BaseBoolexpListener) EnterExpArithmeticGT(ctx *ExpArithmeticGTContext) {} - -// ExitExpArithmeticGT is called when production ExpArithmeticGT is exited. -func (s *BaseBoolexpListener) ExitExpArithmeticGT(ctx *ExpArithmeticGTContext) {} - -// EnterExpText is called when production ExpText is entered. -func (s *BaseBoolexpListener) EnterExpText(ctx *ExpTextContext) {} - -// ExitExpText is called when production ExpText is exited. -func (s *BaseBoolexpListener) ExitExpText(ctx *ExpTextContext) {} - -// EnterExpNumber is called when production ExpNumber is entered. -func (s *BaseBoolexpListener) EnterExpNumber(ctx *ExpNumberContext) {} - -// ExitExpNumber is called when production ExpNumber is exited. -func (s *BaseBoolexpListener) ExitExpNumber(ctx *ExpNumberContext) {} - -// EnterExpLogicalAnd is called when production ExpLogicalAnd is entered. -func (s *BaseBoolexpListener) EnterExpLogicalAnd(ctx *ExpLogicalAndContext) {} - -// ExitExpLogicalAnd is called when production ExpLogicalAnd is exited. -func (s *BaseBoolexpListener) ExitExpLogicalAnd(ctx *ExpLogicalAndContext) {} - -// EnterExpLogicalOR is called when production ExpLogicalOR is entered. -func (s *BaseBoolexpListener) EnterExpLogicalOR(ctx *ExpLogicalORContext) {} - -// ExitExpLogicalOR is called when production ExpLogicalOR is exited. -func (s *BaseBoolexpListener) ExitExpLogicalOR(ctx *ExpLogicalORContext) {} - -// EnterExpFloat is called when production ExpFloat is entered. -func (s *BaseBoolexpListener) EnterExpFloat(ctx *ExpFloatContext) {} - -// ExitExpFloat is called when production ExpFloat is exited. -func (s *BaseBoolexpListener) ExitExpFloat(ctx *ExpFloatContext) {} - -// EnterExpVariable is called when production ExpVariable is entered. -func (s *BaseBoolexpListener) EnterExpVariable(ctx *ExpVariableContext) {} - -// ExitExpVariable is called when production ExpVariable is exited. -func (s *BaseBoolexpListener) ExitExpVariable(ctx *ExpVariableContext) {} - -// EnterExpNot is called when production ExpNot is entered. -func (s *BaseBoolexpListener) EnterExpNot(ctx *ExpNotContext) {} - -// ExitExpNot is called when production ExpNot is exited. -func (s *BaseBoolexpListener) ExitExpNot(ctx *ExpNotContext) {} - -// EnterExpInParen is called when production ExpInParen is entered. -func (s *BaseBoolexpListener) EnterExpInParen(ctx *ExpInParenContext) {} - -// ExitExpInParen is called when production ExpInParen is exited. -func (s *BaseBoolexpListener) ExitExpInParen(ctx *ExpInParenContext) {} - -// EnterExpBoolean is called when production ExpBoolean is entered. -func (s *BaseBoolexpListener) EnterExpBoolean(ctx *ExpBooleanContext) {} - -// ExitExpBoolean is called when production ExpBoolean is exited. -func (s *BaseBoolexpListener) ExitExpBoolean(ctx *ExpBooleanContext) {} - -// EnterExpFunction is called when production ExpFunction is entered. -func (s *BaseBoolexpListener) EnterExpFunction(ctx *ExpFunctionContext) {} - -// ExitExpFunction is called when production ExpFunction is exited. -func (s *BaseBoolexpListener) ExitExpFunction(ctx *ExpFunctionContext) {} - -// EnterExpArithmeticLT is called when production ExpArithmeticLT is entered. -func (s *BaseBoolexpListener) EnterExpArithmeticLT(ctx *ExpArithmeticLTContext) {} - -// ExitExpArithmeticLT is called when production ExpArithmeticLT is exited. -func (s *BaseBoolexpListener) ExitExpArithmeticLT(ctx *ExpArithmeticLTContext) {} - -// EnterBoolean is called when production boolean is entered. -func (s *BaseBoolexpListener) EnterBoolean(ctx *BooleanContext) {} - -// ExitBoolean is called when production boolean is exited. -func (s *BaseBoolexpListener) ExitBoolean(ctx *BooleanContext) {} - -// EnterArguments is called when production arguments is entered. -func (s *BaseBoolexpListener) EnterArguments(ctx *ArgumentsContext) {} - -// ExitArguments is called when production arguments is exited. -func (s *BaseBoolexpListener) ExitArguments(ctx *ArgumentsContext) {} diff --git a/x-pack/elastic-agent/pkg/boolexp/parser/boolexp_base_visitor.go b/x-pack/elastic-agent/pkg/boolexp/parser/boolexp_base_visitor.go deleted file mode 100644 index 44adc6d7c10..00000000000 --- a/x-pack/elastic-agent/pkg/boolexp/parser/boolexp_base_visitor.go +++ /dev/null @@ -1,89 +0,0 @@ -// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one -// or more contributor license agreements. Licensed under the Elastic License; -// you may not use this file except in compliance with the Elastic License. - -// Code generated from Boolexp.g4 by ANTLR 4.7.2. DO NOT EDIT. - -package parser // Boolexp - -import "github.com/antlr/antlr4/runtime/Go/antlr" - -type BaseBoolexpVisitor struct { - *antlr.BaseParseTreeVisitor -} - -func (v *BaseBoolexpVisitor) VisitExpList(ctx *ExpListContext) interface{} { - return v.VisitChildren(ctx) -} - -func (v *BaseBoolexpVisitor) VisitExpArithmeticNEQ(ctx *ExpArithmeticNEQContext) interface{} { - return v.VisitChildren(ctx) -} - -func (v *BaseBoolexpVisitor) VisitExpArithmeticEQ(ctx *ExpArithmeticEQContext) interface{} { - return v.VisitChildren(ctx) -} - -func (v *BaseBoolexpVisitor) VisitExpArithmeticGTE(ctx *ExpArithmeticGTEContext) interface{} { - return v.VisitChildren(ctx) -} - -func (v *BaseBoolexpVisitor) VisitExpArithmeticLTE(ctx *ExpArithmeticLTEContext) interface{} { - return v.VisitChildren(ctx) -} - -func (v *BaseBoolexpVisitor) VisitExpArithmeticGT(ctx *ExpArithmeticGTContext) interface{} { - return v.VisitChildren(ctx) -} - -func (v *BaseBoolexpVisitor) VisitExpText(ctx *ExpTextContext) interface{} { - return v.VisitChildren(ctx) -} - -func (v *BaseBoolexpVisitor) VisitExpNumber(ctx *ExpNumberContext) interface{} { - return v.VisitChildren(ctx) -} - -func (v *BaseBoolexpVisitor) VisitExpLogicalAnd(ctx *ExpLogicalAndContext) interface{} { - return v.VisitChildren(ctx) -} - -func (v *BaseBoolexpVisitor) VisitExpLogicalOR(ctx *ExpLogicalORContext) interface{} { - return v.VisitChildren(ctx) -} - -func (v *BaseBoolexpVisitor) VisitExpFloat(ctx *ExpFloatContext) interface{} { - return v.VisitChildren(ctx) -} - -func (v *BaseBoolexpVisitor) VisitExpVariable(ctx *ExpVariableContext) interface{} { - return v.VisitChildren(ctx) -} - -func (v *BaseBoolexpVisitor) VisitExpNot(ctx *ExpNotContext) interface{} { - return v.VisitChildren(ctx) -} - -func (v *BaseBoolexpVisitor) VisitExpInParen(ctx *ExpInParenContext) interface{} { - return v.VisitChildren(ctx) -} - -func (v *BaseBoolexpVisitor) VisitExpBoolean(ctx *ExpBooleanContext) interface{} { - return v.VisitChildren(ctx) -} - -func (v *BaseBoolexpVisitor) VisitExpFunction(ctx *ExpFunctionContext) interface{} { - return v.VisitChildren(ctx) -} - -func (v *BaseBoolexpVisitor) VisitExpArithmeticLT(ctx *ExpArithmeticLTContext) interface{} { - return v.VisitChildren(ctx) -} - -func (v *BaseBoolexpVisitor) VisitBoolean(ctx *BooleanContext) interface{} { - return v.VisitChildren(ctx) -} - -func (v *BaseBoolexpVisitor) VisitArguments(ctx *ArgumentsContext) interface{} { - return v.VisitChildren(ctx) -} diff --git a/x-pack/elastic-agent/pkg/boolexp/parser/boolexp_lexer.go b/x-pack/elastic-agent/pkg/boolexp/parser/boolexp_lexer.go deleted file mode 100644 index b89f1dbc517..00000000000 --- a/x-pack/elastic-agent/pkg/boolexp/parser/boolexp_lexer.go +++ /dev/null @@ -1,191 +0,0 @@ -// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one -// or more contributor license agreements. Licensed under the Elastic License; -// you may not use this file except in compliance with the Elastic License. - -// Code generated from Boolexp.g4 by ANTLR 4.7.2. DO NOT EDIT. - -package parser - -import ( - "fmt" - "unicode" - - "github.com/antlr/antlr4/runtime/Go/antlr" -) - -// Suppress unused import error -var _ = fmt.Printf -var _ = unicode.IsLetter - -var serializedLexerAtn = []uint16{ - 3, 24715, 42794, 33075, 47597, 16764, 15335, 30598, 22884, 2, 22, 183, - 8, 1, 4, 2, 9, 2, 4, 3, 9, 3, 4, 4, 9, 4, 4, 5, 9, 5, 4, 6, 9, 6, 4, 7, - 9, 7, 4, 8, 9, 8, 4, 9, 9, 9, 4, 10, 9, 10, 4, 11, 9, 11, 4, 12, 9, 12, - 4, 13, 9, 13, 4, 14, 9, 14, 4, 15, 9, 15, 4, 16, 9, 16, 4, 17, 9, 17, 4, - 18, 9, 18, 4, 19, 9, 19, 4, 20, 9, 20, 4, 21, 9, 21, 4, 22, 9, 22, 4, 23, - 9, 23, 3, 2, 3, 2, 3, 3, 3, 3, 3, 3, 3, 4, 3, 4, 3, 4, 3, 5, 3, 5, 3, 6, - 3, 6, 3, 7, 3, 7, 3, 7, 3, 8, 3, 8, 3, 8, 3, 9, 3, 9, 3, 9, 3, 9, 3, 9, - 3, 9, 3, 9, 3, 9, 5, 9, 74, 10, 9, 3, 10, 3, 10, 3, 10, 3, 10, 3, 10, 3, - 10, 5, 10, 82, 10, 10, 3, 11, 3, 11, 3, 11, 3, 11, 3, 11, 3, 11, 3, 11, - 3, 11, 5, 11, 92, 10, 11, 3, 12, 3, 12, 3, 12, 3, 12, 3, 12, 3, 12, 3, - 12, 3, 12, 3, 12, 3, 12, 5, 12, 104, 10, 12, 3, 13, 6, 13, 107, 10, 13, - 13, 13, 14, 13, 108, 3, 13, 3, 13, 6, 13, 113, 10, 13, 13, 13, 14, 13, - 114, 3, 14, 6, 14, 118, 10, 14, 13, 14, 14, 14, 119, 3, 15, 6, 15, 123, - 10, 15, 13, 15, 14, 15, 124, 3, 15, 3, 15, 3, 16, 3, 16, 3, 16, 3, 16, - 3, 16, 3, 16, 3, 16, 5, 16, 136, 10, 16, 3, 17, 3, 17, 6, 17, 140, 10, - 17, 13, 17, 14, 17, 141, 3, 17, 3, 17, 6, 17, 146, 10, 17, 13, 17, 14, - 17, 147, 7, 17, 150, 10, 17, 12, 17, 14, 17, 153, 11, 17, 3, 17, 3, 17, - 3, 18, 3, 18, 7, 18, 159, 10, 18, 12, 18, 14, 18, 162, 11, 18, 3, 19, 3, - 19, 7, 19, 166, 10, 19, 12, 19, 14, 19, 169, 11, 19, 3, 19, 3, 19, 3, 20, - 3, 20, 3, 21, 3, 21, 3, 22, 3, 22, 3, 22, 3, 22, 3, 23, 3, 23, 3, 23, 2, - 2, 24, 3, 3, 5, 4, 7, 5, 9, 6, 11, 7, 13, 8, 15, 9, 17, 10, 19, 11, 21, - 12, 23, 13, 25, 14, 27, 15, 29, 16, 31, 17, 33, 18, 35, 19, 37, 20, 39, - 21, 41, 22, 43, 2, 45, 2, 3, 2, 9, 3, 2, 50, 59, 5, 2, 11, 12, 15, 15, - 34, 34, 7, 2, 48, 48, 50, 59, 67, 92, 97, 97, 99, 124, 6, 2, 50, 59, 92, - 92, 97, 97, 99, 124, 5, 2, 67, 92, 97, 97, 99, 124, 6, 2, 50, 59, 67, 92, - 97, 97, 99, 124, 5, 2, 12, 12, 15, 15, 41, 41, 2, 197, 2, 3, 3, 2, 2, 2, - 2, 5, 3, 2, 2, 2, 2, 7, 3, 2, 2, 2, 2, 9, 3, 2, 2, 2, 2, 11, 3, 2, 2, 2, - 2, 13, 3, 2, 2, 2, 2, 15, 3, 2, 2, 2, 2, 17, 3, 2, 2, 2, 2, 19, 3, 2, 2, - 2, 2, 21, 3, 2, 2, 2, 2, 23, 3, 2, 2, 2, 2, 25, 3, 2, 2, 2, 2, 27, 3, 2, - 2, 2, 2, 29, 3, 2, 2, 2, 2, 31, 3, 2, 2, 2, 2, 33, 3, 2, 2, 2, 2, 35, 3, - 2, 2, 2, 2, 37, 3, 2, 2, 2, 2, 39, 3, 2, 2, 2, 2, 41, 3, 2, 2, 2, 3, 47, - 3, 2, 2, 2, 5, 49, 3, 2, 2, 2, 7, 52, 3, 2, 2, 2, 9, 55, 3, 2, 2, 2, 11, - 57, 3, 2, 2, 2, 13, 59, 3, 2, 2, 2, 15, 62, 3, 2, 2, 2, 17, 73, 3, 2, 2, - 2, 19, 81, 3, 2, 2, 2, 21, 91, 3, 2, 2, 2, 23, 103, 3, 2, 2, 2, 25, 106, - 3, 2, 2, 2, 27, 117, 3, 2, 2, 2, 29, 122, 3, 2, 2, 2, 31, 135, 3, 2, 2, - 2, 33, 137, 3, 2, 2, 2, 35, 156, 3, 2, 2, 2, 37, 163, 3, 2, 2, 2, 39, 172, - 3, 2, 2, 2, 41, 174, 3, 2, 2, 2, 43, 176, 3, 2, 2, 2, 45, 180, 3, 2, 2, - 2, 47, 48, 7, 46, 2, 2, 48, 4, 3, 2, 2, 2, 49, 50, 7, 63, 2, 2, 50, 51, - 7, 63, 2, 2, 51, 6, 3, 2, 2, 2, 52, 53, 7, 35, 2, 2, 53, 54, 7, 63, 2, - 2, 54, 8, 3, 2, 2, 2, 55, 56, 7, 64, 2, 2, 56, 10, 3, 2, 2, 2, 57, 58, - 7, 62, 2, 2, 58, 12, 3, 2, 2, 2, 59, 60, 7, 64, 2, 2, 60, 61, 7, 63, 2, - 2, 61, 14, 3, 2, 2, 2, 62, 63, 7, 62, 2, 2, 63, 64, 7, 63, 2, 2, 64, 16, - 3, 2, 2, 2, 65, 66, 7, 99, 2, 2, 66, 67, 7, 112, 2, 2, 67, 74, 7, 102, - 2, 2, 68, 69, 7, 67, 2, 2, 69, 70, 7, 80, 2, 2, 70, 74, 7, 70, 2, 2, 71, - 72, 7, 40, 2, 2, 72, 74, 7, 40, 2, 2, 73, 65, 3, 2, 2, 2, 73, 68, 3, 2, - 2, 2, 73, 71, 3, 2, 2, 2, 74, 18, 3, 2, 2, 2, 75, 76, 7, 113, 2, 2, 76, - 82, 7, 116, 2, 2, 77, 78, 7, 81, 2, 2, 78, 82, 7, 84, 2, 2, 79, 80, 7, - 126, 2, 2, 80, 82, 7, 126, 2, 2, 81, 75, 3, 2, 2, 2, 81, 77, 3, 2, 2, 2, - 81, 79, 3, 2, 2, 2, 82, 20, 3, 2, 2, 2, 83, 84, 7, 118, 2, 2, 84, 85, 7, - 116, 2, 2, 85, 86, 7, 119, 2, 2, 86, 92, 7, 103, 2, 2, 87, 88, 7, 86, 2, - 2, 88, 89, 7, 84, 2, 2, 89, 90, 7, 87, 2, 2, 90, 92, 7, 71, 2, 2, 91, 83, - 3, 2, 2, 2, 91, 87, 3, 2, 2, 2, 92, 22, 3, 2, 2, 2, 93, 94, 7, 104, 2, - 2, 94, 95, 7, 99, 2, 2, 95, 96, 7, 110, 2, 2, 96, 97, 7, 117, 2, 2, 97, - 104, 7, 103, 2, 2, 98, 99, 7, 72, 2, 2, 99, 100, 7, 67, 2, 2, 100, 101, - 7, 78, 2, 2, 101, 102, 7, 85, 2, 2, 102, 104, 7, 71, 2, 2, 103, 93, 3, - 2, 2, 2, 103, 98, 3, 2, 2, 2, 104, 24, 3, 2, 2, 2, 105, 107, 9, 2, 2, 2, - 106, 105, 3, 2, 2, 2, 107, 108, 3, 2, 2, 2, 108, 106, 3, 2, 2, 2, 108, - 109, 3, 2, 2, 2, 109, 110, 3, 2, 2, 2, 110, 112, 7, 48, 2, 2, 111, 113, - 9, 2, 2, 2, 112, 111, 3, 2, 2, 2, 113, 114, 3, 2, 2, 2, 114, 112, 3, 2, - 2, 2, 114, 115, 3, 2, 2, 2, 115, 26, 3, 2, 2, 2, 116, 118, 9, 2, 2, 2, - 117, 116, 3, 2, 2, 2, 118, 119, 3, 2, 2, 2, 119, 117, 3, 2, 2, 2, 119, - 120, 3, 2, 2, 2, 120, 28, 3, 2, 2, 2, 121, 123, 9, 3, 2, 2, 122, 121, 3, - 2, 2, 2, 123, 124, 3, 2, 2, 2, 124, 122, 3, 2, 2, 2, 124, 125, 3, 2, 2, - 2, 125, 126, 3, 2, 2, 2, 126, 127, 8, 15, 2, 2, 127, 30, 3, 2, 2, 2, 128, - 129, 7, 80, 2, 2, 129, 130, 7, 81, 2, 2, 130, 136, 7, 86, 2, 2, 131, 136, - 7, 35, 2, 2, 132, 133, 7, 112, 2, 2, 133, 134, 7, 113, 2, 2, 134, 136, - 7, 118, 2, 2, 135, 128, 3, 2, 2, 2, 135, 131, 3, 2, 2, 2, 135, 132, 3, - 2, 2, 2, 136, 32, 3, 2, 2, 2, 137, 139, 5, 43, 22, 2, 138, 140, 9, 4, 2, - 2, 139, 138, 3, 2, 2, 2, 140, 141, 3, 2, 2, 2, 141, 139, 3, 2, 2, 2, 141, - 142, 3, 2, 2, 2, 142, 151, 3, 2, 2, 2, 143, 145, 7, 48, 2, 2, 144, 146, - 9, 5, 2, 2, 145, 144, 3, 2, 2, 2, 146, 147, 3, 2, 2, 2, 147, 145, 3, 2, - 2, 2, 147, 148, 3, 2, 2, 2, 148, 150, 3, 2, 2, 2, 149, 143, 3, 2, 2, 2, - 150, 153, 3, 2, 2, 2, 151, 149, 3, 2, 2, 2, 151, 152, 3, 2, 2, 2, 152, - 154, 3, 2, 2, 2, 153, 151, 3, 2, 2, 2, 154, 155, 5, 45, 23, 2, 155, 34, - 3, 2, 2, 2, 156, 160, 9, 6, 2, 2, 157, 159, 9, 7, 2, 2, 158, 157, 3, 2, - 2, 2, 159, 162, 3, 2, 2, 2, 160, 158, 3, 2, 2, 2, 160, 161, 3, 2, 2, 2, - 161, 36, 3, 2, 2, 2, 162, 160, 3, 2, 2, 2, 163, 167, 7, 41, 2, 2, 164, - 166, 10, 8, 2, 2, 165, 164, 3, 2, 2, 2, 166, 169, 3, 2, 2, 2, 167, 165, - 3, 2, 2, 2, 167, 168, 3, 2, 2, 2, 168, 170, 3, 2, 2, 2, 169, 167, 3, 2, - 2, 2, 170, 171, 7, 41, 2, 2, 171, 38, 3, 2, 2, 2, 172, 173, 7, 42, 2, 2, - 173, 40, 3, 2, 2, 2, 174, 175, 7, 43, 2, 2, 175, 42, 3, 2, 2, 2, 176, 177, - 7, 39, 2, 2, 177, 178, 7, 125, 2, 2, 178, 179, 7, 93, 2, 2, 179, 44, 3, - 2, 2, 2, 180, 181, 7, 95, 2, 2, 181, 182, 7, 127, 2, 2, 182, 46, 3, 2, - 2, 2, 17, 2, 73, 81, 91, 103, 108, 114, 119, 124, 135, 141, 147, 151, 160, - 167, 3, 8, 2, 2, -} - -var lexerDeserializer = antlr.NewATNDeserializer(nil) -var lexerAtn = lexerDeserializer.DeserializeFromUInt16(serializedLexerAtn) - -var lexerChannelNames = []string{ - "DEFAULT_TOKEN_CHANNEL", "HIDDEN", -} - -var lexerModeNames = []string{ - "DEFAULT_MODE", -} - -var lexerLiteralNames = []string{ - "", "','", "'=='", "'!='", "'>'", "'<'", "'>='", "'<='", "", "", "", "", - "", "", "", "", "", "", "", "'('", "')'", -} - -var lexerSymbolicNames = []string{ - "", "", "EQ", "NEQ", "GT", "LT", "GTE", "LTE", "AND", "OR", "TRUE", "FALSE", - "FLOAT", "NUMBER", "WHITESPACE", "NOT", "VARIABLE", "METHODNAME", "TEXT", - "LPAR", "RPAR", -} - -var lexerRuleNames = []string{ - "T__0", "EQ", "NEQ", "GT", "LT", "GTE", "LTE", "AND", "OR", "TRUE", "FALSE", - "FLOAT", "NUMBER", "WHITESPACE", "NOT", "VARIABLE", "METHODNAME", "TEXT", - "LPAR", "RPAR", "BEGIN_VARIABLE", "END_VARIABLE", -} - -type BoolexpLexer struct { - *antlr.BaseLexer - channelNames []string - modeNames []string - // TODO: EOF string -} - -var lexerDecisionToDFA = make([]*antlr.DFA, len(lexerAtn.DecisionToState)) - -func init() { - for index, ds := range lexerAtn.DecisionToState { - lexerDecisionToDFA[index] = antlr.NewDFA(ds, index) - } -} - -func NewBoolexpLexer(input antlr.CharStream) *BoolexpLexer { - - l := new(BoolexpLexer) - - l.BaseLexer = antlr.NewBaseLexer(input) - l.Interpreter = antlr.NewLexerATNSimulator(l, lexerAtn, lexerDecisionToDFA, antlr.NewPredictionContextCache()) - - l.channelNames = lexerChannelNames - l.modeNames = lexerModeNames - l.RuleNames = lexerRuleNames - l.LiteralNames = lexerLiteralNames - l.SymbolicNames = lexerSymbolicNames - l.GrammarFileName = "Boolexp.g4" - // TODO: l.EOF = antlr.TokenEOF - - return l -} - -// BoolexpLexer tokens. -const ( - BoolexpLexerT__0 = 1 - BoolexpLexerEQ = 2 - BoolexpLexerNEQ = 3 - BoolexpLexerGT = 4 - BoolexpLexerLT = 5 - BoolexpLexerGTE = 6 - BoolexpLexerLTE = 7 - BoolexpLexerAND = 8 - BoolexpLexerOR = 9 - BoolexpLexerTRUE = 10 - BoolexpLexerFALSE = 11 - BoolexpLexerFLOAT = 12 - BoolexpLexerNUMBER = 13 - BoolexpLexerWHITESPACE = 14 - BoolexpLexerNOT = 15 - BoolexpLexerVARIABLE = 16 - BoolexpLexerMETHODNAME = 17 - BoolexpLexerTEXT = 18 - BoolexpLexerLPAR = 19 - BoolexpLexerRPAR = 20 -) diff --git a/x-pack/elastic-agent/pkg/boolexp/parser/boolexp_parser.go b/x-pack/elastic-agent/pkg/boolexp/parser/boolexp_parser.go deleted file mode 100644 index 2805c35e9e5..00000000000 --- a/x-pack/elastic-agent/pkg/boolexp/parser/boolexp_parser.go +++ /dev/null @@ -1,1952 +0,0 @@ -// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one -// or more contributor license agreements. Licensed under the Elastic License; -// you may not use this file except in compliance with the Elastic License. - -// Code generated from Boolexp.g4 by ANTLR 4.7.2. DO NOT EDIT. - -package parser // Boolexp - -import ( - "fmt" - "reflect" - "strconv" - - "github.com/antlr/antlr4/runtime/Go/antlr" -) - -// Suppress unused import errors -var _ = fmt.Printf -var _ = reflect.Copy -var _ = strconv.Itoa - -var parserATN = []uint16{ - 3, 24715, 42794, 33075, 47597, 16764, 15335, 30598, 22884, 3, 22, 73, 4, - 2, 9, 2, 4, 3, 9, 3, 4, 4, 9, 4, 4, 5, 9, 5, 3, 2, 3, 2, 3, 2, 3, 3, 3, - 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 5, 3, 26, - 10, 3, 3, 3, 3, 3, 3, 3, 3, 3, 5, 3, 32, 10, 3, 3, 3, 3, 3, 3, 3, 3, 3, - 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, - 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 7, 3, 58, 10, 3, 12, 3, - 14, 3, 61, 11, 3, 3, 4, 3, 4, 3, 5, 3, 5, 3, 5, 7, 5, 68, 10, 5, 12, 5, - 14, 5, 71, 11, 5, 3, 5, 2, 3, 4, 6, 2, 4, 6, 8, 2, 3, 3, 2, 12, 13, 2, - 85, 2, 10, 3, 2, 2, 2, 4, 31, 3, 2, 2, 2, 6, 62, 3, 2, 2, 2, 8, 64, 3, - 2, 2, 2, 10, 11, 5, 4, 3, 2, 11, 12, 7, 2, 2, 3, 12, 3, 3, 2, 2, 2, 13, - 14, 8, 3, 1, 2, 14, 15, 7, 21, 2, 2, 15, 16, 5, 4, 3, 2, 16, 17, 7, 22, - 2, 2, 17, 32, 3, 2, 2, 2, 18, 19, 7, 17, 2, 2, 19, 32, 5, 4, 3, 17, 20, - 32, 5, 6, 4, 2, 21, 32, 7, 18, 2, 2, 22, 23, 7, 19, 2, 2, 23, 25, 7, 21, - 2, 2, 24, 26, 5, 8, 5, 2, 25, 24, 3, 2, 2, 2, 25, 26, 3, 2, 2, 2, 26, 27, - 3, 2, 2, 2, 27, 32, 7, 22, 2, 2, 28, 32, 7, 20, 2, 2, 29, 32, 7, 14, 2, - 2, 30, 32, 7, 15, 2, 2, 31, 13, 3, 2, 2, 2, 31, 18, 3, 2, 2, 2, 31, 20, - 3, 2, 2, 2, 31, 21, 3, 2, 2, 2, 31, 22, 3, 2, 2, 2, 31, 28, 3, 2, 2, 2, - 31, 29, 3, 2, 2, 2, 31, 30, 3, 2, 2, 2, 32, 59, 3, 2, 2, 2, 33, 34, 12, - 16, 2, 2, 34, 35, 7, 4, 2, 2, 35, 58, 5, 4, 3, 17, 36, 37, 12, 15, 2, 2, - 37, 38, 7, 5, 2, 2, 38, 58, 5, 4, 3, 16, 39, 40, 12, 14, 2, 2, 40, 41, - 7, 9, 2, 2, 41, 58, 5, 4, 3, 15, 42, 43, 12, 13, 2, 2, 43, 44, 7, 8, 2, - 2, 44, 58, 5, 4, 3, 14, 45, 46, 12, 12, 2, 2, 46, 47, 7, 7, 2, 2, 47, 58, - 5, 4, 3, 13, 48, 49, 12, 11, 2, 2, 49, 50, 7, 6, 2, 2, 50, 58, 5, 4, 3, - 12, 51, 52, 12, 10, 2, 2, 52, 53, 7, 10, 2, 2, 53, 58, 5, 4, 3, 11, 54, - 55, 12, 9, 2, 2, 55, 56, 7, 11, 2, 2, 56, 58, 5, 4, 3, 10, 57, 33, 3, 2, - 2, 2, 57, 36, 3, 2, 2, 2, 57, 39, 3, 2, 2, 2, 57, 42, 3, 2, 2, 2, 57, 45, - 3, 2, 2, 2, 57, 48, 3, 2, 2, 2, 57, 51, 3, 2, 2, 2, 57, 54, 3, 2, 2, 2, - 58, 61, 3, 2, 2, 2, 59, 57, 3, 2, 2, 2, 59, 60, 3, 2, 2, 2, 60, 5, 3, 2, - 2, 2, 61, 59, 3, 2, 2, 2, 62, 63, 9, 2, 2, 2, 63, 7, 3, 2, 2, 2, 64, 69, - 5, 4, 3, 2, 65, 66, 7, 3, 2, 2, 66, 68, 5, 4, 3, 2, 67, 65, 3, 2, 2, 2, - 68, 71, 3, 2, 2, 2, 69, 67, 3, 2, 2, 2, 69, 70, 3, 2, 2, 2, 70, 9, 3, 2, - 2, 2, 71, 69, 3, 2, 2, 2, 7, 25, 31, 57, 59, 69, -} -var deserializer = antlr.NewATNDeserializer(nil) -var deserializedATN = deserializer.DeserializeFromUInt16(parserATN) - -var literalNames = []string{ - "", "','", "'=='", "'!='", "'>'", "'<'", "'>='", "'<='", "", "", "", "", - "", "", "", "", "", "", "", "'('", "')'", -} -var symbolicNames = []string{ - "", "", "EQ", "NEQ", "GT", "LT", "GTE", "LTE", "AND", "OR", "TRUE", "FALSE", - "FLOAT", "NUMBER", "WHITESPACE", "NOT", "VARIABLE", "METHODNAME", "TEXT", - "LPAR", "RPAR", -} - -var ruleNames = []string{ - "expList", "exp", "boolean", "arguments", -} -var decisionToDFA = make([]*antlr.DFA, len(deserializedATN.DecisionToState)) - -func init() { - for index, ds := range deserializedATN.DecisionToState { - decisionToDFA[index] = antlr.NewDFA(ds, index) - } -} - -type BoolexpParser struct { - *antlr.BaseParser -} - -func NewBoolexpParser(input antlr.TokenStream) *BoolexpParser { - this := new(BoolexpParser) - - this.BaseParser = antlr.NewBaseParser(input) - - this.Interpreter = antlr.NewParserATNSimulator(this, deserializedATN, decisionToDFA, antlr.NewPredictionContextCache()) - this.RuleNames = ruleNames - this.LiteralNames = literalNames - this.SymbolicNames = symbolicNames - this.GrammarFileName = "Boolexp.g4" - - return this -} - -// BoolexpParser tokens. -const ( - BoolexpParserEOF = antlr.TokenEOF - BoolexpParserT__0 = 1 - BoolexpParserEQ = 2 - BoolexpParserNEQ = 3 - BoolexpParserGT = 4 - BoolexpParserLT = 5 - BoolexpParserGTE = 6 - BoolexpParserLTE = 7 - BoolexpParserAND = 8 - BoolexpParserOR = 9 - BoolexpParserTRUE = 10 - BoolexpParserFALSE = 11 - BoolexpParserFLOAT = 12 - BoolexpParserNUMBER = 13 - BoolexpParserWHITESPACE = 14 - BoolexpParserNOT = 15 - BoolexpParserVARIABLE = 16 - BoolexpParserMETHODNAME = 17 - BoolexpParserTEXT = 18 - BoolexpParserLPAR = 19 - BoolexpParserRPAR = 20 -) - -// BoolexpParser rules. -const ( - BoolexpParserRULE_expList = 0 - BoolexpParserRULE_exp = 1 - BoolexpParserRULE_boolean = 2 - BoolexpParserRULE_arguments = 3 -) - -// IExpListContext is an interface to support dynamic dispatch. -type IExpListContext interface { - antlr.ParserRuleContext - - // GetParser returns the parser. - GetParser() antlr.Parser - - // IsExpListContext differentiates from other interfaces. - IsExpListContext() -} - -type ExpListContext struct { - *antlr.BaseParserRuleContext - parser antlr.Parser -} - -func NewEmptyExpListContext() *ExpListContext { - var p = new(ExpListContext) - p.BaseParserRuleContext = antlr.NewBaseParserRuleContext(nil, -1) - p.RuleIndex = BoolexpParserRULE_expList - return p -} - -func (*ExpListContext) IsExpListContext() {} - -func NewExpListContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *ExpListContext { - var p = new(ExpListContext) - - p.BaseParserRuleContext = antlr.NewBaseParserRuleContext(parent, invokingState) - - p.parser = parser - p.RuleIndex = BoolexpParserRULE_expList - - return p -} - -func (s *ExpListContext) GetParser() antlr.Parser { return s.parser } - -func (s *ExpListContext) Exp() IExpContext { - var t = s.GetTypedRuleContext(reflect.TypeOf((*IExpContext)(nil)).Elem(), 0) - - if t == nil { - return nil - } - - return t.(IExpContext) -} - -func (s *ExpListContext) EOF() antlr.TerminalNode { - return s.GetToken(BoolexpParserEOF, 0) -} - -func (s *ExpListContext) GetRuleContext() antlr.RuleContext { - return s -} - -func (s *ExpListContext) ToStringTree(ruleNames []string, recog antlr.Recognizer) string { - return antlr.TreesStringTree(s, ruleNames, recog) -} - -func (s *ExpListContext) EnterRule(listener antlr.ParseTreeListener) { - if listenerT, ok := listener.(BoolexpListener); ok { - listenerT.EnterExpList(s) - } -} - -func (s *ExpListContext) ExitRule(listener antlr.ParseTreeListener) { - if listenerT, ok := listener.(BoolexpListener); ok { - listenerT.ExitExpList(s) - } -} - -func (s *ExpListContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { - switch t := visitor.(type) { - case BoolexpVisitor: - return t.VisitExpList(s) - - default: - return t.VisitChildren(s) - } -} - -func (p *BoolexpParser) ExpList() (localctx IExpListContext) { - localctx = NewExpListContext(p, p.GetParserRuleContext(), p.GetState()) - p.EnterRule(localctx, 0, BoolexpParserRULE_expList) - - defer func() { - p.ExitRule() - }() - - defer func() { - if err := recover(); err != nil { - if v, ok := err.(antlr.RecognitionException); ok { - localctx.SetException(v) - p.GetErrorHandler().ReportError(p, v) - p.GetErrorHandler().Recover(p, v) - } else { - panic(err) - } - } - }() - - p.EnterOuterAlt(localctx, 1) - { - p.SetState(8) - p.exp(0) - } - { - p.SetState(9) - p.Match(BoolexpParserEOF) - } - - return localctx -} - -// IExpContext is an interface to support dynamic dispatch. -type IExpContext interface { - antlr.ParserRuleContext - - // GetParser returns the parser. - GetParser() antlr.Parser - - // IsExpContext differentiates from other interfaces. - IsExpContext() -} - -type ExpContext struct { - *antlr.BaseParserRuleContext - parser antlr.Parser -} - -func NewEmptyExpContext() *ExpContext { - var p = new(ExpContext) - p.BaseParserRuleContext = antlr.NewBaseParserRuleContext(nil, -1) - p.RuleIndex = BoolexpParserRULE_exp - return p -} - -func (*ExpContext) IsExpContext() {} - -func NewExpContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *ExpContext { - var p = new(ExpContext) - - p.BaseParserRuleContext = antlr.NewBaseParserRuleContext(parent, invokingState) - - p.parser = parser - p.RuleIndex = BoolexpParserRULE_exp - - return p -} - -func (s *ExpContext) GetParser() antlr.Parser { return s.parser } - -func (s *ExpContext) CopyFrom(ctx *ExpContext) { - s.BaseParserRuleContext.CopyFrom(ctx.BaseParserRuleContext) -} - -func (s *ExpContext) GetRuleContext() antlr.RuleContext { - return s -} - -func (s *ExpContext) ToStringTree(ruleNames []string, recog antlr.Recognizer) string { - return antlr.TreesStringTree(s, ruleNames, recog) -} - -type ExpArithmeticNEQContext struct { - *ExpContext - left IExpContext - right IExpContext -} - -func NewExpArithmeticNEQContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *ExpArithmeticNEQContext { - var p = new(ExpArithmeticNEQContext) - - p.ExpContext = NewEmptyExpContext() - p.parser = parser - p.CopyFrom(ctx.(*ExpContext)) - - return p -} - -func (s *ExpArithmeticNEQContext) GetLeft() IExpContext { return s.left } - -func (s *ExpArithmeticNEQContext) GetRight() IExpContext { return s.right } - -func (s *ExpArithmeticNEQContext) SetLeft(v IExpContext) { s.left = v } - -func (s *ExpArithmeticNEQContext) SetRight(v IExpContext) { s.right = v } - -func (s *ExpArithmeticNEQContext) GetRuleContext() antlr.RuleContext { - return s -} - -func (s *ExpArithmeticNEQContext) NEQ() antlr.TerminalNode { - return s.GetToken(BoolexpParserNEQ, 0) -} - -func (s *ExpArithmeticNEQContext) AllExp() []IExpContext { - var ts = s.GetTypedRuleContexts(reflect.TypeOf((*IExpContext)(nil)).Elem()) - var tst = make([]IExpContext, len(ts)) - - for i, t := range ts { - if t != nil { - tst[i] = t.(IExpContext) - } - } - - return tst -} - -func (s *ExpArithmeticNEQContext) Exp(i int) IExpContext { - var t = s.GetTypedRuleContext(reflect.TypeOf((*IExpContext)(nil)).Elem(), i) - - if t == nil { - return nil - } - - return t.(IExpContext) -} - -func (s *ExpArithmeticNEQContext) EnterRule(listener antlr.ParseTreeListener) { - if listenerT, ok := listener.(BoolexpListener); ok { - listenerT.EnterExpArithmeticNEQ(s) - } -} - -func (s *ExpArithmeticNEQContext) ExitRule(listener antlr.ParseTreeListener) { - if listenerT, ok := listener.(BoolexpListener); ok { - listenerT.ExitExpArithmeticNEQ(s) - } -} - -func (s *ExpArithmeticNEQContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { - switch t := visitor.(type) { - case BoolexpVisitor: - return t.VisitExpArithmeticNEQ(s) - - default: - return t.VisitChildren(s) - } -} - -type ExpArithmeticEQContext struct { - *ExpContext - left IExpContext - right IExpContext -} - -func NewExpArithmeticEQContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *ExpArithmeticEQContext { - var p = new(ExpArithmeticEQContext) - - p.ExpContext = NewEmptyExpContext() - p.parser = parser - p.CopyFrom(ctx.(*ExpContext)) - - return p -} - -func (s *ExpArithmeticEQContext) GetLeft() IExpContext { return s.left } - -func (s *ExpArithmeticEQContext) GetRight() IExpContext { return s.right } - -func (s *ExpArithmeticEQContext) SetLeft(v IExpContext) { s.left = v } - -func (s *ExpArithmeticEQContext) SetRight(v IExpContext) { s.right = v } - -func (s *ExpArithmeticEQContext) GetRuleContext() antlr.RuleContext { - return s -} - -func (s *ExpArithmeticEQContext) EQ() antlr.TerminalNode { - return s.GetToken(BoolexpParserEQ, 0) -} - -func (s *ExpArithmeticEQContext) AllExp() []IExpContext { - var ts = s.GetTypedRuleContexts(reflect.TypeOf((*IExpContext)(nil)).Elem()) - var tst = make([]IExpContext, len(ts)) - - for i, t := range ts { - if t != nil { - tst[i] = t.(IExpContext) - } - } - - return tst -} - -func (s *ExpArithmeticEQContext) Exp(i int) IExpContext { - var t = s.GetTypedRuleContext(reflect.TypeOf((*IExpContext)(nil)).Elem(), i) - - if t == nil { - return nil - } - - return t.(IExpContext) -} - -func (s *ExpArithmeticEQContext) EnterRule(listener antlr.ParseTreeListener) { - if listenerT, ok := listener.(BoolexpListener); ok { - listenerT.EnterExpArithmeticEQ(s) - } -} - -func (s *ExpArithmeticEQContext) ExitRule(listener antlr.ParseTreeListener) { - if listenerT, ok := listener.(BoolexpListener); ok { - listenerT.ExitExpArithmeticEQ(s) - } -} - -func (s *ExpArithmeticEQContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { - switch t := visitor.(type) { - case BoolexpVisitor: - return t.VisitExpArithmeticEQ(s) - - default: - return t.VisitChildren(s) - } -} - -type ExpArithmeticGTEContext struct { - *ExpContext - left IExpContext - right IExpContext -} - -func NewExpArithmeticGTEContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *ExpArithmeticGTEContext { - var p = new(ExpArithmeticGTEContext) - - p.ExpContext = NewEmptyExpContext() - p.parser = parser - p.CopyFrom(ctx.(*ExpContext)) - - return p -} - -func (s *ExpArithmeticGTEContext) GetLeft() IExpContext { return s.left } - -func (s *ExpArithmeticGTEContext) GetRight() IExpContext { return s.right } - -func (s *ExpArithmeticGTEContext) SetLeft(v IExpContext) { s.left = v } - -func (s *ExpArithmeticGTEContext) SetRight(v IExpContext) { s.right = v } - -func (s *ExpArithmeticGTEContext) GetRuleContext() antlr.RuleContext { - return s -} - -func (s *ExpArithmeticGTEContext) GTE() antlr.TerminalNode { - return s.GetToken(BoolexpParserGTE, 0) -} - -func (s *ExpArithmeticGTEContext) AllExp() []IExpContext { - var ts = s.GetTypedRuleContexts(reflect.TypeOf((*IExpContext)(nil)).Elem()) - var tst = make([]IExpContext, len(ts)) - - for i, t := range ts { - if t != nil { - tst[i] = t.(IExpContext) - } - } - - return tst -} - -func (s *ExpArithmeticGTEContext) Exp(i int) IExpContext { - var t = s.GetTypedRuleContext(reflect.TypeOf((*IExpContext)(nil)).Elem(), i) - - if t == nil { - return nil - } - - return t.(IExpContext) -} - -func (s *ExpArithmeticGTEContext) EnterRule(listener antlr.ParseTreeListener) { - if listenerT, ok := listener.(BoolexpListener); ok { - listenerT.EnterExpArithmeticGTE(s) - } -} - -func (s *ExpArithmeticGTEContext) ExitRule(listener antlr.ParseTreeListener) { - if listenerT, ok := listener.(BoolexpListener); ok { - listenerT.ExitExpArithmeticGTE(s) - } -} - -func (s *ExpArithmeticGTEContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { - switch t := visitor.(type) { - case BoolexpVisitor: - return t.VisitExpArithmeticGTE(s) - - default: - return t.VisitChildren(s) - } -} - -type ExpArithmeticLTEContext struct { - *ExpContext - left IExpContext - right IExpContext -} - -func NewExpArithmeticLTEContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *ExpArithmeticLTEContext { - var p = new(ExpArithmeticLTEContext) - - p.ExpContext = NewEmptyExpContext() - p.parser = parser - p.CopyFrom(ctx.(*ExpContext)) - - return p -} - -func (s *ExpArithmeticLTEContext) GetLeft() IExpContext { return s.left } - -func (s *ExpArithmeticLTEContext) GetRight() IExpContext { return s.right } - -func (s *ExpArithmeticLTEContext) SetLeft(v IExpContext) { s.left = v } - -func (s *ExpArithmeticLTEContext) SetRight(v IExpContext) { s.right = v } - -func (s *ExpArithmeticLTEContext) GetRuleContext() antlr.RuleContext { - return s -} - -func (s *ExpArithmeticLTEContext) LTE() antlr.TerminalNode { - return s.GetToken(BoolexpParserLTE, 0) -} - -func (s *ExpArithmeticLTEContext) AllExp() []IExpContext { - var ts = s.GetTypedRuleContexts(reflect.TypeOf((*IExpContext)(nil)).Elem()) - var tst = make([]IExpContext, len(ts)) - - for i, t := range ts { - if t != nil { - tst[i] = t.(IExpContext) - } - } - - return tst -} - -func (s *ExpArithmeticLTEContext) Exp(i int) IExpContext { - var t = s.GetTypedRuleContext(reflect.TypeOf((*IExpContext)(nil)).Elem(), i) - - if t == nil { - return nil - } - - return t.(IExpContext) -} - -func (s *ExpArithmeticLTEContext) EnterRule(listener antlr.ParseTreeListener) { - if listenerT, ok := listener.(BoolexpListener); ok { - listenerT.EnterExpArithmeticLTE(s) - } -} - -func (s *ExpArithmeticLTEContext) ExitRule(listener antlr.ParseTreeListener) { - if listenerT, ok := listener.(BoolexpListener); ok { - listenerT.ExitExpArithmeticLTE(s) - } -} - -func (s *ExpArithmeticLTEContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { - switch t := visitor.(type) { - case BoolexpVisitor: - return t.VisitExpArithmeticLTE(s) - - default: - return t.VisitChildren(s) - } -} - -type ExpArithmeticGTContext struct { - *ExpContext - left IExpContext - right IExpContext -} - -func NewExpArithmeticGTContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *ExpArithmeticGTContext { - var p = new(ExpArithmeticGTContext) - - p.ExpContext = NewEmptyExpContext() - p.parser = parser - p.CopyFrom(ctx.(*ExpContext)) - - return p -} - -func (s *ExpArithmeticGTContext) GetLeft() IExpContext { return s.left } - -func (s *ExpArithmeticGTContext) GetRight() IExpContext { return s.right } - -func (s *ExpArithmeticGTContext) SetLeft(v IExpContext) { s.left = v } - -func (s *ExpArithmeticGTContext) SetRight(v IExpContext) { s.right = v } - -func (s *ExpArithmeticGTContext) GetRuleContext() antlr.RuleContext { - return s -} - -func (s *ExpArithmeticGTContext) GT() antlr.TerminalNode { - return s.GetToken(BoolexpParserGT, 0) -} - -func (s *ExpArithmeticGTContext) AllExp() []IExpContext { - var ts = s.GetTypedRuleContexts(reflect.TypeOf((*IExpContext)(nil)).Elem()) - var tst = make([]IExpContext, len(ts)) - - for i, t := range ts { - if t != nil { - tst[i] = t.(IExpContext) - } - } - - return tst -} - -func (s *ExpArithmeticGTContext) Exp(i int) IExpContext { - var t = s.GetTypedRuleContext(reflect.TypeOf((*IExpContext)(nil)).Elem(), i) - - if t == nil { - return nil - } - - return t.(IExpContext) -} - -func (s *ExpArithmeticGTContext) EnterRule(listener antlr.ParseTreeListener) { - if listenerT, ok := listener.(BoolexpListener); ok { - listenerT.EnterExpArithmeticGT(s) - } -} - -func (s *ExpArithmeticGTContext) ExitRule(listener antlr.ParseTreeListener) { - if listenerT, ok := listener.(BoolexpListener); ok { - listenerT.ExitExpArithmeticGT(s) - } -} - -func (s *ExpArithmeticGTContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { - switch t := visitor.(type) { - case BoolexpVisitor: - return t.VisitExpArithmeticGT(s) - - default: - return t.VisitChildren(s) - } -} - -type ExpTextContext struct { - *ExpContext -} - -func NewExpTextContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *ExpTextContext { - var p = new(ExpTextContext) - - p.ExpContext = NewEmptyExpContext() - p.parser = parser - p.CopyFrom(ctx.(*ExpContext)) - - return p -} - -func (s *ExpTextContext) GetRuleContext() antlr.RuleContext { - return s -} - -func (s *ExpTextContext) TEXT() antlr.TerminalNode { - return s.GetToken(BoolexpParserTEXT, 0) -} - -func (s *ExpTextContext) EnterRule(listener antlr.ParseTreeListener) { - if listenerT, ok := listener.(BoolexpListener); ok { - listenerT.EnterExpText(s) - } -} - -func (s *ExpTextContext) ExitRule(listener antlr.ParseTreeListener) { - if listenerT, ok := listener.(BoolexpListener); ok { - listenerT.ExitExpText(s) - } -} - -func (s *ExpTextContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { - switch t := visitor.(type) { - case BoolexpVisitor: - return t.VisitExpText(s) - - default: - return t.VisitChildren(s) - } -} - -type ExpNumberContext struct { - *ExpContext -} - -func NewExpNumberContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *ExpNumberContext { - var p = new(ExpNumberContext) - - p.ExpContext = NewEmptyExpContext() - p.parser = parser - p.CopyFrom(ctx.(*ExpContext)) - - return p -} - -func (s *ExpNumberContext) GetRuleContext() antlr.RuleContext { - return s -} - -func (s *ExpNumberContext) NUMBER() antlr.TerminalNode { - return s.GetToken(BoolexpParserNUMBER, 0) -} - -func (s *ExpNumberContext) EnterRule(listener antlr.ParseTreeListener) { - if listenerT, ok := listener.(BoolexpListener); ok { - listenerT.EnterExpNumber(s) - } -} - -func (s *ExpNumberContext) ExitRule(listener antlr.ParseTreeListener) { - if listenerT, ok := listener.(BoolexpListener); ok { - listenerT.ExitExpNumber(s) - } -} - -func (s *ExpNumberContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { - switch t := visitor.(type) { - case BoolexpVisitor: - return t.VisitExpNumber(s) - - default: - return t.VisitChildren(s) - } -} - -type ExpLogicalAndContext struct { - *ExpContext - left IExpContext - right IExpContext -} - -func NewExpLogicalAndContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *ExpLogicalAndContext { - var p = new(ExpLogicalAndContext) - - p.ExpContext = NewEmptyExpContext() - p.parser = parser - p.CopyFrom(ctx.(*ExpContext)) - - return p -} - -func (s *ExpLogicalAndContext) GetLeft() IExpContext { return s.left } - -func (s *ExpLogicalAndContext) GetRight() IExpContext { return s.right } - -func (s *ExpLogicalAndContext) SetLeft(v IExpContext) { s.left = v } - -func (s *ExpLogicalAndContext) SetRight(v IExpContext) { s.right = v } - -func (s *ExpLogicalAndContext) GetRuleContext() antlr.RuleContext { - return s -} - -func (s *ExpLogicalAndContext) AND() antlr.TerminalNode { - return s.GetToken(BoolexpParserAND, 0) -} - -func (s *ExpLogicalAndContext) AllExp() []IExpContext { - var ts = s.GetTypedRuleContexts(reflect.TypeOf((*IExpContext)(nil)).Elem()) - var tst = make([]IExpContext, len(ts)) - - for i, t := range ts { - if t != nil { - tst[i] = t.(IExpContext) - } - } - - return tst -} - -func (s *ExpLogicalAndContext) Exp(i int) IExpContext { - var t = s.GetTypedRuleContext(reflect.TypeOf((*IExpContext)(nil)).Elem(), i) - - if t == nil { - return nil - } - - return t.(IExpContext) -} - -func (s *ExpLogicalAndContext) EnterRule(listener antlr.ParseTreeListener) { - if listenerT, ok := listener.(BoolexpListener); ok { - listenerT.EnterExpLogicalAnd(s) - } -} - -func (s *ExpLogicalAndContext) ExitRule(listener antlr.ParseTreeListener) { - if listenerT, ok := listener.(BoolexpListener); ok { - listenerT.ExitExpLogicalAnd(s) - } -} - -func (s *ExpLogicalAndContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { - switch t := visitor.(type) { - case BoolexpVisitor: - return t.VisitExpLogicalAnd(s) - - default: - return t.VisitChildren(s) - } -} - -type ExpLogicalORContext struct { - *ExpContext - left IExpContext - right IExpContext -} - -func NewExpLogicalORContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *ExpLogicalORContext { - var p = new(ExpLogicalORContext) - - p.ExpContext = NewEmptyExpContext() - p.parser = parser - p.CopyFrom(ctx.(*ExpContext)) - - return p -} - -func (s *ExpLogicalORContext) GetLeft() IExpContext { return s.left } - -func (s *ExpLogicalORContext) GetRight() IExpContext { return s.right } - -func (s *ExpLogicalORContext) SetLeft(v IExpContext) { s.left = v } - -func (s *ExpLogicalORContext) SetRight(v IExpContext) { s.right = v } - -func (s *ExpLogicalORContext) GetRuleContext() antlr.RuleContext { - return s -} - -func (s *ExpLogicalORContext) OR() antlr.TerminalNode { - return s.GetToken(BoolexpParserOR, 0) -} - -func (s *ExpLogicalORContext) AllExp() []IExpContext { - var ts = s.GetTypedRuleContexts(reflect.TypeOf((*IExpContext)(nil)).Elem()) - var tst = make([]IExpContext, len(ts)) - - for i, t := range ts { - if t != nil { - tst[i] = t.(IExpContext) - } - } - - return tst -} - -func (s *ExpLogicalORContext) Exp(i int) IExpContext { - var t = s.GetTypedRuleContext(reflect.TypeOf((*IExpContext)(nil)).Elem(), i) - - if t == nil { - return nil - } - - return t.(IExpContext) -} - -func (s *ExpLogicalORContext) EnterRule(listener antlr.ParseTreeListener) { - if listenerT, ok := listener.(BoolexpListener); ok { - listenerT.EnterExpLogicalOR(s) - } -} - -func (s *ExpLogicalORContext) ExitRule(listener antlr.ParseTreeListener) { - if listenerT, ok := listener.(BoolexpListener); ok { - listenerT.ExitExpLogicalOR(s) - } -} - -func (s *ExpLogicalORContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { - switch t := visitor.(type) { - case BoolexpVisitor: - return t.VisitExpLogicalOR(s) - - default: - return t.VisitChildren(s) - } -} - -type ExpFloatContext struct { - *ExpContext -} - -func NewExpFloatContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *ExpFloatContext { - var p = new(ExpFloatContext) - - p.ExpContext = NewEmptyExpContext() - p.parser = parser - p.CopyFrom(ctx.(*ExpContext)) - - return p -} - -func (s *ExpFloatContext) GetRuleContext() antlr.RuleContext { - return s -} - -func (s *ExpFloatContext) FLOAT() antlr.TerminalNode { - return s.GetToken(BoolexpParserFLOAT, 0) -} - -func (s *ExpFloatContext) EnterRule(listener antlr.ParseTreeListener) { - if listenerT, ok := listener.(BoolexpListener); ok { - listenerT.EnterExpFloat(s) - } -} - -func (s *ExpFloatContext) ExitRule(listener antlr.ParseTreeListener) { - if listenerT, ok := listener.(BoolexpListener); ok { - listenerT.ExitExpFloat(s) - } -} - -func (s *ExpFloatContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { - switch t := visitor.(type) { - case BoolexpVisitor: - return t.VisitExpFloat(s) - - default: - return t.VisitChildren(s) - } -} - -type ExpVariableContext struct { - *ExpContext -} - -func NewExpVariableContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *ExpVariableContext { - var p = new(ExpVariableContext) - - p.ExpContext = NewEmptyExpContext() - p.parser = parser - p.CopyFrom(ctx.(*ExpContext)) - - return p -} - -func (s *ExpVariableContext) GetRuleContext() antlr.RuleContext { - return s -} - -func (s *ExpVariableContext) VARIABLE() antlr.TerminalNode { - return s.GetToken(BoolexpParserVARIABLE, 0) -} - -func (s *ExpVariableContext) EnterRule(listener antlr.ParseTreeListener) { - if listenerT, ok := listener.(BoolexpListener); ok { - listenerT.EnterExpVariable(s) - } -} - -func (s *ExpVariableContext) ExitRule(listener antlr.ParseTreeListener) { - if listenerT, ok := listener.(BoolexpListener); ok { - listenerT.ExitExpVariable(s) - } -} - -func (s *ExpVariableContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { - switch t := visitor.(type) { - case BoolexpVisitor: - return t.VisitExpVariable(s) - - default: - return t.VisitChildren(s) - } -} - -type ExpNotContext struct { - *ExpContext -} - -func NewExpNotContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *ExpNotContext { - var p = new(ExpNotContext) - - p.ExpContext = NewEmptyExpContext() - p.parser = parser - p.CopyFrom(ctx.(*ExpContext)) - - return p -} - -func (s *ExpNotContext) GetRuleContext() antlr.RuleContext { - return s -} - -func (s *ExpNotContext) NOT() antlr.TerminalNode { - return s.GetToken(BoolexpParserNOT, 0) -} - -func (s *ExpNotContext) Exp() IExpContext { - var t = s.GetTypedRuleContext(reflect.TypeOf((*IExpContext)(nil)).Elem(), 0) - - if t == nil { - return nil - } - - return t.(IExpContext) -} - -func (s *ExpNotContext) EnterRule(listener antlr.ParseTreeListener) { - if listenerT, ok := listener.(BoolexpListener); ok { - listenerT.EnterExpNot(s) - } -} - -func (s *ExpNotContext) ExitRule(listener antlr.ParseTreeListener) { - if listenerT, ok := listener.(BoolexpListener); ok { - listenerT.ExitExpNot(s) - } -} - -func (s *ExpNotContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { - switch t := visitor.(type) { - case BoolexpVisitor: - return t.VisitExpNot(s) - - default: - return t.VisitChildren(s) - } -} - -type ExpInParenContext struct { - *ExpContext -} - -func NewExpInParenContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *ExpInParenContext { - var p = new(ExpInParenContext) - - p.ExpContext = NewEmptyExpContext() - p.parser = parser - p.CopyFrom(ctx.(*ExpContext)) - - return p -} - -func (s *ExpInParenContext) GetRuleContext() antlr.RuleContext { - return s -} - -func (s *ExpInParenContext) LPAR() antlr.TerminalNode { - return s.GetToken(BoolexpParserLPAR, 0) -} - -func (s *ExpInParenContext) Exp() IExpContext { - var t = s.GetTypedRuleContext(reflect.TypeOf((*IExpContext)(nil)).Elem(), 0) - - if t == nil { - return nil - } - - return t.(IExpContext) -} - -func (s *ExpInParenContext) RPAR() antlr.TerminalNode { - return s.GetToken(BoolexpParserRPAR, 0) -} - -func (s *ExpInParenContext) EnterRule(listener antlr.ParseTreeListener) { - if listenerT, ok := listener.(BoolexpListener); ok { - listenerT.EnterExpInParen(s) - } -} - -func (s *ExpInParenContext) ExitRule(listener antlr.ParseTreeListener) { - if listenerT, ok := listener.(BoolexpListener); ok { - listenerT.ExitExpInParen(s) - } -} - -func (s *ExpInParenContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { - switch t := visitor.(type) { - case BoolexpVisitor: - return t.VisitExpInParen(s) - - default: - return t.VisitChildren(s) - } -} - -type ExpBooleanContext struct { - *ExpContext -} - -func NewExpBooleanContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *ExpBooleanContext { - var p = new(ExpBooleanContext) - - p.ExpContext = NewEmptyExpContext() - p.parser = parser - p.CopyFrom(ctx.(*ExpContext)) - - return p -} - -func (s *ExpBooleanContext) GetRuleContext() antlr.RuleContext { - return s -} - -func (s *ExpBooleanContext) Boolean() IBooleanContext { - var t = s.GetTypedRuleContext(reflect.TypeOf((*IBooleanContext)(nil)).Elem(), 0) - - if t == nil { - return nil - } - - return t.(IBooleanContext) -} - -func (s *ExpBooleanContext) EnterRule(listener antlr.ParseTreeListener) { - if listenerT, ok := listener.(BoolexpListener); ok { - listenerT.EnterExpBoolean(s) - } -} - -func (s *ExpBooleanContext) ExitRule(listener antlr.ParseTreeListener) { - if listenerT, ok := listener.(BoolexpListener); ok { - listenerT.ExitExpBoolean(s) - } -} - -func (s *ExpBooleanContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { - switch t := visitor.(type) { - case BoolexpVisitor: - return t.VisitExpBoolean(s) - - default: - return t.VisitChildren(s) - } -} - -type ExpFunctionContext struct { - *ExpContext -} - -func NewExpFunctionContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *ExpFunctionContext { - var p = new(ExpFunctionContext) - - p.ExpContext = NewEmptyExpContext() - p.parser = parser - p.CopyFrom(ctx.(*ExpContext)) - - return p -} - -func (s *ExpFunctionContext) GetRuleContext() antlr.RuleContext { - return s -} - -func (s *ExpFunctionContext) METHODNAME() antlr.TerminalNode { - return s.GetToken(BoolexpParserMETHODNAME, 0) -} - -func (s *ExpFunctionContext) LPAR() antlr.TerminalNode { - return s.GetToken(BoolexpParserLPAR, 0) -} - -func (s *ExpFunctionContext) RPAR() antlr.TerminalNode { - return s.GetToken(BoolexpParserRPAR, 0) -} - -func (s *ExpFunctionContext) Arguments() IArgumentsContext { - var t = s.GetTypedRuleContext(reflect.TypeOf((*IArgumentsContext)(nil)).Elem(), 0) - - if t == nil { - return nil - } - - return t.(IArgumentsContext) -} - -func (s *ExpFunctionContext) EnterRule(listener antlr.ParseTreeListener) { - if listenerT, ok := listener.(BoolexpListener); ok { - listenerT.EnterExpFunction(s) - } -} - -func (s *ExpFunctionContext) ExitRule(listener antlr.ParseTreeListener) { - if listenerT, ok := listener.(BoolexpListener); ok { - listenerT.ExitExpFunction(s) - } -} - -func (s *ExpFunctionContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { - switch t := visitor.(type) { - case BoolexpVisitor: - return t.VisitExpFunction(s) - - default: - return t.VisitChildren(s) - } -} - -type ExpArithmeticLTContext struct { - *ExpContext - left IExpContext - right IExpContext -} - -func NewExpArithmeticLTContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *ExpArithmeticLTContext { - var p = new(ExpArithmeticLTContext) - - p.ExpContext = NewEmptyExpContext() - p.parser = parser - p.CopyFrom(ctx.(*ExpContext)) - - return p -} - -func (s *ExpArithmeticLTContext) GetLeft() IExpContext { return s.left } - -func (s *ExpArithmeticLTContext) GetRight() IExpContext { return s.right } - -func (s *ExpArithmeticLTContext) SetLeft(v IExpContext) { s.left = v } - -func (s *ExpArithmeticLTContext) SetRight(v IExpContext) { s.right = v } - -func (s *ExpArithmeticLTContext) GetRuleContext() antlr.RuleContext { - return s -} - -func (s *ExpArithmeticLTContext) LT() antlr.TerminalNode { - return s.GetToken(BoolexpParserLT, 0) -} - -func (s *ExpArithmeticLTContext) AllExp() []IExpContext { - var ts = s.GetTypedRuleContexts(reflect.TypeOf((*IExpContext)(nil)).Elem()) - var tst = make([]IExpContext, len(ts)) - - for i, t := range ts { - if t != nil { - tst[i] = t.(IExpContext) - } - } - - return tst -} - -func (s *ExpArithmeticLTContext) Exp(i int) IExpContext { - var t = s.GetTypedRuleContext(reflect.TypeOf((*IExpContext)(nil)).Elem(), i) - - if t == nil { - return nil - } - - return t.(IExpContext) -} - -func (s *ExpArithmeticLTContext) EnterRule(listener antlr.ParseTreeListener) { - if listenerT, ok := listener.(BoolexpListener); ok { - listenerT.EnterExpArithmeticLT(s) - } -} - -func (s *ExpArithmeticLTContext) ExitRule(listener antlr.ParseTreeListener) { - if listenerT, ok := listener.(BoolexpListener); ok { - listenerT.ExitExpArithmeticLT(s) - } -} - -func (s *ExpArithmeticLTContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { - switch t := visitor.(type) { - case BoolexpVisitor: - return t.VisitExpArithmeticLT(s) - - default: - return t.VisitChildren(s) - } -} - -func (p *BoolexpParser) Exp() (localctx IExpContext) { - return p.exp(0) -} - -func (p *BoolexpParser) exp(_p int) (localctx IExpContext) { - var _parentctx antlr.ParserRuleContext = p.GetParserRuleContext() - _parentState := p.GetState() - localctx = NewExpContext(p, p.GetParserRuleContext(), _parentState) - var _prevctx IExpContext = localctx - var _ antlr.ParserRuleContext = _prevctx // TODO: To prevent unused variable warning. - _startState := 2 - p.EnterRecursionRule(localctx, 2, BoolexpParserRULE_exp, _p) - var _la int - - defer func() { - p.UnrollRecursionContexts(_parentctx) - }() - - defer func() { - if err := recover(); err != nil { - if v, ok := err.(antlr.RecognitionException); ok { - localctx.SetException(v) - p.GetErrorHandler().ReportError(p, v) - p.GetErrorHandler().Recover(p, v) - } else { - panic(err) - } - } - }() - - var _alt int - - p.EnterOuterAlt(localctx, 1) - p.SetState(29) - p.GetErrorHandler().Sync(p) - - switch p.GetTokenStream().LA(1) { - case BoolexpParserLPAR: - localctx = NewExpInParenContext(p, localctx) - p.SetParserRuleContext(localctx) - _prevctx = localctx - - { - p.SetState(12) - p.Match(BoolexpParserLPAR) - } - { - p.SetState(13) - p.exp(0) - } - { - p.SetState(14) - p.Match(BoolexpParserRPAR) - } - - case BoolexpParserNOT: - localctx = NewExpNotContext(p, localctx) - p.SetParserRuleContext(localctx) - _prevctx = localctx - { - p.SetState(16) - p.Match(BoolexpParserNOT) - } - { - p.SetState(17) - p.exp(15) - } - - case BoolexpParserTRUE, BoolexpParserFALSE: - localctx = NewExpBooleanContext(p, localctx) - p.SetParserRuleContext(localctx) - _prevctx = localctx - { - p.SetState(18) - p.Boolean() - } - - case BoolexpParserVARIABLE: - localctx = NewExpVariableContext(p, localctx) - p.SetParserRuleContext(localctx) - _prevctx = localctx - { - p.SetState(19) - p.Match(BoolexpParserVARIABLE) - } - - case BoolexpParserMETHODNAME: - localctx = NewExpFunctionContext(p, localctx) - p.SetParserRuleContext(localctx) - _prevctx = localctx - { - p.SetState(20) - p.Match(BoolexpParserMETHODNAME) - } - { - p.SetState(21) - p.Match(BoolexpParserLPAR) - } - p.SetState(23) - p.GetErrorHandler().Sync(p) - _la = p.GetTokenStream().LA(1) - - if ((_la)&-(0x1f+1)) == 0 && ((1<'; +LT: '<'; +GTE: '>='; +LTE: '<='; +ADD: '+'; +SUB: '-'; +MUL: '*'; +DIV: '/'; +MOD: '%'; +AND: 'and' | 'AND'; +OR: 'or' | 'OR'; +TRUE: 'true' | 'TRUE'; +FALSE: 'false' | 'FALSE'; +FLOAT: [\-]? [0-9]+ '.' [0-9]+; +NUMBER: [\-]? [0-9]+; +WHITESPACE: [ \r\n\t]+ -> skip; +NOT: 'NOT' | 'not'; +NAME: [a-zA-Z_] [a-zA-Z0-9_]*; +VNAME: [a-zA-Z0-9_.]+('.'[a-zA-Z0-9_]+)*; +STEXT: '\'' ~[\r\n']* '\''; +DTEXT: '"' ~[\r\n"]* '"'; +LPAR: '('; +RPAR: ')'; +LARR: '['; +RARR: ']'; +LDICT: '{'; +RDICT: '}'; +BEGIN_VARIABLE: '${'; + +expList: exp EOF; + +boolean +: TRUE | FALSE +; + +constant +: STEXT +| DTEXT +| FLOAT +| NUMBER +| boolean +; + +variable +: NAME +| VNAME +| constant +; + +variableExp +: variable( '|' variable)* +; + +exp +: LPAR exp RPAR # ExpInParen +| left=exp (MUL | DIV | MOD) right=exp # ExpArithmeticMulDivMod +| left=exp (ADD | SUB) right=exp # ExpArithmeticAddSub +| NOT exp # ExpNot +| left=exp EQ right=exp # ExpArithmeticEQ +| left=exp NEQ right=exp # ExpArithmeticNEQ +| left=exp LTE right=exp # ExpArithmeticLTE +| left=exp GTE right=exp # ExpArithmeticGTE +| left=exp LT right=exp # ExpArithmeticLT +| left=exp GT right=exp # ExpArithmeticGT +| left=exp AND right=exp # ExpLogicalAnd +| left=exp OR right=exp # ExpLogicalOR +| boolean # ExpBoolean +| BEGIN_VARIABLE variableExp RDICT # ExpVariable +| NAME LPAR arguments? RPAR # ExpFunction +| LARR array? RARR # ExpArray +| LDICT dict? RDICT # ExpDict +| (STEXT | DTEXT) # ExpText +| FLOAT # ExpFloat +| NUMBER # ExpNumber +; + +arguments +: exp( ',' exp)* +; + +array +: constant( ',' constant)* +; + +key +: (NAME | STEXT | DTEXT) ':' constant +; + +dict +: key( ',' key)* +; diff --git a/x-pack/elastic-agent/pkg/boolexp/compare.go b/x-pack/elastic-agent/pkg/eql/compare.go similarity index 71% rename from x-pack/elastic-agent/pkg/boolexp/compare.go rename to x-pack/elastic-agent/pkg/eql/compare.go index 9a68286e3df..9381f254fde 100644 --- a/x-pack/elastic-agent/pkg/boolexp/compare.go +++ b/x-pack/elastic-agent/pkg/eql/compare.go @@ -2,9 +2,12 @@ // or more contributor license agreements. Licensed under the Elastic License; // you may not use this file except in compliance with the Elastic License. -package boolexp +package eql -import "fmt" +import ( + "fmt" + "sort" +) type operand interface{} @@ -12,6 +15,12 @@ type compare func(left, right operand) (bool, error) func compareEQ(left, right operand) (bool, error) { switch v := left.(type) { + case *null: + _, ok := right.(*null) + if ok { + return true, nil + } + return false, nil case bool: rV, ok := right.(bool) if !ok { @@ -23,6 +32,8 @@ func compareEQ(left, right operand) (bool, error) { return false, nil case int: switch rv := right.(type) { + case *null: + return false, nil case int: return v == rv, nil case float64: @@ -38,6 +49,8 @@ func compareEQ(left, right operand) (bool, error) { } case float64: switch rv := right.(type) { + case *null: + return false, nil case int: return v == float64(rv), nil case float64: @@ -58,6 +71,42 @@ func compareEQ(left, right operand) (bool, error) { return true, nil } return false, nil + case []interface{}: + rV, ok := right.([]interface{}) + if !ok { + return false, nil + } + if len(v) != len(rV) { + return false, nil + } + for i := range v { + b, err := compareEQ(v[i], rV[i]) + if err != nil { + return false, err + } + if !b { + return false, nil + } + } + return true, nil + case map[string]interface{}: + rV, ok := right.(map[string]interface{}) + if !ok { + return false, nil + } + if !keysEqual(v, rV) { + return false, nil + } + for i := range v { + b, err := compareEQ(v[i], rV[i]) + if err != nil { + return false, err + } + if !b { + return false, nil + } + } + return true, nil default: return false, fmt.Errorf( "compare: ==, incompatible type to compare, left=%T, right=%T", @@ -69,10 +118,16 @@ func compareEQ(left, right operand) (bool, error) { func compareNEQ(left, right operand) (bool, error) { switch v := left.(type) { + case *null: + _, ok := right.(*null) + if ok { + return false, nil + } + return true, nil case bool: rV, ok := right.(bool) if !ok { - return false, nil + return true, nil } if rV == v { return false, nil @@ -80,6 +135,8 @@ func compareNEQ(left, right operand) (bool, error) { return true, nil case int: switch rv := right.(type) { + case *null: + return true, nil case int: return v != rv, nil case float64: @@ -95,6 +152,8 @@ func compareNEQ(left, right operand) (bool, error) { } case float64: switch rv := right.(type) { + case *null: + return true, nil case int: return v != float64(rv), nil case float64: @@ -109,12 +168,48 @@ func compareNEQ(left, right operand) (bool, error) { case string: rV, ok := right.(string) if !ok { - return false, nil + return true, nil } if rV == v { return false, nil } return true, nil + case []interface{}: + rV, ok := right.([]interface{}) + if !ok { + return true, nil + } + if len(v) != len(rV) { + return true, nil + } + for i := range v { + b, err := compareNEQ(v[i], rV[i]) + if err != nil { + return false, err + } + if b { + return true, nil + } + } + return false, nil + case map[string]interface{}: + rV, ok := right.(map[string]interface{}) + if !ok { + return true, nil + } + if !keysEqual(v, rV) { + return true, nil + } + for i := range v { + b, err := compareNEQ(v[i], rV[i]) + if err != nil { + return false, err + } + if b { + return true, nil + } + } + return false, nil default: return false, fmt.Errorf( "compare: !=, incompatible type to compare, left=%T, right=%T", @@ -275,9 +370,56 @@ func compareGTE(left, right operand) (bool, error) { type logical func(left, right operand) (bool, error) func logicalAND(left, right operand) (bool, error) { - return left.(bool) && right.(bool), nil + switch l := left.(type) { + case bool: + switch r := right.(type) { + case bool: + return l && r, nil + } + } + return false, fmt.Errorf( + "and: incompatible type to and both operands must be booleans, left=%T, right=%T", + left, + right, + ) } func logicalOR(left, right operand) (bool, error) { - return left.(bool) == true || right.(bool), nil + switch l := left.(type) { + case bool: + switch r := right.(type) { + case bool: + return l || r, nil + } + } + return false, fmt.Errorf( + "and: incompatible type to and both operands must be booleans, left=%T, right=%T", + left, + right, + ) +} + +func keys(v map[string]interface{}) []string { + ks := make([]string, len(v)) + i := 0 + for k := range v { + ks[i] = k + i++ + } + sort.Strings(ks) + return ks +} + +func keysEqual(v1, v2 map[string]interface{}) bool { + ks1 := keys(v1) + ks2 := keys(v2) + if len(ks1) != len(ks2) { + return false + } + for i, v := range ks1 { + if v != ks2[i] { + return false + } + } + return true } diff --git a/x-pack/elastic-agent/pkg/boolexp/boolexp.go b/x-pack/elastic-agent/pkg/eql/eql.go similarity index 70% rename from x-pack/elastic-agent/pkg/boolexp/boolexp.go rename to x-pack/elastic-agent/pkg/eql/eql.go index 87d43ab2eb6..0993f251eaf 100644 --- a/x-pack/elastic-agent/pkg/boolexp/boolexp.go +++ b/x-pack/elastic-agent/pkg/eql/eql.go @@ -2,14 +2,14 @@ // or more contributor license agreements. Licensed under the Elastic License; // you may not use this file except in compliance with the Elastic License. -package boolexp +package eql -//go:generate antlr4 -Dlanguage=Go -o parser Boolexp.g4 -visitor +//go:generate antlr4 -Dlanguage=Go -o parser Eql.g4 -visitor // Eval takes an expression, parse and evaluate it, everytime this method is called a new // parser is created, if you want to reuse the parsed tree see the `New` method. -func Eval(expression string, methods *MethodsReg, store VarStore) (bool, error) { - e, err := New(expression, methods) +func Eval(expression string, store VarStore) (bool, error) { + e, err := New(expression) if err != nil { return false, err } diff --git a/x-pack/elastic-agent/pkg/eql/eql_test.go b/x-pack/elastic-agent/pkg/eql/eql_test.go new file mode 100644 index 00000000000..56df991b449 --- /dev/null +++ b/x-pack/elastic-agent/pkg/eql/eql_test.go @@ -0,0 +1,377 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License; +// you may not use this file except in compliance with the Elastic License. + +package eql + +import ( + "fmt" + "os" + "testing" + + "github.com/antlr/antlr4/runtime/Go/antlr" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "github.com/elastic/beats/v7/x-pack/elastic-agent/pkg/eql/parser" +) + +var showDebug = lookupEnvOrDefault("DEBUG", "0") + +type testVarStore struct { + vars map[string]interface{} +} + +func (s *testVarStore) Lookup(v string) (interface{}, bool) { + val, ok := s.vars[v] + return val, ok +} + +func TestEql(t *testing.T) { + testcases := []struct { + expression string + result bool + err bool + }{ + // variables + {expression: "${env.HOSTNAME|host.name|'fallback'} == 'my-hostname'", result: true}, + {expression: "${env.MISSING|host.name|'fallback'} == 'host-name'", result: true}, + {expression: "${env.MISSING|host.MISSING|'fallback'} == 'fallback'", result: true}, + {expression: "${env.MISSING|host.MISSING|2} == 2", result: true}, + {expression: "${env.MISSING|host.MISSING|2.0} == 2.0", result: true}, + {expression: "${env.MISSING|host.MISSING|true} == true", result: true}, + {expression: "${env.MISSING|host.MISSING|false} == false", result: true}, + {expression: "${'constant'} == 'constant'", result: true}, + + // boolean + {expression: "true", result: true}, + {expression: "false", result: false}, + + // equal + {expression: "'hello' == 'hello'", result: true}, + {expression: "'hello' == 'other'", result: false}, + {expression: "'other' == 'hello'", result: false}, + {expression: "1 == 1", result: true}, + {expression: "1 == 2", result: false}, + {expression: "2 == 1", result: false}, + {expression: "1.0 == 1", result: true}, + {expression: "1.1 == 1", result: false}, + {expression: "1 == 1.1", result: false}, + {expression: "true == true", result: true}, + {expression: "true == false", result: false}, + {expression: "false == false", result: true}, + {expression: "true == false", result: false}, + {expression: "${missing} == ${missing}", result: true}, + {expression: "${missing} == false", result: false}, + {expression: "false == ${missing}", result: false}, + + // not equal + {expression: "'hello' != 'hello'", result: false}, + {expression: "'hello' != 'other'", result: true}, + {expression: "'other' != 'hello'", result: true}, + {expression: "1 != 1", result: false}, + {expression: "1 != 2", result: true}, + {expression: "2 != 1", result: true}, + {expression: "1.0 != 1", result: false}, + {expression: "1.1 != 1", result: true}, + {expression: "1 != 1.1", result: true}, + {expression: "true != true", result: false}, + {expression: "true != false", result: true}, + {expression: "false != false", result: false}, + {expression: "true != false", result: true}, + {expression: "${missing} != ${missing}", result: false}, + {expression: "${missing} != false", result: true}, + {expression: "false != ${missing}", result: true}, + + // gt + {expression: "1 > 5", result: false}, + {expression: "10 > 5", result: true}, + {expression: "10 > 10", result: false}, + {expression: "1.1 > 5", result: false}, + {expression: "10.1 > 5", result: true}, + {expression: "1 > 5.0", result: false}, + {expression: "10 > 5.0", result: true}, + {expression: "10.1 > 10.1", result: false}, + + // lt + {expression: "1 < 5", result: true}, + {expression: "10 < 5", result: false}, + {expression: "10 < 10", result: false}, + {expression: "1.1 < 5", result: true}, + {expression: "10.1 < 5", result: false}, + {expression: "1 < 5.0", result: true}, + {expression: "10 < 5.0", result: false}, + {expression: "10.1 < 10.1", result: false}, + + // gte + {expression: "1 >= 5", result: false}, + {expression: "10 >= 5", result: true}, + {expression: "10 >= 10", result: true}, + {expression: "1.1 >= 5", result: false}, + {expression: "10.1 >= 5", result: true}, + {expression: "1 >= 5.0", result: false}, + {expression: "10 >= 5.0", result: true}, + {expression: "10.1 >= 10.1", result: true}, + + // lte + {expression: "1 <= 5", result: true}, + {expression: "10 <= 5", result: false}, + {expression: "10 <= 10", result: true}, + {expression: "1.1 <= 5", result: true}, + {expression: "10.1 <= 5", result: false}, + {expression: "1 <= 5.0", result: true}, + {expression: "10 <= 5.0", result: false}, + {expression: "10.1 <= 10.1", result: true}, + + // math (pemdas) + {expression: "4 * (5 + 3) == 32", result: true}, + {expression: "4 * 5 + 3 == 23", result: true}, + {expression: "2 + 5 * 3 == 17", result: true}, + {expression: "(2 + 5) * 3 == 21", result: true}, + {expression: "30 / 5 * 3 == 18", result: true}, + {expression: "30 / (5 * 3) == 2", result: true}, + {expression: "(18 / 6 * 5) - 14 / 7 == 13", result: true}, + {expression: "(18 / 6 * 5) - 14 / 7 == 13", result: true}, + {expression: "1.0 / 2 * 6 == 3", result: true}, + {expression: "24.0 / (-2 * -6) == 2", result: true}, + {expression: "24.0 / 0 == 0", err: true}, + {expression: "-4 * (5 + 3) == -32", result: true}, + {expression: "-4 * 5 + 3 == -17", result: true}, + {expression: "-24.0 / (2 * 6) == -2", result: true}, + {expression: "-24.0 / (5 % 3) == -12", result: true}, + {expression: "-24 % 5 * 3 == -12", result: true}, + + // not + {expression: "not false", result: true}, + {expression: "not true", result: false}, + {expression: "not (1 == 1)", result: false}, + {expression: "not (1 != 1)", result: true}, + {expression: "NOT false", result: true}, + {expression: "NOT true", result: false}, + {expression: "NOT (1 == 1)", result: false}, + {expression: "NOT (1 != 1)", result: true}, + + // and + {expression: "(1 == 1) and (2 == 2)", result: true}, + {expression: "(1 == 4) and (2 == 2)", result: false}, + {expression: "(1 == 1) and (2 == 3)", result: false}, + {expression: "(1 == 5) and (2 == 3)", result: false}, + {expression: "(1 == 1) AND (2 == 2)", result: true}, + {expression: "(1 == 4) AND (2 == 2)", result: false}, + {expression: "(1 == 1) AND (2 == 3)", result: false}, + {expression: "(1 == 5) AND (2 == 3)", result: false}, + {expression: "1 == 1 AND 2 == 2", result: true}, + {expression: "1 == 4 AND 2 == 2", result: false}, + {expression: "1 == 1 AND 2 == 3", result: false}, + {expression: "1 == 5 AND 2 == 3", result: false}, + {expression: "1 == 1 and 2 == 2", result: true}, + {expression: "1 == 4 and 2 == 2", result: false}, + {expression: "1 == 1 and 2 == 3", result: false}, + {expression: "1 == 5 and 2 == 3", result: false}, + + // or + {expression: "(1 == 1) OR (2 == 2)", result: true}, + {expression: "(1 == 1) OR (3 == 2)", result: true}, + {expression: "(1 == 2) OR (2 == 2)", result: true}, + {expression: "(1 == 2) OR (2 == 2)", result: true}, + {expression: "(1 == 2) OR (1 == 2)", result: false}, + {expression: "(1 == 1) or (2 == 2)", result: true}, + {expression: "(1 == 1) or (3 == 2)", result: true}, + {expression: "(1 == 2) or (2 == 2)", result: true}, + {expression: "(1 == 2) or (2 == 2)", result: true}, + {expression: "(1 == 2) or (1 == 2)", result: false}, + + // mixed + {expression: "((1 == 1) AND (2 == 2)) OR (2 != 3)", result: true}, + {expression: "(1 == 1 OR 2 == 2) AND 2 != 3", result: true}, + {expression: "((1 == 1) AND (2 == 2)) OR (2 != 3)", result: true}, + {expression: "1 == 1 OR 2 == 2 AND 2 != 3", result: true}, + + // arrays + {expression: "[true, false, 1, 1.0, 'test'] == [true, false, 1, 1.0, 'test']", result: true}, + {expression: "[true, false, 1, 1.0, 'test'] == [true, false, 1, 1.1, 'test']", result: false}, + {expression: "[true, false, 1, 1.0, 'test'] != [true, false, 1, 1.0, 'test']", result: false}, + {expression: "[true, false, 1, 1.0, 'test'] != [true, false, 1, 1.1, 'test']", result: true}, + + // dict + {expression: `{bt: true, bf: false, number: 1, float: 1.0, st: 'test', dt: "test"} == {bt: true, bf: false, number: 1, float: 1.0, st: 'test', dt: "test"}`, result: true}, + {expression: `{bt: true, bf: false, number: 1, float: 1.0, st: 'test'} == {bt: true, bf: false, number: 1, float: 1.0, st: 'test', dt: "test"}`, result: false}, + {expression: `{bt: true, bf: false, number: 1, float: 1.0, st: 'test', dt: "other"} == {bt: true, bf: false, number: 1, float: 1.0, st: 'test', dt: "test"}`, result: false}, + {expression: `{bt: true, bf: false, number: 1, float: 1.0, st: 'test', dt2: "test"} == {bt: true, bf: false, number: 1, float: 1.0, st: 'test', dt: "test"}`, result: false}, + {expression: `{bt: true, bf: false, number: 1, float: 1.0, st: 'test', dt: "test"} != {bt: true, bf: false, number: 1, float: 1.0, st: 'test', dt: "test"}`, result: false}, + {expression: `{bt: true, bf: false, number: 1, float: 1.0, st: 'test'} != {bt: true, bf: false, number: 1, float: 1.0, st: 'test', dt: "test"}`, result: true}, + {expression: `{bt: true, bf: false, number: 1, float: 1.0, st: 'test', dt: "other"} != {bt: true, bf: false, number: 1, float: 1.0, st: 'test', dt: "test"}`, result: true}, + {expression: `{bt: true, bf: false, number: 1, float: 1.0, st: 'test', dt2: "test"} != {bt: true, bf: false, number: 1, float: 1.0, st: 'test', dt: "test"}`, result: true}, + + // methods array + {expression: "arrayContains([true, 1, 3.5, 'str'], 1)", result: true}, + {expression: "arrayContains([true, 1, 3.5, 'str'], 2)", result: false}, + {expression: "arrayContains([true, 1, 3.5, 'str'], 'str')", result: true}, + {expression: "arrayContains([true, 1, 3.5, 'str'], 'str2')", result: false}, + {expression: "arrayContains([true, 1, 3.5, 'str'], 'str2', 3.5)", result: true}, + {expression: "arrayContains(${null.data}, 'str2', 3.5)", result: false}, + {expression: "arrayContains(${data.array}, 'array5', 'array2')", result: true}, + {expression: "arrayContains('not array', 'str2')", err: true}, + + // methods dict + {expression: "hasKey({key1: 'val1', key2: 'val2'}, 'key2')", result: true}, + {expression: "hasKey({key1: 'val1', key2: 'val2'}, 'other', 'key1')", result: true}, + {expression: "hasKey({key1: 'val1', key2: 'val2'}, 'missing', 'still')", result: false}, + {expression: "hasKey(${data.dict}, 'key3', 'still')", result: true}, + {expression: "hasKey(${null}, 'key3', 'still')", result: false}, + {expression: "hasKey(${data.dict})", err: true}, + {expression: "hasKey(${data.array}, 'not present')", err: true}, + + // methods length + {expression: "length('hello') == 5", result: true}, + {expression: "length([true, 1, 3.5, 'str']) == 4", result: true}, + {expression: "length({key: 'data', other: '2'}) == 2", result: true}, + {expression: "length(${data.dict}) == 3", result: true}, + {expression: "length(${null}) == 0", result: true}, + {expression: "length(4) == 2", err: true}, + {expression: "length('hello', 'too many args') == 2", err: true}, + + // methods math + {expression: "add(2, 2) == 4", result: true}, + {expression: "add(2.2, 2.2) == 4.4", result: true}, + {expression: "add(2) == 4", err: true}, + {expression: "add(2, 2, 2) == 4", err: true}, + {expression: "add('str', 'str') == 4", err: true}, + {expression: "subtract(2, 2) == 0", result: true}, + {expression: "subtract(2.2, 2.2) == 0", result: true}, + {expression: "subtract(2) == 0", err: true}, + {expression: "subtract(2, 2, 2) == 0", err: true}, + {expression: "subtract('str', 'str') == 0", err: true}, + {expression: "multiply(4, 2) == 8", result: true}, + {expression: "multiply(4.2, 2) == 8.4", result: true}, + {expression: "multiply(4) == 4", err: true}, + {expression: "multiply(2, 2, 2) == 4", err: true}, + {expression: "multiply('str', 'str') == 4", err: true}, + {expression: "divide(8, 2) == 4", result: true}, + {expression: "divide(4.2, 2) == 2.1", result: true}, + {expression: "divide(4.2, 0) == 2.1", err: true}, + {expression: "divide(4) == 4", err: true}, + {expression: "divide(2, 2, 2) == 4", err: true}, + {expression: "divide('str', 'str') == 4", err: true}, + {expression: "modulo(8, 3) == 2", result: true}, + {expression: "modulo(8, 0) == 2", err: true}, + {expression: "modulo(4.2, 2) == 1.2", err: true}, + {expression: "modulo(4) == 4", err: true}, + {expression: "modulo(2, 2, 2) == 4", err: true}, + {expression: "modulo('str', 'str') == 4", err: true}, + + // methods str + {expression: "concat('hello ', 2, ' the world') == 'hello 2 the world'", result: true}, + {expression: "concat('h', 2, 2.0, ['a', 'b'], true, {key: 'value'}) == 'h22E+00[a,b]true{key:value}'", result: true}, + {expression: "endsWith('hello world', 'world')", result: true}, + {expression: "endsWith('hello world', 'wor')", result: false}, + {expression: "endsWith('hello world', 'world', 'too many args')", err: true}, + {expression: "endsWith('not enough')", err: true}, + {expression: "indexOf('elastic.co', '.') == 7", result: true}, + {expression: "indexOf('elastic-agent.elastic.co', '.', 15) == 21", result: true}, + {expression: "indexOf('elastic-agent.elastic.co', '.', 15.2) == 21", err: true}, + {expression: "indexOf('elastic-agent.elastic.co', '.', 'not int') == 21", err: true}, + {expression: "indexOf('elastic-agent.elastic.co', '.', '15, 'too many args') == 21", err: true}, + {expression: "match('elastic.co', '[a-z]+.[a-z]{2}')", result: true}, + {expression: "match('elastic.co', '[a-z]+', '[a-z]+.[a-z]{2}')", result: true}, + {expression: "match('not enough')", err: true}, + {expression: "match('elastic.co', '[a-z')", err: true}, + {expression: "number('002020') == 2020", result: true}, + {expression: "number('0xdeadbeef', 16) == 3735928559", result: true}, + {expression: "number('not a number') == 'not'", err: true}, + {expression: "number('0xdeadbeef', 16, 2) == 'too many args'", err: true}, + {expression: "startsWith('hello world', 'hello')", result: true}, + {expression: "startsWith('hello world', 'llo')", result: false}, + {expression: "startsWith('hello world', 'hello', 'too many args')", err: true}, + {expression: "startsWith('not enough')", err: true}, + {expression: "string('str') == 'str'", result: true}, + {expression: "string(2) == '2'", result: true}, + {expression: "string(2.0) == '2E+00'", result: true}, + {expression: "string(true) == 'true'", result: true}, + {expression: "string(false) == 'false'", result: true}, + {expression: "string(['a', 'b']) == '[a,b]'", result: true}, + {expression: "string({key:'value'}) == '{key:value}'", result: true}, + {expression: "string(2, 'too many') == '2'", err: true}, + {expression: "stringContains('hello world', 'o w')", result: true}, + {expression: "stringContains('hello world', 'rol')", result: false}, + {expression: "stringContains('hello world', 'o w', 'too many')", err: true}, + {expression: "stringContains(0, 'o w', 'too many')", err: true}, + {expression: "stringContains('hello world', 0)", err: true}, + + // Bad expression and malformed expression + {expression: "length('hello')", err: true}, + {expression: "length()", err: true}, + {expression: "donotexist()", err: true}, + } + + store := &testVarStore{ + vars: map[string]interface{}{ + "env.HOSTNAME": "my-hostname", + "host.name": "host-name", + "data.array": []interface{}{"array1", "array2", "array3"}, + "data.dict": map[string]interface{}{ + "key1": "dict1", + "key2": "dict2", + "key3": "dict3", + }, + }, + } + + for _, test := range testcases { + test := test + var title string + if test.err { + title = fmt.Sprintf("%s failed parsing", test.expression) + } else { + title = fmt.Sprintf("%s => return %v", test.expression, test.result) + } + t.Run(title, func(t *testing.T) { + if showDebug == "1" { + debug(test.expression) + } + + r, err := Eval(test.expression, store) + + if test.err { + require.Error(t, err) + return + } + + require.NoError(t, err) + assert.Equal(t, test.result, r) + }) + } +} + +func debug(expression string) { + raw := antlr.NewInputStream(expression) + + lexer := parser.NewEqlLexer(raw) + for { + t := lexer.NextToken() + if t.GetTokenType() == antlr.TokenEOF { + break + } + fmt.Printf("%s (%q)\n", + lexer.SymbolicNames[t.GetTokenType()], t.GetText()) + } +} + +var result bool + +func BenchmarkEval(b *testing.B) { + expression, _ := New("(length('hello') == 5) AND (length('Hi') == 2)") + + var r bool + for n := 0; n < b.N; n++ { + r, _ = expression.Eval(nil) + } + result = r +} + +func lookupEnvOrDefault(name, d string) string { + if v, ok := os.LookupEnv(name); ok { + return v + } + return d +} diff --git a/x-pack/elastic-agent/pkg/boolexp/expression.go b/x-pack/elastic-agent/pkg/eql/expression.go similarity index 79% rename from x-pack/elastic-agent/pkg/boolexp/expression.go rename to x-pack/elastic-agent/pkg/eql/expression.go index 69d30603d5c..eb0d5b08b4b 100644 --- a/x-pack/elastic-agent/pkg/boolexp/expression.go +++ b/x-pack/elastic-agent/pkg/eql/expression.go @@ -2,7 +2,7 @@ // or more contributor license agreements. Licensed under the Elastic License; // you may not use this file except in compliance with the Elastic License. -package boolexp +package eql import ( "errors" @@ -10,17 +10,17 @@ import ( "github.com/antlr/antlr4/runtime/Go/antlr" - "github.com/elastic/beats/v7/x-pack/elastic-agent/pkg/boolexp/parser" + "github.com/elastic/beats/v7/x-pack/elastic-agent/pkg/eql/parser" ) // VarStore is the interface to implements when you want the expression engine to be able to fetch // the value of a variables. Variables are defined using the field reference syntax likes -/// this: `%{[hello.var]}`. +// this: `${hello.var|other.var|'constant'}`. type VarStore interface { // Lookup allows to lookup a value of a variable from the store, the lookup method will received // the name of variable like this. // - // %{[hello.var]} => hello.var + // ${hello.var|other.var} => hello.var, followed by other.var if hello.var is not found Lookup(string) (interface{}, bool) } @@ -33,7 +33,6 @@ var ( type Expression struct { expression string tree antlr.ParseTree - methodsReg *MethodsReg vars VarStore } @@ -48,7 +47,7 @@ func (e *Expression) Eval(store VarStore) (result bool, err error) { } }() - visitor := &expVisitor{methodsReg: e.methodsReg, vars: store} + visitor := &expVisitor{vars: store} r := visitor.Visit(e.tree) if visitor.err != nil { @@ -59,18 +58,18 @@ func (e *Expression) Eval(store VarStore) (result bool, err error) { } // New create a new boolean expression parser will return an error if the expression if invalid. -func New(expression string, methods *MethodsReg) (*Expression, error) { +func New(expression string) (*Expression, error) { if len(expression) == 0 { return nil, ErrEmptyExpression } input := antlr.NewInputStream(expression) - lexer := parser.NewBoolexpLexer(input) + lexer := parser.NewEqlLexer(input) lexer.RemoveErrorListeners() tokens := antlr.NewCommonTokenStream(lexer, antlr.TokenDefaultChannel) - p := parser.NewBoolexpParser(tokens) + p := parser.NewEqlParser(tokens) p.RemoveErrorListeners() tree := p.ExpList() - return &Expression{expression: expression, tree: tree, methodsReg: methods}, nil + return &Expression{expression: expression, tree: tree}, nil } diff --git a/x-pack/elastic-agent/pkg/eql/math.go b/x-pack/elastic-agent/pkg/eql/math.go new file mode 100644 index 00000000000..6808c577c47 --- /dev/null +++ b/x-pack/elastic-agent/pkg/eql/math.go @@ -0,0 +1,212 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License; +// you may not use this file except in compliance with the Elastic License. + +package eql + +import "fmt" + +func mathAdd(left, right operand) (interface{}, error) { + switch v := left.(type) { + case int: + switch rv := right.(type) { + case int: + return v + rv, nil + case float64: + return float64(v) + rv, nil + default: + return 0, fmt.Errorf( + "math: +, incompatible type to add both operands must be numbers, left=%T, right=%T", + left, + right, + ) + } + case float64: + switch rv := right.(type) { + case int: + return v + float64(rv), nil + case float64: + return v + rv, nil + default: + return 0, fmt.Errorf( + "math: +, incompatible type to add both operands must be numbers, left=%T, right=%T", + left, + right, + ) + } + default: + return 0, fmt.Errorf( + "math: +, incompatible type to add both operands must be numbers, left=%T, right=%T", + left, + right, + ) + } +} + +func mathSub(left, right operand) (interface{}, error) { + switch v := left.(type) { + case int: + switch rv := right.(type) { + case int: + return v - rv, nil + case float64: + return float64(v) - rv, nil + default: + return 0, fmt.Errorf( + "math: -, incompatible type to subtract both operands must be numbers, left=%T, right=%T", + left, + right, + ) + } + case float64: + switch rv := right.(type) { + case int: + return v - float64(rv), nil + case float64: + return v - rv, nil + default: + return 0, fmt.Errorf( + "math: -, incompatible type to subtract both operands must be numbers, left=%T, right=%T", + left, + right, + ) + } + default: + return 0, fmt.Errorf( + "math: -, incompatible type to subtract both operands must be numbers, left=%T, right=%T", + left, + right, + ) + } +} + +func mathMul(left, right operand) (interface{}, error) { + switch v := left.(type) { + case int: + switch rv := right.(type) { + case int: + return v * rv, nil + case float64: + return float64(v) * rv, nil + default: + return 0, fmt.Errorf( + "math: *, incompatible type to multiply both operands must be numbers, left=%T, right=%T", + left, + right, + ) + } + case float64: + switch rv := right.(type) { + case int: + return v * float64(rv), nil + case float64: + return v * rv, nil + default: + return 0, fmt.Errorf( + "math: *, incompatible type to multiply both operands must be numbers, left=%T, right=%T", + left, + right, + ) + } + default: + return 0, fmt.Errorf( + "math: *, incompatible type to multiply both operands must be numbers, left=%T, right=%T", + left, + right, + ) + } +} + +func mathDiv(left, right operand) (interface{}, error) { + switch v := left.(type) { + case int: + switch rv := right.(type) { + case int: + if rv == 0 { + return 0, fmt.Errorf( + "math: /, division by zero, left=%T, right=%T", + left, + right, + ) + } + return v / rv, nil + case float64: + if rv == 0 { + return 0, fmt.Errorf( + "math: /, division by zero, left=%T, right=%T", + left, + right, + ) + } + return float64(v) / rv, nil + default: + return 0, fmt.Errorf( + "math: /, incompatible type to divide both operands must be numbers, left=%T, right=%T", + left, + right, + ) + } + case float64: + switch rv := right.(type) { + case int: + if rv == 0 { + return 0, fmt.Errorf( + "math: /, division by zero, left=%T, right=%T", + left, + right, + ) + } + return v / float64(rv), nil + case float64: + if rv == 0 { + return 0, fmt.Errorf( + "math: /, division by zero, left=%T, right=%T", + left, + right, + ) + } + return v / rv, nil + default: + return 0, fmt.Errorf( + "math: /, incompatible type to divide both operands must be numbers, left=%T, right=%T", + left, + right, + ) + } + default: + return 0, fmt.Errorf( + "math: /, incompatible type to divide both operands must be numbers, left=%T, right=%T", + left, + right, + ) + } +} + +func mathMod(left, right operand) (interface{}, error) { + switch v := left.(type) { + case int: + switch rv := right.(type) { + case int: + if rv == 0 { + return 0, fmt.Errorf( + "math: %%, division by zero, left=%T, right=%T", + left, + right, + ) + } + return v % rv, nil + default: + return 0, fmt.Errorf( + "math: %%, incompatible type to modulus both operands must be integers, left=%T, right=%T", + left, + right, + ) + } + default: + return 0, fmt.Errorf( + "math: %%, incompatible type to modulus both operands must be integers, left=%T, right=%T", + left, + right, + ) + } +} diff --git a/x-pack/elastic-agent/pkg/eql/methods.go b/x-pack/elastic-agent/pkg/eql/methods.go new file mode 100644 index 00000000000..ac803a0ff83 --- /dev/null +++ b/x-pack/elastic-agent/pkg/eql/methods.go @@ -0,0 +1,38 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License; +// you may not use this file except in compliance with the Elastic License. + +package eql + +// callFunc is a function called while the expression evaluation is done, the function is responsible +// of doing the type conversion and allow checking the arity of the function. +type callFunc func(args []interface{}) (interface{}, error) + +// methods are the methods enabled in EQL. +var methods = map[string]callFunc{ + // array + "arrayContains": arrayContains, + + // dict + "hasKey": hasKey, + + // length: + "length": length, + + // math + "add": add, + "subtract": subtract, + "multiply": multiply, + "divide": divide, + "modulo": modulo, + + // str + "concat": concat, + "endsWith": endsWith, + "indexOf": indexOf, + "match": match, + "number": number, + "startsWith": startsWith, + "string": str, + "stringContains": stringContains, +} diff --git a/x-pack/elastic-agent/pkg/eql/methods_array.go b/x-pack/elastic-agent/pkg/eql/methods_array.go new file mode 100644 index 00000000000..2d07e4d105c --- /dev/null +++ b/x-pack/elastic-agent/pkg/eql/methods_array.go @@ -0,0 +1,31 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License; +// you may not use this file except in compliance with the Elastic License. + +package eql + +import ( + "fmt" + "reflect" +) + +// arrayContains check if value is a member of the array. +func arrayContains(args []interface{}) (interface{}, error) { + if len(args) < 2 { + return nil, fmt.Errorf("arrayContains: accepts minimum 2 arguments; recieved %d", len(args)) + } + switch a := args[0].(type) { + case *null: + return false, nil + case []interface{}: + for _, check := range args[1:] { + for _, i := range a { + if reflect.DeepEqual(i, check) { + return true, nil + } + } + } + return false, nil + } + return nil, fmt.Errorf("arrayContains: first argument must be an array; recieved %T", args[0]) +} diff --git a/x-pack/elastic-agent/pkg/eql/methods_dict.go b/x-pack/elastic-agent/pkg/eql/methods_dict.go new file mode 100644 index 00000000000..c49cd7278b7 --- /dev/null +++ b/x-pack/elastic-agent/pkg/eql/methods_dict.go @@ -0,0 +1,32 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License; +// you may not use this file except in compliance with the Elastic License. + +package eql + +import "fmt" + +// hasKey check if dict has anyone of the provided keys. +func hasKey(args []interface{}) (interface{}, error) { + if len(args) < 2 { + return nil, fmt.Errorf("hasKey: accepts minimum 2 arguments; recieved %d", len(args)) + } + switch d := args[0].(type) { + case *null: + return false, nil + case map[string]interface{}: + for i, check := range args[1:] { + switch c := check.(type) { + case string: + _, ok := d[c] + if ok { + return true, nil + } + default: + return nil, fmt.Errorf("hasKey: %d argument must be a string; recieved %T", i+1, check) + } + } + return false, nil + } + return nil, fmt.Errorf("hasKey: first argument must be a dictionary; recieved %T", args[0]) +} diff --git a/x-pack/elastic-agent/pkg/eql/methods_length.go b/x-pack/elastic-agent/pkg/eql/methods_length.go new file mode 100644 index 00000000000..026da65261f --- /dev/null +++ b/x-pack/elastic-agent/pkg/eql/methods_length.go @@ -0,0 +1,25 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License; +// you may not use this file except in compliance with the Elastic License. + +package eql + +import "fmt" + +// length returns the length of the string, array, or dictionary +func length(args []interface{}) (interface{}, error) { + if len(args) != 1 { + return nil, fmt.Errorf("length: accepts exactly 1 argument; recieved %d", len(args)) + } + switch a := args[0].(type) { + case *null: + return 0, nil + case string: + return len(a), nil + case []interface{}: + return len(a), nil + case map[string]interface{}: + return len(a), nil + } + return nil, fmt.Errorf("length: accepts only a string, array, or dictionary; recieved %T", args[0]) +} diff --git a/x-pack/elastic-agent/pkg/eql/methods_math.go b/x-pack/elastic-agent/pkg/eql/methods_math.go new file mode 100644 index 00000000000..508f73ae47c --- /dev/null +++ b/x-pack/elastic-agent/pkg/eql/methods_math.go @@ -0,0 +1,47 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License; +// you may not use this file except in compliance with the Elastic License. + +package eql + +import "fmt" + +// add performs x + y +func add(args []interface{}) (interface{}, error) { + if len(args) != 2 { + return nil, fmt.Errorf("add: accepts exactly 2 arguments; recieved %d", len(args)) + } + return mathAdd(args[0], args[1]) +} + +// subtract performs x - y +func subtract(args []interface{}) (interface{}, error) { + if len(args) != 2 { + return nil, fmt.Errorf("subtract: accepts exactly 2 arguments; recieved %d", len(args)) + } + return mathSub(args[0], args[1]) +} + +// multiply performs x * y +func multiply(args []interface{}) (interface{}, error) { + if len(args) != 2 { + return nil, fmt.Errorf("multiply: accepts exactly 2 arguments; recieved %d", len(args)) + } + return mathMul(args[0], args[1]) +} + +// divide performs x / y +func divide(args []interface{}) (interface{}, error) { + if len(args) != 2 { + return nil, fmt.Errorf("divide: accepts exactly 2 arguments; recieved %d", len(args)) + } + return mathDiv(args[0], args[1]) +} + +// modulo performs x % y +func modulo(args []interface{}) (interface{}, error) { + if len(args) != 2 { + return nil, fmt.Errorf("modulo: accepts exactly 2 arguments; recieved %d", len(args)) + } + return mathMod(args[0], args[1]) +} diff --git a/x-pack/elastic-agent/pkg/eql/methods_str.go b/x-pack/elastic-agent/pkg/eql/methods_str.go new file mode 100644 index 00000000000..781e193d924 --- /dev/null +++ b/x-pack/elastic-agent/pkg/eql/methods_str.go @@ -0,0 +1,186 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License; +// you may not use this file except in compliance with the Elastic License. + +package eql + +import ( + "fmt" + "regexp" + "strconv" + "strings" +) + +// concat concatenates the arguments into a string +func concat(args []interface{}) (interface{}, error) { + var sb strings.Builder + for _, arg := range args { + sb.WriteString(toString(arg)) + } + return sb.String(), nil +} + +// endsWith returns true if the string ends with given suffix +func endsWith(args []interface{}) (interface{}, error) { + if len(args) != 2 { + return nil, fmt.Errorf("endsWith: accepts exactly 2 arguments; recieved %d", len(args)) + } + input, iOk := args[0].(string) + suffix, sOk := args[1].(string) + if !iOk || !sOk { + return nil, fmt.Errorf("endsWith: accepts exactly 2 string arguments; recieved %T and %T", args[0], args[1]) + } + return strings.HasSuffix(input, suffix), nil +} + +// indexOf returns the starting index of substring +func indexOf(args []interface{}) (interface{}, error) { + if len(args) < 2 || len(args) > 3 { + return nil, fmt.Errorf("indexOf: accepts 2-3 arguments; recieved %d", len(args)) + } + input, iOk := args[0].(string) + substring, sOk := args[1].(string) + if !iOk || !sOk { + return nil, fmt.Errorf("indexOf: argument 0 and 1 must be a string; recieved %T and %T", args[0], args[1]) + } + start := 0 + if len(args) > 2 { + s, sOk := args[2].(int) + if !sOk { + return nil, fmt.Errorf("indexOf: argument 2 must be a integer; recieved %T", args[2]) + } + start = s + } + return start + strings.Index(input[start:], substring), nil +} + +// match returns true if the string matches any of the provided regular expressions +func match(args []interface{}) (interface{}, error) { + if len(args) < 2 { + return nil, fmt.Errorf("match: accepts minimum of 2 arguments; recieved %d", len(args)) + } + input, iOk := args[0].(string) + if !iOk { + return nil, fmt.Errorf("match: argument 0 must be a string; recieved %T", args[0]) + } + for i, reg := range args[1:] { + switch r := reg.(type) { + case string: + exp, err := regexp.Compile(r) + if err != nil { + return nil, fmt.Errorf("match: failed to compile regexp: %s", err) + } + if exp.Match([]byte(input)) { + return true, nil + } + default: + return nil, fmt.Errorf("match: argument %d must be a string; recieved %T", i+1, reg) + } + } + return false, nil +} + +// number converts the string into a integer +func number(args []interface{}) (interface{}, error) { + if len(args) < 1 || len(args) > 2 { + return nil, fmt.Errorf("number: accepts between 1-2 arguments; recieved %d", len(args)) + } + input, iOk := args[0].(string) + if !iOk { + return nil, fmt.Errorf("number: argument 0 must be a string; recieved %T", args[0]) + } + base := 10 + if len(args) > 1 { + switch a := args[1].(type) { + case int: + base = a + default: + return nil, fmt.Errorf("number: argument 1 must be an integer; recieved %T", args[1]) + } + } + if strings.HasPrefix(input, "0x") { + input = input[2:] + } + n, err := strconv.ParseInt(input, base, 64) + if err != nil { + return nil, fmt.Errorf("number: failed to convert '%s' to integer", input) + } + return int(n), nil +} + +// startsWith returns true if the string starts with given prefix +func startsWith(args []interface{}) (interface{}, error) { + if len(args) != 2 { + return nil, fmt.Errorf("startsWith: accepts exactly 2 arguments; recieved %d", len(args)) + } + input, iOk := args[0].(string) + prefix, pOk := args[1].(string) + if !iOk || !pOk { + return nil, fmt.Errorf("startsWith: accepts exactly 2 string arguments; recieved %T and %T", args[0], args[1]) + } + return strings.HasPrefix(input, prefix), nil +} + +// str converts the argument into a string +func str(args []interface{}) (interface{}, error) { + if len(args) != 1 { + return nil, fmt.Errorf("string: accepts exactly 1 argument; recieved %d", len(args)) + } + return toString(args[0]), nil +} + +// stringContains returns true if the string contains substring +func stringContains(args []interface{}) (interface{}, error) { + if len(args) != 2 { + return nil, fmt.Errorf("stringContains: accepts exactly 2 arguments; recieved %d", len(args)) + } + input, iOk := args[0].(string) + substr, sOk := args[1].(string) + if !iOk || !sOk { + return nil, fmt.Errorf("stringContains: accepts exactly 2 string arguments; recieved %T and %T", args[0], args[1]) + } + return strings.Contains(input, substr), nil +} + +func toString(arg interface{}) string { + switch a := arg.(type) { + case *null: + return "null" + case string: + return a + case int: + return strconv.Itoa(a) + case float64: + return strconv.FormatFloat(a, 'E', -1, 64) + case bool: + return strconv.FormatBool(a) + case []interface{}: + var sb strings.Builder + sb.WriteString("[") + for idx, item := range a { + sb.WriteString(toString(item)) + if idx < len(a)-1 { + sb.WriteString(",") + } + } + sb.WriteString("]") + return sb.String() + case map[string]interface{}: + var sb strings.Builder + sb.WriteString("{") + idx := 0 + for k, v := range a { + sb.WriteString(k) + sb.WriteString(":") + sb.WriteString(toString(v)) + if idx < len(a)-1 { + sb.WriteString(",") + } + idx++ + } + sb.WriteString("}") + return sb.String() + default: + return fmt.Sprintf("%s", a) + } +} diff --git a/x-pack/elastic-agent/pkg/eql/parser/Eql.interp b/x-pack/elastic-agent/pkg/eql/parser/Eql.interp new file mode 100644 index 00000000000..08538b25565 --- /dev/null +++ b/x-pack/elastic-agent/pkg/eql/parser/Eql.interp @@ -0,0 +1,87 @@ +token literal names: +null +'|' +',' +':' +'==' +'!=' +'>' +'<' +'>=' +'<=' +'+' +'-' +'*' +'/' +'%' +null +null +null +null +null +null +null +null +null +null +null +null +'(' +')' +'[' +']' +'{' +'}' +'${' + +token symbolic names: +null +null +null +null +EQ +NEQ +GT +LT +GTE +LTE +ADD +SUB +MUL +DIV +MOD +AND +OR +TRUE +FALSE +FLOAT +NUMBER +WHITESPACE +NOT +NAME +VNAME +STEXT +DTEXT +LPAR +RPAR +LARR +RARR +LDICT +RDICT +BEGIN_VARIABLE + +rule names: +expList +boolean +constant +variable +variableExp +exp +arguments +array +key +dict + + +atn: +[3, 24715, 42794, 33075, 47597, 16764, 15335, 30598, 22884, 3, 35, 144, 4, 2, 9, 2, 4, 3, 9, 3, 4, 4, 9, 4, 4, 5, 9, 5, 4, 6, 9, 6, 4, 7, 9, 7, 4, 8, 9, 8, 4, 9, 9, 9, 4, 10, 9, 10, 4, 11, 9, 11, 3, 2, 3, 2, 3, 2, 3, 3, 3, 3, 3, 4, 3, 4, 3, 4, 3, 4, 3, 4, 5, 4, 33, 10, 4, 3, 5, 3, 5, 3, 5, 5, 5, 38, 10, 5, 3, 6, 3, 6, 3, 6, 7, 6, 43, 10, 6, 12, 6, 14, 6, 46, 11, 6, 3, 7, 3, 7, 3, 7, 3, 7, 3, 7, 3, 7, 3, 7, 3, 7, 3, 7, 3, 7, 3, 7, 3, 7, 3, 7, 3, 7, 3, 7, 5, 7, 63, 10, 7, 3, 7, 3, 7, 3, 7, 5, 7, 68, 10, 7, 3, 7, 3, 7, 3, 7, 5, 7, 73, 10, 7, 3, 7, 3, 7, 3, 7, 3, 7, 5, 7, 79, 10, 7, 3, 7, 3, 7, 3, 7, 3, 7, 3, 7, 3, 7, 3, 7, 3, 7, 3, 7, 3, 7, 3, 7, 3, 7, 3, 7, 3, 7, 3, 7, 3, 7, 3, 7, 3, 7, 3, 7, 3, 7, 3, 7, 3, 7, 3, 7, 3, 7, 3, 7, 3, 7, 3, 7, 3, 7, 3, 7, 3, 7, 7, 7, 111, 10, 7, 12, 7, 14, 7, 114, 11, 7, 3, 8, 3, 8, 3, 8, 7, 8, 119, 10, 8, 12, 8, 14, 8, 122, 11, 8, 3, 9, 3, 9, 3, 9, 7, 9, 127, 10, 9, 12, 9, 14, 9, 130, 11, 9, 3, 10, 3, 10, 3, 10, 3, 10, 3, 11, 3, 11, 3, 11, 7, 11, 139, 10, 11, 12, 11, 14, 11, 142, 11, 11, 3, 11, 2, 3, 12, 12, 2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 2, 7, 3, 2, 19, 20, 3, 2, 27, 28, 3, 2, 14, 16, 3, 2, 12, 13, 4, 2, 25, 25, 27, 28, 2, 165, 2, 22, 3, 2, 2, 2, 4, 25, 3, 2, 2, 2, 6, 32, 3, 2, 2, 2, 8, 37, 3, 2, 2, 2, 10, 39, 3, 2, 2, 2, 12, 78, 3, 2, 2, 2, 14, 115, 3, 2, 2, 2, 16, 123, 3, 2, 2, 2, 18, 131, 3, 2, 2, 2, 20, 135, 3, 2, 2, 2, 22, 23, 5, 12, 7, 2, 23, 24, 7, 2, 2, 3, 24, 3, 3, 2, 2, 2, 25, 26, 9, 2, 2, 2, 26, 5, 3, 2, 2, 2, 27, 33, 7, 27, 2, 2, 28, 33, 7, 28, 2, 2, 29, 33, 7, 21, 2, 2, 30, 33, 7, 22, 2, 2, 31, 33, 5, 4, 3, 2, 32, 27, 3, 2, 2, 2, 32, 28, 3, 2, 2, 2, 32, 29, 3, 2, 2, 2, 32, 30, 3, 2, 2, 2, 32, 31, 3, 2, 2, 2, 33, 7, 3, 2, 2, 2, 34, 38, 7, 25, 2, 2, 35, 38, 7, 26, 2, 2, 36, 38, 5, 6, 4, 2, 37, 34, 3, 2, 2, 2, 37, 35, 3, 2, 2, 2, 37, 36, 3, 2, 2, 2, 38, 9, 3, 2, 2, 2, 39, 44, 5, 8, 5, 2, 40, 41, 7, 3, 2, 2, 41, 43, 5, 8, 5, 2, 42, 40, 3, 2, 2, 2, 43, 46, 3, 2, 2, 2, 44, 42, 3, 2, 2, 2, 44, 45, 3, 2, 2, 2, 45, 11, 3, 2, 2, 2, 46, 44, 3, 2, 2, 2, 47, 48, 8, 7, 1, 2, 48, 49, 7, 29, 2, 2, 49, 50, 5, 12, 7, 2, 50, 51, 7, 30, 2, 2, 51, 79, 3, 2, 2, 2, 52, 53, 7, 24, 2, 2, 53, 79, 5, 12, 7, 19, 54, 79, 5, 4, 3, 2, 55, 56, 7, 35, 2, 2, 56, 57, 5, 10, 6, 2, 57, 58, 7, 34, 2, 2, 58, 79, 3, 2, 2, 2, 59, 60, 7, 25, 2, 2, 60, 62, 7, 29, 2, 2, 61, 63, 5, 14, 8, 2, 62, 61, 3, 2, 2, 2, 62, 63, 3, 2, 2, 2, 63, 64, 3, 2, 2, 2, 64, 79, 7, 30, 2, 2, 65, 67, 7, 31, 2, 2, 66, 68, 5, 16, 9, 2, 67, 66, 3, 2, 2, 2, 67, 68, 3, 2, 2, 2, 68, 69, 3, 2, 2, 2, 69, 79, 7, 32, 2, 2, 70, 72, 7, 33, 2, 2, 71, 73, 5, 20, 11, 2, 72, 71, 3, 2, 2, 2, 72, 73, 3, 2, 2, 2, 73, 74, 3, 2, 2, 2, 74, 79, 7, 34, 2, 2, 75, 79, 9, 3, 2, 2, 76, 79, 7, 21, 2, 2, 77, 79, 7, 22, 2, 2, 78, 47, 3, 2, 2, 2, 78, 52, 3, 2, 2, 2, 78, 54, 3, 2, 2, 2, 78, 55, 3, 2, 2, 2, 78, 59, 3, 2, 2, 2, 78, 65, 3, 2, 2, 2, 78, 70, 3, 2, 2, 2, 78, 75, 3, 2, 2, 2, 78, 76, 3, 2, 2, 2, 78, 77, 3, 2, 2, 2, 79, 112, 3, 2, 2, 2, 80, 81, 12, 21, 2, 2, 81, 82, 9, 4, 2, 2, 82, 111, 5, 12, 7, 22, 83, 84, 12, 20, 2, 2, 84, 85, 9, 5, 2, 2, 85, 111, 5, 12, 7, 21, 86, 87, 12, 18, 2, 2, 87, 88, 7, 6, 2, 2, 88, 111, 5, 12, 7, 19, 89, 90, 12, 17, 2, 2, 90, 91, 7, 7, 2, 2, 91, 111, 5, 12, 7, 18, 92, 93, 12, 16, 2, 2, 93, 94, 7, 11, 2, 2, 94, 111, 5, 12, 7, 17, 95, 96, 12, 15, 2, 2, 96, 97, 7, 10, 2, 2, 97, 111, 5, 12, 7, 16, 98, 99, 12, 14, 2, 2, 99, 100, 7, 9, 2, 2, 100, 111, 5, 12, 7, 15, 101, 102, 12, 13, 2, 2, 102, 103, 7, 8, 2, 2, 103, 111, 5, 12, 7, 14, 104, 105, 12, 12, 2, 2, 105, 106, 7, 17, 2, 2, 106, 111, 5, 12, 7, 13, 107, 108, 12, 11, 2, 2, 108, 109, 7, 18, 2, 2, 109, 111, 5, 12, 7, 12, 110, 80, 3, 2, 2, 2, 110, 83, 3, 2, 2, 2, 110, 86, 3, 2, 2, 2, 110, 89, 3, 2, 2, 2, 110, 92, 3, 2, 2, 2, 110, 95, 3, 2, 2, 2, 110, 98, 3, 2, 2, 2, 110, 101, 3, 2, 2, 2, 110, 104, 3, 2, 2, 2, 110, 107, 3, 2, 2, 2, 111, 114, 3, 2, 2, 2, 112, 110, 3, 2, 2, 2, 112, 113, 3, 2, 2, 2, 113, 13, 3, 2, 2, 2, 114, 112, 3, 2, 2, 2, 115, 120, 5, 12, 7, 2, 116, 117, 7, 4, 2, 2, 117, 119, 5, 12, 7, 2, 118, 116, 3, 2, 2, 2, 119, 122, 3, 2, 2, 2, 120, 118, 3, 2, 2, 2, 120, 121, 3, 2, 2, 2, 121, 15, 3, 2, 2, 2, 122, 120, 3, 2, 2, 2, 123, 128, 5, 6, 4, 2, 124, 125, 7, 4, 2, 2, 125, 127, 5, 6, 4, 2, 126, 124, 3, 2, 2, 2, 127, 130, 3, 2, 2, 2, 128, 126, 3, 2, 2, 2, 128, 129, 3, 2, 2, 2, 129, 17, 3, 2, 2, 2, 130, 128, 3, 2, 2, 2, 131, 132, 9, 6, 2, 2, 132, 133, 7, 5, 2, 2, 133, 134, 5, 6, 4, 2, 134, 19, 3, 2, 2, 2, 135, 140, 5, 18, 10, 2, 136, 137, 7, 4, 2, 2, 137, 139, 5, 18, 10, 2, 138, 136, 3, 2, 2, 2, 139, 142, 3, 2, 2, 2, 140, 138, 3, 2, 2, 2, 140, 141, 3, 2, 2, 2, 141, 21, 3, 2, 2, 2, 142, 140, 3, 2, 2, 2, 14, 32, 37, 44, 62, 67, 72, 78, 110, 112, 120, 128, 140] \ No newline at end of file diff --git a/x-pack/elastic-agent/pkg/eql/parser/Eql.tokens b/x-pack/elastic-agent/pkg/eql/parser/Eql.tokens new file mode 100644 index 00000000000..80081750b0b --- /dev/null +++ b/x-pack/elastic-agent/pkg/eql/parser/Eql.tokens @@ -0,0 +1,54 @@ +T__0=1 +T__1=2 +T__2=3 +EQ=4 +NEQ=5 +GT=6 +LT=7 +GTE=8 +LTE=9 +ADD=10 +SUB=11 +MUL=12 +DIV=13 +MOD=14 +AND=15 +OR=16 +TRUE=17 +FALSE=18 +FLOAT=19 +NUMBER=20 +WHITESPACE=21 +NOT=22 +NAME=23 +VNAME=24 +STEXT=25 +DTEXT=26 +LPAR=27 +RPAR=28 +LARR=29 +RARR=30 +LDICT=31 +RDICT=32 +BEGIN_VARIABLE=33 +'|'=1 +','=2 +':'=3 +'=='=4 +'!='=5 +'>'=6 +'<'=7 +'>='=8 +'<='=9 +'+'=10 +'-'=11 +'*'=12 +'/'=13 +'%'=14 +'('=27 +')'=28 +'['=29 +']'=30 +'{'=31 +'}'=32 +'${'=33 diff --git a/x-pack/elastic-agent/pkg/eql/parser/EqlLexer.interp b/x-pack/elastic-agent/pkg/eql/parser/EqlLexer.interp new file mode 100644 index 00000000000..2131aba8177 --- /dev/null +++ b/x-pack/elastic-agent/pkg/eql/parser/EqlLexer.interp @@ -0,0 +1,116 @@ +token literal names: +null +'|' +',' +':' +'==' +'!=' +'>' +'<' +'>=' +'<=' +'+' +'-' +'*' +'/' +'%' +null +null +null +null +null +null +null +null +null +null +null +null +'(' +')' +'[' +']' +'{' +'}' +'${' + +token symbolic names: +null +null +null +null +EQ +NEQ +GT +LT +GTE +LTE +ADD +SUB +MUL +DIV +MOD +AND +OR +TRUE +FALSE +FLOAT +NUMBER +WHITESPACE +NOT +NAME +VNAME +STEXT +DTEXT +LPAR +RPAR +LARR +RARR +LDICT +RDICT +BEGIN_VARIABLE + +rule names: +T__0 +T__1 +T__2 +EQ +NEQ +GT +LT +GTE +LTE +ADD +SUB +MUL +DIV +MOD +AND +OR +TRUE +FALSE +FLOAT +NUMBER +WHITESPACE +NOT +NAME +VNAME +STEXT +DTEXT +LPAR +RPAR +LARR +RARR +LDICT +RDICT +BEGIN_VARIABLE + +channel names: +DEFAULT_TOKEN_CHANNEL +HIDDEN + +mode names: +DEFAULT_MODE + +atn: +[3, 24715, 42794, 33075, 47597, 16764, 15335, 30598, 22884, 2, 35, 230, 8, 1, 4, 2, 9, 2, 4, 3, 9, 3, 4, 4, 9, 4, 4, 5, 9, 5, 4, 6, 9, 6, 4, 7, 9, 7, 4, 8, 9, 8, 4, 9, 9, 9, 4, 10, 9, 10, 4, 11, 9, 11, 4, 12, 9, 12, 4, 13, 9, 13, 4, 14, 9, 14, 4, 15, 9, 15, 4, 16, 9, 16, 4, 17, 9, 17, 4, 18, 9, 18, 4, 19, 9, 19, 4, 20, 9, 20, 4, 21, 9, 21, 4, 22, 9, 22, 4, 23, 9, 23, 4, 24, 9, 24, 4, 25, 9, 25, 4, 26, 9, 26, 4, 27, 9, 27, 4, 28, 9, 28, 4, 29, 9, 29, 4, 30, 9, 30, 4, 31, 9, 31, 4, 32, 9, 32, 4, 33, 9, 33, 4, 34, 9, 34, 3, 2, 3, 2, 3, 3, 3, 3, 3, 4, 3, 4, 3, 5, 3, 5, 3, 5, 3, 6, 3, 6, 3, 6, 3, 7, 3, 7, 3, 8, 3, 8, 3, 9, 3, 9, 3, 9, 3, 10, 3, 10, 3, 10, 3, 11, 3, 11, 3, 12, 3, 12, 3, 13, 3, 13, 3, 14, 3, 14, 3, 15, 3, 15, 3, 16, 3, 16, 3, 16, 3, 16, 3, 16, 3, 16, 5, 16, 108, 10, 16, 3, 17, 3, 17, 3, 17, 3, 17, 5, 17, 114, 10, 17, 3, 18, 3, 18, 3, 18, 3, 18, 3, 18, 3, 18, 3, 18, 3, 18, 5, 18, 124, 10, 18, 3, 19, 3, 19, 3, 19, 3, 19, 3, 19, 3, 19, 3, 19, 3, 19, 3, 19, 3, 19, 5, 19, 136, 10, 19, 3, 20, 5, 20, 139, 10, 20, 3, 20, 6, 20, 142, 10, 20, 13, 20, 14, 20, 143, 3, 20, 3, 20, 6, 20, 148, 10, 20, 13, 20, 14, 20, 149, 3, 21, 5, 21, 153, 10, 21, 3, 21, 6, 21, 156, 10, 21, 13, 21, 14, 21, 157, 3, 22, 6, 22, 161, 10, 22, 13, 22, 14, 22, 162, 3, 22, 3, 22, 3, 23, 3, 23, 3, 23, 3, 23, 3, 23, 3, 23, 5, 23, 173, 10, 23, 3, 24, 3, 24, 7, 24, 177, 10, 24, 12, 24, 14, 24, 180, 11, 24, 3, 25, 6, 25, 183, 10, 25, 13, 25, 14, 25, 184, 3, 25, 3, 25, 6, 25, 189, 10, 25, 13, 25, 14, 25, 190, 7, 25, 193, 10, 25, 12, 25, 14, 25, 196, 11, 25, 3, 26, 3, 26, 7, 26, 200, 10, 26, 12, 26, 14, 26, 203, 11, 26, 3, 26, 3, 26, 3, 27, 3, 27, 7, 27, 209, 10, 27, 12, 27, 14, 27, 212, 11, 27, 3, 27, 3, 27, 3, 28, 3, 28, 3, 29, 3, 29, 3, 30, 3, 30, 3, 31, 3, 31, 3, 32, 3, 32, 3, 33, 3, 33, 3, 34, 3, 34, 3, 34, 2, 2, 35, 3, 3, 5, 4, 7, 5, 9, 6, 11, 7, 13, 8, 15, 9, 17, 10, 19, 11, 21, 12, 23, 13, 25, 14, 27, 15, 29, 16, 31, 17, 33, 18, 35, 19, 37, 20, 39, 21, 41, 22, 43, 23, 45, 24, 47, 25, 49, 26, 51, 27, 53, 28, 55, 29, 57, 30, 59, 31, 61, 32, 63, 33, 65, 34, 67, 35, 3, 2, 10, 3, 2, 47, 47, 3, 2, 50, 59, 5, 2, 11, 12, 15, 15, 34, 34, 5, 2, 67, 92, 97, 97, 99, 124, 6, 2, 50, 59, 67, 92, 97, 97, 99, 124, 7, 2, 48, 48, 50, 59, 67, 92, 97, 97, 99, 124, 5, 2, 12, 12, 15, 15, 41, 41, 5, 2, 12, 12, 15, 15, 36, 36, 2, 246, 2, 3, 3, 2, 2, 2, 2, 5, 3, 2, 2, 2, 2, 7, 3, 2, 2, 2, 2, 9, 3, 2, 2, 2, 2, 11, 3, 2, 2, 2, 2, 13, 3, 2, 2, 2, 2, 15, 3, 2, 2, 2, 2, 17, 3, 2, 2, 2, 2, 19, 3, 2, 2, 2, 2, 21, 3, 2, 2, 2, 2, 23, 3, 2, 2, 2, 2, 25, 3, 2, 2, 2, 2, 27, 3, 2, 2, 2, 2, 29, 3, 2, 2, 2, 2, 31, 3, 2, 2, 2, 2, 33, 3, 2, 2, 2, 2, 35, 3, 2, 2, 2, 2, 37, 3, 2, 2, 2, 2, 39, 3, 2, 2, 2, 2, 41, 3, 2, 2, 2, 2, 43, 3, 2, 2, 2, 2, 45, 3, 2, 2, 2, 2, 47, 3, 2, 2, 2, 2, 49, 3, 2, 2, 2, 2, 51, 3, 2, 2, 2, 2, 53, 3, 2, 2, 2, 2, 55, 3, 2, 2, 2, 2, 57, 3, 2, 2, 2, 2, 59, 3, 2, 2, 2, 2, 61, 3, 2, 2, 2, 2, 63, 3, 2, 2, 2, 2, 65, 3, 2, 2, 2, 2, 67, 3, 2, 2, 2, 3, 69, 3, 2, 2, 2, 5, 71, 3, 2, 2, 2, 7, 73, 3, 2, 2, 2, 9, 75, 3, 2, 2, 2, 11, 78, 3, 2, 2, 2, 13, 81, 3, 2, 2, 2, 15, 83, 3, 2, 2, 2, 17, 85, 3, 2, 2, 2, 19, 88, 3, 2, 2, 2, 21, 91, 3, 2, 2, 2, 23, 93, 3, 2, 2, 2, 25, 95, 3, 2, 2, 2, 27, 97, 3, 2, 2, 2, 29, 99, 3, 2, 2, 2, 31, 107, 3, 2, 2, 2, 33, 113, 3, 2, 2, 2, 35, 123, 3, 2, 2, 2, 37, 135, 3, 2, 2, 2, 39, 138, 3, 2, 2, 2, 41, 152, 3, 2, 2, 2, 43, 160, 3, 2, 2, 2, 45, 172, 3, 2, 2, 2, 47, 174, 3, 2, 2, 2, 49, 182, 3, 2, 2, 2, 51, 197, 3, 2, 2, 2, 53, 206, 3, 2, 2, 2, 55, 215, 3, 2, 2, 2, 57, 217, 3, 2, 2, 2, 59, 219, 3, 2, 2, 2, 61, 221, 3, 2, 2, 2, 63, 223, 3, 2, 2, 2, 65, 225, 3, 2, 2, 2, 67, 227, 3, 2, 2, 2, 69, 70, 7, 126, 2, 2, 70, 4, 3, 2, 2, 2, 71, 72, 7, 46, 2, 2, 72, 6, 3, 2, 2, 2, 73, 74, 7, 60, 2, 2, 74, 8, 3, 2, 2, 2, 75, 76, 7, 63, 2, 2, 76, 77, 7, 63, 2, 2, 77, 10, 3, 2, 2, 2, 78, 79, 7, 35, 2, 2, 79, 80, 7, 63, 2, 2, 80, 12, 3, 2, 2, 2, 81, 82, 7, 64, 2, 2, 82, 14, 3, 2, 2, 2, 83, 84, 7, 62, 2, 2, 84, 16, 3, 2, 2, 2, 85, 86, 7, 64, 2, 2, 86, 87, 7, 63, 2, 2, 87, 18, 3, 2, 2, 2, 88, 89, 7, 62, 2, 2, 89, 90, 7, 63, 2, 2, 90, 20, 3, 2, 2, 2, 91, 92, 7, 45, 2, 2, 92, 22, 3, 2, 2, 2, 93, 94, 7, 47, 2, 2, 94, 24, 3, 2, 2, 2, 95, 96, 7, 44, 2, 2, 96, 26, 3, 2, 2, 2, 97, 98, 7, 49, 2, 2, 98, 28, 3, 2, 2, 2, 99, 100, 7, 39, 2, 2, 100, 30, 3, 2, 2, 2, 101, 102, 7, 99, 2, 2, 102, 103, 7, 112, 2, 2, 103, 108, 7, 102, 2, 2, 104, 105, 7, 67, 2, 2, 105, 106, 7, 80, 2, 2, 106, 108, 7, 70, 2, 2, 107, 101, 3, 2, 2, 2, 107, 104, 3, 2, 2, 2, 108, 32, 3, 2, 2, 2, 109, 110, 7, 113, 2, 2, 110, 114, 7, 116, 2, 2, 111, 112, 7, 81, 2, 2, 112, 114, 7, 84, 2, 2, 113, 109, 3, 2, 2, 2, 113, 111, 3, 2, 2, 2, 114, 34, 3, 2, 2, 2, 115, 116, 7, 118, 2, 2, 116, 117, 7, 116, 2, 2, 117, 118, 7, 119, 2, 2, 118, 124, 7, 103, 2, 2, 119, 120, 7, 86, 2, 2, 120, 121, 7, 84, 2, 2, 121, 122, 7, 87, 2, 2, 122, 124, 7, 71, 2, 2, 123, 115, 3, 2, 2, 2, 123, 119, 3, 2, 2, 2, 124, 36, 3, 2, 2, 2, 125, 126, 7, 104, 2, 2, 126, 127, 7, 99, 2, 2, 127, 128, 7, 110, 2, 2, 128, 129, 7, 117, 2, 2, 129, 136, 7, 103, 2, 2, 130, 131, 7, 72, 2, 2, 131, 132, 7, 67, 2, 2, 132, 133, 7, 78, 2, 2, 133, 134, 7, 85, 2, 2, 134, 136, 7, 71, 2, 2, 135, 125, 3, 2, 2, 2, 135, 130, 3, 2, 2, 2, 136, 38, 3, 2, 2, 2, 137, 139, 9, 2, 2, 2, 138, 137, 3, 2, 2, 2, 138, 139, 3, 2, 2, 2, 139, 141, 3, 2, 2, 2, 140, 142, 9, 3, 2, 2, 141, 140, 3, 2, 2, 2, 142, 143, 3, 2, 2, 2, 143, 141, 3, 2, 2, 2, 143, 144, 3, 2, 2, 2, 144, 145, 3, 2, 2, 2, 145, 147, 7, 48, 2, 2, 146, 148, 9, 3, 2, 2, 147, 146, 3, 2, 2, 2, 148, 149, 3, 2, 2, 2, 149, 147, 3, 2, 2, 2, 149, 150, 3, 2, 2, 2, 150, 40, 3, 2, 2, 2, 151, 153, 9, 2, 2, 2, 152, 151, 3, 2, 2, 2, 152, 153, 3, 2, 2, 2, 153, 155, 3, 2, 2, 2, 154, 156, 9, 3, 2, 2, 155, 154, 3, 2, 2, 2, 156, 157, 3, 2, 2, 2, 157, 155, 3, 2, 2, 2, 157, 158, 3, 2, 2, 2, 158, 42, 3, 2, 2, 2, 159, 161, 9, 4, 2, 2, 160, 159, 3, 2, 2, 2, 161, 162, 3, 2, 2, 2, 162, 160, 3, 2, 2, 2, 162, 163, 3, 2, 2, 2, 163, 164, 3, 2, 2, 2, 164, 165, 8, 22, 2, 2, 165, 44, 3, 2, 2, 2, 166, 167, 7, 80, 2, 2, 167, 168, 7, 81, 2, 2, 168, 173, 7, 86, 2, 2, 169, 170, 7, 112, 2, 2, 170, 171, 7, 113, 2, 2, 171, 173, 7, 118, 2, 2, 172, 166, 3, 2, 2, 2, 172, 169, 3, 2, 2, 2, 173, 46, 3, 2, 2, 2, 174, 178, 9, 5, 2, 2, 175, 177, 9, 6, 2, 2, 176, 175, 3, 2, 2, 2, 177, 180, 3, 2, 2, 2, 178, 176, 3, 2, 2, 2, 178, 179, 3, 2, 2, 2, 179, 48, 3, 2, 2, 2, 180, 178, 3, 2, 2, 2, 181, 183, 9, 7, 2, 2, 182, 181, 3, 2, 2, 2, 183, 184, 3, 2, 2, 2, 184, 182, 3, 2, 2, 2, 184, 185, 3, 2, 2, 2, 185, 194, 3, 2, 2, 2, 186, 188, 7, 48, 2, 2, 187, 189, 9, 6, 2, 2, 188, 187, 3, 2, 2, 2, 189, 190, 3, 2, 2, 2, 190, 188, 3, 2, 2, 2, 190, 191, 3, 2, 2, 2, 191, 193, 3, 2, 2, 2, 192, 186, 3, 2, 2, 2, 193, 196, 3, 2, 2, 2, 194, 192, 3, 2, 2, 2, 194, 195, 3, 2, 2, 2, 195, 50, 3, 2, 2, 2, 196, 194, 3, 2, 2, 2, 197, 201, 7, 41, 2, 2, 198, 200, 10, 8, 2, 2, 199, 198, 3, 2, 2, 2, 200, 203, 3, 2, 2, 2, 201, 199, 3, 2, 2, 2, 201, 202, 3, 2, 2, 2, 202, 204, 3, 2, 2, 2, 203, 201, 3, 2, 2, 2, 204, 205, 7, 41, 2, 2, 205, 52, 3, 2, 2, 2, 206, 210, 7, 36, 2, 2, 207, 209, 10, 9, 2, 2, 208, 207, 3, 2, 2, 2, 209, 212, 3, 2, 2, 2, 210, 208, 3, 2, 2, 2, 210, 211, 3, 2, 2, 2, 211, 213, 3, 2, 2, 2, 212, 210, 3, 2, 2, 2, 213, 214, 7, 36, 2, 2, 214, 54, 3, 2, 2, 2, 215, 216, 7, 42, 2, 2, 216, 56, 3, 2, 2, 2, 217, 218, 7, 43, 2, 2, 218, 58, 3, 2, 2, 2, 219, 220, 7, 93, 2, 2, 220, 60, 3, 2, 2, 2, 221, 222, 7, 95, 2, 2, 222, 62, 3, 2, 2, 2, 223, 224, 7, 125, 2, 2, 224, 64, 3, 2, 2, 2, 225, 226, 7, 127, 2, 2, 226, 66, 3, 2, 2, 2, 227, 228, 7, 38, 2, 2, 228, 229, 7, 125, 2, 2, 229, 68, 3, 2, 2, 2, 20, 2, 107, 113, 123, 135, 138, 143, 149, 152, 157, 162, 172, 178, 184, 190, 194, 201, 210, 3, 8, 2, 2] \ No newline at end of file diff --git a/x-pack/elastic-agent/pkg/eql/parser/EqlLexer.tokens b/x-pack/elastic-agent/pkg/eql/parser/EqlLexer.tokens new file mode 100644 index 00000000000..80081750b0b --- /dev/null +++ b/x-pack/elastic-agent/pkg/eql/parser/EqlLexer.tokens @@ -0,0 +1,54 @@ +T__0=1 +T__1=2 +T__2=3 +EQ=4 +NEQ=5 +GT=6 +LT=7 +GTE=8 +LTE=9 +ADD=10 +SUB=11 +MUL=12 +DIV=13 +MOD=14 +AND=15 +OR=16 +TRUE=17 +FALSE=18 +FLOAT=19 +NUMBER=20 +WHITESPACE=21 +NOT=22 +NAME=23 +VNAME=24 +STEXT=25 +DTEXT=26 +LPAR=27 +RPAR=28 +LARR=29 +RARR=30 +LDICT=31 +RDICT=32 +BEGIN_VARIABLE=33 +'|'=1 +','=2 +':'=3 +'=='=4 +'!='=5 +'>'=6 +'<'=7 +'>='=8 +'<='=9 +'+'=10 +'-'=11 +'*'=12 +'/'=13 +'%'=14 +'('=27 +')'=28 +'['=29 +']'=30 +'{'=31 +'}'=32 +'${'=33 diff --git a/x-pack/elastic-agent/pkg/eql/parser/eql_base_listener.go b/x-pack/elastic-agent/pkg/eql/parser/eql_base_listener.go new file mode 100644 index 00000000000..1ceadb12562 --- /dev/null +++ b/x-pack/elastic-agent/pkg/eql/parser/eql_base_listener.go @@ -0,0 +1,200 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License; +// you may not use this file except in compliance with the Elastic License. + +// Code generated from Eql.g4 by ANTLR 4.7.1. DO NOT EDIT. + +package parser // Eql + +import "github.com/antlr/antlr4/runtime/Go/antlr" + +// BaseEqlListener is a complete listener for a parse tree produced by EqlParser. +type BaseEqlListener struct{} + +var _ EqlListener = &BaseEqlListener{} + +// VisitTerminal is called when a terminal node is visited. +func (s *BaseEqlListener) VisitTerminal(node antlr.TerminalNode) {} + +// VisitErrorNode is called when an error node is visited. +func (s *BaseEqlListener) VisitErrorNode(node antlr.ErrorNode) {} + +// EnterEveryRule is called when any rule is entered. +func (s *BaseEqlListener) EnterEveryRule(ctx antlr.ParserRuleContext) {} + +// ExitEveryRule is called when any rule is exited. +func (s *BaseEqlListener) ExitEveryRule(ctx antlr.ParserRuleContext) {} + +// EnterExpList is called when production expList is entered. +func (s *BaseEqlListener) EnterExpList(ctx *ExpListContext) {} + +// ExitExpList is called when production expList is exited. +func (s *BaseEqlListener) ExitExpList(ctx *ExpListContext) {} + +// EnterBoolean is called when production boolean is entered. +func (s *BaseEqlListener) EnterBoolean(ctx *BooleanContext) {} + +// ExitBoolean is called when production boolean is exited. +func (s *BaseEqlListener) ExitBoolean(ctx *BooleanContext) {} + +// EnterConstant is called when production constant is entered. +func (s *BaseEqlListener) EnterConstant(ctx *ConstantContext) {} + +// ExitConstant is called when production constant is exited. +func (s *BaseEqlListener) ExitConstant(ctx *ConstantContext) {} + +// EnterVariable is called when production variable is entered. +func (s *BaseEqlListener) EnterVariable(ctx *VariableContext) {} + +// ExitVariable is called when production variable is exited. +func (s *BaseEqlListener) ExitVariable(ctx *VariableContext) {} + +// EnterVariableExp is called when production variableExp is entered. +func (s *BaseEqlListener) EnterVariableExp(ctx *VariableExpContext) {} + +// ExitVariableExp is called when production variableExp is exited. +func (s *BaseEqlListener) ExitVariableExp(ctx *VariableExpContext) {} + +// EnterExpArithmeticNEQ is called when production ExpArithmeticNEQ is entered. +func (s *BaseEqlListener) EnterExpArithmeticNEQ(ctx *ExpArithmeticNEQContext) {} + +// ExitExpArithmeticNEQ is called when production ExpArithmeticNEQ is exited. +func (s *BaseEqlListener) ExitExpArithmeticNEQ(ctx *ExpArithmeticNEQContext) {} + +// EnterExpArithmeticEQ is called when production ExpArithmeticEQ is entered. +func (s *BaseEqlListener) EnterExpArithmeticEQ(ctx *ExpArithmeticEQContext) {} + +// ExitExpArithmeticEQ is called when production ExpArithmeticEQ is exited. +func (s *BaseEqlListener) ExitExpArithmeticEQ(ctx *ExpArithmeticEQContext) {} + +// EnterExpArithmeticGTE is called when production ExpArithmeticGTE is entered. +func (s *BaseEqlListener) EnterExpArithmeticGTE(ctx *ExpArithmeticGTEContext) {} + +// ExitExpArithmeticGTE is called when production ExpArithmeticGTE is exited. +func (s *BaseEqlListener) ExitExpArithmeticGTE(ctx *ExpArithmeticGTEContext) {} + +// EnterExpArithmeticLTE is called when production ExpArithmeticLTE is entered. +func (s *BaseEqlListener) EnterExpArithmeticLTE(ctx *ExpArithmeticLTEContext) {} + +// ExitExpArithmeticLTE is called when production ExpArithmeticLTE is exited. +func (s *BaseEqlListener) ExitExpArithmeticLTE(ctx *ExpArithmeticLTEContext) {} + +// EnterExpArithmeticGT is called when production ExpArithmeticGT is entered. +func (s *BaseEqlListener) EnterExpArithmeticGT(ctx *ExpArithmeticGTContext) {} + +// ExitExpArithmeticGT is called when production ExpArithmeticGT is exited. +func (s *BaseEqlListener) ExitExpArithmeticGT(ctx *ExpArithmeticGTContext) {} + +// EnterExpArithmeticMulDivMod is called when production ExpArithmeticMulDivMod is entered. +func (s *BaseEqlListener) EnterExpArithmeticMulDivMod(ctx *ExpArithmeticMulDivModContext) {} + +// ExitExpArithmeticMulDivMod is called when production ExpArithmeticMulDivMod is exited. +func (s *BaseEqlListener) ExitExpArithmeticMulDivMod(ctx *ExpArithmeticMulDivModContext) {} + +// EnterExpDict is called when production ExpDict is entered. +func (s *BaseEqlListener) EnterExpDict(ctx *ExpDictContext) {} + +// ExitExpDict is called when production ExpDict is exited. +func (s *BaseEqlListener) ExitExpDict(ctx *ExpDictContext) {} + +// EnterExpText is called when production ExpText is entered. +func (s *BaseEqlListener) EnterExpText(ctx *ExpTextContext) {} + +// ExitExpText is called when production ExpText is exited. +func (s *BaseEqlListener) ExitExpText(ctx *ExpTextContext) {} + +// EnterExpNumber is called when production ExpNumber is entered. +func (s *BaseEqlListener) EnterExpNumber(ctx *ExpNumberContext) {} + +// ExitExpNumber is called when production ExpNumber is exited. +func (s *BaseEqlListener) ExitExpNumber(ctx *ExpNumberContext) {} + +// EnterExpLogicalAnd is called when production ExpLogicalAnd is entered. +func (s *BaseEqlListener) EnterExpLogicalAnd(ctx *ExpLogicalAndContext) {} + +// ExitExpLogicalAnd is called when production ExpLogicalAnd is exited. +func (s *BaseEqlListener) ExitExpLogicalAnd(ctx *ExpLogicalAndContext) {} + +// EnterExpLogicalOR is called when production ExpLogicalOR is entered. +func (s *BaseEqlListener) EnterExpLogicalOR(ctx *ExpLogicalORContext) {} + +// ExitExpLogicalOR is called when production ExpLogicalOR is exited. +func (s *BaseEqlListener) ExitExpLogicalOR(ctx *ExpLogicalORContext) {} + +// EnterExpFloat is called when production ExpFloat is entered. +func (s *BaseEqlListener) EnterExpFloat(ctx *ExpFloatContext) {} + +// ExitExpFloat is called when production ExpFloat is exited. +func (s *BaseEqlListener) ExitExpFloat(ctx *ExpFloatContext) {} + +// EnterExpVariable is called when production ExpVariable is entered. +func (s *BaseEqlListener) EnterExpVariable(ctx *ExpVariableContext) {} + +// ExitExpVariable is called when production ExpVariable is exited. +func (s *BaseEqlListener) ExitExpVariable(ctx *ExpVariableContext) {} + +// EnterExpArray is called when production ExpArray is entered. +func (s *BaseEqlListener) EnterExpArray(ctx *ExpArrayContext) {} + +// ExitExpArray is called when production ExpArray is exited. +func (s *BaseEqlListener) ExitExpArray(ctx *ExpArrayContext) {} + +// EnterExpNot is called when production ExpNot is entered. +func (s *BaseEqlListener) EnterExpNot(ctx *ExpNotContext) {} + +// ExitExpNot is called when production ExpNot is exited. +func (s *BaseEqlListener) ExitExpNot(ctx *ExpNotContext) {} + +// EnterExpInParen is called when production ExpInParen is entered. +func (s *BaseEqlListener) EnterExpInParen(ctx *ExpInParenContext) {} + +// ExitExpInParen is called when production ExpInParen is exited. +func (s *BaseEqlListener) ExitExpInParen(ctx *ExpInParenContext) {} + +// EnterExpBoolean is called when production ExpBoolean is entered. +func (s *BaseEqlListener) EnterExpBoolean(ctx *ExpBooleanContext) {} + +// ExitExpBoolean is called when production ExpBoolean is exited. +func (s *BaseEqlListener) ExitExpBoolean(ctx *ExpBooleanContext) {} + +// EnterExpArithmeticAddSub is called when production ExpArithmeticAddSub is entered. +func (s *BaseEqlListener) EnterExpArithmeticAddSub(ctx *ExpArithmeticAddSubContext) {} + +// ExitExpArithmeticAddSub is called when production ExpArithmeticAddSub is exited. +func (s *BaseEqlListener) ExitExpArithmeticAddSub(ctx *ExpArithmeticAddSubContext) {} + +// EnterExpFunction is called when production ExpFunction is entered. +func (s *BaseEqlListener) EnterExpFunction(ctx *ExpFunctionContext) {} + +// ExitExpFunction is called when production ExpFunction is exited. +func (s *BaseEqlListener) ExitExpFunction(ctx *ExpFunctionContext) {} + +// EnterExpArithmeticLT is called when production ExpArithmeticLT is entered. +func (s *BaseEqlListener) EnterExpArithmeticLT(ctx *ExpArithmeticLTContext) {} + +// ExitExpArithmeticLT is called when production ExpArithmeticLT is exited. +func (s *BaseEqlListener) ExitExpArithmeticLT(ctx *ExpArithmeticLTContext) {} + +// EnterArguments is called when production arguments is entered. +func (s *BaseEqlListener) EnterArguments(ctx *ArgumentsContext) {} + +// ExitArguments is called when production arguments is exited. +func (s *BaseEqlListener) ExitArguments(ctx *ArgumentsContext) {} + +// EnterArray is called when production array is entered. +func (s *BaseEqlListener) EnterArray(ctx *ArrayContext) {} + +// ExitArray is called when production array is exited. +func (s *BaseEqlListener) ExitArray(ctx *ArrayContext) {} + +// EnterKey is called when production key is entered. +func (s *BaseEqlListener) EnterKey(ctx *KeyContext) {} + +// ExitKey is called when production key is exited. +func (s *BaseEqlListener) ExitKey(ctx *KeyContext) {} + +// EnterDict is called when production dict is entered. +func (s *BaseEqlListener) EnterDict(ctx *DictContext) {} + +// ExitDict is called when production dict is exited. +func (s *BaseEqlListener) ExitDict(ctx *DictContext) {} diff --git a/x-pack/elastic-agent/pkg/eql/parser/eql_base_visitor.go b/x-pack/elastic-agent/pkg/eql/parser/eql_base_visitor.go new file mode 100644 index 00000000000..fd7523997fd --- /dev/null +++ b/x-pack/elastic-agent/pkg/eql/parser/eql_base_visitor.go @@ -0,0 +1,129 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License; +// you may not use this file except in compliance with the Elastic License. + +// Code generated from Eql.g4 by ANTLR 4.7.1. DO NOT EDIT. + +package parser // Eql + +import "github.com/antlr/antlr4/runtime/Go/antlr" + +type BaseEqlVisitor struct { + *antlr.BaseParseTreeVisitor +} + +func (v *BaseEqlVisitor) VisitExpList(ctx *ExpListContext) interface{} { + return v.VisitChildren(ctx) +} + +func (v *BaseEqlVisitor) VisitBoolean(ctx *BooleanContext) interface{} { + return v.VisitChildren(ctx) +} + +func (v *BaseEqlVisitor) VisitConstant(ctx *ConstantContext) interface{} { + return v.VisitChildren(ctx) +} + +func (v *BaseEqlVisitor) VisitVariable(ctx *VariableContext) interface{} { + return v.VisitChildren(ctx) +} + +func (v *BaseEqlVisitor) VisitVariableExp(ctx *VariableExpContext) interface{} { + return v.VisitChildren(ctx) +} + +func (v *BaseEqlVisitor) VisitExpArithmeticNEQ(ctx *ExpArithmeticNEQContext) interface{} { + return v.VisitChildren(ctx) +} + +func (v *BaseEqlVisitor) VisitExpArithmeticEQ(ctx *ExpArithmeticEQContext) interface{} { + return v.VisitChildren(ctx) +} + +func (v *BaseEqlVisitor) VisitExpArithmeticGTE(ctx *ExpArithmeticGTEContext) interface{} { + return v.VisitChildren(ctx) +} + +func (v *BaseEqlVisitor) VisitExpArithmeticLTE(ctx *ExpArithmeticLTEContext) interface{} { + return v.VisitChildren(ctx) +} + +func (v *BaseEqlVisitor) VisitExpArithmeticGT(ctx *ExpArithmeticGTContext) interface{} { + return v.VisitChildren(ctx) +} + +func (v *BaseEqlVisitor) VisitExpArithmeticMulDivMod(ctx *ExpArithmeticMulDivModContext) interface{} { + return v.VisitChildren(ctx) +} + +func (v *BaseEqlVisitor) VisitExpDict(ctx *ExpDictContext) interface{} { + return v.VisitChildren(ctx) +} + +func (v *BaseEqlVisitor) VisitExpText(ctx *ExpTextContext) interface{} { + return v.VisitChildren(ctx) +} + +func (v *BaseEqlVisitor) VisitExpNumber(ctx *ExpNumberContext) interface{} { + return v.VisitChildren(ctx) +} + +func (v *BaseEqlVisitor) VisitExpLogicalAnd(ctx *ExpLogicalAndContext) interface{} { + return v.VisitChildren(ctx) +} + +func (v *BaseEqlVisitor) VisitExpLogicalOR(ctx *ExpLogicalORContext) interface{} { + return v.VisitChildren(ctx) +} + +func (v *BaseEqlVisitor) VisitExpFloat(ctx *ExpFloatContext) interface{} { + return v.VisitChildren(ctx) +} + +func (v *BaseEqlVisitor) VisitExpVariable(ctx *ExpVariableContext) interface{} { + return v.VisitChildren(ctx) +} + +func (v *BaseEqlVisitor) VisitExpArray(ctx *ExpArrayContext) interface{} { + return v.VisitChildren(ctx) +} + +func (v *BaseEqlVisitor) VisitExpNot(ctx *ExpNotContext) interface{} { + return v.VisitChildren(ctx) +} + +func (v *BaseEqlVisitor) VisitExpInParen(ctx *ExpInParenContext) interface{} { + return v.VisitChildren(ctx) +} + +func (v *BaseEqlVisitor) VisitExpBoolean(ctx *ExpBooleanContext) interface{} { + return v.VisitChildren(ctx) +} + +func (v *BaseEqlVisitor) VisitExpArithmeticAddSub(ctx *ExpArithmeticAddSubContext) interface{} { + return v.VisitChildren(ctx) +} + +func (v *BaseEqlVisitor) VisitExpFunction(ctx *ExpFunctionContext) interface{} { + return v.VisitChildren(ctx) +} + +func (v *BaseEqlVisitor) VisitExpArithmeticLT(ctx *ExpArithmeticLTContext) interface{} { + return v.VisitChildren(ctx) +} + +func (v *BaseEqlVisitor) VisitArguments(ctx *ArgumentsContext) interface{} { + return v.VisitChildren(ctx) +} + +func (v *BaseEqlVisitor) VisitArray(ctx *ArrayContext) interface{} { + return v.VisitChildren(ctx) +} + +func (v *BaseEqlVisitor) VisitKey(ctx *KeyContext) interface{} { + return v.VisitChildren(ctx) +} + +func (v *BaseEqlVisitor) VisitDict(ctx *DictContext) interface{} { + return v.VisitChildren(ctx) +} diff --git a/x-pack/elastic-agent/pkg/eql/parser/eql_lexer.go b/x-pack/elastic-agent/pkg/eql/parser/eql_lexer.go new file mode 100644 index 00000000000..da1bf4d112e --- /dev/null +++ b/x-pack/elastic-agent/pkg/eql/parser/eql_lexer.go @@ -0,0 +1,228 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License; +// you may not use this file except in compliance with the Elastic License. + +// Code generated from Eql.g4 by ANTLR 4.7.1. DO NOT EDIT. + +package parser + +import ( + "fmt" + "unicode" + + "github.com/antlr/antlr4/runtime/Go/antlr" +) + +// Suppress unused import error +var _ = fmt.Printf +var _ = unicode.IsLetter + +var serializedLexerAtn = []uint16{ + 3, 24715, 42794, 33075, 47597, 16764, 15335, 30598, 22884, 2, 35, 230, + 8, 1, 4, 2, 9, 2, 4, 3, 9, 3, 4, 4, 9, 4, 4, 5, 9, 5, 4, 6, 9, 6, 4, 7, + 9, 7, 4, 8, 9, 8, 4, 9, 9, 9, 4, 10, 9, 10, 4, 11, 9, 11, 4, 12, 9, 12, + 4, 13, 9, 13, 4, 14, 9, 14, 4, 15, 9, 15, 4, 16, 9, 16, 4, 17, 9, 17, 4, + 18, 9, 18, 4, 19, 9, 19, 4, 20, 9, 20, 4, 21, 9, 21, 4, 22, 9, 22, 4, 23, + 9, 23, 4, 24, 9, 24, 4, 25, 9, 25, 4, 26, 9, 26, 4, 27, 9, 27, 4, 28, 9, + 28, 4, 29, 9, 29, 4, 30, 9, 30, 4, 31, 9, 31, 4, 32, 9, 32, 4, 33, 9, 33, + 4, 34, 9, 34, 3, 2, 3, 2, 3, 3, 3, 3, 3, 4, 3, 4, 3, 5, 3, 5, 3, 5, 3, + 6, 3, 6, 3, 6, 3, 7, 3, 7, 3, 8, 3, 8, 3, 9, 3, 9, 3, 9, 3, 10, 3, 10, + 3, 10, 3, 11, 3, 11, 3, 12, 3, 12, 3, 13, 3, 13, 3, 14, 3, 14, 3, 15, 3, + 15, 3, 16, 3, 16, 3, 16, 3, 16, 3, 16, 3, 16, 5, 16, 108, 10, 16, 3, 17, + 3, 17, 3, 17, 3, 17, 5, 17, 114, 10, 17, 3, 18, 3, 18, 3, 18, 3, 18, 3, + 18, 3, 18, 3, 18, 3, 18, 5, 18, 124, 10, 18, 3, 19, 3, 19, 3, 19, 3, 19, + 3, 19, 3, 19, 3, 19, 3, 19, 3, 19, 3, 19, 5, 19, 136, 10, 19, 3, 20, 5, + 20, 139, 10, 20, 3, 20, 6, 20, 142, 10, 20, 13, 20, 14, 20, 143, 3, 20, + 3, 20, 6, 20, 148, 10, 20, 13, 20, 14, 20, 149, 3, 21, 5, 21, 153, 10, + 21, 3, 21, 6, 21, 156, 10, 21, 13, 21, 14, 21, 157, 3, 22, 6, 22, 161, + 10, 22, 13, 22, 14, 22, 162, 3, 22, 3, 22, 3, 23, 3, 23, 3, 23, 3, 23, + 3, 23, 3, 23, 5, 23, 173, 10, 23, 3, 24, 3, 24, 7, 24, 177, 10, 24, 12, + 24, 14, 24, 180, 11, 24, 3, 25, 6, 25, 183, 10, 25, 13, 25, 14, 25, 184, + 3, 25, 3, 25, 6, 25, 189, 10, 25, 13, 25, 14, 25, 190, 7, 25, 193, 10, + 25, 12, 25, 14, 25, 196, 11, 25, 3, 26, 3, 26, 7, 26, 200, 10, 26, 12, + 26, 14, 26, 203, 11, 26, 3, 26, 3, 26, 3, 27, 3, 27, 7, 27, 209, 10, 27, + 12, 27, 14, 27, 212, 11, 27, 3, 27, 3, 27, 3, 28, 3, 28, 3, 29, 3, 29, + 3, 30, 3, 30, 3, 31, 3, 31, 3, 32, 3, 32, 3, 33, 3, 33, 3, 34, 3, 34, 3, + 34, 2, 2, 35, 3, 3, 5, 4, 7, 5, 9, 6, 11, 7, 13, 8, 15, 9, 17, 10, 19, + 11, 21, 12, 23, 13, 25, 14, 27, 15, 29, 16, 31, 17, 33, 18, 35, 19, 37, + 20, 39, 21, 41, 22, 43, 23, 45, 24, 47, 25, 49, 26, 51, 27, 53, 28, 55, + 29, 57, 30, 59, 31, 61, 32, 63, 33, 65, 34, 67, 35, 3, 2, 10, 3, 2, 47, + 47, 3, 2, 50, 59, 5, 2, 11, 12, 15, 15, 34, 34, 5, 2, 67, 92, 97, 97, 99, + 124, 6, 2, 50, 59, 67, 92, 97, 97, 99, 124, 7, 2, 48, 48, 50, 59, 67, 92, + 97, 97, 99, 124, 5, 2, 12, 12, 15, 15, 41, 41, 5, 2, 12, 12, 15, 15, 36, + 36, 2, 246, 2, 3, 3, 2, 2, 2, 2, 5, 3, 2, 2, 2, 2, 7, 3, 2, 2, 2, 2, 9, + 3, 2, 2, 2, 2, 11, 3, 2, 2, 2, 2, 13, 3, 2, 2, 2, 2, 15, 3, 2, 2, 2, 2, + 17, 3, 2, 2, 2, 2, 19, 3, 2, 2, 2, 2, 21, 3, 2, 2, 2, 2, 23, 3, 2, 2, 2, + 2, 25, 3, 2, 2, 2, 2, 27, 3, 2, 2, 2, 2, 29, 3, 2, 2, 2, 2, 31, 3, 2, 2, + 2, 2, 33, 3, 2, 2, 2, 2, 35, 3, 2, 2, 2, 2, 37, 3, 2, 2, 2, 2, 39, 3, 2, + 2, 2, 2, 41, 3, 2, 2, 2, 2, 43, 3, 2, 2, 2, 2, 45, 3, 2, 2, 2, 2, 47, 3, + 2, 2, 2, 2, 49, 3, 2, 2, 2, 2, 51, 3, 2, 2, 2, 2, 53, 3, 2, 2, 2, 2, 55, + 3, 2, 2, 2, 2, 57, 3, 2, 2, 2, 2, 59, 3, 2, 2, 2, 2, 61, 3, 2, 2, 2, 2, + 63, 3, 2, 2, 2, 2, 65, 3, 2, 2, 2, 2, 67, 3, 2, 2, 2, 3, 69, 3, 2, 2, 2, + 5, 71, 3, 2, 2, 2, 7, 73, 3, 2, 2, 2, 9, 75, 3, 2, 2, 2, 11, 78, 3, 2, + 2, 2, 13, 81, 3, 2, 2, 2, 15, 83, 3, 2, 2, 2, 17, 85, 3, 2, 2, 2, 19, 88, + 3, 2, 2, 2, 21, 91, 3, 2, 2, 2, 23, 93, 3, 2, 2, 2, 25, 95, 3, 2, 2, 2, + 27, 97, 3, 2, 2, 2, 29, 99, 3, 2, 2, 2, 31, 107, 3, 2, 2, 2, 33, 113, 3, + 2, 2, 2, 35, 123, 3, 2, 2, 2, 37, 135, 3, 2, 2, 2, 39, 138, 3, 2, 2, 2, + 41, 152, 3, 2, 2, 2, 43, 160, 3, 2, 2, 2, 45, 172, 3, 2, 2, 2, 47, 174, + 3, 2, 2, 2, 49, 182, 3, 2, 2, 2, 51, 197, 3, 2, 2, 2, 53, 206, 3, 2, 2, + 2, 55, 215, 3, 2, 2, 2, 57, 217, 3, 2, 2, 2, 59, 219, 3, 2, 2, 2, 61, 221, + 3, 2, 2, 2, 63, 223, 3, 2, 2, 2, 65, 225, 3, 2, 2, 2, 67, 227, 3, 2, 2, + 2, 69, 70, 7, 126, 2, 2, 70, 4, 3, 2, 2, 2, 71, 72, 7, 46, 2, 2, 72, 6, + 3, 2, 2, 2, 73, 74, 7, 60, 2, 2, 74, 8, 3, 2, 2, 2, 75, 76, 7, 63, 2, 2, + 76, 77, 7, 63, 2, 2, 77, 10, 3, 2, 2, 2, 78, 79, 7, 35, 2, 2, 79, 80, 7, + 63, 2, 2, 80, 12, 3, 2, 2, 2, 81, 82, 7, 64, 2, 2, 82, 14, 3, 2, 2, 2, + 83, 84, 7, 62, 2, 2, 84, 16, 3, 2, 2, 2, 85, 86, 7, 64, 2, 2, 86, 87, 7, + 63, 2, 2, 87, 18, 3, 2, 2, 2, 88, 89, 7, 62, 2, 2, 89, 90, 7, 63, 2, 2, + 90, 20, 3, 2, 2, 2, 91, 92, 7, 45, 2, 2, 92, 22, 3, 2, 2, 2, 93, 94, 7, + 47, 2, 2, 94, 24, 3, 2, 2, 2, 95, 96, 7, 44, 2, 2, 96, 26, 3, 2, 2, 2, + 97, 98, 7, 49, 2, 2, 98, 28, 3, 2, 2, 2, 99, 100, 7, 39, 2, 2, 100, 30, + 3, 2, 2, 2, 101, 102, 7, 99, 2, 2, 102, 103, 7, 112, 2, 2, 103, 108, 7, + 102, 2, 2, 104, 105, 7, 67, 2, 2, 105, 106, 7, 80, 2, 2, 106, 108, 7, 70, + 2, 2, 107, 101, 3, 2, 2, 2, 107, 104, 3, 2, 2, 2, 108, 32, 3, 2, 2, 2, + 109, 110, 7, 113, 2, 2, 110, 114, 7, 116, 2, 2, 111, 112, 7, 81, 2, 2, + 112, 114, 7, 84, 2, 2, 113, 109, 3, 2, 2, 2, 113, 111, 3, 2, 2, 2, 114, + 34, 3, 2, 2, 2, 115, 116, 7, 118, 2, 2, 116, 117, 7, 116, 2, 2, 117, 118, + 7, 119, 2, 2, 118, 124, 7, 103, 2, 2, 119, 120, 7, 86, 2, 2, 120, 121, + 7, 84, 2, 2, 121, 122, 7, 87, 2, 2, 122, 124, 7, 71, 2, 2, 123, 115, 3, + 2, 2, 2, 123, 119, 3, 2, 2, 2, 124, 36, 3, 2, 2, 2, 125, 126, 7, 104, 2, + 2, 126, 127, 7, 99, 2, 2, 127, 128, 7, 110, 2, 2, 128, 129, 7, 117, 2, + 2, 129, 136, 7, 103, 2, 2, 130, 131, 7, 72, 2, 2, 131, 132, 7, 67, 2, 2, + 132, 133, 7, 78, 2, 2, 133, 134, 7, 85, 2, 2, 134, 136, 7, 71, 2, 2, 135, + 125, 3, 2, 2, 2, 135, 130, 3, 2, 2, 2, 136, 38, 3, 2, 2, 2, 137, 139, 9, + 2, 2, 2, 138, 137, 3, 2, 2, 2, 138, 139, 3, 2, 2, 2, 139, 141, 3, 2, 2, + 2, 140, 142, 9, 3, 2, 2, 141, 140, 3, 2, 2, 2, 142, 143, 3, 2, 2, 2, 143, + 141, 3, 2, 2, 2, 143, 144, 3, 2, 2, 2, 144, 145, 3, 2, 2, 2, 145, 147, + 7, 48, 2, 2, 146, 148, 9, 3, 2, 2, 147, 146, 3, 2, 2, 2, 148, 149, 3, 2, + 2, 2, 149, 147, 3, 2, 2, 2, 149, 150, 3, 2, 2, 2, 150, 40, 3, 2, 2, 2, + 151, 153, 9, 2, 2, 2, 152, 151, 3, 2, 2, 2, 152, 153, 3, 2, 2, 2, 153, + 155, 3, 2, 2, 2, 154, 156, 9, 3, 2, 2, 155, 154, 3, 2, 2, 2, 156, 157, + 3, 2, 2, 2, 157, 155, 3, 2, 2, 2, 157, 158, 3, 2, 2, 2, 158, 42, 3, 2, + 2, 2, 159, 161, 9, 4, 2, 2, 160, 159, 3, 2, 2, 2, 161, 162, 3, 2, 2, 2, + 162, 160, 3, 2, 2, 2, 162, 163, 3, 2, 2, 2, 163, 164, 3, 2, 2, 2, 164, + 165, 8, 22, 2, 2, 165, 44, 3, 2, 2, 2, 166, 167, 7, 80, 2, 2, 167, 168, + 7, 81, 2, 2, 168, 173, 7, 86, 2, 2, 169, 170, 7, 112, 2, 2, 170, 171, 7, + 113, 2, 2, 171, 173, 7, 118, 2, 2, 172, 166, 3, 2, 2, 2, 172, 169, 3, 2, + 2, 2, 173, 46, 3, 2, 2, 2, 174, 178, 9, 5, 2, 2, 175, 177, 9, 6, 2, 2, + 176, 175, 3, 2, 2, 2, 177, 180, 3, 2, 2, 2, 178, 176, 3, 2, 2, 2, 178, + 179, 3, 2, 2, 2, 179, 48, 3, 2, 2, 2, 180, 178, 3, 2, 2, 2, 181, 183, 9, + 7, 2, 2, 182, 181, 3, 2, 2, 2, 183, 184, 3, 2, 2, 2, 184, 182, 3, 2, 2, + 2, 184, 185, 3, 2, 2, 2, 185, 194, 3, 2, 2, 2, 186, 188, 7, 48, 2, 2, 187, + 189, 9, 6, 2, 2, 188, 187, 3, 2, 2, 2, 189, 190, 3, 2, 2, 2, 190, 188, + 3, 2, 2, 2, 190, 191, 3, 2, 2, 2, 191, 193, 3, 2, 2, 2, 192, 186, 3, 2, + 2, 2, 193, 196, 3, 2, 2, 2, 194, 192, 3, 2, 2, 2, 194, 195, 3, 2, 2, 2, + 195, 50, 3, 2, 2, 2, 196, 194, 3, 2, 2, 2, 197, 201, 7, 41, 2, 2, 198, + 200, 10, 8, 2, 2, 199, 198, 3, 2, 2, 2, 200, 203, 3, 2, 2, 2, 201, 199, + 3, 2, 2, 2, 201, 202, 3, 2, 2, 2, 202, 204, 3, 2, 2, 2, 203, 201, 3, 2, + 2, 2, 204, 205, 7, 41, 2, 2, 205, 52, 3, 2, 2, 2, 206, 210, 7, 36, 2, 2, + 207, 209, 10, 9, 2, 2, 208, 207, 3, 2, 2, 2, 209, 212, 3, 2, 2, 2, 210, + 208, 3, 2, 2, 2, 210, 211, 3, 2, 2, 2, 211, 213, 3, 2, 2, 2, 212, 210, + 3, 2, 2, 2, 213, 214, 7, 36, 2, 2, 214, 54, 3, 2, 2, 2, 215, 216, 7, 42, + 2, 2, 216, 56, 3, 2, 2, 2, 217, 218, 7, 43, 2, 2, 218, 58, 3, 2, 2, 2, + 219, 220, 7, 93, 2, 2, 220, 60, 3, 2, 2, 2, 221, 222, 7, 95, 2, 2, 222, + 62, 3, 2, 2, 2, 223, 224, 7, 125, 2, 2, 224, 64, 3, 2, 2, 2, 225, 226, + 7, 127, 2, 2, 226, 66, 3, 2, 2, 2, 227, 228, 7, 38, 2, 2, 228, 229, 7, + 125, 2, 2, 229, 68, 3, 2, 2, 2, 20, 2, 107, 113, 123, 135, 138, 143, 149, + 152, 157, 162, 172, 178, 184, 190, 194, 201, 210, 3, 8, 2, 2, +} + +var lexerDeserializer = antlr.NewATNDeserializer(nil) +var lexerAtn = lexerDeserializer.DeserializeFromUInt16(serializedLexerAtn) + +var lexerChannelNames = []string{ + "DEFAULT_TOKEN_CHANNEL", "HIDDEN", +} + +var lexerModeNames = []string{ + "DEFAULT_MODE", +} + +var lexerLiteralNames = []string{ + "", "'|'", "','", "':'", "'=='", "'!='", "'>'", "'<'", "'>='", "'<='", + "'+'", "'-'", "'*'", "'/'", "'%'", "", "", "", "", "", "", "", "", "", + "", "", "", "'('", "')'", "'['", "']'", "'{'", "'}'", "'${'", +} + +var lexerSymbolicNames = []string{ + "", "", "", "", "EQ", "NEQ", "GT", "LT", "GTE", "LTE", "ADD", "SUB", "MUL", + "DIV", "MOD", "AND", "OR", "TRUE", "FALSE", "FLOAT", "NUMBER", "WHITESPACE", + "NOT", "NAME", "VNAME", "STEXT", "DTEXT", "LPAR", "RPAR", "LARR", "RARR", + "LDICT", "RDICT", "BEGIN_VARIABLE", +} + +var lexerRuleNames = []string{ + "T__0", "T__1", "T__2", "EQ", "NEQ", "GT", "LT", "GTE", "LTE", "ADD", "SUB", + "MUL", "DIV", "MOD", "AND", "OR", "TRUE", "FALSE", "FLOAT", "NUMBER", "WHITESPACE", + "NOT", "NAME", "VNAME", "STEXT", "DTEXT", "LPAR", "RPAR", "LARR", "RARR", + "LDICT", "RDICT", "BEGIN_VARIABLE", +} + +type EqlLexer struct { + *antlr.BaseLexer + channelNames []string + modeNames []string + // TODO: EOF string +} + +var lexerDecisionToDFA = make([]*antlr.DFA, len(lexerAtn.DecisionToState)) + +func init() { + for index, ds := range lexerAtn.DecisionToState { + lexerDecisionToDFA[index] = antlr.NewDFA(ds, index) + } +} + +func NewEqlLexer(input antlr.CharStream) *EqlLexer { + + l := new(EqlLexer) + + l.BaseLexer = antlr.NewBaseLexer(input) + l.Interpreter = antlr.NewLexerATNSimulator(l, lexerAtn, lexerDecisionToDFA, antlr.NewPredictionContextCache()) + + l.channelNames = lexerChannelNames + l.modeNames = lexerModeNames + l.RuleNames = lexerRuleNames + l.LiteralNames = lexerLiteralNames + l.SymbolicNames = lexerSymbolicNames + l.GrammarFileName = "Eql.g4" + // TODO: l.EOF = antlr.TokenEOF + + return l +} + +// EqlLexer tokens. +const ( + EqlLexerT__0 = 1 + EqlLexerT__1 = 2 + EqlLexerT__2 = 3 + EqlLexerEQ = 4 + EqlLexerNEQ = 5 + EqlLexerGT = 6 + EqlLexerLT = 7 + EqlLexerGTE = 8 + EqlLexerLTE = 9 + EqlLexerADD = 10 + EqlLexerSUB = 11 + EqlLexerMUL = 12 + EqlLexerDIV = 13 + EqlLexerMOD = 14 + EqlLexerAND = 15 + EqlLexerOR = 16 + EqlLexerTRUE = 17 + EqlLexerFALSE = 18 + EqlLexerFLOAT = 19 + EqlLexerNUMBER = 20 + EqlLexerWHITESPACE = 21 + EqlLexerNOT = 22 + EqlLexerNAME = 23 + EqlLexerVNAME = 24 + EqlLexerSTEXT = 25 + EqlLexerDTEXT = 26 + EqlLexerLPAR = 27 + EqlLexerRPAR = 28 + EqlLexerLARR = 29 + EqlLexerRARR = 30 + EqlLexerLDICT = 31 + EqlLexerRDICT = 32 + EqlLexerBEGIN_VARIABLE = 33 +) diff --git a/x-pack/elastic-agent/pkg/boolexp/parser/boolexp_listener.go b/x-pack/elastic-agent/pkg/eql/parser/eql_listener.go similarity index 66% rename from x-pack/elastic-agent/pkg/boolexp/parser/boolexp_listener.go rename to x-pack/elastic-agent/pkg/eql/parser/eql_listener.go index 12ff6e0fe89..b4bfa5b5d4d 100644 --- a/x-pack/elastic-agent/pkg/boolexp/parser/boolexp_listener.go +++ b/x-pack/elastic-agent/pkg/eql/parser/eql_listener.go @@ -2,19 +2,31 @@ // or more contributor license agreements. Licensed under the Elastic License; // you may not use this file except in compliance with the Elastic License. -// Code generated from Boolexp.g4 by ANTLR 4.7.2. DO NOT EDIT. +// Code generated from Eql.g4 by ANTLR 4.7.1. DO NOT EDIT. -package parser // Boolexp +package parser // Eql import "github.com/antlr/antlr4/runtime/Go/antlr" -// BoolexpListener is a complete listener for a parse tree produced by BoolexpParser. -type BoolexpListener interface { +// EqlListener is a complete listener for a parse tree produced by EqlParser. +type EqlListener interface { antlr.ParseTreeListener // EnterExpList is called when entering the expList production. EnterExpList(c *ExpListContext) + // EnterBoolean is called when entering the boolean production. + EnterBoolean(c *BooleanContext) + + // EnterConstant is called when entering the constant production. + EnterConstant(c *ConstantContext) + + // EnterVariable is called when entering the variable production. + EnterVariable(c *VariableContext) + + // EnterVariableExp is called when entering the variableExp production. + EnterVariableExp(c *VariableExpContext) + // EnterExpArithmeticNEQ is called when entering the ExpArithmeticNEQ production. EnterExpArithmeticNEQ(c *ExpArithmeticNEQContext) @@ -30,6 +42,12 @@ type BoolexpListener interface { // EnterExpArithmeticGT is called when entering the ExpArithmeticGT production. EnterExpArithmeticGT(c *ExpArithmeticGTContext) + // EnterExpArithmeticMulDivMod is called when entering the ExpArithmeticMulDivMod production. + EnterExpArithmeticMulDivMod(c *ExpArithmeticMulDivModContext) + + // EnterExpDict is called when entering the ExpDict production. + EnterExpDict(c *ExpDictContext) + // EnterExpText is called when entering the ExpText production. EnterExpText(c *ExpTextContext) @@ -48,6 +66,9 @@ type BoolexpListener interface { // EnterExpVariable is called when entering the ExpVariable production. EnterExpVariable(c *ExpVariableContext) + // EnterExpArray is called when entering the ExpArray production. + EnterExpArray(c *ExpArrayContext) + // EnterExpNot is called when entering the ExpNot production. EnterExpNot(c *ExpNotContext) @@ -57,21 +78,42 @@ type BoolexpListener interface { // EnterExpBoolean is called when entering the ExpBoolean production. EnterExpBoolean(c *ExpBooleanContext) + // EnterExpArithmeticAddSub is called when entering the ExpArithmeticAddSub production. + EnterExpArithmeticAddSub(c *ExpArithmeticAddSubContext) + // EnterExpFunction is called when entering the ExpFunction production. EnterExpFunction(c *ExpFunctionContext) // EnterExpArithmeticLT is called when entering the ExpArithmeticLT production. EnterExpArithmeticLT(c *ExpArithmeticLTContext) - // EnterBoolean is called when entering the boolean production. - EnterBoolean(c *BooleanContext) - // EnterArguments is called when entering the arguments production. EnterArguments(c *ArgumentsContext) + // EnterArray is called when entering the array production. + EnterArray(c *ArrayContext) + + // EnterKey is called when entering the key production. + EnterKey(c *KeyContext) + + // EnterDict is called when entering the dict production. + EnterDict(c *DictContext) + // ExitExpList is called when exiting the expList production. ExitExpList(c *ExpListContext) + // ExitBoolean is called when exiting the boolean production. + ExitBoolean(c *BooleanContext) + + // ExitConstant is called when exiting the constant production. + ExitConstant(c *ConstantContext) + + // ExitVariable is called when exiting the variable production. + ExitVariable(c *VariableContext) + + // ExitVariableExp is called when exiting the variableExp production. + ExitVariableExp(c *VariableExpContext) + // ExitExpArithmeticNEQ is called when exiting the ExpArithmeticNEQ production. ExitExpArithmeticNEQ(c *ExpArithmeticNEQContext) @@ -87,6 +129,12 @@ type BoolexpListener interface { // ExitExpArithmeticGT is called when exiting the ExpArithmeticGT production. ExitExpArithmeticGT(c *ExpArithmeticGTContext) + // ExitExpArithmeticMulDivMod is called when exiting the ExpArithmeticMulDivMod production. + ExitExpArithmeticMulDivMod(c *ExpArithmeticMulDivModContext) + + // ExitExpDict is called when exiting the ExpDict production. + ExitExpDict(c *ExpDictContext) + // ExitExpText is called when exiting the ExpText production. ExitExpText(c *ExpTextContext) @@ -105,6 +153,9 @@ type BoolexpListener interface { // ExitExpVariable is called when exiting the ExpVariable production. ExitExpVariable(c *ExpVariableContext) + // ExitExpArray is called when exiting the ExpArray production. + ExitExpArray(c *ExpArrayContext) + // ExitExpNot is called when exiting the ExpNot production. ExitExpNot(c *ExpNotContext) @@ -114,15 +165,24 @@ type BoolexpListener interface { // ExitExpBoolean is called when exiting the ExpBoolean production. ExitExpBoolean(c *ExpBooleanContext) + // ExitExpArithmeticAddSub is called when exiting the ExpArithmeticAddSub production. + ExitExpArithmeticAddSub(c *ExpArithmeticAddSubContext) + // ExitExpFunction is called when exiting the ExpFunction production. ExitExpFunction(c *ExpFunctionContext) // ExitExpArithmeticLT is called when exiting the ExpArithmeticLT production. ExitExpArithmeticLT(c *ExpArithmeticLTContext) - // ExitBoolean is called when exiting the boolean production. - ExitBoolean(c *BooleanContext) - // ExitArguments is called when exiting the arguments production. ExitArguments(c *ArgumentsContext) + + // ExitArray is called when exiting the array production. + ExitArray(c *ArrayContext) + + // ExitKey is called when exiting the key production. + ExitKey(c *KeyContext) + + // ExitDict is called when exiting the dict production. + ExitDict(c *DictContext) } diff --git a/x-pack/elastic-agent/pkg/eql/parser/eql_parser.go b/x-pack/elastic-agent/pkg/eql/parser/eql_parser.go new file mode 100644 index 00000000000..6507c67a388 --- /dev/null +++ b/x-pack/elastic-agent/pkg/eql/parser/eql_parser.go @@ -0,0 +1,3284 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License; +// you may not use this file except in compliance with the Elastic License. + +// Code generated from Eql.g4 by ANTLR 4.7.1. DO NOT EDIT. + +package parser // Eql + +import ( + "fmt" + "reflect" + "strconv" + + "github.com/antlr/antlr4/runtime/Go/antlr" +) + +// Suppress unused import errors +var _ = fmt.Printf +var _ = reflect.Copy +var _ = strconv.Itoa + +var parserATN = []uint16{ + 3, 24715, 42794, 33075, 47597, 16764, 15335, 30598, 22884, 3, 35, 144, + 4, 2, 9, 2, 4, 3, 9, 3, 4, 4, 9, 4, 4, 5, 9, 5, 4, 6, 9, 6, 4, 7, 9, 7, + 4, 8, 9, 8, 4, 9, 9, 9, 4, 10, 9, 10, 4, 11, 9, 11, 3, 2, 3, 2, 3, 2, 3, + 3, 3, 3, 3, 4, 3, 4, 3, 4, 3, 4, 3, 4, 5, 4, 33, 10, 4, 3, 5, 3, 5, 3, + 5, 5, 5, 38, 10, 5, 3, 6, 3, 6, 3, 6, 7, 6, 43, 10, 6, 12, 6, 14, 6, 46, + 11, 6, 3, 7, 3, 7, 3, 7, 3, 7, 3, 7, 3, 7, 3, 7, 3, 7, 3, 7, 3, 7, 3, 7, + 3, 7, 3, 7, 3, 7, 3, 7, 5, 7, 63, 10, 7, 3, 7, 3, 7, 3, 7, 5, 7, 68, 10, + 7, 3, 7, 3, 7, 3, 7, 5, 7, 73, 10, 7, 3, 7, 3, 7, 3, 7, 3, 7, 5, 7, 79, + 10, 7, 3, 7, 3, 7, 3, 7, 3, 7, 3, 7, 3, 7, 3, 7, 3, 7, 3, 7, 3, 7, 3, 7, + 3, 7, 3, 7, 3, 7, 3, 7, 3, 7, 3, 7, 3, 7, 3, 7, 3, 7, 3, 7, 3, 7, 3, 7, + 3, 7, 3, 7, 3, 7, 3, 7, 3, 7, 3, 7, 3, 7, 7, 7, 111, 10, 7, 12, 7, 14, + 7, 114, 11, 7, 3, 8, 3, 8, 3, 8, 7, 8, 119, 10, 8, 12, 8, 14, 8, 122, 11, + 8, 3, 9, 3, 9, 3, 9, 7, 9, 127, 10, 9, 12, 9, 14, 9, 130, 11, 9, 3, 10, + 3, 10, 3, 10, 3, 10, 3, 11, 3, 11, 3, 11, 7, 11, 139, 10, 11, 12, 11, 14, + 11, 142, 11, 11, 3, 11, 2, 3, 12, 12, 2, 4, 6, 8, 10, 12, 14, 16, 18, 20, + 2, 7, 3, 2, 19, 20, 3, 2, 27, 28, 3, 2, 14, 16, 3, 2, 12, 13, 4, 2, 25, + 25, 27, 28, 2, 165, 2, 22, 3, 2, 2, 2, 4, 25, 3, 2, 2, 2, 6, 32, 3, 2, + 2, 2, 8, 37, 3, 2, 2, 2, 10, 39, 3, 2, 2, 2, 12, 78, 3, 2, 2, 2, 14, 115, + 3, 2, 2, 2, 16, 123, 3, 2, 2, 2, 18, 131, 3, 2, 2, 2, 20, 135, 3, 2, 2, + 2, 22, 23, 5, 12, 7, 2, 23, 24, 7, 2, 2, 3, 24, 3, 3, 2, 2, 2, 25, 26, + 9, 2, 2, 2, 26, 5, 3, 2, 2, 2, 27, 33, 7, 27, 2, 2, 28, 33, 7, 28, 2, 2, + 29, 33, 7, 21, 2, 2, 30, 33, 7, 22, 2, 2, 31, 33, 5, 4, 3, 2, 32, 27, 3, + 2, 2, 2, 32, 28, 3, 2, 2, 2, 32, 29, 3, 2, 2, 2, 32, 30, 3, 2, 2, 2, 32, + 31, 3, 2, 2, 2, 33, 7, 3, 2, 2, 2, 34, 38, 7, 25, 2, 2, 35, 38, 7, 26, + 2, 2, 36, 38, 5, 6, 4, 2, 37, 34, 3, 2, 2, 2, 37, 35, 3, 2, 2, 2, 37, 36, + 3, 2, 2, 2, 38, 9, 3, 2, 2, 2, 39, 44, 5, 8, 5, 2, 40, 41, 7, 3, 2, 2, + 41, 43, 5, 8, 5, 2, 42, 40, 3, 2, 2, 2, 43, 46, 3, 2, 2, 2, 44, 42, 3, + 2, 2, 2, 44, 45, 3, 2, 2, 2, 45, 11, 3, 2, 2, 2, 46, 44, 3, 2, 2, 2, 47, + 48, 8, 7, 1, 2, 48, 49, 7, 29, 2, 2, 49, 50, 5, 12, 7, 2, 50, 51, 7, 30, + 2, 2, 51, 79, 3, 2, 2, 2, 52, 53, 7, 24, 2, 2, 53, 79, 5, 12, 7, 19, 54, + 79, 5, 4, 3, 2, 55, 56, 7, 35, 2, 2, 56, 57, 5, 10, 6, 2, 57, 58, 7, 34, + 2, 2, 58, 79, 3, 2, 2, 2, 59, 60, 7, 25, 2, 2, 60, 62, 7, 29, 2, 2, 61, + 63, 5, 14, 8, 2, 62, 61, 3, 2, 2, 2, 62, 63, 3, 2, 2, 2, 63, 64, 3, 2, + 2, 2, 64, 79, 7, 30, 2, 2, 65, 67, 7, 31, 2, 2, 66, 68, 5, 16, 9, 2, 67, + 66, 3, 2, 2, 2, 67, 68, 3, 2, 2, 2, 68, 69, 3, 2, 2, 2, 69, 79, 7, 32, + 2, 2, 70, 72, 7, 33, 2, 2, 71, 73, 5, 20, 11, 2, 72, 71, 3, 2, 2, 2, 72, + 73, 3, 2, 2, 2, 73, 74, 3, 2, 2, 2, 74, 79, 7, 34, 2, 2, 75, 79, 9, 3, + 2, 2, 76, 79, 7, 21, 2, 2, 77, 79, 7, 22, 2, 2, 78, 47, 3, 2, 2, 2, 78, + 52, 3, 2, 2, 2, 78, 54, 3, 2, 2, 2, 78, 55, 3, 2, 2, 2, 78, 59, 3, 2, 2, + 2, 78, 65, 3, 2, 2, 2, 78, 70, 3, 2, 2, 2, 78, 75, 3, 2, 2, 2, 78, 76, + 3, 2, 2, 2, 78, 77, 3, 2, 2, 2, 79, 112, 3, 2, 2, 2, 80, 81, 12, 21, 2, + 2, 81, 82, 9, 4, 2, 2, 82, 111, 5, 12, 7, 22, 83, 84, 12, 20, 2, 2, 84, + 85, 9, 5, 2, 2, 85, 111, 5, 12, 7, 21, 86, 87, 12, 18, 2, 2, 87, 88, 7, + 6, 2, 2, 88, 111, 5, 12, 7, 19, 89, 90, 12, 17, 2, 2, 90, 91, 7, 7, 2, + 2, 91, 111, 5, 12, 7, 18, 92, 93, 12, 16, 2, 2, 93, 94, 7, 11, 2, 2, 94, + 111, 5, 12, 7, 17, 95, 96, 12, 15, 2, 2, 96, 97, 7, 10, 2, 2, 97, 111, + 5, 12, 7, 16, 98, 99, 12, 14, 2, 2, 99, 100, 7, 9, 2, 2, 100, 111, 5, 12, + 7, 15, 101, 102, 12, 13, 2, 2, 102, 103, 7, 8, 2, 2, 103, 111, 5, 12, 7, + 14, 104, 105, 12, 12, 2, 2, 105, 106, 7, 17, 2, 2, 106, 111, 5, 12, 7, + 13, 107, 108, 12, 11, 2, 2, 108, 109, 7, 18, 2, 2, 109, 111, 5, 12, 7, + 12, 110, 80, 3, 2, 2, 2, 110, 83, 3, 2, 2, 2, 110, 86, 3, 2, 2, 2, 110, + 89, 3, 2, 2, 2, 110, 92, 3, 2, 2, 2, 110, 95, 3, 2, 2, 2, 110, 98, 3, 2, + 2, 2, 110, 101, 3, 2, 2, 2, 110, 104, 3, 2, 2, 2, 110, 107, 3, 2, 2, 2, + 111, 114, 3, 2, 2, 2, 112, 110, 3, 2, 2, 2, 112, 113, 3, 2, 2, 2, 113, + 13, 3, 2, 2, 2, 114, 112, 3, 2, 2, 2, 115, 120, 5, 12, 7, 2, 116, 117, + 7, 4, 2, 2, 117, 119, 5, 12, 7, 2, 118, 116, 3, 2, 2, 2, 119, 122, 3, 2, + 2, 2, 120, 118, 3, 2, 2, 2, 120, 121, 3, 2, 2, 2, 121, 15, 3, 2, 2, 2, + 122, 120, 3, 2, 2, 2, 123, 128, 5, 6, 4, 2, 124, 125, 7, 4, 2, 2, 125, + 127, 5, 6, 4, 2, 126, 124, 3, 2, 2, 2, 127, 130, 3, 2, 2, 2, 128, 126, + 3, 2, 2, 2, 128, 129, 3, 2, 2, 2, 129, 17, 3, 2, 2, 2, 130, 128, 3, 2, + 2, 2, 131, 132, 9, 6, 2, 2, 132, 133, 7, 5, 2, 2, 133, 134, 5, 6, 4, 2, + 134, 19, 3, 2, 2, 2, 135, 140, 5, 18, 10, 2, 136, 137, 7, 4, 2, 2, 137, + 139, 5, 18, 10, 2, 138, 136, 3, 2, 2, 2, 139, 142, 3, 2, 2, 2, 140, 138, + 3, 2, 2, 2, 140, 141, 3, 2, 2, 2, 141, 21, 3, 2, 2, 2, 142, 140, 3, 2, + 2, 2, 14, 32, 37, 44, 62, 67, 72, 78, 110, 112, 120, 128, 140, +} +var deserializer = antlr.NewATNDeserializer(nil) +var deserializedATN = deserializer.DeserializeFromUInt16(parserATN) + +var literalNames = []string{ + "", "'|'", "','", "':'", "'=='", "'!='", "'>'", "'<'", "'>='", "'<='", + "'+'", "'-'", "'*'", "'/'", "'%'", "", "", "", "", "", "", "", "", "", + "", "", "", "'('", "')'", "'['", "']'", "'{'", "'}'", "'${'", +} +var symbolicNames = []string{ + "", "", "", "", "EQ", "NEQ", "GT", "LT", "GTE", "LTE", "ADD", "SUB", "MUL", + "DIV", "MOD", "AND", "OR", "TRUE", "FALSE", "FLOAT", "NUMBER", "WHITESPACE", + "NOT", "NAME", "VNAME", "STEXT", "DTEXT", "LPAR", "RPAR", "LARR", "RARR", + "LDICT", "RDICT", "BEGIN_VARIABLE", +} + +var ruleNames = []string{ + "expList", "boolean", "constant", "variable", "variableExp", "exp", "arguments", + "array", "key", "dict", +} +var decisionToDFA = make([]*antlr.DFA, len(deserializedATN.DecisionToState)) + +func init() { + for index, ds := range deserializedATN.DecisionToState { + decisionToDFA[index] = antlr.NewDFA(ds, index) + } +} + +type EqlParser struct { + *antlr.BaseParser +} + +func NewEqlParser(input antlr.TokenStream) *EqlParser { + this := new(EqlParser) + + this.BaseParser = antlr.NewBaseParser(input) + + this.Interpreter = antlr.NewParserATNSimulator(this, deserializedATN, decisionToDFA, antlr.NewPredictionContextCache()) + this.RuleNames = ruleNames + this.LiteralNames = literalNames + this.SymbolicNames = symbolicNames + this.GrammarFileName = "Eql.g4" + + return this +} + +// EqlParser tokens. +const ( + EqlParserEOF = antlr.TokenEOF + EqlParserT__0 = 1 + EqlParserT__1 = 2 + EqlParserT__2 = 3 + EqlParserEQ = 4 + EqlParserNEQ = 5 + EqlParserGT = 6 + EqlParserLT = 7 + EqlParserGTE = 8 + EqlParserLTE = 9 + EqlParserADD = 10 + EqlParserSUB = 11 + EqlParserMUL = 12 + EqlParserDIV = 13 + EqlParserMOD = 14 + EqlParserAND = 15 + EqlParserOR = 16 + EqlParserTRUE = 17 + EqlParserFALSE = 18 + EqlParserFLOAT = 19 + EqlParserNUMBER = 20 + EqlParserWHITESPACE = 21 + EqlParserNOT = 22 + EqlParserNAME = 23 + EqlParserVNAME = 24 + EqlParserSTEXT = 25 + EqlParserDTEXT = 26 + EqlParserLPAR = 27 + EqlParserRPAR = 28 + EqlParserLARR = 29 + EqlParserRARR = 30 + EqlParserLDICT = 31 + EqlParserRDICT = 32 + EqlParserBEGIN_VARIABLE = 33 +) + +// EqlParser rules. +const ( + EqlParserRULE_expList = 0 + EqlParserRULE_boolean = 1 + EqlParserRULE_constant = 2 + EqlParserRULE_variable = 3 + EqlParserRULE_variableExp = 4 + EqlParserRULE_exp = 5 + EqlParserRULE_arguments = 6 + EqlParserRULE_array = 7 + EqlParserRULE_key = 8 + EqlParserRULE_dict = 9 +) + +// IExpListContext is an interface to support dynamic dispatch. +type IExpListContext interface { + antlr.ParserRuleContext + + // GetParser returns the parser. + GetParser() antlr.Parser + + // IsExpListContext differentiates from other interfaces. + IsExpListContext() +} + +type ExpListContext struct { + *antlr.BaseParserRuleContext + parser antlr.Parser +} + +func NewEmptyExpListContext() *ExpListContext { + var p = new(ExpListContext) + p.BaseParserRuleContext = antlr.NewBaseParserRuleContext(nil, -1) + p.RuleIndex = EqlParserRULE_expList + return p +} + +func (*ExpListContext) IsExpListContext() {} + +func NewExpListContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *ExpListContext { + var p = new(ExpListContext) + + p.BaseParserRuleContext = antlr.NewBaseParserRuleContext(parent, invokingState) + + p.parser = parser + p.RuleIndex = EqlParserRULE_expList + + return p +} + +func (s *ExpListContext) GetParser() antlr.Parser { return s.parser } + +func (s *ExpListContext) Exp() IExpContext { + var t = s.GetTypedRuleContext(reflect.TypeOf((*IExpContext)(nil)).Elem(), 0) + + if t == nil { + return nil + } + + return t.(IExpContext) +} + +func (s *ExpListContext) EOF() antlr.TerminalNode { + return s.GetToken(EqlParserEOF, 0) +} + +func (s *ExpListContext) GetRuleContext() antlr.RuleContext { + return s +} + +func (s *ExpListContext) ToStringTree(ruleNames []string, recog antlr.Recognizer) string { + return antlr.TreesStringTree(s, ruleNames, recog) +} + +func (s *ExpListContext) EnterRule(listener antlr.ParseTreeListener) { + if listenerT, ok := listener.(EqlListener); ok { + listenerT.EnterExpList(s) + } +} + +func (s *ExpListContext) ExitRule(listener antlr.ParseTreeListener) { + if listenerT, ok := listener.(EqlListener); ok { + listenerT.ExitExpList(s) + } +} + +func (s *ExpListContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { + switch t := visitor.(type) { + case EqlVisitor: + return t.VisitExpList(s) + + default: + return t.VisitChildren(s) + } +} + +func (p *EqlParser) ExpList() (localctx IExpListContext) { + localctx = NewExpListContext(p, p.GetParserRuleContext(), p.GetState()) + p.EnterRule(localctx, 0, EqlParserRULE_expList) + + defer func() { + p.ExitRule() + }() + + defer func() { + if err := recover(); err != nil { + if v, ok := err.(antlr.RecognitionException); ok { + localctx.SetException(v) + p.GetErrorHandler().ReportError(p, v) + p.GetErrorHandler().Recover(p, v) + } else { + panic(err) + } + } + }() + + p.EnterOuterAlt(localctx, 1) + { + p.SetState(20) + p.exp(0) + } + { + p.SetState(21) + p.Match(EqlParserEOF) + } + + return localctx +} + +// IBooleanContext is an interface to support dynamic dispatch. +type IBooleanContext interface { + antlr.ParserRuleContext + + // GetParser returns the parser. + GetParser() antlr.Parser + + // IsBooleanContext differentiates from other interfaces. + IsBooleanContext() +} + +type BooleanContext struct { + *antlr.BaseParserRuleContext + parser antlr.Parser +} + +func NewEmptyBooleanContext() *BooleanContext { + var p = new(BooleanContext) + p.BaseParserRuleContext = antlr.NewBaseParserRuleContext(nil, -1) + p.RuleIndex = EqlParserRULE_boolean + return p +} + +func (*BooleanContext) IsBooleanContext() {} + +func NewBooleanContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *BooleanContext { + var p = new(BooleanContext) + + p.BaseParserRuleContext = antlr.NewBaseParserRuleContext(parent, invokingState) + + p.parser = parser + p.RuleIndex = EqlParserRULE_boolean + + return p +} + +func (s *BooleanContext) GetParser() antlr.Parser { return s.parser } + +func (s *BooleanContext) TRUE() antlr.TerminalNode { + return s.GetToken(EqlParserTRUE, 0) +} + +func (s *BooleanContext) FALSE() antlr.TerminalNode { + return s.GetToken(EqlParserFALSE, 0) +} + +func (s *BooleanContext) GetRuleContext() antlr.RuleContext { + return s +} + +func (s *BooleanContext) ToStringTree(ruleNames []string, recog antlr.Recognizer) string { + return antlr.TreesStringTree(s, ruleNames, recog) +} + +func (s *BooleanContext) EnterRule(listener antlr.ParseTreeListener) { + if listenerT, ok := listener.(EqlListener); ok { + listenerT.EnterBoolean(s) + } +} + +func (s *BooleanContext) ExitRule(listener antlr.ParseTreeListener) { + if listenerT, ok := listener.(EqlListener); ok { + listenerT.ExitBoolean(s) + } +} + +func (s *BooleanContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { + switch t := visitor.(type) { + case EqlVisitor: + return t.VisitBoolean(s) + + default: + return t.VisitChildren(s) + } +} + +func (p *EqlParser) Boolean() (localctx IBooleanContext) { + localctx = NewBooleanContext(p, p.GetParserRuleContext(), p.GetState()) + p.EnterRule(localctx, 2, EqlParserRULE_boolean) + var _la int + + defer func() { + p.ExitRule() + }() + + defer func() { + if err := recover(); err != nil { + if v, ok := err.(antlr.RecognitionException); ok { + localctx.SetException(v) + p.GetErrorHandler().ReportError(p, v) + p.GetErrorHandler().Recover(p, v) + } else { + panic(err) + } + } + }() + + p.EnterOuterAlt(localctx, 1) + { + p.SetState(23) + _la = p.GetTokenStream().LA(1) + + if !(_la == EqlParserTRUE || _la == EqlParserFALSE) { + p.GetErrorHandler().RecoverInline(p) + } else { + p.GetErrorHandler().ReportMatch(p) + p.Consume() + } + } + + return localctx +} + +// IConstantContext is an interface to support dynamic dispatch. +type IConstantContext interface { + antlr.ParserRuleContext + + // GetParser returns the parser. + GetParser() antlr.Parser + + // IsConstantContext differentiates from other interfaces. + IsConstantContext() +} + +type ConstantContext struct { + *antlr.BaseParserRuleContext + parser antlr.Parser +} + +func NewEmptyConstantContext() *ConstantContext { + var p = new(ConstantContext) + p.BaseParserRuleContext = antlr.NewBaseParserRuleContext(nil, -1) + p.RuleIndex = EqlParserRULE_constant + return p +} + +func (*ConstantContext) IsConstantContext() {} + +func NewConstantContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *ConstantContext { + var p = new(ConstantContext) + + p.BaseParserRuleContext = antlr.NewBaseParserRuleContext(parent, invokingState) + + p.parser = parser + p.RuleIndex = EqlParserRULE_constant + + return p +} + +func (s *ConstantContext) GetParser() antlr.Parser { return s.parser } + +func (s *ConstantContext) STEXT() antlr.TerminalNode { + return s.GetToken(EqlParserSTEXT, 0) +} + +func (s *ConstantContext) DTEXT() antlr.TerminalNode { + return s.GetToken(EqlParserDTEXT, 0) +} + +func (s *ConstantContext) FLOAT() antlr.TerminalNode { + return s.GetToken(EqlParserFLOAT, 0) +} + +func (s *ConstantContext) NUMBER() antlr.TerminalNode { + return s.GetToken(EqlParserNUMBER, 0) +} + +func (s *ConstantContext) Boolean() IBooleanContext { + var t = s.GetTypedRuleContext(reflect.TypeOf((*IBooleanContext)(nil)).Elem(), 0) + + if t == nil { + return nil + } + + return t.(IBooleanContext) +} + +func (s *ConstantContext) GetRuleContext() antlr.RuleContext { + return s +} + +func (s *ConstantContext) ToStringTree(ruleNames []string, recog antlr.Recognizer) string { + return antlr.TreesStringTree(s, ruleNames, recog) +} + +func (s *ConstantContext) EnterRule(listener antlr.ParseTreeListener) { + if listenerT, ok := listener.(EqlListener); ok { + listenerT.EnterConstant(s) + } +} + +func (s *ConstantContext) ExitRule(listener antlr.ParseTreeListener) { + if listenerT, ok := listener.(EqlListener); ok { + listenerT.ExitConstant(s) + } +} + +func (s *ConstantContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { + switch t := visitor.(type) { + case EqlVisitor: + return t.VisitConstant(s) + + default: + return t.VisitChildren(s) + } +} + +func (p *EqlParser) Constant() (localctx IConstantContext) { + localctx = NewConstantContext(p, p.GetParserRuleContext(), p.GetState()) + p.EnterRule(localctx, 4, EqlParserRULE_constant) + + defer func() { + p.ExitRule() + }() + + defer func() { + if err := recover(); err != nil { + if v, ok := err.(antlr.RecognitionException); ok { + localctx.SetException(v) + p.GetErrorHandler().ReportError(p, v) + p.GetErrorHandler().Recover(p, v) + } else { + panic(err) + } + } + }() + + p.SetState(30) + p.GetErrorHandler().Sync(p) + + switch p.GetTokenStream().LA(1) { + case EqlParserSTEXT: + p.EnterOuterAlt(localctx, 1) + { + p.SetState(25) + p.Match(EqlParserSTEXT) + } + + case EqlParserDTEXT: + p.EnterOuterAlt(localctx, 2) + { + p.SetState(26) + p.Match(EqlParserDTEXT) + } + + case EqlParserFLOAT: + p.EnterOuterAlt(localctx, 3) + { + p.SetState(27) + p.Match(EqlParserFLOAT) + } + + case EqlParserNUMBER: + p.EnterOuterAlt(localctx, 4) + { + p.SetState(28) + p.Match(EqlParserNUMBER) + } + + case EqlParserTRUE, EqlParserFALSE: + p.EnterOuterAlt(localctx, 5) + { + p.SetState(29) + p.Boolean() + } + + default: + panic(antlr.NewNoViableAltException(p, nil, nil, nil, nil, nil)) + } + + return localctx +} + +// IVariableContext is an interface to support dynamic dispatch. +type IVariableContext interface { + antlr.ParserRuleContext + + // GetParser returns the parser. + GetParser() antlr.Parser + + // IsVariableContext differentiates from other interfaces. + IsVariableContext() +} + +type VariableContext struct { + *antlr.BaseParserRuleContext + parser antlr.Parser +} + +func NewEmptyVariableContext() *VariableContext { + var p = new(VariableContext) + p.BaseParserRuleContext = antlr.NewBaseParserRuleContext(nil, -1) + p.RuleIndex = EqlParserRULE_variable + return p +} + +func (*VariableContext) IsVariableContext() {} + +func NewVariableContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *VariableContext { + var p = new(VariableContext) + + p.BaseParserRuleContext = antlr.NewBaseParserRuleContext(parent, invokingState) + + p.parser = parser + p.RuleIndex = EqlParserRULE_variable + + return p +} + +func (s *VariableContext) GetParser() antlr.Parser { return s.parser } + +func (s *VariableContext) NAME() antlr.TerminalNode { + return s.GetToken(EqlParserNAME, 0) +} + +func (s *VariableContext) VNAME() antlr.TerminalNode { + return s.GetToken(EqlParserVNAME, 0) +} + +func (s *VariableContext) Constant() IConstantContext { + var t = s.GetTypedRuleContext(reflect.TypeOf((*IConstantContext)(nil)).Elem(), 0) + + if t == nil { + return nil + } + + return t.(IConstantContext) +} + +func (s *VariableContext) GetRuleContext() antlr.RuleContext { + return s +} + +func (s *VariableContext) ToStringTree(ruleNames []string, recog antlr.Recognizer) string { + return antlr.TreesStringTree(s, ruleNames, recog) +} + +func (s *VariableContext) EnterRule(listener antlr.ParseTreeListener) { + if listenerT, ok := listener.(EqlListener); ok { + listenerT.EnterVariable(s) + } +} + +func (s *VariableContext) ExitRule(listener antlr.ParseTreeListener) { + if listenerT, ok := listener.(EqlListener); ok { + listenerT.ExitVariable(s) + } +} + +func (s *VariableContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { + switch t := visitor.(type) { + case EqlVisitor: + return t.VisitVariable(s) + + default: + return t.VisitChildren(s) + } +} + +func (p *EqlParser) Variable() (localctx IVariableContext) { + localctx = NewVariableContext(p, p.GetParserRuleContext(), p.GetState()) + p.EnterRule(localctx, 6, EqlParserRULE_variable) + + defer func() { + p.ExitRule() + }() + + defer func() { + if err := recover(); err != nil { + if v, ok := err.(antlr.RecognitionException); ok { + localctx.SetException(v) + p.GetErrorHandler().ReportError(p, v) + p.GetErrorHandler().Recover(p, v) + } else { + panic(err) + } + } + }() + + p.SetState(35) + p.GetErrorHandler().Sync(p) + + switch p.GetTokenStream().LA(1) { + case EqlParserNAME: + p.EnterOuterAlt(localctx, 1) + { + p.SetState(32) + p.Match(EqlParserNAME) + } + + case EqlParserVNAME: + p.EnterOuterAlt(localctx, 2) + { + p.SetState(33) + p.Match(EqlParserVNAME) + } + + case EqlParserTRUE, EqlParserFALSE, EqlParserFLOAT, EqlParserNUMBER, EqlParserSTEXT, EqlParserDTEXT: + p.EnterOuterAlt(localctx, 3) + { + p.SetState(34) + p.Constant() + } + + default: + panic(antlr.NewNoViableAltException(p, nil, nil, nil, nil, nil)) + } + + return localctx +} + +// IVariableExpContext is an interface to support dynamic dispatch. +type IVariableExpContext interface { + antlr.ParserRuleContext + + // GetParser returns the parser. + GetParser() antlr.Parser + + // IsVariableExpContext differentiates from other interfaces. + IsVariableExpContext() +} + +type VariableExpContext struct { + *antlr.BaseParserRuleContext + parser antlr.Parser +} + +func NewEmptyVariableExpContext() *VariableExpContext { + var p = new(VariableExpContext) + p.BaseParserRuleContext = antlr.NewBaseParserRuleContext(nil, -1) + p.RuleIndex = EqlParserRULE_variableExp + return p +} + +func (*VariableExpContext) IsVariableExpContext() {} + +func NewVariableExpContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *VariableExpContext { + var p = new(VariableExpContext) + + p.BaseParserRuleContext = antlr.NewBaseParserRuleContext(parent, invokingState) + + p.parser = parser + p.RuleIndex = EqlParserRULE_variableExp + + return p +} + +func (s *VariableExpContext) GetParser() antlr.Parser { return s.parser } + +func (s *VariableExpContext) AllVariable() []IVariableContext { + var ts = s.GetTypedRuleContexts(reflect.TypeOf((*IVariableContext)(nil)).Elem()) + var tst = make([]IVariableContext, len(ts)) + + for i, t := range ts { + if t != nil { + tst[i] = t.(IVariableContext) + } + } + + return tst +} + +func (s *VariableExpContext) Variable(i int) IVariableContext { + var t = s.GetTypedRuleContext(reflect.TypeOf((*IVariableContext)(nil)).Elem(), i) + + if t == nil { + return nil + } + + return t.(IVariableContext) +} + +func (s *VariableExpContext) GetRuleContext() antlr.RuleContext { + return s +} + +func (s *VariableExpContext) ToStringTree(ruleNames []string, recog antlr.Recognizer) string { + return antlr.TreesStringTree(s, ruleNames, recog) +} + +func (s *VariableExpContext) EnterRule(listener antlr.ParseTreeListener) { + if listenerT, ok := listener.(EqlListener); ok { + listenerT.EnterVariableExp(s) + } +} + +func (s *VariableExpContext) ExitRule(listener antlr.ParseTreeListener) { + if listenerT, ok := listener.(EqlListener); ok { + listenerT.ExitVariableExp(s) + } +} + +func (s *VariableExpContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { + switch t := visitor.(type) { + case EqlVisitor: + return t.VisitVariableExp(s) + + default: + return t.VisitChildren(s) + } +} + +func (p *EqlParser) VariableExp() (localctx IVariableExpContext) { + localctx = NewVariableExpContext(p, p.GetParserRuleContext(), p.GetState()) + p.EnterRule(localctx, 8, EqlParserRULE_variableExp) + var _la int + + defer func() { + p.ExitRule() + }() + + defer func() { + if err := recover(); err != nil { + if v, ok := err.(antlr.RecognitionException); ok { + localctx.SetException(v) + p.GetErrorHandler().ReportError(p, v) + p.GetErrorHandler().Recover(p, v) + } else { + panic(err) + } + } + }() + + p.EnterOuterAlt(localctx, 1) + { + p.SetState(37) + p.Variable() + } + p.SetState(42) + p.GetErrorHandler().Sync(p) + _la = p.GetTokenStream().LA(1) + + for _la == EqlParserT__0 { + { + p.SetState(38) + p.Match(EqlParserT__0) + } + { + p.SetState(39) + p.Variable() + } + + p.SetState(44) + p.GetErrorHandler().Sync(p) + _la = p.GetTokenStream().LA(1) + } + + return localctx +} + +// IExpContext is an interface to support dynamic dispatch. +type IExpContext interface { + antlr.ParserRuleContext + + // GetParser returns the parser. + GetParser() antlr.Parser + + // IsExpContext differentiates from other interfaces. + IsExpContext() +} + +type ExpContext struct { + *antlr.BaseParserRuleContext + parser antlr.Parser +} + +func NewEmptyExpContext() *ExpContext { + var p = new(ExpContext) + p.BaseParserRuleContext = antlr.NewBaseParserRuleContext(nil, -1) + p.RuleIndex = EqlParserRULE_exp + return p +} + +func (*ExpContext) IsExpContext() {} + +func NewExpContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *ExpContext { + var p = new(ExpContext) + + p.BaseParserRuleContext = antlr.NewBaseParserRuleContext(parent, invokingState) + + p.parser = parser + p.RuleIndex = EqlParserRULE_exp + + return p +} + +func (s *ExpContext) GetParser() antlr.Parser { return s.parser } + +func (s *ExpContext) CopyFrom(ctx *ExpContext) { + s.BaseParserRuleContext.CopyFrom(ctx.BaseParserRuleContext) +} + +func (s *ExpContext) GetRuleContext() antlr.RuleContext { + return s +} + +func (s *ExpContext) ToStringTree(ruleNames []string, recog antlr.Recognizer) string { + return antlr.TreesStringTree(s, ruleNames, recog) +} + +type ExpArithmeticNEQContext struct { + *ExpContext + left IExpContext + right IExpContext +} + +func NewExpArithmeticNEQContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *ExpArithmeticNEQContext { + var p = new(ExpArithmeticNEQContext) + + p.ExpContext = NewEmptyExpContext() + p.parser = parser + p.CopyFrom(ctx.(*ExpContext)) + + return p +} + +func (s *ExpArithmeticNEQContext) GetLeft() IExpContext { return s.left } + +func (s *ExpArithmeticNEQContext) GetRight() IExpContext { return s.right } + +func (s *ExpArithmeticNEQContext) SetLeft(v IExpContext) { s.left = v } + +func (s *ExpArithmeticNEQContext) SetRight(v IExpContext) { s.right = v } + +func (s *ExpArithmeticNEQContext) GetRuleContext() antlr.RuleContext { + return s +} + +func (s *ExpArithmeticNEQContext) NEQ() antlr.TerminalNode { + return s.GetToken(EqlParserNEQ, 0) +} + +func (s *ExpArithmeticNEQContext) AllExp() []IExpContext { + var ts = s.GetTypedRuleContexts(reflect.TypeOf((*IExpContext)(nil)).Elem()) + var tst = make([]IExpContext, len(ts)) + + for i, t := range ts { + if t != nil { + tst[i] = t.(IExpContext) + } + } + + return tst +} + +func (s *ExpArithmeticNEQContext) Exp(i int) IExpContext { + var t = s.GetTypedRuleContext(reflect.TypeOf((*IExpContext)(nil)).Elem(), i) + + if t == nil { + return nil + } + + return t.(IExpContext) +} + +func (s *ExpArithmeticNEQContext) EnterRule(listener antlr.ParseTreeListener) { + if listenerT, ok := listener.(EqlListener); ok { + listenerT.EnterExpArithmeticNEQ(s) + } +} + +func (s *ExpArithmeticNEQContext) ExitRule(listener antlr.ParseTreeListener) { + if listenerT, ok := listener.(EqlListener); ok { + listenerT.ExitExpArithmeticNEQ(s) + } +} + +func (s *ExpArithmeticNEQContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { + switch t := visitor.(type) { + case EqlVisitor: + return t.VisitExpArithmeticNEQ(s) + + default: + return t.VisitChildren(s) + } +} + +type ExpArithmeticEQContext struct { + *ExpContext + left IExpContext + right IExpContext +} + +func NewExpArithmeticEQContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *ExpArithmeticEQContext { + var p = new(ExpArithmeticEQContext) + + p.ExpContext = NewEmptyExpContext() + p.parser = parser + p.CopyFrom(ctx.(*ExpContext)) + + return p +} + +func (s *ExpArithmeticEQContext) GetLeft() IExpContext { return s.left } + +func (s *ExpArithmeticEQContext) GetRight() IExpContext { return s.right } + +func (s *ExpArithmeticEQContext) SetLeft(v IExpContext) { s.left = v } + +func (s *ExpArithmeticEQContext) SetRight(v IExpContext) { s.right = v } + +func (s *ExpArithmeticEQContext) GetRuleContext() antlr.RuleContext { + return s +} + +func (s *ExpArithmeticEQContext) EQ() antlr.TerminalNode { + return s.GetToken(EqlParserEQ, 0) +} + +func (s *ExpArithmeticEQContext) AllExp() []IExpContext { + var ts = s.GetTypedRuleContexts(reflect.TypeOf((*IExpContext)(nil)).Elem()) + var tst = make([]IExpContext, len(ts)) + + for i, t := range ts { + if t != nil { + tst[i] = t.(IExpContext) + } + } + + return tst +} + +func (s *ExpArithmeticEQContext) Exp(i int) IExpContext { + var t = s.GetTypedRuleContext(reflect.TypeOf((*IExpContext)(nil)).Elem(), i) + + if t == nil { + return nil + } + + return t.(IExpContext) +} + +func (s *ExpArithmeticEQContext) EnterRule(listener antlr.ParseTreeListener) { + if listenerT, ok := listener.(EqlListener); ok { + listenerT.EnterExpArithmeticEQ(s) + } +} + +func (s *ExpArithmeticEQContext) ExitRule(listener antlr.ParseTreeListener) { + if listenerT, ok := listener.(EqlListener); ok { + listenerT.ExitExpArithmeticEQ(s) + } +} + +func (s *ExpArithmeticEQContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { + switch t := visitor.(type) { + case EqlVisitor: + return t.VisitExpArithmeticEQ(s) + + default: + return t.VisitChildren(s) + } +} + +type ExpArithmeticGTEContext struct { + *ExpContext + left IExpContext + right IExpContext +} + +func NewExpArithmeticGTEContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *ExpArithmeticGTEContext { + var p = new(ExpArithmeticGTEContext) + + p.ExpContext = NewEmptyExpContext() + p.parser = parser + p.CopyFrom(ctx.(*ExpContext)) + + return p +} + +func (s *ExpArithmeticGTEContext) GetLeft() IExpContext { return s.left } + +func (s *ExpArithmeticGTEContext) GetRight() IExpContext { return s.right } + +func (s *ExpArithmeticGTEContext) SetLeft(v IExpContext) { s.left = v } + +func (s *ExpArithmeticGTEContext) SetRight(v IExpContext) { s.right = v } + +func (s *ExpArithmeticGTEContext) GetRuleContext() antlr.RuleContext { + return s +} + +func (s *ExpArithmeticGTEContext) GTE() antlr.TerminalNode { + return s.GetToken(EqlParserGTE, 0) +} + +func (s *ExpArithmeticGTEContext) AllExp() []IExpContext { + var ts = s.GetTypedRuleContexts(reflect.TypeOf((*IExpContext)(nil)).Elem()) + var tst = make([]IExpContext, len(ts)) + + for i, t := range ts { + if t != nil { + tst[i] = t.(IExpContext) + } + } + + return tst +} + +func (s *ExpArithmeticGTEContext) Exp(i int) IExpContext { + var t = s.GetTypedRuleContext(reflect.TypeOf((*IExpContext)(nil)).Elem(), i) + + if t == nil { + return nil + } + + return t.(IExpContext) +} + +func (s *ExpArithmeticGTEContext) EnterRule(listener antlr.ParseTreeListener) { + if listenerT, ok := listener.(EqlListener); ok { + listenerT.EnterExpArithmeticGTE(s) + } +} + +func (s *ExpArithmeticGTEContext) ExitRule(listener antlr.ParseTreeListener) { + if listenerT, ok := listener.(EqlListener); ok { + listenerT.ExitExpArithmeticGTE(s) + } +} + +func (s *ExpArithmeticGTEContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { + switch t := visitor.(type) { + case EqlVisitor: + return t.VisitExpArithmeticGTE(s) + + default: + return t.VisitChildren(s) + } +} + +type ExpArithmeticLTEContext struct { + *ExpContext + left IExpContext + right IExpContext +} + +func NewExpArithmeticLTEContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *ExpArithmeticLTEContext { + var p = new(ExpArithmeticLTEContext) + + p.ExpContext = NewEmptyExpContext() + p.parser = parser + p.CopyFrom(ctx.(*ExpContext)) + + return p +} + +func (s *ExpArithmeticLTEContext) GetLeft() IExpContext { return s.left } + +func (s *ExpArithmeticLTEContext) GetRight() IExpContext { return s.right } + +func (s *ExpArithmeticLTEContext) SetLeft(v IExpContext) { s.left = v } + +func (s *ExpArithmeticLTEContext) SetRight(v IExpContext) { s.right = v } + +func (s *ExpArithmeticLTEContext) GetRuleContext() antlr.RuleContext { + return s +} + +func (s *ExpArithmeticLTEContext) LTE() antlr.TerminalNode { + return s.GetToken(EqlParserLTE, 0) +} + +func (s *ExpArithmeticLTEContext) AllExp() []IExpContext { + var ts = s.GetTypedRuleContexts(reflect.TypeOf((*IExpContext)(nil)).Elem()) + var tst = make([]IExpContext, len(ts)) + + for i, t := range ts { + if t != nil { + tst[i] = t.(IExpContext) + } + } + + return tst +} + +func (s *ExpArithmeticLTEContext) Exp(i int) IExpContext { + var t = s.GetTypedRuleContext(reflect.TypeOf((*IExpContext)(nil)).Elem(), i) + + if t == nil { + return nil + } + + return t.(IExpContext) +} + +func (s *ExpArithmeticLTEContext) EnterRule(listener antlr.ParseTreeListener) { + if listenerT, ok := listener.(EqlListener); ok { + listenerT.EnterExpArithmeticLTE(s) + } +} + +func (s *ExpArithmeticLTEContext) ExitRule(listener antlr.ParseTreeListener) { + if listenerT, ok := listener.(EqlListener); ok { + listenerT.ExitExpArithmeticLTE(s) + } +} + +func (s *ExpArithmeticLTEContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { + switch t := visitor.(type) { + case EqlVisitor: + return t.VisitExpArithmeticLTE(s) + + default: + return t.VisitChildren(s) + } +} + +type ExpArithmeticGTContext struct { + *ExpContext + left IExpContext + right IExpContext +} + +func NewExpArithmeticGTContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *ExpArithmeticGTContext { + var p = new(ExpArithmeticGTContext) + + p.ExpContext = NewEmptyExpContext() + p.parser = parser + p.CopyFrom(ctx.(*ExpContext)) + + return p +} + +func (s *ExpArithmeticGTContext) GetLeft() IExpContext { return s.left } + +func (s *ExpArithmeticGTContext) GetRight() IExpContext { return s.right } + +func (s *ExpArithmeticGTContext) SetLeft(v IExpContext) { s.left = v } + +func (s *ExpArithmeticGTContext) SetRight(v IExpContext) { s.right = v } + +func (s *ExpArithmeticGTContext) GetRuleContext() antlr.RuleContext { + return s +} + +func (s *ExpArithmeticGTContext) GT() antlr.TerminalNode { + return s.GetToken(EqlParserGT, 0) +} + +func (s *ExpArithmeticGTContext) AllExp() []IExpContext { + var ts = s.GetTypedRuleContexts(reflect.TypeOf((*IExpContext)(nil)).Elem()) + var tst = make([]IExpContext, len(ts)) + + for i, t := range ts { + if t != nil { + tst[i] = t.(IExpContext) + } + } + + return tst +} + +func (s *ExpArithmeticGTContext) Exp(i int) IExpContext { + var t = s.GetTypedRuleContext(reflect.TypeOf((*IExpContext)(nil)).Elem(), i) + + if t == nil { + return nil + } + + return t.(IExpContext) +} + +func (s *ExpArithmeticGTContext) EnterRule(listener antlr.ParseTreeListener) { + if listenerT, ok := listener.(EqlListener); ok { + listenerT.EnterExpArithmeticGT(s) + } +} + +func (s *ExpArithmeticGTContext) ExitRule(listener antlr.ParseTreeListener) { + if listenerT, ok := listener.(EqlListener); ok { + listenerT.ExitExpArithmeticGT(s) + } +} + +func (s *ExpArithmeticGTContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { + switch t := visitor.(type) { + case EqlVisitor: + return t.VisitExpArithmeticGT(s) + + default: + return t.VisitChildren(s) + } +} + +type ExpArithmeticMulDivModContext struct { + *ExpContext + left IExpContext + right IExpContext +} + +func NewExpArithmeticMulDivModContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *ExpArithmeticMulDivModContext { + var p = new(ExpArithmeticMulDivModContext) + + p.ExpContext = NewEmptyExpContext() + p.parser = parser + p.CopyFrom(ctx.(*ExpContext)) + + return p +} + +func (s *ExpArithmeticMulDivModContext) GetLeft() IExpContext { return s.left } + +func (s *ExpArithmeticMulDivModContext) GetRight() IExpContext { return s.right } + +func (s *ExpArithmeticMulDivModContext) SetLeft(v IExpContext) { s.left = v } + +func (s *ExpArithmeticMulDivModContext) SetRight(v IExpContext) { s.right = v } + +func (s *ExpArithmeticMulDivModContext) GetRuleContext() antlr.RuleContext { + return s +} + +func (s *ExpArithmeticMulDivModContext) AllExp() []IExpContext { + var ts = s.GetTypedRuleContexts(reflect.TypeOf((*IExpContext)(nil)).Elem()) + var tst = make([]IExpContext, len(ts)) + + for i, t := range ts { + if t != nil { + tst[i] = t.(IExpContext) + } + } + + return tst +} + +func (s *ExpArithmeticMulDivModContext) Exp(i int) IExpContext { + var t = s.GetTypedRuleContext(reflect.TypeOf((*IExpContext)(nil)).Elem(), i) + + if t == nil { + return nil + } + + return t.(IExpContext) +} + +func (s *ExpArithmeticMulDivModContext) MUL() antlr.TerminalNode { + return s.GetToken(EqlParserMUL, 0) +} + +func (s *ExpArithmeticMulDivModContext) DIV() antlr.TerminalNode { + return s.GetToken(EqlParserDIV, 0) +} + +func (s *ExpArithmeticMulDivModContext) MOD() antlr.TerminalNode { + return s.GetToken(EqlParserMOD, 0) +} + +func (s *ExpArithmeticMulDivModContext) EnterRule(listener antlr.ParseTreeListener) { + if listenerT, ok := listener.(EqlListener); ok { + listenerT.EnterExpArithmeticMulDivMod(s) + } +} + +func (s *ExpArithmeticMulDivModContext) ExitRule(listener antlr.ParseTreeListener) { + if listenerT, ok := listener.(EqlListener); ok { + listenerT.ExitExpArithmeticMulDivMod(s) + } +} + +func (s *ExpArithmeticMulDivModContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { + switch t := visitor.(type) { + case EqlVisitor: + return t.VisitExpArithmeticMulDivMod(s) + + default: + return t.VisitChildren(s) + } +} + +type ExpDictContext struct { + *ExpContext +} + +func NewExpDictContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *ExpDictContext { + var p = new(ExpDictContext) + + p.ExpContext = NewEmptyExpContext() + p.parser = parser + p.CopyFrom(ctx.(*ExpContext)) + + return p +} + +func (s *ExpDictContext) GetRuleContext() antlr.RuleContext { + return s +} + +func (s *ExpDictContext) LDICT() antlr.TerminalNode { + return s.GetToken(EqlParserLDICT, 0) +} + +func (s *ExpDictContext) RDICT() antlr.TerminalNode { + return s.GetToken(EqlParserRDICT, 0) +} + +func (s *ExpDictContext) Dict() IDictContext { + var t = s.GetTypedRuleContext(reflect.TypeOf((*IDictContext)(nil)).Elem(), 0) + + if t == nil { + return nil + } + + return t.(IDictContext) +} + +func (s *ExpDictContext) EnterRule(listener antlr.ParseTreeListener) { + if listenerT, ok := listener.(EqlListener); ok { + listenerT.EnterExpDict(s) + } +} + +func (s *ExpDictContext) ExitRule(listener antlr.ParseTreeListener) { + if listenerT, ok := listener.(EqlListener); ok { + listenerT.ExitExpDict(s) + } +} + +func (s *ExpDictContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { + switch t := visitor.(type) { + case EqlVisitor: + return t.VisitExpDict(s) + + default: + return t.VisitChildren(s) + } +} + +type ExpTextContext struct { + *ExpContext +} + +func NewExpTextContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *ExpTextContext { + var p = new(ExpTextContext) + + p.ExpContext = NewEmptyExpContext() + p.parser = parser + p.CopyFrom(ctx.(*ExpContext)) + + return p +} + +func (s *ExpTextContext) GetRuleContext() antlr.RuleContext { + return s +} + +func (s *ExpTextContext) STEXT() antlr.TerminalNode { + return s.GetToken(EqlParserSTEXT, 0) +} + +func (s *ExpTextContext) DTEXT() antlr.TerminalNode { + return s.GetToken(EqlParserDTEXT, 0) +} + +func (s *ExpTextContext) EnterRule(listener antlr.ParseTreeListener) { + if listenerT, ok := listener.(EqlListener); ok { + listenerT.EnterExpText(s) + } +} + +func (s *ExpTextContext) ExitRule(listener antlr.ParseTreeListener) { + if listenerT, ok := listener.(EqlListener); ok { + listenerT.ExitExpText(s) + } +} + +func (s *ExpTextContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { + switch t := visitor.(type) { + case EqlVisitor: + return t.VisitExpText(s) + + default: + return t.VisitChildren(s) + } +} + +type ExpNumberContext struct { + *ExpContext +} + +func NewExpNumberContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *ExpNumberContext { + var p = new(ExpNumberContext) + + p.ExpContext = NewEmptyExpContext() + p.parser = parser + p.CopyFrom(ctx.(*ExpContext)) + + return p +} + +func (s *ExpNumberContext) GetRuleContext() antlr.RuleContext { + return s +} + +func (s *ExpNumberContext) NUMBER() antlr.TerminalNode { + return s.GetToken(EqlParserNUMBER, 0) +} + +func (s *ExpNumberContext) EnterRule(listener antlr.ParseTreeListener) { + if listenerT, ok := listener.(EqlListener); ok { + listenerT.EnterExpNumber(s) + } +} + +func (s *ExpNumberContext) ExitRule(listener antlr.ParseTreeListener) { + if listenerT, ok := listener.(EqlListener); ok { + listenerT.ExitExpNumber(s) + } +} + +func (s *ExpNumberContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { + switch t := visitor.(type) { + case EqlVisitor: + return t.VisitExpNumber(s) + + default: + return t.VisitChildren(s) + } +} + +type ExpLogicalAndContext struct { + *ExpContext + left IExpContext + right IExpContext +} + +func NewExpLogicalAndContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *ExpLogicalAndContext { + var p = new(ExpLogicalAndContext) + + p.ExpContext = NewEmptyExpContext() + p.parser = parser + p.CopyFrom(ctx.(*ExpContext)) + + return p +} + +func (s *ExpLogicalAndContext) GetLeft() IExpContext { return s.left } + +func (s *ExpLogicalAndContext) GetRight() IExpContext { return s.right } + +func (s *ExpLogicalAndContext) SetLeft(v IExpContext) { s.left = v } + +func (s *ExpLogicalAndContext) SetRight(v IExpContext) { s.right = v } + +func (s *ExpLogicalAndContext) GetRuleContext() antlr.RuleContext { + return s +} + +func (s *ExpLogicalAndContext) AND() antlr.TerminalNode { + return s.GetToken(EqlParserAND, 0) +} + +func (s *ExpLogicalAndContext) AllExp() []IExpContext { + var ts = s.GetTypedRuleContexts(reflect.TypeOf((*IExpContext)(nil)).Elem()) + var tst = make([]IExpContext, len(ts)) + + for i, t := range ts { + if t != nil { + tst[i] = t.(IExpContext) + } + } + + return tst +} + +func (s *ExpLogicalAndContext) Exp(i int) IExpContext { + var t = s.GetTypedRuleContext(reflect.TypeOf((*IExpContext)(nil)).Elem(), i) + + if t == nil { + return nil + } + + return t.(IExpContext) +} + +func (s *ExpLogicalAndContext) EnterRule(listener antlr.ParseTreeListener) { + if listenerT, ok := listener.(EqlListener); ok { + listenerT.EnterExpLogicalAnd(s) + } +} + +func (s *ExpLogicalAndContext) ExitRule(listener antlr.ParseTreeListener) { + if listenerT, ok := listener.(EqlListener); ok { + listenerT.ExitExpLogicalAnd(s) + } +} + +func (s *ExpLogicalAndContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { + switch t := visitor.(type) { + case EqlVisitor: + return t.VisitExpLogicalAnd(s) + + default: + return t.VisitChildren(s) + } +} + +type ExpLogicalORContext struct { + *ExpContext + left IExpContext + right IExpContext +} + +func NewExpLogicalORContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *ExpLogicalORContext { + var p = new(ExpLogicalORContext) + + p.ExpContext = NewEmptyExpContext() + p.parser = parser + p.CopyFrom(ctx.(*ExpContext)) + + return p +} + +func (s *ExpLogicalORContext) GetLeft() IExpContext { return s.left } + +func (s *ExpLogicalORContext) GetRight() IExpContext { return s.right } + +func (s *ExpLogicalORContext) SetLeft(v IExpContext) { s.left = v } + +func (s *ExpLogicalORContext) SetRight(v IExpContext) { s.right = v } + +func (s *ExpLogicalORContext) GetRuleContext() antlr.RuleContext { + return s +} + +func (s *ExpLogicalORContext) OR() antlr.TerminalNode { + return s.GetToken(EqlParserOR, 0) +} + +func (s *ExpLogicalORContext) AllExp() []IExpContext { + var ts = s.GetTypedRuleContexts(reflect.TypeOf((*IExpContext)(nil)).Elem()) + var tst = make([]IExpContext, len(ts)) + + for i, t := range ts { + if t != nil { + tst[i] = t.(IExpContext) + } + } + + return tst +} + +func (s *ExpLogicalORContext) Exp(i int) IExpContext { + var t = s.GetTypedRuleContext(reflect.TypeOf((*IExpContext)(nil)).Elem(), i) + + if t == nil { + return nil + } + + return t.(IExpContext) +} + +func (s *ExpLogicalORContext) EnterRule(listener antlr.ParseTreeListener) { + if listenerT, ok := listener.(EqlListener); ok { + listenerT.EnterExpLogicalOR(s) + } +} + +func (s *ExpLogicalORContext) ExitRule(listener antlr.ParseTreeListener) { + if listenerT, ok := listener.(EqlListener); ok { + listenerT.ExitExpLogicalOR(s) + } +} + +func (s *ExpLogicalORContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { + switch t := visitor.(type) { + case EqlVisitor: + return t.VisitExpLogicalOR(s) + + default: + return t.VisitChildren(s) + } +} + +type ExpFloatContext struct { + *ExpContext +} + +func NewExpFloatContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *ExpFloatContext { + var p = new(ExpFloatContext) + + p.ExpContext = NewEmptyExpContext() + p.parser = parser + p.CopyFrom(ctx.(*ExpContext)) + + return p +} + +func (s *ExpFloatContext) GetRuleContext() antlr.RuleContext { + return s +} + +func (s *ExpFloatContext) FLOAT() antlr.TerminalNode { + return s.GetToken(EqlParserFLOAT, 0) +} + +func (s *ExpFloatContext) EnterRule(listener antlr.ParseTreeListener) { + if listenerT, ok := listener.(EqlListener); ok { + listenerT.EnterExpFloat(s) + } +} + +func (s *ExpFloatContext) ExitRule(listener antlr.ParseTreeListener) { + if listenerT, ok := listener.(EqlListener); ok { + listenerT.ExitExpFloat(s) + } +} + +func (s *ExpFloatContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { + switch t := visitor.(type) { + case EqlVisitor: + return t.VisitExpFloat(s) + + default: + return t.VisitChildren(s) + } +} + +type ExpVariableContext struct { + *ExpContext +} + +func NewExpVariableContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *ExpVariableContext { + var p = new(ExpVariableContext) + + p.ExpContext = NewEmptyExpContext() + p.parser = parser + p.CopyFrom(ctx.(*ExpContext)) + + return p +} + +func (s *ExpVariableContext) GetRuleContext() antlr.RuleContext { + return s +} + +func (s *ExpVariableContext) BEGIN_VARIABLE() antlr.TerminalNode { + return s.GetToken(EqlParserBEGIN_VARIABLE, 0) +} + +func (s *ExpVariableContext) VariableExp() IVariableExpContext { + var t = s.GetTypedRuleContext(reflect.TypeOf((*IVariableExpContext)(nil)).Elem(), 0) + + if t == nil { + return nil + } + + return t.(IVariableExpContext) +} + +func (s *ExpVariableContext) RDICT() antlr.TerminalNode { + return s.GetToken(EqlParserRDICT, 0) +} + +func (s *ExpVariableContext) EnterRule(listener antlr.ParseTreeListener) { + if listenerT, ok := listener.(EqlListener); ok { + listenerT.EnterExpVariable(s) + } +} + +func (s *ExpVariableContext) ExitRule(listener antlr.ParseTreeListener) { + if listenerT, ok := listener.(EqlListener); ok { + listenerT.ExitExpVariable(s) + } +} + +func (s *ExpVariableContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { + switch t := visitor.(type) { + case EqlVisitor: + return t.VisitExpVariable(s) + + default: + return t.VisitChildren(s) + } +} + +type ExpArrayContext struct { + *ExpContext +} + +func NewExpArrayContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *ExpArrayContext { + var p = new(ExpArrayContext) + + p.ExpContext = NewEmptyExpContext() + p.parser = parser + p.CopyFrom(ctx.(*ExpContext)) + + return p +} + +func (s *ExpArrayContext) GetRuleContext() antlr.RuleContext { + return s +} + +func (s *ExpArrayContext) LARR() antlr.TerminalNode { + return s.GetToken(EqlParserLARR, 0) +} + +func (s *ExpArrayContext) RARR() antlr.TerminalNode { + return s.GetToken(EqlParserRARR, 0) +} + +func (s *ExpArrayContext) Array() IArrayContext { + var t = s.GetTypedRuleContext(reflect.TypeOf((*IArrayContext)(nil)).Elem(), 0) + + if t == nil { + return nil + } + + return t.(IArrayContext) +} + +func (s *ExpArrayContext) EnterRule(listener antlr.ParseTreeListener) { + if listenerT, ok := listener.(EqlListener); ok { + listenerT.EnterExpArray(s) + } +} + +func (s *ExpArrayContext) ExitRule(listener antlr.ParseTreeListener) { + if listenerT, ok := listener.(EqlListener); ok { + listenerT.ExitExpArray(s) + } +} + +func (s *ExpArrayContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { + switch t := visitor.(type) { + case EqlVisitor: + return t.VisitExpArray(s) + + default: + return t.VisitChildren(s) + } +} + +type ExpNotContext struct { + *ExpContext +} + +func NewExpNotContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *ExpNotContext { + var p = new(ExpNotContext) + + p.ExpContext = NewEmptyExpContext() + p.parser = parser + p.CopyFrom(ctx.(*ExpContext)) + + return p +} + +func (s *ExpNotContext) GetRuleContext() antlr.RuleContext { + return s +} + +func (s *ExpNotContext) NOT() antlr.TerminalNode { + return s.GetToken(EqlParserNOT, 0) +} + +func (s *ExpNotContext) Exp() IExpContext { + var t = s.GetTypedRuleContext(reflect.TypeOf((*IExpContext)(nil)).Elem(), 0) + + if t == nil { + return nil + } + + return t.(IExpContext) +} + +func (s *ExpNotContext) EnterRule(listener antlr.ParseTreeListener) { + if listenerT, ok := listener.(EqlListener); ok { + listenerT.EnterExpNot(s) + } +} + +func (s *ExpNotContext) ExitRule(listener antlr.ParseTreeListener) { + if listenerT, ok := listener.(EqlListener); ok { + listenerT.ExitExpNot(s) + } +} + +func (s *ExpNotContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { + switch t := visitor.(type) { + case EqlVisitor: + return t.VisitExpNot(s) + + default: + return t.VisitChildren(s) + } +} + +type ExpInParenContext struct { + *ExpContext +} + +func NewExpInParenContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *ExpInParenContext { + var p = new(ExpInParenContext) + + p.ExpContext = NewEmptyExpContext() + p.parser = parser + p.CopyFrom(ctx.(*ExpContext)) + + return p +} + +func (s *ExpInParenContext) GetRuleContext() antlr.RuleContext { + return s +} + +func (s *ExpInParenContext) LPAR() antlr.TerminalNode { + return s.GetToken(EqlParserLPAR, 0) +} + +func (s *ExpInParenContext) Exp() IExpContext { + var t = s.GetTypedRuleContext(reflect.TypeOf((*IExpContext)(nil)).Elem(), 0) + + if t == nil { + return nil + } + + return t.(IExpContext) +} + +func (s *ExpInParenContext) RPAR() antlr.TerminalNode { + return s.GetToken(EqlParserRPAR, 0) +} + +func (s *ExpInParenContext) EnterRule(listener antlr.ParseTreeListener) { + if listenerT, ok := listener.(EqlListener); ok { + listenerT.EnterExpInParen(s) + } +} + +func (s *ExpInParenContext) ExitRule(listener antlr.ParseTreeListener) { + if listenerT, ok := listener.(EqlListener); ok { + listenerT.ExitExpInParen(s) + } +} + +func (s *ExpInParenContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { + switch t := visitor.(type) { + case EqlVisitor: + return t.VisitExpInParen(s) + + default: + return t.VisitChildren(s) + } +} + +type ExpBooleanContext struct { + *ExpContext +} + +func NewExpBooleanContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *ExpBooleanContext { + var p = new(ExpBooleanContext) + + p.ExpContext = NewEmptyExpContext() + p.parser = parser + p.CopyFrom(ctx.(*ExpContext)) + + return p +} + +func (s *ExpBooleanContext) GetRuleContext() antlr.RuleContext { + return s +} + +func (s *ExpBooleanContext) Boolean() IBooleanContext { + var t = s.GetTypedRuleContext(reflect.TypeOf((*IBooleanContext)(nil)).Elem(), 0) + + if t == nil { + return nil + } + + return t.(IBooleanContext) +} + +func (s *ExpBooleanContext) EnterRule(listener antlr.ParseTreeListener) { + if listenerT, ok := listener.(EqlListener); ok { + listenerT.EnterExpBoolean(s) + } +} + +func (s *ExpBooleanContext) ExitRule(listener antlr.ParseTreeListener) { + if listenerT, ok := listener.(EqlListener); ok { + listenerT.ExitExpBoolean(s) + } +} + +func (s *ExpBooleanContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { + switch t := visitor.(type) { + case EqlVisitor: + return t.VisitExpBoolean(s) + + default: + return t.VisitChildren(s) + } +} + +type ExpArithmeticAddSubContext struct { + *ExpContext + left IExpContext + right IExpContext +} + +func NewExpArithmeticAddSubContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *ExpArithmeticAddSubContext { + var p = new(ExpArithmeticAddSubContext) + + p.ExpContext = NewEmptyExpContext() + p.parser = parser + p.CopyFrom(ctx.(*ExpContext)) + + return p +} + +func (s *ExpArithmeticAddSubContext) GetLeft() IExpContext { return s.left } + +func (s *ExpArithmeticAddSubContext) GetRight() IExpContext { return s.right } + +func (s *ExpArithmeticAddSubContext) SetLeft(v IExpContext) { s.left = v } + +func (s *ExpArithmeticAddSubContext) SetRight(v IExpContext) { s.right = v } + +func (s *ExpArithmeticAddSubContext) GetRuleContext() antlr.RuleContext { + return s +} + +func (s *ExpArithmeticAddSubContext) AllExp() []IExpContext { + var ts = s.GetTypedRuleContexts(reflect.TypeOf((*IExpContext)(nil)).Elem()) + var tst = make([]IExpContext, len(ts)) + + for i, t := range ts { + if t != nil { + tst[i] = t.(IExpContext) + } + } + + return tst +} + +func (s *ExpArithmeticAddSubContext) Exp(i int) IExpContext { + var t = s.GetTypedRuleContext(reflect.TypeOf((*IExpContext)(nil)).Elem(), i) + + if t == nil { + return nil + } + + return t.(IExpContext) +} + +func (s *ExpArithmeticAddSubContext) ADD() antlr.TerminalNode { + return s.GetToken(EqlParserADD, 0) +} + +func (s *ExpArithmeticAddSubContext) SUB() antlr.TerminalNode { + return s.GetToken(EqlParserSUB, 0) +} + +func (s *ExpArithmeticAddSubContext) EnterRule(listener antlr.ParseTreeListener) { + if listenerT, ok := listener.(EqlListener); ok { + listenerT.EnterExpArithmeticAddSub(s) + } +} + +func (s *ExpArithmeticAddSubContext) ExitRule(listener antlr.ParseTreeListener) { + if listenerT, ok := listener.(EqlListener); ok { + listenerT.ExitExpArithmeticAddSub(s) + } +} + +func (s *ExpArithmeticAddSubContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { + switch t := visitor.(type) { + case EqlVisitor: + return t.VisitExpArithmeticAddSub(s) + + default: + return t.VisitChildren(s) + } +} + +type ExpFunctionContext struct { + *ExpContext +} + +func NewExpFunctionContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *ExpFunctionContext { + var p = new(ExpFunctionContext) + + p.ExpContext = NewEmptyExpContext() + p.parser = parser + p.CopyFrom(ctx.(*ExpContext)) + + return p +} + +func (s *ExpFunctionContext) GetRuleContext() antlr.RuleContext { + return s +} + +func (s *ExpFunctionContext) NAME() antlr.TerminalNode { + return s.GetToken(EqlParserNAME, 0) +} + +func (s *ExpFunctionContext) LPAR() antlr.TerminalNode { + return s.GetToken(EqlParserLPAR, 0) +} + +func (s *ExpFunctionContext) RPAR() antlr.TerminalNode { + return s.GetToken(EqlParserRPAR, 0) +} + +func (s *ExpFunctionContext) Arguments() IArgumentsContext { + var t = s.GetTypedRuleContext(reflect.TypeOf((*IArgumentsContext)(nil)).Elem(), 0) + + if t == nil { + return nil + } + + return t.(IArgumentsContext) +} + +func (s *ExpFunctionContext) EnterRule(listener antlr.ParseTreeListener) { + if listenerT, ok := listener.(EqlListener); ok { + listenerT.EnterExpFunction(s) + } +} + +func (s *ExpFunctionContext) ExitRule(listener antlr.ParseTreeListener) { + if listenerT, ok := listener.(EqlListener); ok { + listenerT.ExitExpFunction(s) + } +} + +func (s *ExpFunctionContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { + switch t := visitor.(type) { + case EqlVisitor: + return t.VisitExpFunction(s) + + default: + return t.VisitChildren(s) + } +} + +type ExpArithmeticLTContext struct { + *ExpContext + left IExpContext + right IExpContext +} + +func NewExpArithmeticLTContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *ExpArithmeticLTContext { + var p = new(ExpArithmeticLTContext) + + p.ExpContext = NewEmptyExpContext() + p.parser = parser + p.CopyFrom(ctx.(*ExpContext)) + + return p +} + +func (s *ExpArithmeticLTContext) GetLeft() IExpContext { return s.left } + +func (s *ExpArithmeticLTContext) GetRight() IExpContext { return s.right } + +func (s *ExpArithmeticLTContext) SetLeft(v IExpContext) { s.left = v } + +func (s *ExpArithmeticLTContext) SetRight(v IExpContext) { s.right = v } + +func (s *ExpArithmeticLTContext) GetRuleContext() antlr.RuleContext { + return s +} + +func (s *ExpArithmeticLTContext) LT() antlr.TerminalNode { + return s.GetToken(EqlParserLT, 0) +} + +func (s *ExpArithmeticLTContext) AllExp() []IExpContext { + var ts = s.GetTypedRuleContexts(reflect.TypeOf((*IExpContext)(nil)).Elem()) + var tst = make([]IExpContext, len(ts)) + + for i, t := range ts { + if t != nil { + tst[i] = t.(IExpContext) + } + } + + return tst +} + +func (s *ExpArithmeticLTContext) Exp(i int) IExpContext { + var t = s.GetTypedRuleContext(reflect.TypeOf((*IExpContext)(nil)).Elem(), i) + + if t == nil { + return nil + } + + return t.(IExpContext) +} + +func (s *ExpArithmeticLTContext) EnterRule(listener antlr.ParseTreeListener) { + if listenerT, ok := listener.(EqlListener); ok { + listenerT.EnterExpArithmeticLT(s) + } +} + +func (s *ExpArithmeticLTContext) ExitRule(listener antlr.ParseTreeListener) { + if listenerT, ok := listener.(EqlListener); ok { + listenerT.ExitExpArithmeticLT(s) + } +} + +func (s *ExpArithmeticLTContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { + switch t := visitor.(type) { + case EqlVisitor: + return t.VisitExpArithmeticLT(s) + + default: + return t.VisitChildren(s) + } +} + +func (p *EqlParser) Exp() (localctx IExpContext) { + return p.exp(0) +} + +func (p *EqlParser) exp(_p int) (localctx IExpContext) { + var _parentctx antlr.ParserRuleContext = p.GetParserRuleContext() + _parentState := p.GetState() + localctx = NewExpContext(p, p.GetParserRuleContext(), _parentState) + var _prevctx IExpContext = localctx + var _ antlr.ParserRuleContext = _prevctx // TODO: To prevent unused variable warning. + _startState := 10 + p.EnterRecursionRule(localctx, 10, EqlParserRULE_exp, _p) + var _la int + + defer func() { + p.UnrollRecursionContexts(_parentctx) + }() + + defer func() { + if err := recover(); err != nil { + if v, ok := err.(antlr.RecognitionException); ok { + localctx.SetException(v) + p.GetErrorHandler().ReportError(p, v) + p.GetErrorHandler().Recover(p, v) + } else { + panic(err) + } + } + }() + + var _alt int + + p.EnterOuterAlt(localctx, 1) + p.SetState(76) + p.GetErrorHandler().Sync(p) + + switch p.GetTokenStream().LA(1) { + case EqlParserLPAR: + localctx = NewExpInParenContext(p, localctx) + p.SetParserRuleContext(localctx) + _prevctx = localctx + + { + p.SetState(46) + p.Match(EqlParserLPAR) + } + { + p.SetState(47) + p.exp(0) + } + { + p.SetState(48) + p.Match(EqlParserRPAR) + } + + case EqlParserNOT: + localctx = NewExpNotContext(p, localctx) + p.SetParserRuleContext(localctx) + _prevctx = localctx + { + p.SetState(50) + p.Match(EqlParserNOT) + } + { + p.SetState(51) + p.exp(17) + } + + case EqlParserTRUE, EqlParserFALSE: + localctx = NewExpBooleanContext(p, localctx) + p.SetParserRuleContext(localctx) + _prevctx = localctx + { + p.SetState(52) + p.Boolean() + } + + case EqlParserBEGIN_VARIABLE: + localctx = NewExpVariableContext(p, localctx) + p.SetParserRuleContext(localctx) + _prevctx = localctx + { + p.SetState(53) + p.Match(EqlParserBEGIN_VARIABLE) + } + { + p.SetState(54) + p.VariableExp() + } + { + p.SetState(55) + p.Match(EqlParserRDICT) + } + + case EqlParserNAME: + localctx = NewExpFunctionContext(p, localctx) + p.SetParserRuleContext(localctx) + _prevctx = localctx + { + p.SetState(57) + p.Match(EqlParserNAME) + } + { + p.SetState(58) + p.Match(EqlParserLPAR) + } + p.SetState(60) + p.GetErrorHandler().Sync(p) + _la = p.GetTokenStream().LA(1) + + if ((_la-17)&-(0x1f+1)) == 0 && ((1< 0 and length(${inputs}) > 0 and hasKey(${output}, 'elasticsearch') diff --git a/x-pack/elastic-agent/spec/filebeat.yml b/x-pack/elastic-agent/spec/filebeat.yml index a1a7f9243b9..a461ab1a1c8 100644 --- a/x-pack/elastic-agent/spec/filebeat.yml +++ b/x-pack/elastic-agent/spec/filebeat.yml @@ -96,5 +96,5 @@ rules: - filebeat - output - keystore -when: HasItems(%{[filebeat.inputs]}) && HasNamespace('output', 'elasticsearch', 'redis', +when: length(${filebeat.inputs}) > 0 and hasKey(${output}, 'elasticsearch', 'redis', 'kafka', 'logstash') diff --git a/x-pack/elastic-agent/spec/heartbeat.yml b/x-pack/elastic-agent/spec/heartbeat.yml index b13ea46059b..399fd7d0885 100644 --- a/x-pack/elastic-agent/spec/heartbeat.yml +++ b/x-pack/elastic-agent/spec/heartbeat.yml @@ -18,5 +18,5 @@ rules: - inputs - output - keystore -when: HasItems(%{[inputs]}) && HasNamespace('output', 'elasticsearch', 'redis', +when: length(${inputs}) > 0 and hasKey(${output}, 'elasticsearch', 'redis', 'kafka', 'logstash') diff --git a/x-pack/elastic-agent/spec/metricbeat.yml b/x-pack/elastic-agent/spec/metricbeat.yml index 1546e67f32d..94b69e9a2f3 100644 --- a/x-pack/elastic-agent/spec/metricbeat.yml +++ b/x-pack/elastic-agent/spec/metricbeat.yml @@ -87,5 +87,5 @@ rules: - output - keystore -when: HasItems(%{[metricbeat.modules]}) && HasNamespace('output', 'elasticsearch', +when: length(${metricbeat.modules}) > 0 and hasKey(${output}, 'elasticsearch', 'redis', 'kafka', 'logstash')