From 3aeeafe79eceb85c47086c75c62cc99029f4c323 Mon Sep 17 00:00:00 2001 From: Evgenii Stratonikov Date: Fri, 21 May 2021 18:12:32 +0300 Subject: [PATCH] [#3] policy: use ANTLRv4 parser generator Signed-off-by: Evgenii Stratonikov --- .gitattributes | 4 + go.mod | 2 +- go.sum | 5 +- pkg/policy/doc.go | 2 +- pkg/policy/grammar.ebnf | 55 - pkg/policy/grammar.go | 60 - pkg/policy/parser/Query.g4 | 45 + pkg/policy/parser/Query.interp | 67 + pkg/policy/parser/Query.tokens | 37 + pkg/policy/parser/QueryLexer.g4 | 41 + pkg/policy/parser/QueryLexer.interp | 87 + pkg/policy/parser/QueryLexer.tokens | 37 + pkg/policy/parser/generate.go | 3 + pkg/policy/parser/query_base_listener.go | 106 + pkg/policy/parser/query_base_visitor.go | 65 + pkg/policy/parser/query_lexer.go | 193 ++ pkg/policy/parser/query_listener.go | 94 + pkg/policy/parser/query_parser.go | 2474 ++++++++++++++++++++++ pkg/policy/parser/query_visitor.go | 52 + pkg/policy/query.go | 363 +++- pkg/policy/query_test.go | 35 +- 21 files changed, 3586 insertions(+), 241 deletions(-) create mode 100644 .gitattributes delete mode 100644 pkg/policy/grammar.ebnf delete mode 100644 pkg/policy/grammar.go create mode 100644 pkg/policy/parser/Query.g4 create mode 100644 pkg/policy/parser/Query.interp create mode 100644 pkg/policy/parser/Query.tokens create mode 100644 pkg/policy/parser/QueryLexer.g4 create mode 100644 pkg/policy/parser/QueryLexer.interp create mode 100644 pkg/policy/parser/QueryLexer.tokens create mode 100644 pkg/policy/parser/generate.go create mode 100644 pkg/policy/parser/query_base_listener.go create mode 100644 pkg/policy/parser/query_base_visitor.go create mode 100644 pkg/policy/parser/query_lexer.go create mode 100644 pkg/policy/parser/query_listener.go create mode 100644 pkg/policy/parser/query_parser.go create mode 100644 pkg/policy/parser/query_visitor.go diff --git a/.gitattributes b/.gitattributes new file mode 100644 index 00000000..b371347c --- /dev/null +++ b/.gitattributes @@ -0,0 +1,4 @@ +/pkg/policy/parser/*.go -diff +/pkg/policy/parser/generate.go diff +**/*.interp -diff +**/*.tokens -diff diff --git a/go.mod b/go.mod index f7f18292..25c51c6b 100644 --- a/go.mod +++ b/go.mod @@ -3,7 +3,7 @@ module github.com/nspcc-dev/neofs-sdk-go go 1.16 require ( - github.com/alecthomas/participle v0.7.1 + github.com/antlr/antlr4/runtime/Go/antlr v0.0.0-20210521073959-f0d4d129b7f1 github.com/nspcc-dev/neofs-api-go v1.27.0 github.com/stretchr/testify v1.6.1 go.uber.org/zap v1.10.0 diff --git a/go.sum b/go.sum index 799b7f75..ed17a369 100644 --- a/go.sum +++ b/go.sum @@ -10,15 +10,14 @@ github.com/Workiva/go-datastructures v1.0.50/go.mod h1:Z+F2Rca0qCsVYDS8z7bAGm8f3 github.com/abiosoft/ishell v2.0.0+incompatible/go.mod h1:HQR9AqF2R3P4XXpMpI0NAzgHf/aS6+zVXRj14cVk9qg= github.com/abiosoft/readline v0.0.0-20180607040430-155bce2042db/go.mod h1:rB3B4rKii8V21ydCbIzH5hZiCQE7f5E9SzUb/ZZx530= github.com/aead/siphash v1.0.1/go.mod h1:Nywa3cDsYNNK3gaciGTWPwHt0wlpNV15vwmswBAUSII= -github.com/alecthomas/participle v0.7.1 h1:2bN7reTw//5f0cugJcTOnY/NYZcWQOaajW+BwZB5xWs= -github.com/alecthomas/participle v0.7.1/go.mod h1:HfdmEuwvr12HXQN44HPWXR0lHmVolVYe4dyL6lQ3duY= -github.com/alecthomas/repr v0.0.0-20181024024818-d37bc2a10ba1/go.mod h1:xTS7Pm1pD1mvyM075QCDSRqH6qRLXylzS24ZTpRiSzQ= github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc= github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc= github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0= github.com/alecthomas/units v0.0.0-20190717042225-c3de453c63f4/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0= github.com/alicebob/gopher-json v0.0.0-20180125190556-5a6b3ba71ee6/go.mod h1:SGnFV6hVsYE877CKEZ6tDNTjaSXYUk6QqoIK6PrAtcc= github.com/alicebob/miniredis v2.5.0+incompatible/go.mod h1:8HZjEj4yU0dwhYHky+DxYx+6BMjkBbe5ONFIF1MXffk= +github.com/antlr/antlr4/runtime/Go/antlr v0.0.0-20210521073959-f0d4d129b7f1 h1:zFRi26YWd7NIorBXe8UkevRl0dIvk/AnXHWaAiZG+Yk= +github.com/antlr/antlr4/runtime/Go/antlr v0.0.0-20210521073959-f0d4d129b7f1/go.mod h1:F7bn7fEU90QkQ3tnmaTx3LTKLEDqnwWODIYppRQ5hnY= github.com/armon/consul-api v0.0.0-20180202201655-eb2c6b5be1b6/go.mod h1:grANhF5doyWs3UAsr3K4I6qtAmlQcZDesFNEHPZAzj8= github.com/beorn7/perks v0.0.0-20180321164747-3a771d992973/go.mod h1:Dwedo/Wpr24TaqPxmxbtue+5NUziq4I4S80YR8gNf3Q= github.com/beorn7/perks v1.0.0/go.mod h1:KWe93zE9D1o94FZ5RNwFwVgaQK1VOXiVxmqh+CedLV8= diff --git a/pkg/policy/doc.go b/pkg/policy/doc.go index e2f94bb7..10394fa1 100644 --- a/pkg/policy/doc.go +++ b/pkg/policy/doc.go @@ -1,5 +1,5 @@ // Package policy provides facilities for creating policy from SQL-like language. -// eBNF grammar is provided in `grammar.ebnf` for illustration. +// ANTLRv4 grammar is provided in `parser/Query.g4` and `parser/QueryLexer.g4`. // // Current limitations: // 1. Grouping filter expressions in parenthesis is not supported right now. diff --git a/pkg/policy/grammar.ebnf b/pkg/policy/grammar.ebnf deleted file mode 100644 index 2bcd3a20..00000000 --- a/pkg/policy/grammar.ebnf +++ /dev/null @@ -1,55 +0,0 @@ -Policy ::= - RepStmt, [RepStmt], - CbtStmt?, - [SelectStmt], - [FilterStmt], -; - -RepStmt ::= - 'REP', Number1, (* number of object replicas *) - ('AS', Ident)? (* optional selector name *) -; - -CbtStmt ::= 'CBF', Number1 (* container backup factor *) -; - -SelectStmt ::= - 'SELECT', Number1, (* number of nodes to select without container backup factor *) - ('IN', Clause?, Ident)?, (* bucket name *) - FROM, (Ident | '*'), (* filter reference or whole netmap *) - ('AS', Ident)? (* optional selector name *) -; - -Clause ::= - 'SAME' (* nodes from the same bucket *) - | 'DISTINCT' (* nodes from distinct buckets *) -; - -FilterStmt ::= - 'FILTER', AndChain, ['OR', AndChain], - 'AS', Ident (* obligatory filter name *) -; - -AndChain ::= - Expr, ['AND', Expr] -; - -Expr ::= - '@' Ident (* filter reference *) - | Key, Op, Value (* attribute filter *) -; - -Op ::= 'EQ' | 'NE' | 'GE' | 'GT' | 'LT' | 'LE' -; - -Key ::= Ident | String -; - -Value ::= Ident | Number | String -; - -Number1 ::= Digit1 [Digit]; -Number ::= Digit [Digit]; - -Digit1 ::= '1' | '2' | '3' | '4' | '5' | '6' | '7' | '8' | '9' ; -Digit ::= '0' | Digit1; diff --git a/pkg/policy/grammar.go b/pkg/policy/grammar.go deleted file mode 100644 index 6fd228e7..00000000 --- a/pkg/policy/grammar.go +++ /dev/null @@ -1,60 +0,0 @@ -//nolint:govet,golint // fails on struct tags here, but participle needs this syntax -package policy - -import ( - "github.com/alecthomas/participle" -) - -var parser *participle.Parser - -func init() { - p, err := participle.Build(&query{}) - if err != nil { - panic(err) - } - parser = p -} - -type query struct { - Replicas []*replicaStmt `@@+` - CBF uint32 `("CBF" @Int)?` - Selectors []*selectorStmt `@@*` - Filters []*filterStmt `@@*` -} - -type replicaStmt struct { - Count int `"REP" @Int` - Selector string `("IN" @Ident)?` -} - -type selectorStmt struct { - Count uint32 `"SELECT" @Int` - Bucket []string `("IN" @(("SAME" | "DISTINCT")? Ident))?` - Filter string `"FROM" @(Ident | "*")` - Name string `("AS" @Ident)?` -} - -type filterStmt struct { - Value *orChain `"FILTER" @@` - Name string `"AS" @Ident` -} - -type filterOrExpr struct { - Reference string `"@"@Ident` - Expr *simpleExpr `| @@` -} - -type orChain struct { - Clauses []*andChain `@@ ("OR" @@)*` -} - -type andChain struct { - Clauses []*filterOrExpr `@@ ("AND" @@)*` -} - -type simpleExpr struct { - Key string `@(Ident | String)` - // We don't use literals here to improve error messages. - Op string `@Ident` - Value string `@(Ident | String | Int)` -} diff --git a/pkg/policy/parser/Query.g4 b/pkg/policy/parser/Query.g4 new file mode 100644 index 00000000..0cad576e --- /dev/null +++ b/pkg/policy/parser/Query.g4 @@ -0,0 +1,45 @@ +parser grammar Query; + +options { + tokenVocab = QueryLexer; +} + +policy: repStmt+ cbfStmt? selectStmt* filterStmt*; + +repStmt: + REP Count = NUMBER1 // number of object replicas + (IN Selector = ident)?; // optional selector name + +cbfStmt: CBF BackupFactor = NUMBER1; // container backup factor + +selectStmt: + SELECT Count = NUMBER1 // number of nodes to select without container backup factor *) + (IN clause? Bucket = ident)? // bucket name + FROM Filter = identWC // filter reference or whole netmap + (AS Name = ident)? // optional selector name + ; + +clause: CLAUSE_SAME | CLAUSE_DISTINCT; // nodes from distinct buckets + +filterExpr: + F1 = filterExpr Op = AND_OP F2 = filterExpr + | F1 = filterExpr Op = OR_OP F2 = filterExpr + | expr + ; + +filterStmt: + FILTER Expr = filterExpr + AS Name = ident // obligatory filter name + ; + +expr: + AT Filter = ident // reference to named filter + | Key = filterKey SIMPLE_OP Value = filterValue // attribute comparison + ; + +filterKey : ident | STRING; +filterValue : ident | number | STRING; +number : ZERO | NUMBER1; +keyword : REP | IN | AS | SELECT | FROM | FILTER; +ident : keyword | IDENT; +identWC : ident | WILDCARD; diff --git a/pkg/policy/parser/Query.interp b/pkg/policy/parser/Query.interp new file mode 100644 index 00000000..63f74d9a --- /dev/null +++ b/pkg/policy/parser/Query.interp @@ -0,0 +1,67 @@ +token literal names: +null +'AND' +'OR' +null +'REP' +'IN' +'AS' +'CBF' +'SELECT' +'FROM' +'FILTER' +'*' +'SAME' +'DISTINCT' +'(' +')' +'@' +null +null +'0' +null +null + +token symbolic names: +null +AND_OP +OR_OP +SIMPLE_OP +REP +IN +AS +CBF +SELECT +FROM +FILTER +WILDCARD +CLAUSE_SAME +CLAUSE_DISTINCT +L_PAREN +R_PAREN +AT +IDENT +NUMBER1 +ZERO +STRING +WS + +rule names: +policy +repStmt +cbfStmt +selectStmt +clause +filterExpr +filterStmt +expr +filterKey +filterValue +number +keyword +ident +identWC + + +atn: +[3, 24715, 42794, 33075, 47597, 16764, 15335, 30598, 22884, 3, 23, 125, 4, 2, 9, 2, 4, 3, 9, 3, 4, 4, 9, 4, 4, 5, 9, 5, 4, 6, 9, 6, 4, 7, 9, 7, 4, 8, 9, 8, 4, 9, 9, 9, 4, 10, 9, 10, 4, 11, 9, 11, 4, 12, 9, 12, 4, 13, 9, 13, 4, 14, 9, 14, 4, 15, 9, 15, 3, 2, 6, 2, 32, 10, 2, 13, 2, 14, 2, 33, 3, 2, 5, 2, 37, 10, 2, 3, 2, 7, 2, 40, 10, 2, 12, 2, 14, 2, 43, 11, 2, 3, 2, 7, 2, 46, 10, 2, 12, 2, 14, 2, 49, 11, 2, 3, 3, 3, 3, 3, 3, 3, 3, 5, 3, 55, 10, 3, 3, 4, 3, 4, 3, 4, 3, 5, 3, 5, 3, 5, 3, 5, 5, 5, 64, 10, 5, 3, 5, 5, 5, 67, 10, 5, 3, 5, 3, 5, 3, 5, 3, 5, 5, 5, 73, 10, 5, 3, 6, 3, 6, 3, 7, 3, 7, 3, 7, 3, 7, 3, 7, 3, 7, 3, 7, 3, 7, 3, 7, 7, 7, 86, 10, 7, 12, 7, 14, 7, 89, 11, 7, 3, 8, 3, 8, 3, 8, 3, 8, 3, 8, 3, 9, 3, 9, 3, 9, 3, 9, 3, 9, 3, 9, 5, 9, 102, 10, 9, 3, 10, 3, 10, 5, 10, 106, 10, 10, 3, 11, 3, 11, 3, 11, 5, 11, 111, 10, 11, 3, 12, 3, 12, 3, 13, 3, 13, 3, 14, 3, 14, 5, 14, 119, 10, 14, 3, 15, 3, 15, 5, 15, 123, 10, 15, 3, 15, 2, 3, 12, 16, 2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 22, 24, 26, 28, 2, 5, 3, 2, 14, 15, 3, 2, 20, 21, 4, 2, 6, 8, 10, 12, 2, 126, 2, 31, 3, 2, 2, 2, 4, 50, 3, 2, 2, 2, 6, 56, 3, 2, 2, 2, 8, 59, 3, 2, 2, 2, 10, 74, 3, 2, 2, 2, 12, 76, 3, 2, 2, 2, 14, 90, 3, 2, 2, 2, 16, 101, 3, 2, 2, 2, 18, 105, 3, 2, 2, 2, 20, 110, 3, 2, 2, 2, 22, 112, 3, 2, 2, 2, 24, 114, 3, 2, 2, 2, 26, 118, 3, 2, 2, 2, 28, 122, 3, 2, 2, 2, 30, 32, 5, 4, 3, 2, 31, 30, 3, 2, 2, 2, 32, 33, 3, 2, 2, 2, 33, 31, 3, 2, 2, 2, 33, 34, 3, 2, 2, 2, 34, 36, 3, 2, 2, 2, 35, 37, 5, 6, 4, 2, 36, 35, 3, 2, 2, 2, 36, 37, 3, 2, 2, 2, 37, 41, 3, 2, 2, 2, 38, 40, 5, 8, 5, 2, 39, 38, 3, 2, 2, 2, 40, 43, 3, 2, 2, 2, 41, 39, 3, 2, 2, 2, 41, 42, 3, 2, 2, 2, 42, 47, 3, 2, 2, 2, 43, 41, 3, 2, 2, 2, 44, 46, 5, 14, 8, 2, 45, 44, 3, 2, 2, 2, 46, 49, 3, 2, 2, 2, 47, 45, 3, 2, 2, 2, 47, 48, 3, 2, 2, 2, 48, 3, 3, 2, 2, 2, 49, 47, 3, 2, 2, 2, 50, 51, 7, 6, 2, 2, 51, 54, 7, 20, 2, 2, 52, 53, 7, 7, 2, 2, 53, 55, 5, 26, 14, 2, 54, 52, 3, 2, 2, 2, 54, 55, 3, 2, 2, 2, 55, 5, 3, 2, 2, 2, 56, 57, 7, 9, 2, 2, 57, 58, 7, 20, 2, 2, 58, 7, 3, 2, 2, 2, 59, 60, 7, 10, 2, 2, 60, 66, 7, 20, 2, 2, 61, 63, 7, 7, 2, 2, 62, 64, 5, 10, 6, 2, 63, 62, 3, 2, 2, 2, 63, 64, 3, 2, 2, 2, 64, 65, 3, 2, 2, 2, 65, 67, 5, 26, 14, 2, 66, 61, 3, 2, 2, 2, 66, 67, 3, 2, 2, 2, 67, 68, 3, 2, 2, 2, 68, 69, 7, 11, 2, 2, 69, 72, 5, 28, 15, 2, 70, 71, 7, 8, 2, 2, 71, 73, 5, 26, 14, 2, 72, 70, 3, 2, 2, 2, 72, 73, 3, 2, 2, 2, 73, 9, 3, 2, 2, 2, 74, 75, 9, 2, 2, 2, 75, 11, 3, 2, 2, 2, 76, 77, 8, 7, 1, 2, 77, 78, 5, 16, 9, 2, 78, 87, 3, 2, 2, 2, 79, 80, 12, 5, 2, 2, 80, 81, 7, 3, 2, 2, 81, 86, 5, 12, 7, 6, 82, 83, 12, 4, 2, 2, 83, 84, 7, 4, 2, 2, 84, 86, 5, 12, 7, 5, 85, 79, 3, 2, 2, 2, 85, 82, 3, 2, 2, 2, 86, 89, 3, 2, 2, 2, 87, 85, 3, 2, 2, 2, 87, 88, 3, 2, 2, 2, 88, 13, 3, 2, 2, 2, 89, 87, 3, 2, 2, 2, 90, 91, 7, 12, 2, 2, 91, 92, 5, 12, 7, 2, 92, 93, 7, 8, 2, 2, 93, 94, 5, 26, 14, 2, 94, 15, 3, 2, 2, 2, 95, 96, 7, 18, 2, 2, 96, 102, 5, 26, 14, 2, 97, 98, 5, 18, 10, 2, 98, 99, 7, 5, 2, 2, 99, 100, 5, 20, 11, 2, 100, 102, 3, 2, 2, 2, 101, 95, 3, 2, 2, 2, 101, 97, 3, 2, 2, 2, 102, 17, 3, 2, 2, 2, 103, 106, 5, 26, 14, 2, 104, 106, 7, 22, 2, 2, 105, 103, 3, 2, 2, 2, 105, 104, 3, 2, 2, 2, 106, 19, 3, 2, 2, 2, 107, 111, 5, 26, 14, 2, 108, 111, 5, 22, 12, 2, 109, 111, 7, 22, 2, 2, 110, 107, 3, 2, 2, 2, 110, 108, 3, 2, 2, 2, 110, 109, 3, 2, 2, 2, 111, 21, 3, 2, 2, 2, 112, 113, 9, 3, 2, 2, 113, 23, 3, 2, 2, 2, 114, 115, 9, 4, 2, 2, 115, 25, 3, 2, 2, 2, 116, 119, 5, 24, 13, 2, 117, 119, 7, 19, 2, 2, 118, 116, 3, 2, 2, 2, 118, 117, 3, 2, 2, 2, 119, 27, 3, 2, 2, 2, 120, 123, 5, 26, 14, 2, 121, 123, 7, 13, 2, 2, 122, 120, 3, 2, 2, 2, 122, 121, 3, 2, 2, 2, 123, 29, 3, 2, 2, 2, 17, 33, 36, 41, 47, 54, 63, 66, 72, 85, 87, 101, 105, 110, 118, 122] \ No newline at end of file diff --git a/pkg/policy/parser/Query.tokens b/pkg/policy/parser/Query.tokens new file mode 100644 index 00000000..7f5aee96 --- /dev/null +++ b/pkg/policy/parser/Query.tokens @@ -0,0 +1,37 @@ +AND_OP=1 +OR_OP=2 +SIMPLE_OP=3 +REP=4 +IN=5 +AS=6 +CBF=7 +SELECT=8 +FROM=9 +FILTER=10 +WILDCARD=11 +CLAUSE_SAME=12 +CLAUSE_DISTINCT=13 +L_PAREN=14 +R_PAREN=15 +AT=16 +IDENT=17 +NUMBER1=18 +ZERO=19 +STRING=20 +WS=21 +'AND'=1 +'OR'=2 +'REP'=4 +'IN'=5 +'AS'=6 +'CBF'=7 +'SELECT'=8 +'FROM'=9 +'FILTER'=10 +'*'=11 +'SAME'=12 +'DISTINCT'=13 +'('=14 +')'=15 +'@'=16 +'0'=19 diff --git a/pkg/policy/parser/QueryLexer.g4 b/pkg/policy/parser/QueryLexer.g4 new file mode 100644 index 00000000..6c245b69 --- /dev/null +++ b/pkg/policy/parser/QueryLexer.g4 @@ -0,0 +1,41 @@ +lexer grammar QueryLexer; + +AND_OP : 'AND'; +OR_OP : 'OR'; +SIMPLE_OP : 'EQ' | 'NE' | 'GE' | 'GT' | 'LT' | 'LE'; + +REP : 'REP'; +IN : 'IN'; +AS : 'AS'; +CBF : 'CBF'; +SELECT : 'SELECT'; +FROM : 'FROM'; +FILTER : 'FILTER'; +WILDCARD : '*'; + +CLAUSE_SAME : 'SAME'; +CLAUSE_DISTINCT : 'DISTINCT'; + +L_PAREN : '('; +R_PAREN : ')'; +AT : '@'; + +IDENT : Nondigit (Digit | Nondigit)* ; +fragment Digit : [0-9] ; +fragment Nondigit : [a-zA-Z_] ; + +NUMBER1 : [1-9] Digit* ; +ZERO : '0' ; + +// Taken from antlr4 json grammar with minor corrections. +// https://github.com/antlr/grammars-v4/blob/master/json/JSON.g4 +STRING : '"' (ESC | SAFECODEPOINTDOUBLE)* '"' + | '\'' (ESC | SAFECODEPOINTSINGLE)* '\'' ; + +fragment ESC : '\\' (['"\\/bfnrt] | UNICODE) ; +fragment UNICODE : 'u' HEX HEX HEX HEX ; +fragment HEX : [0-9a-fA-F] ; +fragment SAFECODEPOINTSINGLE : ~ ['\\\u0000-\u001F] ; +fragment SAFECODEPOINTDOUBLE : ~ ["\\\u0000-\u001F] ; + +WS : [ \t\n\r] + -> skip ; diff --git a/pkg/policy/parser/QueryLexer.interp b/pkg/policy/parser/QueryLexer.interp new file mode 100644 index 00000000..d453ff6e --- /dev/null +++ b/pkg/policy/parser/QueryLexer.interp @@ -0,0 +1,87 @@ +token literal names: +null +'AND' +'OR' +null +'REP' +'IN' +'AS' +'CBF' +'SELECT' +'FROM' +'FILTER' +'*' +'SAME' +'DISTINCT' +'(' +')' +'@' +null +null +'0' +null +null + +token symbolic names: +null +AND_OP +OR_OP +SIMPLE_OP +REP +IN +AS +CBF +SELECT +FROM +FILTER +WILDCARD +CLAUSE_SAME +CLAUSE_DISTINCT +L_PAREN +R_PAREN +AT +IDENT +NUMBER1 +ZERO +STRING +WS + +rule names: +AND_OP +OR_OP +SIMPLE_OP +REP +IN +AS +CBF +SELECT +FROM +FILTER +WILDCARD +CLAUSE_SAME +CLAUSE_DISTINCT +L_PAREN +R_PAREN +AT +IDENT +Digit +Nondigit +NUMBER1 +ZERO +STRING +ESC +UNICODE +HEX +SAFECODEPOINTSINGLE +SAFECODEPOINTDOUBLE +WS + +channel names: +DEFAULT_TOKEN_CHANNEL +HIDDEN + +mode names: +DEFAULT_MODE + +atn: +[3, 24715, 42794, 33075, 47597, 16764, 15335, 30598, 22884, 2, 23, 200, 8, 1, 4, 2, 9, 2, 4, 3, 9, 3, 4, 4, 9, 4, 4, 5, 9, 5, 4, 6, 9, 6, 4, 7, 9, 7, 4, 8, 9, 8, 4, 9, 9, 9, 4, 10, 9, 10, 4, 11, 9, 11, 4, 12, 9, 12, 4, 13, 9, 13, 4, 14, 9, 14, 4, 15, 9, 15, 4, 16, 9, 16, 4, 17, 9, 17, 4, 18, 9, 18, 4, 19, 9, 19, 4, 20, 9, 20, 4, 21, 9, 21, 4, 22, 9, 22, 4, 23, 9, 23, 4, 24, 9, 24, 4, 25, 9, 25, 4, 26, 9, 26, 4, 27, 9, 27, 4, 28, 9, 28, 4, 29, 9, 29, 3, 2, 3, 2, 3, 2, 3, 2, 3, 3, 3, 3, 3, 3, 3, 4, 3, 4, 3, 4, 3, 4, 3, 4, 3, 4, 3, 4, 3, 4, 3, 4, 3, 4, 3, 4, 3, 4, 5, 4, 79, 10, 4, 3, 5, 3, 5, 3, 5, 3, 5, 3, 6, 3, 6, 3, 6, 3, 7, 3, 7, 3, 7, 3, 8, 3, 8, 3, 8, 3, 8, 3, 9, 3, 9, 3, 9, 3, 9, 3, 9, 3, 9, 3, 9, 3, 10, 3, 10, 3, 10, 3, 10, 3, 10, 3, 11, 3, 11, 3, 11, 3, 11, 3, 11, 3, 11, 3, 11, 3, 12, 3, 12, 3, 13, 3, 13, 3, 13, 3, 13, 3, 13, 3, 14, 3, 14, 3, 14, 3, 14, 3, 14, 3, 14, 3, 14, 3, 14, 3, 14, 3, 15, 3, 15, 3, 16, 3, 16, 3, 17, 3, 17, 3, 18, 3, 18, 3, 18, 7, 18, 139, 10, 18, 12, 18, 14, 18, 142, 11, 18, 3, 19, 3, 19, 3, 20, 3, 20, 3, 21, 3, 21, 7, 21, 150, 10, 21, 12, 21, 14, 21, 153, 11, 21, 3, 22, 3, 22, 3, 23, 3, 23, 3, 23, 7, 23, 160, 10, 23, 12, 23, 14, 23, 163, 11, 23, 3, 23, 3, 23, 3, 23, 3, 23, 7, 23, 169, 10, 23, 12, 23, 14, 23, 172, 11, 23, 3, 23, 5, 23, 175, 10, 23, 3, 24, 3, 24, 3, 24, 5, 24, 180, 10, 24, 3, 25, 3, 25, 3, 25, 3, 25, 3, 25, 3, 25, 3, 26, 3, 26, 3, 27, 3, 27, 3, 28, 3, 28, 3, 29, 6, 29, 195, 10, 29, 13, 29, 14, 29, 196, 3, 29, 3, 29, 2, 2, 30, 3, 3, 5, 4, 7, 5, 9, 6, 11, 7, 13, 8, 15, 9, 17, 10, 19, 11, 21, 12, 23, 13, 25, 14, 27, 15, 29, 16, 31, 17, 33, 18, 35, 19, 37, 2, 39, 2, 41, 20, 43, 21, 45, 22, 47, 2, 49, 2, 51, 2, 53, 2, 55, 2, 57, 23, 3, 2, 10, 3, 2, 50, 59, 5, 2, 67, 92, 97, 97, 99, 124, 3, 2, 51, 59, 11, 2, 36, 36, 41, 41, 49, 49, 94, 94, 100, 100, 104, 104, 112, 112, 116, 116, 118, 118, 5, 2, 50, 59, 67, 72, 99, 104, 5, 2, 2, 33, 41, 41, 94, 94, 5, 2, 2, 33, 36, 36, 94, 94, 5, 2, 11, 12, 15, 15, 34, 34, 2, 207, 2, 3, 3, 2, 2, 2, 2, 5, 3, 2, 2, 2, 2, 7, 3, 2, 2, 2, 2, 9, 3, 2, 2, 2, 2, 11, 3, 2, 2, 2, 2, 13, 3, 2, 2, 2, 2, 15, 3, 2, 2, 2, 2, 17, 3, 2, 2, 2, 2, 19, 3, 2, 2, 2, 2, 21, 3, 2, 2, 2, 2, 23, 3, 2, 2, 2, 2, 25, 3, 2, 2, 2, 2, 27, 3, 2, 2, 2, 2, 29, 3, 2, 2, 2, 2, 31, 3, 2, 2, 2, 2, 33, 3, 2, 2, 2, 2, 35, 3, 2, 2, 2, 2, 41, 3, 2, 2, 2, 2, 43, 3, 2, 2, 2, 2, 45, 3, 2, 2, 2, 2, 57, 3, 2, 2, 2, 3, 59, 3, 2, 2, 2, 5, 63, 3, 2, 2, 2, 7, 78, 3, 2, 2, 2, 9, 80, 3, 2, 2, 2, 11, 84, 3, 2, 2, 2, 13, 87, 3, 2, 2, 2, 15, 90, 3, 2, 2, 2, 17, 94, 3, 2, 2, 2, 19, 101, 3, 2, 2, 2, 21, 106, 3, 2, 2, 2, 23, 113, 3, 2, 2, 2, 25, 115, 3, 2, 2, 2, 27, 120, 3, 2, 2, 2, 29, 129, 3, 2, 2, 2, 31, 131, 3, 2, 2, 2, 33, 133, 3, 2, 2, 2, 35, 135, 3, 2, 2, 2, 37, 143, 3, 2, 2, 2, 39, 145, 3, 2, 2, 2, 41, 147, 3, 2, 2, 2, 43, 154, 3, 2, 2, 2, 45, 174, 3, 2, 2, 2, 47, 176, 3, 2, 2, 2, 49, 181, 3, 2, 2, 2, 51, 187, 3, 2, 2, 2, 53, 189, 3, 2, 2, 2, 55, 191, 3, 2, 2, 2, 57, 194, 3, 2, 2, 2, 59, 60, 7, 67, 2, 2, 60, 61, 7, 80, 2, 2, 61, 62, 7, 70, 2, 2, 62, 4, 3, 2, 2, 2, 63, 64, 7, 81, 2, 2, 64, 65, 7, 84, 2, 2, 65, 6, 3, 2, 2, 2, 66, 67, 7, 71, 2, 2, 67, 79, 7, 83, 2, 2, 68, 69, 7, 80, 2, 2, 69, 79, 7, 71, 2, 2, 70, 71, 7, 73, 2, 2, 71, 79, 7, 71, 2, 2, 72, 73, 7, 73, 2, 2, 73, 79, 7, 86, 2, 2, 74, 75, 7, 78, 2, 2, 75, 79, 7, 86, 2, 2, 76, 77, 7, 78, 2, 2, 77, 79, 7, 71, 2, 2, 78, 66, 3, 2, 2, 2, 78, 68, 3, 2, 2, 2, 78, 70, 3, 2, 2, 2, 78, 72, 3, 2, 2, 2, 78, 74, 3, 2, 2, 2, 78, 76, 3, 2, 2, 2, 79, 8, 3, 2, 2, 2, 80, 81, 7, 84, 2, 2, 81, 82, 7, 71, 2, 2, 82, 83, 7, 82, 2, 2, 83, 10, 3, 2, 2, 2, 84, 85, 7, 75, 2, 2, 85, 86, 7, 80, 2, 2, 86, 12, 3, 2, 2, 2, 87, 88, 7, 67, 2, 2, 88, 89, 7, 85, 2, 2, 89, 14, 3, 2, 2, 2, 90, 91, 7, 69, 2, 2, 91, 92, 7, 68, 2, 2, 92, 93, 7, 72, 2, 2, 93, 16, 3, 2, 2, 2, 94, 95, 7, 85, 2, 2, 95, 96, 7, 71, 2, 2, 96, 97, 7, 78, 2, 2, 97, 98, 7, 71, 2, 2, 98, 99, 7, 69, 2, 2, 99, 100, 7, 86, 2, 2, 100, 18, 3, 2, 2, 2, 101, 102, 7, 72, 2, 2, 102, 103, 7, 84, 2, 2, 103, 104, 7, 81, 2, 2, 104, 105, 7, 79, 2, 2, 105, 20, 3, 2, 2, 2, 106, 107, 7, 72, 2, 2, 107, 108, 7, 75, 2, 2, 108, 109, 7, 78, 2, 2, 109, 110, 7, 86, 2, 2, 110, 111, 7, 71, 2, 2, 111, 112, 7, 84, 2, 2, 112, 22, 3, 2, 2, 2, 113, 114, 7, 44, 2, 2, 114, 24, 3, 2, 2, 2, 115, 116, 7, 85, 2, 2, 116, 117, 7, 67, 2, 2, 117, 118, 7, 79, 2, 2, 118, 119, 7, 71, 2, 2, 119, 26, 3, 2, 2, 2, 120, 121, 7, 70, 2, 2, 121, 122, 7, 75, 2, 2, 122, 123, 7, 85, 2, 2, 123, 124, 7, 86, 2, 2, 124, 125, 7, 75, 2, 2, 125, 126, 7, 80, 2, 2, 126, 127, 7, 69, 2, 2, 127, 128, 7, 86, 2, 2, 128, 28, 3, 2, 2, 2, 129, 130, 7, 42, 2, 2, 130, 30, 3, 2, 2, 2, 131, 132, 7, 43, 2, 2, 132, 32, 3, 2, 2, 2, 133, 134, 7, 66, 2, 2, 134, 34, 3, 2, 2, 2, 135, 140, 5, 39, 20, 2, 136, 139, 5, 37, 19, 2, 137, 139, 5, 39, 20, 2, 138, 136, 3, 2, 2, 2, 138, 137, 3, 2, 2, 2, 139, 142, 3, 2, 2, 2, 140, 138, 3, 2, 2, 2, 140, 141, 3, 2, 2, 2, 141, 36, 3, 2, 2, 2, 142, 140, 3, 2, 2, 2, 143, 144, 9, 2, 2, 2, 144, 38, 3, 2, 2, 2, 145, 146, 9, 3, 2, 2, 146, 40, 3, 2, 2, 2, 147, 151, 9, 4, 2, 2, 148, 150, 5, 37, 19, 2, 149, 148, 3, 2, 2, 2, 150, 153, 3, 2, 2, 2, 151, 149, 3, 2, 2, 2, 151, 152, 3, 2, 2, 2, 152, 42, 3, 2, 2, 2, 153, 151, 3, 2, 2, 2, 154, 155, 7, 50, 2, 2, 155, 44, 3, 2, 2, 2, 156, 161, 7, 36, 2, 2, 157, 160, 5, 47, 24, 2, 158, 160, 5, 55, 28, 2, 159, 157, 3, 2, 2, 2, 159, 158, 3, 2, 2, 2, 160, 163, 3, 2, 2, 2, 161, 159, 3, 2, 2, 2, 161, 162, 3, 2, 2, 2, 162, 164, 3, 2, 2, 2, 163, 161, 3, 2, 2, 2, 164, 175, 7, 36, 2, 2, 165, 170, 7, 41, 2, 2, 166, 169, 5, 47, 24, 2, 167, 169, 5, 53, 27, 2, 168, 166, 3, 2, 2, 2, 168, 167, 3, 2, 2, 2, 169, 172, 3, 2, 2, 2, 170, 168, 3, 2, 2, 2, 170, 171, 3, 2, 2, 2, 171, 173, 3, 2, 2, 2, 172, 170, 3, 2, 2, 2, 173, 175, 7, 41, 2, 2, 174, 156, 3, 2, 2, 2, 174, 165, 3, 2, 2, 2, 175, 46, 3, 2, 2, 2, 176, 179, 7, 94, 2, 2, 177, 180, 9, 5, 2, 2, 178, 180, 5, 49, 25, 2, 179, 177, 3, 2, 2, 2, 179, 178, 3, 2, 2, 2, 180, 48, 3, 2, 2, 2, 181, 182, 7, 119, 2, 2, 182, 183, 5, 51, 26, 2, 183, 184, 5, 51, 26, 2, 184, 185, 5, 51, 26, 2, 185, 186, 5, 51, 26, 2, 186, 50, 3, 2, 2, 2, 187, 188, 9, 6, 2, 2, 188, 52, 3, 2, 2, 2, 189, 190, 10, 7, 2, 2, 190, 54, 3, 2, 2, 2, 191, 192, 10, 8, 2, 2, 192, 56, 3, 2, 2, 2, 193, 195, 9, 9, 2, 2, 194, 193, 3, 2, 2, 2, 195, 196, 3, 2, 2, 2, 196, 194, 3, 2, 2, 2, 196, 197, 3, 2, 2, 2, 197, 198, 3, 2, 2, 2, 198, 199, 8, 29, 2, 2, 199, 58, 3, 2, 2, 2, 14, 2, 78, 138, 140, 151, 159, 161, 168, 170, 174, 179, 196, 3, 8, 2, 2] \ No newline at end of file diff --git a/pkg/policy/parser/QueryLexer.tokens b/pkg/policy/parser/QueryLexer.tokens new file mode 100644 index 00000000..7f5aee96 --- /dev/null +++ b/pkg/policy/parser/QueryLexer.tokens @@ -0,0 +1,37 @@ +AND_OP=1 +OR_OP=2 +SIMPLE_OP=3 +REP=4 +IN=5 +AS=6 +CBF=7 +SELECT=8 +FROM=9 +FILTER=10 +WILDCARD=11 +CLAUSE_SAME=12 +CLAUSE_DISTINCT=13 +L_PAREN=14 +R_PAREN=15 +AT=16 +IDENT=17 +NUMBER1=18 +ZERO=19 +STRING=20 +WS=21 +'AND'=1 +'OR'=2 +'REP'=4 +'IN'=5 +'AS'=6 +'CBF'=7 +'SELECT'=8 +'FROM'=9 +'FILTER'=10 +'*'=11 +'SAME'=12 +'DISTINCT'=13 +'('=14 +')'=15 +'@'=16 +'0'=19 diff --git a/pkg/policy/parser/generate.go b/pkg/policy/parser/generate.go new file mode 100644 index 00000000..850f3621 --- /dev/null +++ b/pkg/policy/parser/generate.go @@ -0,0 +1,3 @@ +package parser + +//go:generate antlr4 -Dlanguage=Go -visitor QueryLexer.g4 Query.g4 diff --git a/pkg/policy/parser/query_base_listener.go b/pkg/policy/parser/query_base_listener.go new file mode 100644 index 00000000..d872ae1d --- /dev/null +++ b/pkg/policy/parser/query_base_listener.go @@ -0,0 +1,106 @@ +// Code generated from Query.g4 by ANTLR 4.9.2. DO NOT EDIT. + +package parser // Query + +import "github.com/antlr/antlr4/runtime/Go/antlr" + +// BaseQueryListener is a complete listener for a parse tree produced by Query. +type BaseQueryListener struct{} + +var _ QueryListener = &BaseQueryListener{} + +// VisitTerminal is called when a terminal node is visited. +func (s *BaseQueryListener) VisitTerminal(node antlr.TerminalNode) {} + +// VisitErrorNode is called when an error node is visited. +func (s *BaseQueryListener) VisitErrorNode(node antlr.ErrorNode) {} + +// EnterEveryRule is called when any rule is entered. +func (s *BaseQueryListener) EnterEveryRule(ctx antlr.ParserRuleContext) {} + +// ExitEveryRule is called when any rule is exited. +func (s *BaseQueryListener) ExitEveryRule(ctx antlr.ParserRuleContext) {} + +// EnterPolicy is called when production policy is entered. +func (s *BaseQueryListener) EnterPolicy(ctx *PolicyContext) {} + +// ExitPolicy is called when production policy is exited. +func (s *BaseQueryListener) ExitPolicy(ctx *PolicyContext) {} + +// EnterRepStmt is called when production repStmt is entered. +func (s *BaseQueryListener) EnterRepStmt(ctx *RepStmtContext) {} + +// ExitRepStmt is called when production repStmt is exited. +func (s *BaseQueryListener) ExitRepStmt(ctx *RepStmtContext) {} + +// EnterCbfStmt is called when production cbfStmt is entered. +func (s *BaseQueryListener) EnterCbfStmt(ctx *CbfStmtContext) {} + +// ExitCbfStmt is called when production cbfStmt is exited. +func (s *BaseQueryListener) ExitCbfStmt(ctx *CbfStmtContext) {} + +// EnterSelectStmt is called when production selectStmt is entered. +func (s *BaseQueryListener) EnterSelectStmt(ctx *SelectStmtContext) {} + +// ExitSelectStmt is called when production selectStmt is exited. +func (s *BaseQueryListener) ExitSelectStmt(ctx *SelectStmtContext) {} + +// EnterClause is called when production clause is entered. +func (s *BaseQueryListener) EnterClause(ctx *ClauseContext) {} + +// ExitClause is called when production clause is exited. +func (s *BaseQueryListener) ExitClause(ctx *ClauseContext) {} + +// EnterFilterExpr is called when production filterExpr is entered. +func (s *BaseQueryListener) EnterFilterExpr(ctx *FilterExprContext) {} + +// ExitFilterExpr is called when production filterExpr is exited. +func (s *BaseQueryListener) ExitFilterExpr(ctx *FilterExprContext) {} + +// EnterFilterStmt is called when production filterStmt is entered. +func (s *BaseQueryListener) EnterFilterStmt(ctx *FilterStmtContext) {} + +// ExitFilterStmt is called when production filterStmt is exited. +func (s *BaseQueryListener) ExitFilterStmt(ctx *FilterStmtContext) {} + +// EnterExpr is called when production expr is entered. +func (s *BaseQueryListener) EnterExpr(ctx *ExprContext) {} + +// ExitExpr is called when production expr is exited. +func (s *BaseQueryListener) ExitExpr(ctx *ExprContext) {} + +// EnterFilterKey is called when production filterKey is entered. +func (s *BaseQueryListener) EnterFilterKey(ctx *FilterKeyContext) {} + +// ExitFilterKey is called when production filterKey is exited. +func (s *BaseQueryListener) ExitFilterKey(ctx *FilterKeyContext) {} + +// EnterFilterValue is called when production filterValue is entered. +func (s *BaseQueryListener) EnterFilterValue(ctx *FilterValueContext) {} + +// ExitFilterValue is called when production filterValue is exited. +func (s *BaseQueryListener) ExitFilterValue(ctx *FilterValueContext) {} + +// EnterNumber is called when production number is entered. +func (s *BaseQueryListener) EnterNumber(ctx *NumberContext) {} + +// ExitNumber is called when production number is exited. +func (s *BaseQueryListener) ExitNumber(ctx *NumberContext) {} + +// EnterKeyword is called when production keyword is entered. +func (s *BaseQueryListener) EnterKeyword(ctx *KeywordContext) {} + +// ExitKeyword is called when production keyword is exited. +func (s *BaseQueryListener) ExitKeyword(ctx *KeywordContext) {} + +// EnterIdent is called when production ident is entered. +func (s *BaseQueryListener) EnterIdent(ctx *IdentContext) {} + +// ExitIdent is called when production ident is exited. +func (s *BaseQueryListener) ExitIdent(ctx *IdentContext) {} + +// EnterIdentWC is called when production identWC is entered. +func (s *BaseQueryListener) EnterIdentWC(ctx *IdentWCContext) {} + +// ExitIdentWC is called when production identWC is exited. +func (s *BaseQueryListener) ExitIdentWC(ctx *IdentWCContext) {} diff --git a/pkg/policy/parser/query_base_visitor.go b/pkg/policy/parser/query_base_visitor.go new file mode 100644 index 00000000..69c59f7d --- /dev/null +++ b/pkg/policy/parser/query_base_visitor.go @@ -0,0 +1,65 @@ +// Code generated from Query.g4 by ANTLR 4.9.2. DO NOT EDIT. + +package parser // Query + +import "github.com/antlr/antlr4/runtime/Go/antlr" + +type BaseQueryVisitor struct { + *antlr.BaseParseTreeVisitor +} + +func (v *BaseQueryVisitor) VisitPolicy(ctx *PolicyContext) interface{} { + return v.VisitChildren(ctx) +} + +func (v *BaseQueryVisitor) VisitRepStmt(ctx *RepStmtContext) interface{} { + return v.VisitChildren(ctx) +} + +func (v *BaseQueryVisitor) VisitCbfStmt(ctx *CbfStmtContext) interface{} { + return v.VisitChildren(ctx) +} + +func (v *BaseQueryVisitor) VisitSelectStmt(ctx *SelectStmtContext) interface{} { + return v.VisitChildren(ctx) +} + +func (v *BaseQueryVisitor) VisitClause(ctx *ClauseContext) interface{} { + return v.VisitChildren(ctx) +} + +func (v *BaseQueryVisitor) VisitFilterExpr(ctx *FilterExprContext) interface{} { + return v.VisitChildren(ctx) +} + +func (v *BaseQueryVisitor) VisitFilterStmt(ctx *FilterStmtContext) interface{} { + return v.VisitChildren(ctx) +} + +func (v *BaseQueryVisitor) VisitExpr(ctx *ExprContext) interface{} { + return v.VisitChildren(ctx) +} + +func (v *BaseQueryVisitor) VisitFilterKey(ctx *FilterKeyContext) interface{} { + return v.VisitChildren(ctx) +} + +func (v *BaseQueryVisitor) VisitFilterValue(ctx *FilterValueContext) interface{} { + return v.VisitChildren(ctx) +} + +func (v *BaseQueryVisitor) VisitNumber(ctx *NumberContext) interface{} { + return v.VisitChildren(ctx) +} + +func (v *BaseQueryVisitor) VisitKeyword(ctx *KeywordContext) interface{} { + return v.VisitChildren(ctx) +} + +func (v *BaseQueryVisitor) VisitIdent(ctx *IdentContext) interface{} { + return v.VisitChildren(ctx) +} + +func (v *BaseQueryVisitor) VisitIdentWC(ctx *IdentWCContext) interface{} { + return v.VisitChildren(ctx) +} diff --git a/pkg/policy/parser/query_lexer.go b/pkg/policy/parser/query_lexer.go new file mode 100644 index 00000000..05d8f212 --- /dev/null +++ b/pkg/policy/parser/query_lexer.go @@ -0,0 +1,193 @@ +// Code generated from QueryLexer.g4 by ANTLR 4.9.2. DO NOT EDIT. + +package parser + +import ( + "fmt" + "unicode" + + "github.com/antlr/antlr4/runtime/Go/antlr" +) + +// Suppress unused import error +var _ = fmt.Printf +var _ = unicode.IsLetter + +var serializedLexerAtn = []uint16{ + 3, 24715, 42794, 33075, 47597, 16764, 15335, 30598, 22884, 2, 23, 200, + 8, 1, 4, 2, 9, 2, 4, 3, 9, 3, 4, 4, 9, 4, 4, 5, 9, 5, 4, 6, 9, 6, 4, 7, + 9, 7, 4, 8, 9, 8, 4, 9, 9, 9, 4, 10, 9, 10, 4, 11, 9, 11, 4, 12, 9, 12, + 4, 13, 9, 13, 4, 14, 9, 14, 4, 15, 9, 15, 4, 16, 9, 16, 4, 17, 9, 17, 4, + 18, 9, 18, 4, 19, 9, 19, 4, 20, 9, 20, 4, 21, 9, 21, 4, 22, 9, 22, 4, 23, + 9, 23, 4, 24, 9, 24, 4, 25, 9, 25, 4, 26, 9, 26, 4, 27, 9, 27, 4, 28, 9, + 28, 4, 29, 9, 29, 3, 2, 3, 2, 3, 2, 3, 2, 3, 3, 3, 3, 3, 3, 3, 4, 3, 4, + 3, 4, 3, 4, 3, 4, 3, 4, 3, 4, 3, 4, 3, 4, 3, 4, 3, 4, 3, 4, 5, 4, 79, 10, + 4, 3, 5, 3, 5, 3, 5, 3, 5, 3, 6, 3, 6, 3, 6, 3, 7, 3, 7, 3, 7, 3, 8, 3, + 8, 3, 8, 3, 8, 3, 9, 3, 9, 3, 9, 3, 9, 3, 9, 3, 9, 3, 9, 3, 10, 3, 10, + 3, 10, 3, 10, 3, 10, 3, 11, 3, 11, 3, 11, 3, 11, 3, 11, 3, 11, 3, 11, 3, + 12, 3, 12, 3, 13, 3, 13, 3, 13, 3, 13, 3, 13, 3, 14, 3, 14, 3, 14, 3, 14, + 3, 14, 3, 14, 3, 14, 3, 14, 3, 14, 3, 15, 3, 15, 3, 16, 3, 16, 3, 17, 3, + 17, 3, 18, 3, 18, 3, 18, 7, 18, 139, 10, 18, 12, 18, 14, 18, 142, 11, 18, + 3, 19, 3, 19, 3, 20, 3, 20, 3, 21, 3, 21, 7, 21, 150, 10, 21, 12, 21, 14, + 21, 153, 11, 21, 3, 22, 3, 22, 3, 23, 3, 23, 3, 23, 7, 23, 160, 10, 23, + 12, 23, 14, 23, 163, 11, 23, 3, 23, 3, 23, 3, 23, 3, 23, 7, 23, 169, 10, + 23, 12, 23, 14, 23, 172, 11, 23, 3, 23, 5, 23, 175, 10, 23, 3, 24, 3, 24, + 3, 24, 5, 24, 180, 10, 24, 3, 25, 3, 25, 3, 25, 3, 25, 3, 25, 3, 25, 3, + 26, 3, 26, 3, 27, 3, 27, 3, 28, 3, 28, 3, 29, 6, 29, 195, 10, 29, 13, 29, + 14, 29, 196, 3, 29, 3, 29, 2, 2, 30, 3, 3, 5, 4, 7, 5, 9, 6, 11, 7, 13, + 8, 15, 9, 17, 10, 19, 11, 21, 12, 23, 13, 25, 14, 27, 15, 29, 16, 31, 17, + 33, 18, 35, 19, 37, 2, 39, 2, 41, 20, 43, 21, 45, 22, 47, 2, 49, 2, 51, + 2, 53, 2, 55, 2, 57, 23, 3, 2, 10, 3, 2, 50, 59, 5, 2, 67, 92, 97, 97, + 99, 124, 3, 2, 51, 59, 11, 2, 36, 36, 41, 41, 49, 49, 94, 94, 100, 100, + 104, 104, 112, 112, 116, 116, 118, 118, 5, 2, 50, 59, 67, 72, 99, 104, + 5, 2, 2, 33, 41, 41, 94, 94, 5, 2, 2, 33, 36, 36, 94, 94, 5, 2, 11, 12, + 15, 15, 34, 34, 2, 207, 2, 3, 3, 2, 2, 2, 2, 5, 3, 2, 2, 2, 2, 7, 3, 2, + 2, 2, 2, 9, 3, 2, 2, 2, 2, 11, 3, 2, 2, 2, 2, 13, 3, 2, 2, 2, 2, 15, 3, + 2, 2, 2, 2, 17, 3, 2, 2, 2, 2, 19, 3, 2, 2, 2, 2, 21, 3, 2, 2, 2, 2, 23, + 3, 2, 2, 2, 2, 25, 3, 2, 2, 2, 2, 27, 3, 2, 2, 2, 2, 29, 3, 2, 2, 2, 2, + 31, 3, 2, 2, 2, 2, 33, 3, 2, 2, 2, 2, 35, 3, 2, 2, 2, 2, 41, 3, 2, 2, 2, + 2, 43, 3, 2, 2, 2, 2, 45, 3, 2, 2, 2, 2, 57, 3, 2, 2, 2, 3, 59, 3, 2, 2, + 2, 5, 63, 3, 2, 2, 2, 7, 78, 3, 2, 2, 2, 9, 80, 3, 2, 2, 2, 11, 84, 3, + 2, 2, 2, 13, 87, 3, 2, 2, 2, 15, 90, 3, 2, 2, 2, 17, 94, 3, 2, 2, 2, 19, + 101, 3, 2, 2, 2, 21, 106, 3, 2, 2, 2, 23, 113, 3, 2, 2, 2, 25, 115, 3, + 2, 2, 2, 27, 120, 3, 2, 2, 2, 29, 129, 3, 2, 2, 2, 31, 131, 3, 2, 2, 2, + 33, 133, 3, 2, 2, 2, 35, 135, 3, 2, 2, 2, 37, 143, 3, 2, 2, 2, 39, 145, + 3, 2, 2, 2, 41, 147, 3, 2, 2, 2, 43, 154, 3, 2, 2, 2, 45, 174, 3, 2, 2, + 2, 47, 176, 3, 2, 2, 2, 49, 181, 3, 2, 2, 2, 51, 187, 3, 2, 2, 2, 53, 189, + 3, 2, 2, 2, 55, 191, 3, 2, 2, 2, 57, 194, 3, 2, 2, 2, 59, 60, 7, 67, 2, + 2, 60, 61, 7, 80, 2, 2, 61, 62, 7, 70, 2, 2, 62, 4, 3, 2, 2, 2, 63, 64, + 7, 81, 2, 2, 64, 65, 7, 84, 2, 2, 65, 6, 3, 2, 2, 2, 66, 67, 7, 71, 2, + 2, 67, 79, 7, 83, 2, 2, 68, 69, 7, 80, 2, 2, 69, 79, 7, 71, 2, 2, 70, 71, + 7, 73, 2, 2, 71, 79, 7, 71, 2, 2, 72, 73, 7, 73, 2, 2, 73, 79, 7, 86, 2, + 2, 74, 75, 7, 78, 2, 2, 75, 79, 7, 86, 2, 2, 76, 77, 7, 78, 2, 2, 77, 79, + 7, 71, 2, 2, 78, 66, 3, 2, 2, 2, 78, 68, 3, 2, 2, 2, 78, 70, 3, 2, 2, 2, + 78, 72, 3, 2, 2, 2, 78, 74, 3, 2, 2, 2, 78, 76, 3, 2, 2, 2, 79, 8, 3, 2, + 2, 2, 80, 81, 7, 84, 2, 2, 81, 82, 7, 71, 2, 2, 82, 83, 7, 82, 2, 2, 83, + 10, 3, 2, 2, 2, 84, 85, 7, 75, 2, 2, 85, 86, 7, 80, 2, 2, 86, 12, 3, 2, + 2, 2, 87, 88, 7, 67, 2, 2, 88, 89, 7, 85, 2, 2, 89, 14, 3, 2, 2, 2, 90, + 91, 7, 69, 2, 2, 91, 92, 7, 68, 2, 2, 92, 93, 7, 72, 2, 2, 93, 16, 3, 2, + 2, 2, 94, 95, 7, 85, 2, 2, 95, 96, 7, 71, 2, 2, 96, 97, 7, 78, 2, 2, 97, + 98, 7, 71, 2, 2, 98, 99, 7, 69, 2, 2, 99, 100, 7, 86, 2, 2, 100, 18, 3, + 2, 2, 2, 101, 102, 7, 72, 2, 2, 102, 103, 7, 84, 2, 2, 103, 104, 7, 81, + 2, 2, 104, 105, 7, 79, 2, 2, 105, 20, 3, 2, 2, 2, 106, 107, 7, 72, 2, 2, + 107, 108, 7, 75, 2, 2, 108, 109, 7, 78, 2, 2, 109, 110, 7, 86, 2, 2, 110, + 111, 7, 71, 2, 2, 111, 112, 7, 84, 2, 2, 112, 22, 3, 2, 2, 2, 113, 114, + 7, 44, 2, 2, 114, 24, 3, 2, 2, 2, 115, 116, 7, 85, 2, 2, 116, 117, 7, 67, + 2, 2, 117, 118, 7, 79, 2, 2, 118, 119, 7, 71, 2, 2, 119, 26, 3, 2, 2, 2, + 120, 121, 7, 70, 2, 2, 121, 122, 7, 75, 2, 2, 122, 123, 7, 85, 2, 2, 123, + 124, 7, 86, 2, 2, 124, 125, 7, 75, 2, 2, 125, 126, 7, 80, 2, 2, 126, 127, + 7, 69, 2, 2, 127, 128, 7, 86, 2, 2, 128, 28, 3, 2, 2, 2, 129, 130, 7, 42, + 2, 2, 130, 30, 3, 2, 2, 2, 131, 132, 7, 43, 2, 2, 132, 32, 3, 2, 2, 2, + 133, 134, 7, 66, 2, 2, 134, 34, 3, 2, 2, 2, 135, 140, 5, 39, 20, 2, 136, + 139, 5, 37, 19, 2, 137, 139, 5, 39, 20, 2, 138, 136, 3, 2, 2, 2, 138, 137, + 3, 2, 2, 2, 139, 142, 3, 2, 2, 2, 140, 138, 3, 2, 2, 2, 140, 141, 3, 2, + 2, 2, 141, 36, 3, 2, 2, 2, 142, 140, 3, 2, 2, 2, 143, 144, 9, 2, 2, 2, + 144, 38, 3, 2, 2, 2, 145, 146, 9, 3, 2, 2, 146, 40, 3, 2, 2, 2, 147, 151, + 9, 4, 2, 2, 148, 150, 5, 37, 19, 2, 149, 148, 3, 2, 2, 2, 150, 153, 3, + 2, 2, 2, 151, 149, 3, 2, 2, 2, 151, 152, 3, 2, 2, 2, 152, 42, 3, 2, 2, + 2, 153, 151, 3, 2, 2, 2, 154, 155, 7, 50, 2, 2, 155, 44, 3, 2, 2, 2, 156, + 161, 7, 36, 2, 2, 157, 160, 5, 47, 24, 2, 158, 160, 5, 55, 28, 2, 159, + 157, 3, 2, 2, 2, 159, 158, 3, 2, 2, 2, 160, 163, 3, 2, 2, 2, 161, 159, + 3, 2, 2, 2, 161, 162, 3, 2, 2, 2, 162, 164, 3, 2, 2, 2, 163, 161, 3, 2, + 2, 2, 164, 175, 7, 36, 2, 2, 165, 170, 7, 41, 2, 2, 166, 169, 5, 47, 24, + 2, 167, 169, 5, 53, 27, 2, 168, 166, 3, 2, 2, 2, 168, 167, 3, 2, 2, 2, + 169, 172, 3, 2, 2, 2, 170, 168, 3, 2, 2, 2, 170, 171, 3, 2, 2, 2, 171, + 173, 3, 2, 2, 2, 172, 170, 3, 2, 2, 2, 173, 175, 7, 41, 2, 2, 174, 156, + 3, 2, 2, 2, 174, 165, 3, 2, 2, 2, 175, 46, 3, 2, 2, 2, 176, 179, 7, 94, + 2, 2, 177, 180, 9, 5, 2, 2, 178, 180, 5, 49, 25, 2, 179, 177, 3, 2, 2, + 2, 179, 178, 3, 2, 2, 2, 180, 48, 3, 2, 2, 2, 181, 182, 7, 119, 2, 2, 182, + 183, 5, 51, 26, 2, 183, 184, 5, 51, 26, 2, 184, 185, 5, 51, 26, 2, 185, + 186, 5, 51, 26, 2, 186, 50, 3, 2, 2, 2, 187, 188, 9, 6, 2, 2, 188, 52, + 3, 2, 2, 2, 189, 190, 10, 7, 2, 2, 190, 54, 3, 2, 2, 2, 191, 192, 10, 8, + 2, 2, 192, 56, 3, 2, 2, 2, 193, 195, 9, 9, 2, 2, 194, 193, 3, 2, 2, 2, + 195, 196, 3, 2, 2, 2, 196, 194, 3, 2, 2, 2, 196, 197, 3, 2, 2, 2, 197, + 198, 3, 2, 2, 2, 198, 199, 8, 29, 2, 2, 199, 58, 3, 2, 2, 2, 14, 2, 78, + 138, 140, 151, 159, 161, 168, 170, 174, 179, 196, 3, 8, 2, 2, +} + +var lexerChannelNames = []string{ + "DEFAULT_TOKEN_CHANNEL", "HIDDEN", +} + +var lexerModeNames = []string{ + "DEFAULT_MODE", +} + +var lexerLiteralNames = []string{ + "", "'AND'", "'OR'", "", "'REP'", "'IN'", "'AS'", "'CBF'", "'SELECT'", + "'FROM'", "'FILTER'", "'*'", "'SAME'", "'DISTINCT'", "'('", "')'", "'@'", + "", "", "'0'", +} + +var lexerSymbolicNames = []string{ + "", "AND_OP", "OR_OP", "SIMPLE_OP", "REP", "IN", "AS", "CBF", "SELECT", + "FROM", "FILTER", "WILDCARD", "CLAUSE_SAME", "CLAUSE_DISTINCT", "L_PAREN", + "R_PAREN", "AT", "IDENT", "NUMBER1", "ZERO", "STRING", "WS", +} + +var lexerRuleNames = []string{ + "AND_OP", "OR_OP", "SIMPLE_OP", "REP", "IN", "AS", "CBF", "SELECT", "FROM", + "FILTER", "WILDCARD", "CLAUSE_SAME", "CLAUSE_DISTINCT", "L_PAREN", "R_PAREN", + "AT", "IDENT", "Digit", "Nondigit", "NUMBER1", "ZERO", "STRING", "ESC", + "UNICODE", "HEX", "SAFECODEPOINTSINGLE", "SAFECODEPOINTDOUBLE", "WS", +} + +type QueryLexer struct { + *antlr.BaseLexer + channelNames []string + modeNames []string + // TODO: EOF string +} + +// NewQueryLexer produces a new lexer instance for the optional input antlr.CharStream. +// +// The *QueryLexer instance produced may be reused by calling the SetInputStream method. +// The initial lexer configuration is expensive to construct, and the object is not thread-safe; +// however, if used within a Golang sync.Pool, the construction cost amortizes well and the +// objects can be used in a thread-safe manner. +func NewQueryLexer(input antlr.CharStream) *QueryLexer { + l := new(QueryLexer) + lexerDeserializer := antlr.NewATNDeserializer(nil) + lexerAtn := lexerDeserializer.DeserializeFromUInt16(serializedLexerAtn) + lexerDecisionToDFA := make([]*antlr.DFA, len(lexerAtn.DecisionToState)) + for index, ds := range lexerAtn.DecisionToState { + lexerDecisionToDFA[index] = antlr.NewDFA(ds, index) + } + l.BaseLexer = antlr.NewBaseLexer(input) + l.Interpreter = antlr.NewLexerATNSimulator(l, lexerAtn, lexerDecisionToDFA, antlr.NewPredictionContextCache()) + + l.channelNames = lexerChannelNames + l.modeNames = lexerModeNames + l.RuleNames = lexerRuleNames + l.LiteralNames = lexerLiteralNames + l.SymbolicNames = lexerSymbolicNames + l.GrammarFileName = "QueryLexer.g4" + // TODO: l.EOF = antlr.TokenEOF + + return l +} + +// QueryLexer tokens. +const ( + QueryLexerAND_OP = 1 + QueryLexerOR_OP = 2 + QueryLexerSIMPLE_OP = 3 + QueryLexerREP = 4 + QueryLexerIN = 5 + QueryLexerAS = 6 + QueryLexerCBF = 7 + QueryLexerSELECT = 8 + QueryLexerFROM = 9 + QueryLexerFILTER = 10 + QueryLexerWILDCARD = 11 + QueryLexerCLAUSE_SAME = 12 + QueryLexerCLAUSE_DISTINCT = 13 + QueryLexerL_PAREN = 14 + QueryLexerR_PAREN = 15 + QueryLexerAT = 16 + QueryLexerIDENT = 17 + QueryLexerNUMBER1 = 18 + QueryLexerZERO = 19 + QueryLexerSTRING = 20 + QueryLexerWS = 21 +) diff --git a/pkg/policy/parser/query_listener.go b/pkg/policy/parser/query_listener.go new file mode 100644 index 00000000..3e77af59 --- /dev/null +++ b/pkg/policy/parser/query_listener.go @@ -0,0 +1,94 @@ +// Code generated from Query.g4 by ANTLR 4.9.2. DO NOT EDIT. + +package parser // Query + +import "github.com/antlr/antlr4/runtime/Go/antlr" + +// QueryListener is a complete listener for a parse tree produced by Query. +type QueryListener interface { + antlr.ParseTreeListener + + // EnterPolicy is called when entering the policy production. + EnterPolicy(c *PolicyContext) + + // EnterRepStmt is called when entering the repStmt production. + EnterRepStmt(c *RepStmtContext) + + // EnterCbfStmt is called when entering the cbfStmt production. + EnterCbfStmt(c *CbfStmtContext) + + // EnterSelectStmt is called when entering the selectStmt production. + EnterSelectStmt(c *SelectStmtContext) + + // EnterClause is called when entering the clause production. + EnterClause(c *ClauseContext) + + // EnterFilterExpr is called when entering the filterExpr production. + EnterFilterExpr(c *FilterExprContext) + + // EnterFilterStmt is called when entering the filterStmt production. + EnterFilterStmt(c *FilterStmtContext) + + // EnterExpr is called when entering the expr production. + EnterExpr(c *ExprContext) + + // EnterFilterKey is called when entering the filterKey production. + EnterFilterKey(c *FilterKeyContext) + + // EnterFilterValue is called when entering the filterValue production. + EnterFilterValue(c *FilterValueContext) + + // EnterNumber is called when entering the number production. + EnterNumber(c *NumberContext) + + // EnterKeyword is called when entering the keyword production. + EnterKeyword(c *KeywordContext) + + // EnterIdent is called when entering the ident production. + EnterIdent(c *IdentContext) + + // EnterIdentWC is called when entering the identWC production. + EnterIdentWC(c *IdentWCContext) + + // ExitPolicy is called when exiting the policy production. + ExitPolicy(c *PolicyContext) + + // ExitRepStmt is called when exiting the repStmt production. + ExitRepStmt(c *RepStmtContext) + + // ExitCbfStmt is called when exiting the cbfStmt production. + ExitCbfStmt(c *CbfStmtContext) + + // ExitSelectStmt is called when exiting the selectStmt production. + ExitSelectStmt(c *SelectStmtContext) + + // ExitClause is called when exiting the clause production. + ExitClause(c *ClauseContext) + + // ExitFilterExpr is called when exiting the filterExpr production. + ExitFilterExpr(c *FilterExprContext) + + // ExitFilterStmt is called when exiting the filterStmt production. + ExitFilterStmt(c *FilterStmtContext) + + // ExitExpr is called when exiting the expr production. + ExitExpr(c *ExprContext) + + // ExitFilterKey is called when exiting the filterKey production. + ExitFilterKey(c *FilterKeyContext) + + // ExitFilterValue is called when exiting the filterValue production. + ExitFilterValue(c *FilterValueContext) + + // ExitNumber is called when exiting the number production. + ExitNumber(c *NumberContext) + + // ExitKeyword is called when exiting the keyword production. + ExitKeyword(c *KeywordContext) + + // ExitIdent is called when exiting the ident production. + ExitIdent(c *IdentContext) + + // ExitIdentWC is called when exiting the identWC production. + ExitIdentWC(c *IdentWCContext) +} diff --git a/pkg/policy/parser/query_parser.go b/pkg/policy/parser/query_parser.go new file mode 100644 index 00000000..fb94f3a2 --- /dev/null +++ b/pkg/policy/parser/query_parser.go @@ -0,0 +1,2474 @@ +// Code generated from Query.g4 by ANTLR 4.9.2. DO NOT EDIT. + +package parser // Query + +import ( + "fmt" + "reflect" + "strconv" + + "github.com/antlr/antlr4/runtime/Go/antlr" +) + +// Suppress unused import errors +var _ = fmt.Printf +var _ = reflect.Copy +var _ = strconv.Itoa + +var parserATN = []uint16{ + 3, 24715, 42794, 33075, 47597, 16764, 15335, 30598, 22884, 3, 23, 125, + 4, 2, 9, 2, 4, 3, 9, 3, 4, 4, 9, 4, 4, 5, 9, 5, 4, 6, 9, 6, 4, 7, 9, 7, + 4, 8, 9, 8, 4, 9, 9, 9, 4, 10, 9, 10, 4, 11, 9, 11, 4, 12, 9, 12, 4, 13, + 9, 13, 4, 14, 9, 14, 4, 15, 9, 15, 3, 2, 6, 2, 32, 10, 2, 13, 2, 14, 2, + 33, 3, 2, 5, 2, 37, 10, 2, 3, 2, 7, 2, 40, 10, 2, 12, 2, 14, 2, 43, 11, + 2, 3, 2, 7, 2, 46, 10, 2, 12, 2, 14, 2, 49, 11, 2, 3, 3, 3, 3, 3, 3, 3, + 3, 5, 3, 55, 10, 3, 3, 4, 3, 4, 3, 4, 3, 5, 3, 5, 3, 5, 3, 5, 5, 5, 64, + 10, 5, 3, 5, 5, 5, 67, 10, 5, 3, 5, 3, 5, 3, 5, 3, 5, 5, 5, 73, 10, 5, + 3, 6, 3, 6, 3, 7, 3, 7, 3, 7, 3, 7, 3, 7, 3, 7, 3, 7, 3, 7, 3, 7, 7, 7, + 86, 10, 7, 12, 7, 14, 7, 89, 11, 7, 3, 8, 3, 8, 3, 8, 3, 8, 3, 8, 3, 9, + 3, 9, 3, 9, 3, 9, 3, 9, 3, 9, 5, 9, 102, 10, 9, 3, 10, 3, 10, 5, 10, 106, + 10, 10, 3, 11, 3, 11, 3, 11, 5, 11, 111, 10, 11, 3, 12, 3, 12, 3, 13, 3, + 13, 3, 14, 3, 14, 5, 14, 119, 10, 14, 3, 15, 3, 15, 5, 15, 123, 10, 15, + 3, 15, 2, 3, 12, 16, 2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 22, 24, 26, 28, + 2, 5, 3, 2, 14, 15, 3, 2, 20, 21, 4, 2, 6, 8, 10, 12, 2, 126, 2, 31, 3, + 2, 2, 2, 4, 50, 3, 2, 2, 2, 6, 56, 3, 2, 2, 2, 8, 59, 3, 2, 2, 2, 10, 74, + 3, 2, 2, 2, 12, 76, 3, 2, 2, 2, 14, 90, 3, 2, 2, 2, 16, 101, 3, 2, 2, 2, + 18, 105, 3, 2, 2, 2, 20, 110, 3, 2, 2, 2, 22, 112, 3, 2, 2, 2, 24, 114, + 3, 2, 2, 2, 26, 118, 3, 2, 2, 2, 28, 122, 3, 2, 2, 2, 30, 32, 5, 4, 3, + 2, 31, 30, 3, 2, 2, 2, 32, 33, 3, 2, 2, 2, 33, 31, 3, 2, 2, 2, 33, 34, + 3, 2, 2, 2, 34, 36, 3, 2, 2, 2, 35, 37, 5, 6, 4, 2, 36, 35, 3, 2, 2, 2, + 36, 37, 3, 2, 2, 2, 37, 41, 3, 2, 2, 2, 38, 40, 5, 8, 5, 2, 39, 38, 3, + 2, 2, 2, 40, 43, 3, 2, 2, 2, 41, 39, 3, 2, 2, 2, 41, 42, 3, 2, 2, 2, 42, + 47, 3, 2, 2, 2, 43, 41, 3, 2, 2, 2, 44, 46, 5, 14, 8, 2, 45, 44, 3, 2, + 2, 2, 46, 49, 3, 2, 2, 2, 47, 45, 3, 2, 2, 2, 47, 48, 3, 2, 2, 2, 48, 3, + 3, 2, 2, 2, 49, 47, 3, 2, 2, 2, 50, 51, 7, 6, 2, 2, 51, 54, 7, 20, 2, 2, + 52, 53, 7, 7, 2, 2, 53, 55, 5, 26, 14, 2, 54, 52, 3, 2, 2, 2, 54, 55, 3, + 2, 2, 2, 55, 5, 3, 2, 2, 2, 56, 57, 7, 9, 2, 2, 57, 58, 7, 20, 2, 2, 58, + 7, 3, 2, 2, 2, 59, 60, 7, 10, 2, 2, 60, 66, 7, 20, 2, 2, 61, 63, 7, 7, + 2, 2, 62, 64, 5, 10, 6, 2, 63, 62, 3, 2, 2, 2, 63, 64, 3, 2, 2, 2, 64, + 65, 3, 2, 2, 2, 65, 67, 5, 26, 14, 2, 66, 61, 3, 2, 2, 2, 66, 67, 3, 2, + 2, 2, 67, 68, 3, 2, 2, 2, 68, 69, 7, 11, 2, 2, 69, 72, 5, 28, 15, 2, 70, + 71, 7, 8, 2, 2, 71, 73, 5, 26, 14, 2, 72, 70, 3, 2, 2, 2, 72, 73, 3, 2, + 2, 2, 73, 9, 3, 2, 2, 2, 74, 75, 9, 2, 2, 2, 75, 11, 3, 2, 2, 2, 76, 77, + 8, 7, 1, 2, 77, 78, 5, 16, 9, 2, 78, 87, 3, 2, 2, 2, 79, 80, 12, 5, 2, + 2, 80, 81, 7, 3, 2, 2, 81, 86, 5, 12, 7, 6, 82, 83, 12, 4, 2, 2, 83, 84, + 7, 4, 2, 2, 84, 86, 5, 12, 7, 5, 85, 79, 3, 2, 2, 2, 85, 82, 3, 2, 2, 2, + 86, 89, 3, 2, 2, 2, 87, 85, 3, 2, 2, 2, 87, 88, 3, 2, 2, 2, 88, 13, 3, + 2, 2, 2, 89, 87, 3, 2, 2, 2, 90, 91, 7, 12, 2, 2, 91, 92, 5, 12, 7, 2, + 92, 93, 7, 8, 2, 2, 93, 94, 5, 26, 14, 2, 94, 15, 3, 2, 2, 2, 95, 96, 7, + 18, 2, 2, 96, 102, 5, 26, 14, 2, 97, 98, 5, 18, 10, 2, 98, 99, 7, 5, 2, + 2, 99, 100, 5, 20, 11, 2, 100, 102, 3, 2, 2, 2, 101, 95, 3, 2, 2, 2, 101, + 97, 3, 2, 2, 2, 102, 17, 3, 2, 2, 2, 103, 106, 5, 26, 14, 2, 104, 106, + 7, 22, 2, 2, 105, 103, 3, 2, 2, 2, 105, 104, 3, 2, 2, 2, 106, 19, 3, 2, + 2, 2, 107, 111, 5, 26, 14, 2, 108, 111, 5, 22, 12, 2, 109, 111, 7, 22, + 2, 2, 110, 107, 3, 2, 2, 2, 110, 108, 3, 2, 2, 2, 110, 109, 3, 2, 2, 2, + 111, 21, 3, 2, 2, 2, 112, 113, 9, 3, 2, 2, 113, 23, 3, 2, 2, 2, 114, 115, + 9, 4, 2, 2, 115, 25, 3, 2, 2, 2, 116, 119, 5, 24, 13, 2, 117, 119, 7, 19, + 2, 2, 118, 116, 3, 2, 2, 2, 118, 117, 3, 2, 2, 2, 119, 27, 3, 2, 2, 2, + 120, 123, 5, 26, 14, 2, 121, 123, 7, 13, 2, 2, 122, 120, 3, 2, 2, 2, 122, + 121, 3, 2, 2, 2, 123, 29, 3, 2, 2, 2, 17, 33, 36, 41, 47, 54, 63, 66, 72, + 85, 87, 101, 105, 110, 118, 122, +} +var literalNames = []string{ + "", "'AND'", "'OR'", "", "'REP'", "'IN'", "'AS'", "'CBF'", "'SELECT'", + "'FROM'", "'FILTER'", "'*'", "'SAME'", "'DISTINCT'", "'('", "')'", "'@'", + "", "", "'0'", +} +var symbolicNames = []string{ + "", "AND_OP", "OR_OP", "SIMPLE_OP", "REP", "IN", "AS", "CBF", "SELECT", + "FROM", "FILTER", "WILDCARD", "CLAUSE_SAME", "CLAUSE_DISTINCT", "L_PAREN", + "R_PAREN", "AT", "IDENT", "NUMBER1", "ZERO", "STRING", "WS", +} + +var ruleNames = []string{ + "policy", "repStmt", "cbfStmt", "selectStmt", "clause", "filterExpr", "filterStmt", + "expr", "filterKey", "filterValue", "number", "keyword", "ident", "identWC", +} + +type Query struct { + *antlr.BaseParser +} + +// NewQuery produces a new parser instance for the optional input antlr.TokenStream. +// +// The *Query instance produced may be reused by calling the SetInputStream method. +// The initial parser configuration is expensive to construct, and the object is not thread-safe; +// however, if used within a Golang sync.Pool, the construction cost amortizes well and the +// objects can be used in a thread-safe manner. +func NewQuery(input antlr.TokenStream) *Query { + this := new(Query) + deserializer := antlr.NewATNDeserializer(nil) + deserializedATN := deserializer.DeserializeFromUInt16(parserATN) + decisionToDFA := make([]*antlr.DFA, len(deserializedATN.DecisionToState)) + for index, ds := range deserializedATN.DecisionToState { + decisionToDFA[index] = antlr.NewDFA(ds, index) + } + this.BaseParser = antlr.NewBaseParser(input) + + this.Interpreter = antlr.NewParserATNSimulator(this, deserializedATN, decisionToDFA, antlr.NewPredictionContextCache()) + this.RuleNames = ruleNames + this.LiteralNames = literalNames + this.SymbolicNames = symbolicNames + this.GrammarFileName = "Query.g4" + + return this +} + +// Query tokens. +const ( + QueryEOF = antlr.TokenEOF + QueryAND_OP = 1 + QueryOR_OP = 2 + QuerySIMPLE_OP = 3 + QueryREP = 4 + QueryIN = 5 + QueryAS = 6 + QueryCBF = 7 + QuerySELECT = 8 + QueryFROM = 9 + QueryFILTER = 10 + QueryWILDCARD = 11 + QueryCLAUSE_SAME = 12 + QueryCLAUSE_DISTINCT = 13 + QueryL_PAREN = 14 + QueryR_PAREN = 15 + QueryAT = 16 + QueryIDENT = 17 + QueryNUMBER1 = 18 + QueryZERO = 19 + QuerySTRING = 20 + QueryWS = 21 +) + +// Query rules. +const ( + QueryRULE_policy = 0 + QueryRULE_repStmt = 1 + QueryRULE_cbfStmt = 2 + QueryRULE_selectStmt = 3 + QueryRULE_clause = 4 + QueryRULE_filterExpr = 5 + QueryRULE_filterStmt = 6 + QueryRULE_expr = 7 + QueryRULE_filterKey = 8 + QueryRULE_filterValue = 9 + QueryRULE_number = 10 + QueryRULE_keyword = 11 + QueryRULE_ident = 12 + QueryRULE_identWC = 13 +) + +// IPolicyContext is an interface to support dynamic dispatch. +type IPolicyContext interface { + antlr.ParserRuleContext + + // GetParser returns the parser. + GetParser() antlr.Parser + + // IsPolicyContext differentiates from other interfaces. + IsPolicyContext() +} + +type PolicyContext struct { + *antlr.BaseParserRuleContext + parser antlr.Parser +} + +func NewEmptyPolicyContext() *PolicyContext { + var p = new(PolicyContext) + p.BaseParserRuleContext = antlr.NewBaseParserRuleContext(nil, -1) + p.RuleIndex = QueryRULE_policy + return p +} + +func (*PolicyContext) IsPolicyContext() {} + +func NewPolicyContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *PolicyContext { + var p = new(PolicyContext) + + p.BaseParserRuleContext = antlr.NewBaseParserRuleContext(parent, invokingState) + + p.parser = parser + p.RuleIndex = QueryRULE_policy + + return p +} + +func (s *PolicyContext) GetParser() antlr.Parser { return s.parser } + +func (s *PolicyContext) AllRepStmt() []IRepStmtContext { + var ts = s.GetTypedRuleContexts(reflect.TypeOf((*IRepStmtContext)(nil)).Elem()) + var tst = make([]IRepStmtContext, len(ts)) + + for i, t := range ts { + if t != nil { + tst[i] = t.(IRepStmtContext) + } + } + + return tst +} + +func (s *PolicyContext) RepStmt(i int) IRepStmtContext { + var t = s.GetTypedRuleContext(reflect.TypeOf((*IRepStmtContext)(nil)).Elem(), i) + + if t == nil { + return nil + } + + return t.(IRepStmtContext) +} + +func (s *PolicyContext) CbfStmt() ICbfStmtContext { + var t = s.GetTypedRuleContext(reflect.TypeOf((*ICbfStmtContext)(nil)).Elem(), 0) + + if t == nil { + return nil + } + + return t.(ICbfStmtContext) +} + +func (s *PolicyContext) AllSelectStmt() []ISelectStmtContext { + var ts = s.GetTypedRuleContexts(reflect.TypeOf((*ISelectStmtContext)(nil)).Elem()) + var tst = make([]ISelectStmtContext, len(ts)) + + for i, t := range ts { + if t != nil { + tst[i] = t.(ISelectStmtContext) + } + } + + return tst +} + +func (s *PolicyContext) SelectStmt(i int) ISelectStmtContext { + var t = s.GetTypedRuleContext(reflect.TypeOf((*ISelectStmtContext)(nil)).Elem(), i) + + if t == nil { + return nil + } + + return t.(ISelectStmtContext) +} + +func (s *PolicyContext) AllFilterStmt() []IFilterStmtContext { + var ts = s.GetTypedRuleContexts(reflect.TypeOf((*IFilterStmtContext)(nil)).Elem()) + var tst = make([]IFilterStmtContext, len(ts)) + + for i, t := range ts { + if t != nil { + tst[i] = t.(IFilterStmtContext) + } + } + + return tst +} + +func (s *PolicyContext) FilterStmt(i int) IFilterStmtContext { + var t = s.GetTypedRuleContext(reflect.TypeOf((*IFilterStmtContext)(nil)).Elem(), i) + + if t == nil { + return nil + } + + return t.(IFilterStmtContext) +} + +func (s *PolicyContext) GetRuleContext() antlr.RuleContext { + return s +} + +func (s *PolicyContext) ToStringTree(ruleNames []string, recog antlr.Recognizer) string { + return antlr.TreesStringTree(s, ruleNames, recog) +} + +func (s *PolicyContext) EnterRule(listener antlr.ParseTreeListener) { + if listenerT, ok := listener.(QueryListener); ok { + listenerT.EnterPolicy(s) + } +} + +func (s *PolicyContext) ExitRule(listener antlr.ParseTreeListener) { + if listenerT, ok := listener.(QueryListener); ok { + listenerT.ExitPolicy(s) + } +} + +func (s *PolicyContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { + switch t := visitor.(type) { + case QueryVisitor: + return t.VisitPolicy(s) + + default: + return t.VisitChildren(s) + } +} + +func (p *Query) Policy() (localctx IPolicyContext) { + localctx = NewPolicyContext(p, p.GetParserRuleContext(), p.GetState()) + p.EnterRule(localctx, 0, QueryRULE_policy) + var _la int + + defer func() { + p.ExitRule() + }() + + defer func() { + if err := recover(); err != nil { + if v, ok := err.(antlr.RecognitionException); ok { + localctx.SetException(v) + p.GetErrorHandler().ReportError(p, v) + p.GetErrorHandler().Recover(p, v) + } else { + panic(err) + } + } + }() + + p.EnterOuterAlt(localctx, 1) + p.SetState(29) + p.GetErrorHandler().Sync(p) + _la = p.GetTokenStream().LA(1) + + for ok := true; ok; ok = _la == QueryREP { + { + p.SetState(28) + p.RepStmt() + } + + p.SetState(31) + p.GetErrorHandler().Sync(p) + _la = p.GetTokenStream().LA(1) + } + p.SetState(34) + p.GetErrorHandler().Sync(p) + _la = p.GetTokenStream().LA(1) + + if _la == QueryCBF { + { + p.SetState(33) + p.CbfStmt() + } + + } + p.SetState(39) + p.GetErrorHandler().Sync(p) + _la = p.GetTokenStream().LA(1) + + for _la == QuerySELECT { + { + p.SetState(36) + p.SelectStmt() + } + + p.SetState(41) + p.GetErrorHandler().Sync(p) + _la = p.GetTokenStream().LA(1) + } + p.SetState(45) + p.GetErrorHandler().Sync(p) + _la = p.GetTokenStream().LA(1) + + for _la == QueryFILTER { + { + p.SetState(42) + p.FilterStmt() + } + + p.SetState(47) + p.GetErrorHandler().Sync(p) + _la = p.GetTokenStream().LA(1) + } + + return localctx +} + +// IRepStmtContext is an interface to support dynamic dispatch. +type IRepStmtContext interface { + antlr.ParserRuleContext + + // GetParser returns the parser. + GetParser() antlr.Parser + + // GetCount returns the Count token. + GetCount() antlr.Token + + // SetCount sets the Count token. + SetCount(antlr.Token) + + // GetSelector returns the Selector rule contexts. + GetSelector() IIdentContext + + // SetSelector sets the Selector rule contexts. + SetSelector(IIdentContext) + + // IsRepStmtContext differentiates from other interfaces. + IsRepStmtContext() +} + +type RepStmtContext struct { + *antlr.BaseParserRuleContext + parser antlr.Parser + Count antlr.Token + Selector IIdentContext +} + +func NewEmptyRepStmtContext() *RepStmtContext { + var p = new(RepStmtContext) + p.BaseParserRuleContext = antlr.NewBaseParserRuleContext(nil, -1) + p.RuleIndex = QueryRULE_repStmt + return p +} + +func (*RepStmtContext) IsRepStmtContext() {} + +func NewRepStmtContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *RepStmtContext { + var p = new(RepStmtContext) + + p.BaseParserRuleContext = antlr.NewBaseParserRuleContext(parent, invokingState) + + p.parser = parser + p.RuleIndex = QueryRULE_repStmt + + return p +} + +func (s *RepStmtContext) GetParser() antlr.Parser { return s.parser } + +func (s *RepStmtContext) GetCount() antlr.Token { return s.Count } + +func (s *RepStmtContext) SetCount(v antlr.Token) { s.Count = v } + +func (s *RepStmtContext) GetSelector() IIdentContext { return s.Selector } + +func (s *RepStmtContext) SetSelector(v IIdentContext) { s.Selector = v } + +func (s *RepStmtContext) REP() antlr.TerminalNode { + return s.GetToken(QueryREP, 0) +} + +func (s *RepStmtContext) NUMBER1() antlr.TerminalNode { + return s.GetToken(QueryNUMBER1, 0) +} + +func (s *RepStmtContext) IN() antlr.TerminalNode { + return s.GetToken(QueryIN, 0) +} + +func (s *RepStmtContext) Ident() IIdentContext { + var t = s.GetTypedRuleContext(reflect.TypeOf((*IIdentContext)(nil)).Elem(), 0) + + if t == nil { + return nil + } + + return t.(IIdentContext) +} + +func (s *RepStmtContext) GetRuleContext() antlr.RuleContext { + return s +} + +func (s *RepStmtContext) ToStringTree(ruleNames []string, recog antlr.Recognizer) string { + return antlr.TreesStringTree(s, ruleNames, recog) +} + +func (s *RepStmtContext) EnterRule(listener antlr.ParseTreeListener) { + if listenerT, ok := listener.(QueryListener); ok { + listenerT.EnterRepStmt(s) + } +} + +func (s *RepStmtContext) ExitRule(listener antlr.ParseTreeListener) { + if listenerT, ok := listener.(QueryListener); ok { + listenerT.ExitRepStmt(s) + } +} + +func (s *RepStmtContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { + switch t := visitor.(type) { + case QueryVisitor: + return t.VisitRepStmt(s) + + default: + return t.VisitChildren(s) + } +} + +func (p *Query) RepStmt() (localctx IRepStmtContext) { + localctx = NewRepStmtContext(p, p.GetParserRuleContext(), p.GetState()) + p.EnterRule(localctx, 2, QueryRULE_repStmt) + var _la int + + defer func() { + p.ExitRule() + }() + + defer func() { + if err := recover(); err != nil { + if v, ok := err.(antlr.RecognitionException); ok { + localctx.SetException(v) + p.GetErrorHandler().ReportError(p, v) + p.GetErrorHandler().Recover(p, v) + } else { + panic(err) + } + } + }() + + p.EnterOuterAlt(localctx, 1) + { + p.SetState(48) + p.Match(QueryREP) + } + { + p.SetState(49) + + var _m = p.Match(QueryNUMBER1) + + localctx.(*RepStmtContext).Count = _m + } + p.SetState(52) + p.GetErrorHandler().Sync(p) + _la = p.GetTokenStream().LA(1) + + if _la == QueryIN { + { + p.SetState(50) + p.Match(QueryIN) + } + { + p.SetState(51) + + var _x = p.Ident() + + localctx.(*RepStmtContext).Selector = _x + } + + } + + return localctx +} + +// ICbfStmtContext is an interface to support dynamic dispatch. +type ICbfStmtContext interface { + antlr.ParserRuleContext + + // GetParser returns the parser. + GetParser() antlr.Parser + + // GetBackupFactor returns the BackupFactor token. + GetBackupFactor() antlr.Token + + // SetBackupFactor sets the BackupFactor token. + SetBackupFactor(antlr.Token) + + // IsCbfStmtContext differentiates from other interfaces. + IsCbfStmtContext() +} + +type CbfStmtContext struct { + *antlr.BaseParserRuleContext + parser antlr.Parser + BackupFactor antlr.Token +} + +func NewEmptyCbfStmtContext() *CbfStmtContext { + var p = new(CbfStmtContext) + p.BaseParserRuleContext = antlr.NewBaseParserRuleContext(nil, -1) + p.RuleIndex = QueryRULE_cbfStmt + return p +} + +func (*CbfStmtContext) IsCbfStmtContext() {} + +func NewCbfStmtContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *CbfStmtContext { + var p = new(CbfStmtContext) + + p.BaseParserRuleContext = antlr.NewBaseParserRuleContext(parent, invokingState) + + p.parser = parser + p.RuleIndex = QueryRULE_cbfStmt + + return p +} + +func (s *CbfStmtContext) GetParser() antlr.Parser { return s.parser } + +func (s *CbfStmtContext) GetBackupFactor() antlr.Token { return s.BackupFactor } + +func (s *CbfStmtContext) SetBackupFactor(v antlr.Token) { s.BackupFactor = v } + +func (s *CbfStmtContext) CBF() antlr.TerminalNode { + return s.GetToken(QueryCBF, 0) +} + +func (s *CbfStmtContext) NUMBER1() antlr.TerminalNode { + return s.GetToken(QueryNUMBER1, 0) +} + +func (s *CbfStmtContext) GetRuleContext() antlr.RuleContext { + return s +} + +func (s *CbfStmtContext) ToStringTree(ruleNames []string, recog antlr.Recognizer) string { + return antlr.TreesStringTree(s, ruleNames, recog) +} + +func (s *CbfStmtContext) EnterRule(listener antlr.ParseTreeListener) { + if listenerT, ok := listener.(QueryListener); ok { + listenerT.EnterCbfStmt(s) + } +} + +func (s *CbfStmtContext) ExitRule(listener antlr.ParseTreeListener) { + if listenerT, ok := listener.(QueryListener); ok { + listenerT.ExitCbfStmt(s) + } +} + +func (s *CbfStmtContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { + switch t := visitor.(type) { + case QueryVisitor: + return t.VisitCbfStmt(s) + + default: + return t.VisitChildren(s) + } +} + +func (p *Query) CbfStmt() (localctx ICbfStmtContext) { + localctx = NewCbfStmtContext(p, p.GetParserRuleContext(), p.GetState()) + p.EnterRule(localctx, 4, QueryRULE_cbfStmt) + + defer func() { + p.ExitRule() + }() + + defer func() { + if err := recover(); err != nil { + if v, ok := err.(antlr.RecognitionException); ok { + localctx.SetException(v) + p.GetErrorHandler().ReportError(p, v) + p.GetErrorHandler().Recover(p, v) + } else { + panic(err) + } + } + }() + + p.EnterOuterAlt(localctx, 1) + { + p.SetState(54) + p.Match(QueryCBF) + } + { + p.SetState(55) + + var _m = p.Match(QueryNUMBER1) + + localctx.(*CbfStmtContext).BackupFactor = _m + } + + return localctx +} + +// ISelectStmtContext is an interface to support dynamic dispatch. +type ISelectStmtContext interface { + antlr.ParserRuleContext + + // GetParser returns the parser. + GetParser() antlr.Parser + + // GetCount returns the Count token. + GetCount() antlr.Token + + // SetCount sets the Count token. + SetCount(antlr.Token) + + // GetBucket returns the Bucket rule contexts. + GetBucket() IIdentContext + + // GetFilter returns the Filter rule contexts. + GetFilter() IIdentWCContext + + // GetName returns the Name rule contexts. + GetName() IIdentContext + + // SetBucket sets the Bucket rule contexts. + SetBucket(IIdentContext) + + // SetFilter sets the Filter rule contexts. + SetFilter(IIdentWCContext) + + // SetName sets the Name rule contexts. + SetName(IIdentContext) + + // IsSelectStmtContext differentiates from other interfaces. + IsSelectStmtContext() +} + +type SelectStmtContext struct { + *antlr.BaseParserRuleContext + parser antlr.Parser + Count antlr.Token + Bucket IIdentContext + Filter IIdentWCContext + Name IIdentContext +} + +func NewEmptySelectStmtContext() *SelectStmtContext { + var p = new(SelectStmtContext) + p.BaseParserRuleContext = antlr.NewBaseParserRuleContext(nil, -1) + p.RuleIndex = QueryRULE_selectStmt + return p +} + +func (*SelectStmtContext) IsSelectStmtContext() {} + +func NewSelectStmtContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *SelectStmtContext { + var p = new(SelectStmtContext) + + p.BaseParserRuleContext = antlr.NewBaseParserRuleContext(parent, invokingState) + + p.parser = parser + p.RuleIndex = QueryRULE_selectStmt + + return p +} + +func (s *SelectStmtContext) GetParser() antlr.Parser { return s.parser } + +func (s *SelectStmtContext) GetCount() antlr.Token { return s.Count } + +func (s *SelectStmtContext) SetCount(v antlr.Token) { s.Count = v } + +func (s *SelectStmtContext) GetBucket() IIdentContext { return s.Bucket } + +func (s *SelectStmtContext) GetFilter() IIdentWCContext { return s.Filter } + +func (s *SelectStmtContext) GetName() IIdentContext { return s.Name } + +func (s *SelectStmtContext) SetBucket(v IIdentContext) { s.Bucket = v } + +func (s *SelectStmtContext) SetFilter(v IIdentWCContext) { s.Filter = v } + +func (s *SelectStmtContext) SetName(v IIdentContext) { s.Name = v } + +func (s *SelectStmtContext) SELECT() antlr.TerminalNode { + return s.GetToken(QuerySELECT, 0) +} + +func (s *SelectStmtContext) FROM() antlr.TerminalNode { + return s.GetToken(QueryFROM, 0) +} + +func (s *SelectStmtContext) NUMBER1() antlr.TerminalNode { + return s.GetToken(QueryNUMBER1, 0) +} + +func (s *SelectStmtContext) IdentWC() IIdentWCContext { + var t = s.GetTypedRuleContext(reflect.TypeOf((*IIdentWCContext)(nil)).Elem(), 0) + + if t == nil { + return nil + } + + return t.(IIdentWCContext) +} + +func (s *SelectStmtContext) IN() antlr.TerminalNode { + return s.GetToken(QueryIN, 0) +} + +func (s *SelectStmtContext) AS() antlr.TerminalNode { + return s.GetToken(QueryAS, 0) +} + +func (s *SelectStmtContext) AllIdent() []IIdentContext { + var ts = s.GetTypedRuleContexts(reflect.TypeOf((*IIdentContext)(nil)).Elem()) + var tst = make([]IIdentContext, len(ts)) + + for i, t := range ts { + if t != nil { + tst[i] = t.(IIdentContext) + } + } + + return tst +} + +func (s *SelectStmtContext) Ident(i int) IIdentContext { + var t = s.GetTypedRuleContext(reflect.TypeOf((*IIdentContext)(nil)).Elem(), i) + + if t == nil { + return nil + } + + return t.(IIdentContext) +} + +func (s *SelectStmtContext) Clause() IClauseContext { + var t = s.GetTypedRuleContext(reflect.TypeOf((*IClauseContext)(nil)).Elem(), 0) + + if t == nil { + return nil + } + + return t.(IClauseContext) +} + +func (s *SelectStmtContext) GetRuleContext() antlr.RuleContext { + return s +} + +func (s *SelectStmtContext) ToStringTree(ruleNames []string, recog antlr.Recognizer) string { + return antlr.TreesStringTree(s, ruleNames, recog) +} + +func (s *SelectStmtContext) EnterRule(listener antlr.ParseTreeListener) { + if listenerT, ok := listener.(QueryListener); ok { + listenerT.EnterSelectStmt(s) + } +} + +func (s *SelectStmtContext) ExitRule(listener antlr.ParseTreeListener) { + if listenerT, ok := listener.(QueryListener); ok { + listenerT.ExitSelectStmt(s) + } +} + +func (s *SelectStmtContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { + switch t := visitor.(type) { + case QueryVisitor: + return t.VisitSelectStmt(s) + + default: + return t.VisitChildren(s) + } +} + +func (p *Query) SelectStmt() (localctx ISelectStmtContext) { + localctx = NewSelectStmtContext(p, p.GetParserRuleContext(), p.GetState()) + p.EnterRule(localctx, 6, QueryRULE_selectStmt) + var _la int + + defer func() { + p.ExitRule() + }() + + defer func() { + if err := recover(); err != nil { + if v, ok := err.(antlr.RecognitionException); ok { + localctx.SetException(v) + p.GetErrorHandler().ReportError(p, v) + p.GetErrorHandler().Recover(p, v) + } else { + panic(err) + } + } + }() + + p.EnterOuterAlt(localctx, 1) + { + p.SetState(57) + p.Match(QuerySELECT) + } + { + p.SetState(58) + + var _m = p.Match(QueryNUMBER1) + + localctx.(*SelectStmtContext).Count = _m + } + p.SetState(64) + p.GetErrorHandler().Sync(p) + _la = p.GetTokenStream().LA(1) + + if _la == QueryIN { + { + p.SetState(59) + p.Match(QueryIN) + } + p.SetState(61) + p.GetErrorHandler().Sync(p) + _la = p.GetTokenStream().LA(1) + + if _la == QueryCLAUSE_SAME || _la == QueryCLAUSE_DISTINCT { + { + p.SetState(60) + p.Clause() + } + + } + { + p.SetState(63) + + var _x = p.Ident() + + localctx.(*SelectStmtContext).Bucket = _x + } + + } + { + p.SetState(66) + p.Match(QueryFROM) + } + { + p.SetState(67) + + var _x = p.IdentWC() + + localctx.(*SelectStmtContext).Filter = _x + } + p.SetState(70) + p.GetErrorHandler().Sync(p) + _la = p.GetTokenStream().LA(1) + + if _la == QueryAS { + { + p.SetState(68) + p.Match(QueryAS) + } + { + p.SetState(69) + + var _x = p.Ident() + + localctx.(*SelectStmtContext).Name = _x + } + + } + + return localctx +} + +// IClauseContext is an interface to support dynamic dispatch. +type IClauseContext interface { + antlr.ParserRuleContext + + // GetParser returns the parser. + GetParser() antlr.Parser + + // IsClauseContext differentiates from other interfaces. + IsClauseContext() +} + +type ClauseContext struct { + *antlr.BaseParserRuleContext + parser antlr.Parser +} + +func NewEmptyClauseContext() *ClauseContext { + var p = new(ClauseContext) + p.BaseParserRuleContext = antlr.NewBaseParserRuleContext(nil, -1) + p.RuleIndex = QueryRULE_clause + return p +} + +func (*ClauseContext) IsClauseContext() {} + +func NewClauseContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *ClauseContext { + var p = new(ClauseContext) + + p.BaseParserRuleContext = antlr.NewBaseParserRuleContext(parent, invokingState) + + p.parser = parser + p.RuleIndex = QueryRULE_clause + + return p +} + +func (s *ClauseContext) GetParser() antlr.Parser { return s.parser } + +func (s *ClauseContext) CLAUSE_SAME() antlr.TerminalNode { + return s.GetToken(QueryCLAUSE_SAME, 0) +} + +func (s *ClauseContext) CLAUSE_DISTINCT() antlr.TerminalNode { + return s.GetToken(QueryCLAUSE_DISTINCT, 0) +} + +func (s *ClauseContext) GetRuleContext() antlr.RuleContext { + return s +} + +func (s *ClauseContext) ToStringTree(ruleNames []string, recog antlr.Recognizer) string { + return antlr.TreesStringTree(s, ruleNames, recog) +} + +func (s *ClauseContext) EnterRule(listener antlr.ParseTreeListener) { + if listenerT, ok := listener.(QueryListener); ok { + listenerT.EnterClause(s) + } +} + +func (s *ClauseContext) ExitRule(listener antlr.ParseTreeListener) { + if listenerT, ok := listener.(QueryListener); ok { + listenerT.ExitClause(s) + } +} + +func (s *ClauseContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { + switch t := visitor.(type) { + case QueryVisitor: + return t.VisitClause(s) + + default: + return t.VisitChildren(s) + } +} + +func (p *Query) Clause() (localctx IClauseContext) { + localctx = NewClauseContext(p, p.GetParserRuleContext(), p.GetState()) + p.EnterRule(localctx, 8, QueryRULE_clause) + var _la int + + defer func() { + p.ExitRule() + }() + + defer func() { + if err := recover(); err != nil { + if v, ok := err.(antlr.RecognitionException); ok { + localctx.SetException(v) + p.GetErrorHandler().ReportError(p, v) + p.GetErrorHandler().Recover(p, v) + } else { + panic(err) + } + } + }() + + p.EnterOuterAlt(localctx, 1) + { + p.SetState(72) + _la = p.GetTokenStream().LA(1) + + if !(_la == QueryCLAUSE_SAME || _la == QueryCLAUSE_DISTINCT) { + p.GetErrorHandler().RecoverInline(p) + } else { + p.GetErrorHandler().ReportMatch(p) + p.Consume() + } + } + + return localctx +} + +// IFilterExprContext is an interface to support dynamic dispatch. +type IFilterExprContext interface { + antlr.ParserRuleContext + + // GetParser returns the parser. + GetParser() antlr.Parser + + // GetOp returns the Op token. + GetOp() antlr.Token + + // SetOp sets the Op token. + SetOp(antlr.Token) + + // GetF1 returns the F1 rule contexts. + GetF1() IFilterExprContext + + // GetF2 returns the F2 rule contexts. + GetF2() IFilterExprContext + + // SetF1 sets the F1 rule contexts. + SetF1(IFilterExprContext) + + // SetF2 sets the F2 rule contexts. + SetF2(IFilterExprContext) + + // IsFilterExprContext differentiates from other interfaces. + IsFilterExprContext() +} + +type FilterExprContext struct { + *antlr.BaseParserRuleContext + parser antlr.Parser + F1 IFilterExprContext + Op antlr.Token + F2 IFilterExprContext +} + +func NewEmptyFilterExprContext() *FilterExprContext { + var p = new(FilterExprContext) + p.BaseParserRuleContext = antlr.NewBaseParserRuleContext(nil, -1) + p.RuleIndex = QueryRULE_filterExpr + return p +} + +func (*FilterExprContext) IsFilterExprContext() {} + +func NewFilterExprContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *FilterExprContext { + var p = new(FilterExprContext) + + p.BaseParserRuleContext = antlr.NewBaseParserRuleContext(parent, invokingState) + + p.parser = parser + p.RuleIndex = QueryRULE_filterExpr + + return p +} + +func (s *FilterExprContext) GetParser() antlr.Parser { return s.parser } + +func (s *FilterExprContext) GetOp() antlr.Token { return s.Op } + +func (s *FilterExprContext) SetOp(v antlr.Token) { s.Op = v } + +func (s *FilterExprContext) GetF1() IFilterExprContext { return s.F1 } + +func (s *FilterExprContext) GetF2() IFilterExprContext { return s.F2 } + +func (s *FilterExprContext) SetF1(v IFilterExprContext) { s.F1 = v } + +func (s *FilterExprContext) SetF2(v IFilterExprContext) { s.F2 = v } + +func (s *FilterExprContext) Expr() IExprContext { + var t = s.GetTypedRuleContext(reflect.TypeOf((*IExprContext)(nil)).Elem(), 0) + + if t == nil { + return nil + } + + return t.(IExprContext) +} + +func (s *FilterExprContext) AllFilterExpr() []IFilterExprContext { + var ts = s.GetTypedRuleContexts(reflect.TypeOf((*IFilterExprContext)(nil)).Elem()) + var tst = make([]IFilterExprContext, len(ts)) + + for i, t := range ts { + if t != nil { + tst[i] = t.(IFilterExprContext) + } + } + + return tst +} + +func (s *FilterExprContext) FilterExpr(i int) IFilterExprContext { + var t = s.GetTypedRuleContext(reflect.TypeOf((*IFilterExprContext)(nil)).Elem(), i) + + if t == nil { + return nil + } + + return t.(IFilterExprContext) +} + +func (s *FilterExprContext) AND_OP() antlr.TerminalNode { + return s.GetToken(QueryAND_OP, 0) +} + +func (s *FilterExprContext) OR_OP() antlr.TerminalNode { + return s.GetToken(QueryOR_OP, 0) +} + +func (s *FilterExprContext) GetRuleContext() antlr.RuleContext { + return s +} + +func (s *FilterExprContext) ToStringTree(ruleNames []string, recog antlr.Recognizer) string { + return antlr.TreesStringTree(s, ruleNames, recog) +} + +func (s *FilterExprContext) EnterRule(listener antlr.ParseTreeListener) { + if listenerT, ok := listener.(QueryListener); ok { + listenerT.EnterFilterExpr(s) + } +} + +func (s *FilterExprContext) ExitRule(listener antlr.ParseTreeListener) { + if listenerT, ok := listener.(QueryListener); ok { + listenerT.ExitFilterExpr(s) + } +} + +func (s *FilterExprContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { + switch t := visitor.(type) { + case QueryVisitor: + return t.VisitFilterExpr(s) + + default: + return t.VisitChildren(s) + } +} + +func (p *Query) FilterExpr() (localctx IFilterExprContext) { + return p.filterExpr(0) +} + +func (p *Query) filterExpr(_p int) (localctx IFilterExprContext) { + var _parentctx antlr.ParserRuleContext = p.GetParserRuleContext() + _parentState := p.GetState() + localctx = NewFilterExprContext(p, p.GetParserRuleContext(), _parentState) + var _prevctx IFilterExprContext = localctx + var _ antlr.ParserRuleContext = _prevctx // TODO: To prevent unused variable warning. + _startState := 10 + p.EnterRecursionRule(localctx, 10, QueryRULE_filterExpr, _p) + + defer func() { + p.UnrollRecursionContexts(_parentctx) + }() + + defer func() { + if err := recover(); err != nil { + if v, ok := err.(antlr.RecognitionException); ok { + localctx.SetException(v) + p.GetErrorHandler().ReportError(p, v) + p.GetErrorHandler().Recover(p, v) + } else { + panic(err) + } + } + }() + + var _alt int + + p.EnterOuterAlt(localctx, 1) + { + p.SetState(75) + p.Expr() + } + + p.GetParserRuleContext().SetStop(p.GetTokenStream().LT(-1)) + p.SetState(85) + p.GetErrorHandler().Sync(p) + _alt = p.GetInterpreter().AdaptivePredict(p.GetTokenStream(), 9, p.GetParserRuleContext()) + + for _alt != 2 && _alt != antlr.ATNInvalidAltNumber { + if _alt == 1 { + if p.GetParseListeners() != nil { + p.TriggerExitRuleEvent() + } + _prevctx = localctx + p.SetState(83) + p.GetErrorHandler().Sync(p) + switch p.GetInterpreter().AdaptivePredict(p.GetTokenStream(), 8, p.GetParserRuleContext()) { + case 1: + localctx = NewFilterExprContext(p, _parentctx, _parentState) + localctx.(*FilterExprContext).F1 = _prevctx + p.PushNewRecursionContext(localctx, _startState, QueryRULE_filterExpr) + p.SetState(77) + + if !(p.Precpred(p.GetParserRuleContext(), 3)) { + panic(antlr.NewFailedPredicateException(p, "p.Precpred(p.GetParserRuleContext(), 3)", "")) + } + { + p.SetState(78) + + var _m = p.Match(QueryAND_OP) + + localctx.(*FilterExprContext).Op = _m + } + { + p.SetState(79) + + var _x = p.filterExpr(4) + + localctx.(*FilterExprContext).F2 = _x + } + + case 2: + localctx = NewFilterExprContext(p, _parentctx, _parentState) + localctx.(*FilterExprContext).F1 = _prevctx + p.PushNewRecursionContext(localctx, _startState, QueryRULE_filterExpr) + p.SetState(80) + + if !(p.Precpred(p.GetParserRuleContext(), 2)) { + panic(antlr.NewFailedPredicateException(p, "p.Precpred(p.GetParserRuleContext(), 2)", "")) + } + { + p.SetState(81) + + var _m = p.Match(QueryOR_OP) + + localctx.(*FilterExprContext).Op = _m + } + { + p.SetState(82) + + var _x = p.filterExpr(3) + + localctx.(*FilterExprContext).F2 = _x + } + + } + + } + p.SetState(87) + p.GetErrorHandler().Sync(p) + _alt = p.GetInterpreter().AdaptivePredict(p.GetTokenStream(), 9, p.GetParserRuleContext()) + } + + return localctx +} + +// IFilterStmtContext is an interface to support dynamic dispatch. +type IFilterStmtContext interface { + antlr.ParserRuleContext + + // GetParser returns the parser. + GetParser() antlr.Parser + + // GetExpr returns the Expr rule contexts. + GetExpr() IFilterExprContext + + // GetName returns the Name rule contexts. + GetName() IIdentContext + + // SetExpr sets the Expr rule contexts. + SetExpr(IFilterExprContext) + + // SetName sets the Name rule contexts. + SetName(IIdentContext) + + // IsFilterStmtContext differentiates from other interfaces. + IsFilterStmtContext() +} + +type FilterStmtContext struct { + *antlr.BaseParserRuleContext + parser antlr.Parser + Expr IFilterExprContext + Name IIdentContext +} + +func NewEmptyFilterStmtContext() *FilterStmtContext { + var p = new(FilterStmtContext) + p.BaseParserRuleContext = antlr.NewBaseParserRuleContext(nil, -1) + p.RuleIndex = QueryRULE_filterStmt + return p +} + +func (*FilterStmtContext) IsFilterStmtContext() {} + +func NewFilterStmtContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *FilterStmtContext { + var p = new(FilterStmtContext) + + p.BaseParserRuleContext = antlr.NewBaseParserRuleContext(parent, invokingState) + + p.parser = parser + p.RuleIndex = QueryRULE_filterStmt + + return p +} + +func (s *FilterStmtContext) GetParser() antlr.Parser { return s.parser } + +func (s *FilterStmtContext) GetExpr() IFilterExprContext { return s.Expr } + +func (s *FilterStmtContext) GetName() IIdentContext { return s.Name } + +func (s *FilterStmtContext) SetExpr(v IFilterExprContext) { s.Expr = v } + +func (s *FilterStmtContext) SetName(v IIdentContext) { s.Name = v } + +func (s *FilterStmtContext) FILTER() antlr.TerminalNode { + return s.GetToken(QueryFILTER, 0) +} + +func (s *FilterStmtContext) AS() antlr.TerminalNode { + return s.GetToken(QueryAS, 0) +} + +func (s *FilterStmtContext) FilterExpr() IFilterExprContext { + var t = s.GetTypedRuleContext(reflect.TypeOf((*IFilterExprContext)(nil)).Elem(), 0) + + if t == nil { + return nil + } + + return t.(IFilterExprContext) +} + +func (s *FilterStmtContext) Ident() IIdentContext { + var t = s.GetTypedRuleContext(reflect.TypeOf((*IIdentContext)(nil)).Elem(), 0) + + if t == nil { + return nil + } + + return t.(IIdentContext) +} + +func (s *FilterStmtContext) GetRuleContext() antlr.RuleContext { + return s +} + +func (s *FilterStmtContext) ToStringTree(ruleNames []string, recog antlr.Recognizer) string { + return antlr.TreesStringTree(s, ruleNames, recog) +} + +func (s *FilterStmtContext) EnterRule(listener antlr.ParseTreeListener) { + if listenerT, ok := listener.(QueryListener); ok { + listenerT.EnterFilterStmt(s) + } +} + +func (s *FilterStmtContext) ExitRule(listener antlr.ParseTreeListener) { + if listenerT, ok := listener.(QueryListener); ok { + listenerT.ExitFilterStmt(s) + } +} + +func (s *FilterStmtContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { + switch t := visitor.(type) { + case QueryVisitor: + return t.VisitFilterStmt(s) + + default: + return t.VisitChildren(s) + } +} + +func (p *Query) FilterStmt() (localctx IFilterStmtContext) { + localctx = NewFilterStmtContext(p, p.GetParserRuleContext(), p.GetState()) + p.EnterRule(localctx, 12, QueryRULE_filterStmt) + + defer func() { + p.ExitRule() + }() + + defer func() { + if err := recover(); err != nil { + if v, ok := err.(antlr.RecognitionException); ok { + localctx.SetException(v) + p.GetErrorHandler().ReportError(p, v) + p.GetErrorHandler().Recover(p, v) + } else { + panic(err) + } + } + }() + + p.EnterOuterAlt(localctx, 1) + { + p.SetState(88) + p.Match(QueryFILTER) + } + { + p.SetState(89) + + var _x = p.filterExpr(0) + + localctx.(*FilterStmtContext).Expr = _x + } + { + p.SetState(90) + p.Match(QueryAS) + } + { + p.SetState(91) + + var _x = p.Ident() + + localctx.(*FilterStmtContext).Name = _x + } + + return localctx +} + +// IExprContext is an interface to support dynamic dispatch. +type IExprContext interface { + antlr.ParserRuleContext + + // GetParser returns the parser. + GetParser() antlr.Parser + + // GetFilter returns the Filter rule contexts. + GetFilter() IIdentContext + + // GetKey returns the Key rule contexts. + GetKey() IFilterKeyContext + + // GetValue returns the Value rule contexts. + GetValue() IFilterValueContext + + // SetFilter sets the Filter rule contexts. + SetFilter(IIdentContext) + + // SetKey sets the Key rule contexts. + SetKey(IFilterKeyContext) + + // SetValue sets the Value rule contexts. + SetValue(IFilterValueContext) + + // IsExprContext differentiates from other interfaces. + IsExprContext() +} + +type ExprContext struct { + *antlr.BaseParserRuleContext + parser antlr.Parser + Filter IIdentContext + Key IFilterKeyContext + Value IFilterValueContext +} + +func NewEmptyExprContext() *ExprContext { + var p = new(ExprContext) + p.BaseParserRuleContext = antlr.NewBaseParserRuleContext(nil, -1) + p.RuleIndex = QueryRULE_expr + return p +} + +func (*ExprContext) IsExprContext() {} + +func NewExprContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *ExprContext { + var p = new(ExprContext) + + p.BaseParserRuleContext = antlr.NewBaseParserRuleContext(parent, invokingState) + + p.parser = parser + p.RuleIndex = QueryRULE_expr + + return p +} + +func (s *ExprContext) GetParser() antlr.Parser { return s.parser } + +func (s *ExprContext) GetFilter() IIdentContext { return s.Filter } + +func (s *ExprContext) GetKey() IFilterKeyContext { return s.Key } + +func (s *ExprContext) GetValue() IFilterValueContext { return s.Value } + +func (s *ExprContext) SetFilter(v IIdentContext) { s.Filter = v } + +func (s *ExprContext) SetKey(v IFilterKeyContext) { s.Key = v } + +func (s *ExprContext) SetValue(v IFilterValueContext) { s.Value = v } + +func (s *ExprContext) AT() antlr.TerminalNode { + return s.GetToken(QueryAT, 0) +} + +func (s *ExprContext) Ident() IIdentContext { + var t = s.GetTypedRuleContext(reflect.TypeOf((*IIdentContext)(nil)).Elem(), 0) + + if t == nil { + return nil + } + + return t.(IIdentContext) +} + +func (s *ExprContext) SIMPLE_OP() antlr.TerminalNode { + return s.GetToken(QuerySIMPLE_OP, 0) +} + +func (s *ExprContext) FilterKey() IFilterKeyContext { + var t = s.GetTypedRuleContext(reflect.TypeOf((*IFilterKeyContext)(nil)).Elem(), 0) + + if t == nil { + return nil + } + + return t.(IFilterKeyContext) +} + +func (s *ExprContext) FilterValue() IFilterValueContext { + var t = s.GetTypedRuleContext(reflect.TypeOf((*IFilterValueContext)(nil)).Elem(), 0) + + if t == nil { + return nil + } + + return t.(IFilterValueContext) +} + +func (s *ExprContext) GetRuleContext() antlr.RuleContext { + return s +} + +func (s *ExprContext) ToStringTree(ruleNames []string, recog antlr.Recognizer) string { + return antlr.TreesStringTree(s, ruleNames, recog) +} + +func (s *ExprContext) EnterRule(listener antlr.ParseTreeListener) { + if listenerT, ok := listener.(QueryListener); ok { + listenerT.EnterExpr(s) + } +} + +func (s *ExprContext) ExitRule(listener antlr.ParseTreeListener) { + if listenerT, ok := listener.(QueryListener); ok { + listenerT.ExitExpr(s) + } +} + +func (s *ExprContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { + switch t := visitor.(type) { + case QueryVisitor: + return t.VisitExpr(s) + + default: + return t.VisitChildren(s) + } +} + +func (p *Query) Expr() (localctx IExprContext) { + localctx = NewExprContext(p, p.GetParserRuleContext(), p.GetState()) + p.EnterRule(localctx, 14, QueryRULE_expr) + + defer func() { + p.ExitRule() + }() + + defer func() { + if err := recover(); err != nil { + if v, ok := err.(antlr.RecognitionException); ok { + localctx.SetException(v) + p.GetErrorHandler().ReportError(p, v) + p.GetErrorHandler().Recover(p, v) + } else { + panic(err) + } + } + }() + + p.SetState(99) + p.GetErrorHandler().Sync(p) + + switch p.GetTokenStream().LA(1) { + case QueryAT: + p.EnterOuterAlt(localctx, 1) + { + p.SetState(93) + p.Match(QueryAT) + } + { + p.SetState(94) + + var _x = p.Ident() + + localctx.(*ExprContext).Filter = _x + } + + case QueryREP, QueryIN, QueryAS, QuerySELECT, QueryFROM, QueryFILTER, QueryIDENT, QuerySTRING: + p.EnterOuterAlt(localctx, 2) + { + p.SetState(95) + + var _x = p.FilterKey() + + localctx.(*ExprContext).Key = _x + } + { + p.SetState(96) + p.Match(QuerySIMPLE_OP) + } + { + p.SetState(97) + + var _x = p.FilterValue() + + localctx.(*ExprContext).Value = _x + } + + default: + panic(antlr.NewNoViableAltException(p, nil, nil, nil, nil, nil)) + } + + return localctx +} + +// IFilterKeyContext is an interface to support dynamic dispatch. +type IFilterKeyContext interface { + antlr.ParserRuleContext + + // GetParser returns the parser. + GetParser() antlr.Parser + + // IsFilterKeyContext differentiates from other interfaces. + IsFilterKeyContext() +} + +type FilterKeyContext struct { + *antlr.BaseParserRuleContext + parser antlr.Parser +} + +func NewEmptyFilterKeyContext() *FilterKeyContext { + var p = new(FilterKeyContext) + p.BaseParserRuleContext = antlr.NewBaseParserRuleContext(nil, -1) + p.RuleIndex = QueryRULE_filterKey + return p +} + +func (*FilterKeyContext) IsFilterKeyContext() {} + +func NewFilterKeyContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *FilterKeyContext { + var p = new(FilterKeyContext) + + p.BaseParserRuleContext = antlr.NewBaseParserRuleContext(parent, invokingState) + + p.parser = parser + p.RuleIndex = QueryRULE_filterKey + + return p +} + +func (s *FilterKeyContext) GetParser() antlr.Parser { return s.parser } + +func (s *FilterKeyContext) Ident() IIdentContext { + var t = s.GetTypedRuleContext(reflect.TypeOf((*IIdentContext)(nil)).Elem(), 0) + + if t == nil { + return nil + } + + return t.(IIdentContext) +} + +func (s *FilterKeyContext) STRING() antlr.TerminalNode { + return s.GetToken(QuerySTRING, 0) +} + +func (s *FilterKeyContext) GetRuleContext() antlr.RuleContext { + return s +} + +func (s *FilterKeyContext) ToStringTree(ruleNames []string, recog antlr.Recognizer) string { + return antlr.TreesStringTree(s, ruleNames, recog) +} + +func (s *FilterKeyContext) EnterRule(listener antlr.ParseTreeListener) { + if listenerT, ok := listener.(QueryListener); ok { + listenerT.EnterFilterKey(s) + } +} + +func (s *FilterKeyContext) ExitRule(listener antlr.ParseTreeListener) { + if listenerT, ok := listener.(QueryListener); ok { + listenerT.ExitFilterKey(s) + } +} + +func (s *FilterKeyContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { + switch t := visitor.(type) { + case QueryVisitor: + return t.VisitFilterKey(s) + + default: + return t.VisitChildren(s) + } +} + +func (p *Query) FilterKey() (localctx IFilterKeyContext) { + localctx = NewFilterKeyContext(p, p.GetParserRuleContext(), p.GetState()) + p.EnterRule(localctx, 16, QueryRULE_filterKey) + + defer func() { + p.ExitRule() + }() + + defer func() { + if err := recover(); err != nil { + if v, ok := err.(antlr.RecognitionException); ok { + localctx.SetException(v) + p.GetErrorHandler().ReportError(p, v) + p.GetErrorHandler().Recover(p, v) + } else { + panic(err) + } + } + }() + + p.SetState(103) + p.GetErrorHandler().Sync(p) + + switch p.GetTokenStream().LA(1) { + case QueryREP, QueryIN, QueryAS, QuerySELECT, QueryFROM, QueryFILTER, QueryIDENT: + p.EnterOuterAlt(localctx, 1) + { + p.SetState(101) + p.Ident() + } + + case QuerySTRING: + p.EnterOuterAlt(localctx, 2) + { + p.SetState(102) + p.Match(QuerySTRING) + } + + default: + panic(antlr.NewNoViableAltException(p, nil, nil, nil, nil, nil)) + } + + return localctx +} + +// IFilterValueContext is an interface to support dynamic dispatch. +type IFilterValueContext interface { + antlr.ParserRuleContext + + // GetParser returns the parser. + GetParser() antlr.Parser + + // IsFilterValueContext differentiates from other interfaces. + IsFilterValueContext() +} + +type FilterValueContext struct { + *antlr.BaseParserRuleContext + parser antlr.Parser +} + +func NewEmptyFilterValueContext() *FilterValueContext { + var p = new(FilterValueContext) + p.BaseParserRuleContext = antlr.NewBaseParserRuleContext(nil, -1) + p.RuleIndex = QueryRULE_filterValue + return p +} + +func (*FilterValueContext) IsFilterValueContext() {} + +func NewFilterValueContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *FilterValueContext { + var p = new(FilterValueContext) + + p.BaseParserRuleContext = antlr.NewBaseParserRuleContext(parent, invokingState) + + p.parser = parser + p.RuleIndex = QueryRULE_filterValue + + return p +} + +func (s *FilterValueContext) GetParser() antlr.Parser { return s.parser } + +func (s *FilterValueContext) Ident() IIdentContext { + var t = s.GetTypedRuleContext(reflect.TypeOf((*IIdentContext)(nil)).Elem(), 0) + + if t == nil { + return nil + } + + return t.(IIdentContext) +} + +func (s *FilterValueContext) Number() INumberContext { + var t = s.GetTypedRuleContext(reflect.TypeOf((*INumberContext)(nil)).Elem(), 0) + + if t == nil { + return nil + } + + return t.(INumberContext) +} + +func (s *FilterValueContext) STRING() antlr.TerminalNode { + return s.GetToken(QuerySTRING, 0) +} + +func (s *FilterValueContext) GetRuleContext() antlr.RuleContext { + return s +} + +func (s *FilterValueContext) ToStringTree(ruleNames []string, recog antlr.Recognizer) string { + return antlr.TreesStringTree(s, ruleNames, recog) +} + +func (s *FilterValueContext) EnterRule(listener antlr.ParseTreeListener) { + if listenerT, ok := listener.(QueryListener); ok { + listenerT.EnterFilterValue(s) + } +} + +func (s *FilterValueContext) ExitRule(listener antlr.ParseTreeListener) { + if listenerT, ok := listener.(QueryListener); ok { + listenerT.ExitFilterValue(s) + } +} + +func (s *FilterValueContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { + switch t := visitor.(type) { + case QueryVisitor: + return t.VisitFilterValue(s) + + default: + return t.VisitChildren(s) + } +} + +func (p *Query) FilterValue() (localctx IFilterValueContext) { + localctx = NewFilterValueContext(p, p.GetParserRuleContext(), p.GetState()) + p.EnterRule(localctx, 18, QueryRULE_filterValue) + + defer func() { + p.ExitRule() + }() + + defer func() { + if err := recover(); err != nil { + if v, ok := err.(antlr.RecognitionException); ok { + localctx.SetException(v) + p.GetErrorHandler().ReportError(p, v) + p.GetErrorHandler().Recover(p, v) + } else { + panic(err) + } + } + }() + + p.SetState(108) + p.GetErrorHandler().Sync(p) + + switch p.GetTokenStream().LA(1) { + case QueryREP, QueryIN, QueryAS, QuerySELECT, QueryFROM, QueryFILTER, QueryIDENT: + p.EnterOuterAlt(localctx, 1) + { + p.SetState(105) + p.Ident() + } + + case QueryNUMBER1, QueryZERO: + p.EnterOuterAlt(localctx, 2) + { + p.SetState(106) + p.Number() + } + + case QuerySTRING: + p.EnterOuterAlt(localctx, 3) + { + p.SetState(107) + p.Match(QuerySTRING) + } + + default: + panic(antlr.NewNoViableAltException(p, nil, nil, nil, nil, nil)) + } + + return localctx +} + +// INumberContext is an interface to support dynamic dispatch. +type INumberContext interface { + antlr.ParserRuleContext + + // GetParser returns the parser. + GetParser() antlr.Parser + + // IsNumberContext differentiates from other interfaces. + IsNumberContext() +} + +type NumberContext struct { + *antlr.BaseParserRuleContext + parser antlr.Parser +} + +func NewEmptyNumberContext() *NumberContext { + var p = new(NumberContext) + p.BaseParserRuleContext = antlr.NewBaseParserRuleContext(nil, -1) + p.RuleIndex = QueryRULE_number + return p +} + +func (*NumberContext) IsNumberContext() {} + +func NewNumberContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *NumberContext { + var p = new(NumberContext) + + p.BaseParserRuleContext = antlr.NewBaseParserRuleContext(parent, invokingState) + + p.parser = parser + p.RuleIndex = QueryRULE_number + + return p +} + +func (s *NumberContext) GetParser() antlr.Parser { return s.parser } + +func (s *NumberContext) ZERO() antlr.TerminalNode { + return s.GetToken(QueryZERO, 0) +} + +func (s *NumberContext) NUMBER1() antlr.TerminalNode { + return s.GetToken(QueryNUMBER1, 0) +} + +func (s *NumberContext) GetRuleContext() antlr.RuleContext { + return s +} + +func (s *NumberContext) ToStringTree(ruleNames []string, recog antlr.Recognizer) string { + return antlr.TreesStringTree(s, ruleNames, recog) +} + +func (s *NumberContext) EnterRule(listener antlr.ParseTreeListener) { + if listenerT, ok := listener.(QueryListener); ok { + listenerT.EnterNumber(s) + } +} + +func (s *NumberContext) ExitRule(listener antlr.ParseTreeListener) { + if listenerT, ok := listener.(QueryListener); ok { + listenerT.ExitNumber(s) + } +} + +func (s *NumberContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { + switch t := visitor.(type) { + case QueryVisitor: + return t.VisitNumber(s) + + default: + return t.VisitChildren(s) + } +} + +func (p *Query) Number() (localctx INumberContext) { + localctx = NewNumberContext(p, p.GetParserRuleContext(), p.GetState()) + p.EnterRule(localctx, 20, QueryRULE_number) + var _la int + + defer func() { + p.ExitRule() + }() + + defer func() { + if err := recover(); err != nil { + if v, ok := err.(antlr.RecognitionException); ok { + localctx.SetException(v) + p.GetErrorHandler().ReportError(p, v) + p.GetErrorHandler().Recover(p, v) + } else { + panic(err) + } + } + }() + + p.EnterOuterAlt(localctx, 1) + { + p.SetState(110) + _la = p.GetTokenStream().LA(1) + + if !(_la == QueryNUMBER1 || _la == QueryZERO) { + p.GetErrorHandler().RecoverInline(p) + } else { + p.GetErrorHandler().ReportMatch(p) + p.Consume() + } + } + + return localctx +} + +// IKeywordContext is an interface to support dynamic dispatch. +type IKeywordContext interface { + antlr.ParserRuleContext + + // GetParser returns the parser. + GetParser() antlr.Parser + + // IsKeywordContext differentiates from other interfaces. + IsKeywordContext() +} + +type KeywordContext struct { + *antlr.BaseParserRuleContext + parser antlr.Parser +} + +func NewEmptyKeywordContext() *KeywordContext { + var p = new(KeywordContext) + p.BaseParserRuleContext = antlr.NewBaseParserRuleContext(nil, -1) + p.RuleIndex = QueryRULE_keyword + return p +} + +func (*KeywordContext) IsKeywordContext() {} + +func NewKeywordContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *KeywordContext { + var p = new(KeywordContext) + + p.BaseParserRuleContext = antlr.NewBaseParserRuleContext(parent, invokingState) + + p.parser = parser + p.RuleIndex = QueryRULE_keyword + + return p +} + +func (s *KeywordContext) GetParser() antlr.Parser { return s.parser } + +func (s *KeywordContext) REP() antlr.TerminalNode { + return s.GetToken(QueryREP, 0) +} + +func (s *KeywordContext) IN() antlr.TerminalNode { + return s.GetToken(QueryIN, 0) +} + +func (s *KeywordContext) AS() antlr.TerminalNode { + return s.GetToken(QueryAS, 0) +} + +func (s *KeywordContext) SELECT() antlr.TerminalNode { + return s.GetToken(QuerySELECT, 0) +} + +func (s *KeywordContext) FROM() antlr.TerminalNode { + return s.GetToken(QueryFROM, 0) +} + +func (s *KeywordContext) FILTER() antlr.TerminalNode { + return s.GetToken(QueryFILTER, 0) +} + +func (s *KeywordContext) GetRuleContext() antlr.RuleContext { + return s +} + +func (s *KeywordContext) ToStringTree(ruleNames []string, recog antlr.Recognizer) string { + return antlr.TreesStringTree(s, ruleNames, recog) +} + +func (s *KeywordContext) EnterRule(listener antlr.ParseTreeListener) { + if listenerT, ok := listener.(QueryListener); ok { + listenerT.EnterKeyword(s) + } +} + +func (s *KeywordContext) ExitRule(listener antlr.ParseTreeListener) { + if listenerT, ok := listener.(QueryListener); ok { + listenerT.ExitKeyword(s) + } +} + +func (s *KeywordContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { + switch t := visitor.(type) { + case QueryVisitor: + return t.VisitKeyword(s) + + default: + return t.VisitChildren(s) + } +} + +func (p *Query) Keyword() (localctx IKeywordContext) { + localctx = NewKeywordContext(p, p.GetParserRuleContext(), p.GetState()) + p.EnterRule(localctx, 22, QueryRULE_keyword) + var _la int + + defer func() { + p.ExitRule() + }() + + defer func() { + if err := recover(); err != nil { + if v, ok := err.(antlr.RecognitionException); ok { + localctx.SetException(v) + p.GetErrorHandler().ReportError(p, v) + p.GetErrorHandler().Recover(p, v) + } else { + panic(err) + } + } + }() + + p.EnterOuterAlt(localctx, 1) + { + p.SetState(112) + _la = p.GetTokenStream().LA(1) + + if !(((_la)&-(0x1f+1)) == 0 && ((1<