Add draft EQL grammar and expression tree
This commit is contained in:
parent
e22f501018
commit
10a16d15d1
|
@ -10,6 +10,7 @@
|
|||
<suppress files="modules[/\\]lang-painless[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]painless[/\\]antlr[/\\]PainlessLexer\.java" checks="." />
|
||||
<suppress files="modules[/\\]lang-painless[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]painless[/\\]antlr[/\\]PainlessParser(|BaseVisitor|Visitor)\.java" checks="." />
|
||||
<suppress files="plugin[/\\]sql[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]xpack[/\\]sql[/\\]parser[/\\]SqlBase(Base(Listener|Visitor)|Lexer|Listener|Parser|Visitor).java" checks="." />
|
||||
<suppress files="plugin[/\\]eql[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]xpack[/\\]eql[/\\]parser[/\\]EqlBase(Base(Listener|Visitor)|Lexer|Listener|Parser|Visitor).java" checks="." />
|
||||
|
||||
<!-- JNA requires the no-argument constructor on JNAKernel32Library.SizeT to be public-->
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]bootstrap[/\\]JNAKernel32Library.java" checks="RedundantModifier" />
|
||||
|
|
|
@ -0,0 +1,102 @@
|
|||
evaluationDependsOn(xpackModule('core'))
|
||||
|
||||
apply plugin: 'elasticsearch.esplugin'
|
||||
esplugin {
|
||||
name 'x-pack-eql'
|
||||
description 'The Elasticsearch plugin that powers EQL for Elasticsearch'
|
||||
classname 'org.elasticsearch.xpack.eql.plugin.EqlPlugin'
|
||||
extendedPlugins = ['x-pack-core', 'lang-painless']
|
||||
}
|
||||
|
||||
ext {
|
||||
// EQL dependency versions
|
||||
antlrVersion = "4.5.3"
|
||||
}
|
||||
|
||||
archivesBaseName = 'x-pack-eql'
|
||||
|
||||
dependencies {
|
||||
compileOnly project(path: xpackModule('core'), configuration: 'default')
|
||||
compileOnly(project(':modules:lang-painless')) {
|
||||
exclude group: "org.ow2.asm"
|
||||
}
|
||||
compile "org.antlr:antlr4-runtime:4.5.3"
|
||||
testCompile project(':test:framework')
|
||||
testCompile project(path: xpackModule('core'), configuration: 'testArtifacts')
|
||||
testCompile project(path: xpackModule('security'), configuration: 'testArtifacts')
|
||||
testCompile project(path: ':modules:reindex', configuration: 'runtime')
|
||||
testCompile project(path: ':modules:parent-join', configuration: 'runtime')
|
||||
testCompile project(path: ':modules:analysis-common', configuration: 'runtime')
|
||||
}
|
||||
|
||||
// disable integration tests for now
|
||||
integTest.enabled = false
|
||||
|
||||
/**********************************************
|
||||
* EQL Parser regeneration *
|
||||
**********************************************/
|
||||
|
||||
configurations {
|
||||
regenerate
|
||||
}
|
||||
|
||||
dependencies {
|
||||
regenerate "org.antlr:antlr4:${antlrVersion}"
|
||||
}
|
||||
|
||||
String grammarPath = 'src/main/antlr'
|
||||
String outputPath = 'src/main/java/org/elasticsearch/xpack/eql/parser'
|
||||
|
||||
task cleanGenerated(type: Delete) {
|
||||
delete fileTree(grammarPath) {
|
||||
include '*.tokens'
|
||||
}
|
||||
delete fileTree(outputPath) {
|
||||
include 'EqlBase*.java'
|
||||
}
|
||||
}
|
||||
|
||||
task regenParser(type: JavaExec) {
|
||||
dependsOn cleanGenerated
|
||||
main = 'org.antlr.v4.Tool'
|
||||
classpath = configurations.regenerate
|
||||
systemProperty 'file.encoding', 'UTF-8'
|
||||
systemProperty 'user.language', 'en'
|
||||
systemProperty 'user.country', 'US'
|
||||
systemProperty 'user.variant', ''
|
||||
args '-Werror',
|
||||
'-package', 'org.elasticsearch.xpack.eql.parser',
|
||||
'-listener',
|
||||
'-visitor',
|
||||
'-o', outputPath,
|
||||
"${file(grammarPath)}/EqlBase.g4"
|
||||
}
|
||||
|
||||
task regen {
|
||||
dependsOn regenParser
|
||||
doLast {
|
||||
// moves token files to grammar directory for use with IDE's
|
||||
ant.move(file: "${outputPath}/EqlBase.tokens", toDir: grammarPath)
|
||||
ant.move(file: "${outputPath}/EqlBaseLexer.tokens", toDir: grammarPath)
|
||||
// make the generated classes package private
|
||||
ant.replaceregexp(match: 'public ((interface|class) \\QEqlBase\\E\\w+)',
|
||||
replace: '\\1',
|
||||
encoding: 'UTF-8') {
|
||||
fileset(dir: outputPath, includes: 'EqlBase*.java')
|
||||
}
|
||||
// nuke timestamps/filenames in generated files
|
||||
ant.replaceregexp(match: '\\Q// Generated from \\E.*',
|
||||
replace: '\\/\\/ ANTLR GENERATED CODE: DO NOT EDIT',
|
||||
encoding: 'UTF-8') {
|
||||
fileset(dir: outputPath, includes: 'EqlBase*.java')
|
||||
}
|
||||
// remove tabs in antlr generated files
|
||||
ant.replaceregexp(match: '\t', flags: 'g', replace: ' ', encoding: 'UTF-8') {
|
||||
fileset(dir: outputPath, includes: 'EqlBase*.java')
|
||||
}
|
||||
// fix line endings
|
||||
ant.fixcrlf(srcdir: outputPath, eol: 'lf') {
|
||||
patternset(includes: 'EqlBase*.java')
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,254 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
grammar EqlBase;
|
||||
|
||||
tokens {
|
||||
DELIMITER
|
||||
}
|
||||
|
||||
singleStatement
|
||||
: statement EOF
|
||||
;
|
||||
|
||||
singleExpression
|
||||
: expression EOF
|
||||
;
|
||||
|
||||
statement
|
||||
: query (PIPE pipe)*
|
||||
;
|
||||
|
||||
query
|
||||
: sequence
|
||||
| join
|
||||
| condition
|
||||
;
|
||||
|
||||
sequence
|
||||
: SEQUENCE (by=joinKeys)? (span)?
|
||||
match+
|
||||
(UNTIL match)?
|
||||
;
|
||||
|
||||
join
|
||||
: JOIN (by=joinKeys)?
|
||||
match+
|
||||
(UNTIL match)?
|
||||
;
|
||||
|
||||
pipe
|
||||
: kind=IDENTIFIER (booleanExpression (COMMA booleanExpression)*)?
|
||||
;
|
||||
|
||||
joinKeys
|
||||
: BY qualifiedNames
|
||||
;
|
||||
|
||||
span
|
||||
: WITH MAXSPAN EQ DIGIT_IDENTIFIER
|
||||
;
|
||||
|
||||
match
|
||||
: LB condition RB (by=joinKeys)?
|
||||
;
|
||||
|
||||
condition
|
||||
: event=qualifiedName WHERE expression
|
||||
;
|
||||
|
||||
expression
|
||||
: booleanExpression
|
||||
;
|
||||
|
||||
booleanExpression
|
||||
: NOT booleanExpression #logicalNot
|
||||
| predicated #booleanDefault
|
||||
| left=booleanExpression operator=AND right=booleanExpression #logicalBinary
|
||||
| left=booleanExpression operator=OR right=booleanExpression #logicalBinary
|
||||
;
|
||||
|
||||
// workaround for:
|
||||
// https://github.com/antlr/antlr4/issues/780
|
||||
// https://github.com/antlr/antlr4/issues/781
|
||||
predicated
|
||||
: valueExpression predicate?
|
||||
;
|
||||
|
||||
// dedicated calls for each branch are not used to reuse the NOT handling across them
|
||||
// instead the property kind is used for differentiation
|
||||
predicate
|
||||
: NOT? kind=BETWEEN lower=valueExpression AND upper=valueExpression
|
||||
| NOT? kind=IN LP valueExpression (COMMA valueExpression)* RP
|
||||
| NOT? kind=IN LP query RP
|
||||
;
|
||||
|
||||
valueExpression
|
||||
: primaryExpression #valueExpressionDefault
|
||||
| operator=(MINUS | PLUS) valueExpression #arithmeticUnary
|
||||
| left=valueExpression operator=(ASTERISK | SLASH | PERCENT) right=valueExpression #arithmeticBinary
|
||||
| left=valueExpression operator=(PLUS | MINUS) right=valueExpression #arithmeticBinary
|
||||
| left=valueExpression comparisonOperator right=valueExpression #comparison
|
||||
;
|
||||
|
||||
primaryExpression
|
||||
: constant #constantDefault
|
||||
| functionExpression #function
|
||||
| qualifiedName #dereference
|
||||
| LP expression RP #parenthesizedExpression
|
||||
;
|
||||
|
||||
functionExpression
|
||||
: identifier LP (expression (COMMA expression)*)? RP
|
||||
;
|
||||
|
||||
constant
|
||||
: NULL #nullLiteral
|
||||
| number #numericLiteral
|
||||
| booleanValue #booleanLiteral
|
||||
| STRING+ #stringLiteral
|
||||
;
|
||||
|
||||
comparisonOperator
|
||||
: EQ | NEQ | LT | LTE | GT | GTE
|
||||
;
|
||||
|
||||
booleanValue
|
||||
: TRUE | FALSE
|
||||
;
|
||||
|
||||
qualifiedNames
|
||||
: qualifiedName (COMMA qualifiedName)*
|
||||
;
|
||||
|
||||
qualifiedName
|
||||
: (identifier DOT)* identifier
|
||||
;
|
||||
|
||||
identifier
|
||||
: quoteIdentifier
|
||||
| unquoteIdentifier
|
||||
;
|
||||
|
||||
quoteIdentifier
|
||||
: QUOTED_IDENTIFIER #quotedIdentifier
|
||||
;
|
||||
|
||||
unquoteIdentifier
|
||||
: IDENTIFIER #unquotedIdentifier
|
||||
| DIGIT_IDENTIFIER #digitIdentifier
|
||||
;
|
||||
|
||||
number
|
||||
: DECIMAL_VALUE #decimalLiteral
|
||||
| INTEGER_VALUE #integerLiteral
|
||||
;
|
||||
|
||||
string
|
||||
: STRING
|
||||
;
|
||||
|
||||
AND: 'AND';
|
||||
ANY: 'ANY';
|
||||
ASC: 'ASC';
|
||||
BETWEEN: 'BETWEEN';
|
||||
BY: 'BY';
|
||||
CHILD: 'CHILD';
|
||||
DESCENDANT: 'DESCENDANT';
|
||||
EVENT: 'EVENT';
|
||||
FALSE: 'FALSE';
|
||||
IN: 'IN';
|
||||
JOIN: 'JOIN';
|
||||
MAXSPAN: 'MAXSPAN';
|
||||
NOT: 'NOT';
|
||||
NULL: 'NULL';
|
||||
OF: 'OF';
|
||||
OR: 'OR';
|
||||
SEQUENCE: 'SEQUENCE';
|
||||
TRUE: 'TRUE';
|
||||
UNTIL: 'UNTIL';
|
||||
WHERE: 'WHERE';
|
||||
WITH: 'WITH';
|
||||
|
||||
// Operators
|
||||
EQ : '=' | '==';
|
||||
NEQ : '<>' | '!=';
|
||||
LT : '<';
|
||||
LTE : '<=';
|
||||
GT : '>';
|
||||
GTE : '>=';
|
||||
|
||||
PLUS: '+';
|
||||
MINUS: '-';
|
||||
ASTERISK: '*';
|
||||
SLASH: '/';
|
||||
PERCENT: '%';
|
||||
DOT: '.';
|
||||
COMMA: ',';
|
||||
LB: '[';
|
||||
RB: ']';
|
||||
LP: '(';
|
||||
RP: ')';
|
||||
PIPE: '|';
|
||||
|
||||
STRING
|
||||
: '\'' ( ~'\'')* '\''
|
||||
| '"' ( ~'"' )* '"'
|
||||
;
|
||||
|
||||
INTEGER_VALUE
|
||||
: DIGIT+
|
||||
;
|
||||
|
||||
DECIMAL_VALUE
|
||||
: DIGIT+ DOT DIGIT*
|
||||
| DOT DIGIT+
|
||||
| DIGIT+ (DOT DIGIT*)? EXPONENT
|
||||
| DOT DIGIT+ EXPONENT
|
||||
;
|
||||
|
||||
IDENTIFIER
|
||||
: (LETTER | '_') (LETTER | DIGIT | '_' | '@' )*
|
||||
;
|
||||
|
||||
DIGIT_IDENTIFIER
|
||||
: DIGIT (LETTER | DIGIT | '_' | '@')+
|
||||
;
|
||||
|
||||
QUOTED_IDENTIFIER
|
||||
: '"' ( ~'"' | '""' )* '"'
|
||||
;
|
||||
|
||||
fragment EXPONENT
|
||||
: 'E' [+-]? DIGIT+
|
||||
;
|
||||
|
||||
fragment DIGIT
|
||||
: [0-9]
|
||||
;
|
||||
|
||||
fragment LETTER
|
||||
: [A-Z]
|
||||
;
|
||||
|
||||
SIMPLE_COMMENT
|
||||
: '//' ~[\r\n]* '\r'? '\n'? -> channel(HIDDEN)
|
||||
;
|
||||
|
||||
BRACKETED_COMMENT
|
||||
: '/*' (BRACKETED_COMMENT|.)*? '*/' -> channel(HIDDEN)
|
||||
;
|
||||
|
||||
WS
|
||||
: [ \r\n\t]+ -> channel(HIDDEN)
|
||||
;
|
||||
|
||||
// Catch-all for anything we can't recognize.
|
||||
// We use this to be able to ignore and recover all the text
|
||||
// when splitting statements with DelimiterLexer
|
||||
UNRECOGNIZED
|
||||
: .
|
||||
;
|
|
@ -0,0 +1,87 @@
|
|||
AND=1
|
||||
ANY=2
|
||||
ASC=3
|
||||
BETWEEN=4
|
||||
BY=5
|
||||
CHILD=6
|
||||
DESCENDANT=7
|
||||
EVENT=8
|
||||
FALSE=9
|
||||
IN=10
|
||||
JOIN=11
|
||||
MAXSPAN=12
|
||||
NOT=13
|
||||
NULL=14
|
||||
OF=15
|
||||
OR=16
|
||||
SEQUENCE=17
|
||||
TRUE=18
|
||||
UNTIL=19
|
||||
WHERE=20
|
||||
WITH=21
|
||||
EQ=22
|
||||
NEQ=23
|
||||
LT=24
|
||||
LTE=25
|
||||
GT=26
|
||||
GTE=27
|
||||
PLUS=28
|
||||
MINUS=29
|
||||
ASTERISK=30
|
||||
SLASH=31
|
||||
PERCENT=32
|
||||
DOT=33
|
||||
COMMA=34
|
||||
LB=35
|
||||
RB=36
|
||||
LP=37
|
||||
RP=38
|
||||
PIPE=39
|
||||
STRING=40
|
||||
INTEGER_VALUE=41
|
||||
DECIMAL_VALUE=42
|
||||
IDENTIFIER=43
|
||||
DIGIT_IDENTIFIER=44
|
||||
QUOTED_IDENTIFIER=45
|
||||
SIMPLE_COMMENT=46
|
||||
BRACKETED_COMMENT=47
|
||||
WS=48
|
||||
UNRECOGNIZED=49
|
||||
DELIMITER=50
|
||||
'AND'=1
|
||||
'ANY'=2
|
||||
'ASC'=3
|
||||
'BETWEEN'=4
|
||||
'BY'=5
|
||||
'CHILD'=6
|
||||
'DESCENDANT'=7
|
||||
'EVENT'=8
|
||||
'FALSE'=9
|
||||
'IN'=10
|
||||
'JOIN'=11
|
||||
'MAXSPAN'=12
|
||||
'NOT'=13
|
||||
'NULL'=14
|
||||
'OF'=15
|
||||
'OR'=16
|
||||
'SEQUENCE'=17
|
||||
'TRUE'=18
|
||||
'UNTIL'=19
|
||||
'WHERE'=20
|
||||
'WITH'=21
|
||||
'<'=24
|
||||
'<='=25
|
||||
'>'=26
|
||||
'>='=27
|
||||
'+'=28
|
||||
'-'=29
|
||||
'*'=30
|
||||
'/'=31
|
||||
'%'=32
|
||||
'.'=33
|
||||
','=34
|
||||
'['=35
|
||||
']'=36
|
||||
'('=37
|
||||
')'=38
|
||||
'|'=39
|
|
@ -0,0 +1,86 @@
|
|||
AND=1
|
||||
ANY=2
|
||||
ASC=3
|
||||
BETWEEN=4
|
||||
BY=5
|
||||
CHILD=6
|
||||
DESCENDANT=7
|
||||
EVENT=8
|
||||
FALSE=9
|
||||
IN=10
|
||||
JOIN=11
|
||||
MAXSPAN=12
|
||||
NOT=13
|
||||
NULL=14
|
||||
OF=15
|
||||
OR=16
|
||||
SEQUENCE=17
|
||||
TRUE=18
|
||||
UNTIL=19
|
||||
WHERE=20
|
||||
WITH=21
|
||||
EQ=22
|
||||
NEQ=23
|
||||
LT=24
|
||||
LTE=25
|
||||
GT=26
|
||||
GTE=27
|
||||
PLUS=28
|
||||
MINUS=29
|
||||
ASTERISK=30
|
||||
SLASH=31
|
||||
PERCENT=32
|
||||
DOT=33
|
||||
COMMA=34
|
||||
LB=35
|
||||
RB=36
|
||||
LP=37
|
||||
RP=38
|
||||
PIPE=39
|
||||
STRING=40
|
||||
INTEGER_VALUE=41
|
||||
DECIMAL_VALUE=42
|
||||
IDENTIFIER=43
|
||||
DIGIT_IDENTIFIER=44
|
||||
QUOTED_IDENTIFIER=45
|
||||
SIMPLE_COMMENT=46
|
||||
BRACKETED_COMMENT=47
|
||||
WS=48
|
||||
UNRECOGNIZED=49
|
||||
'AND'=1
|
||||
'ANY'=2
|
||||
'ASC'=3
|
||||
'BETWEEN'=4
|
||||
'BY'=5
|
||||
'CHILD'=6
|
||||
'DESCENDANT'=7
|
||||
'EVENT'=8
|
||||
'FALSE'=9
|
||||
'IN'=10
|
||||
'JOIN'=11
|
||||
'MAXSPAN'=12
|
||||
'NOT'=13
|
||||
'NULL'=14
|
||||
'OF'=15
|
||||
'OR'=16
|
||||
'SEQUENCE'=17
|
||||
'TRUE'=18
|
||||
'UNTIL'=19
|
||||
'WHERE'=20
|
||||
'WITH'=21
|
||||
'<'=24
|
||||
'<='=25
|
||||
'>'=26
|
||||
'>='=27
|
||||
'+'=28
|
||||
'-'=29
|
||||
'*'=30
|
||||
'/'=31
|
||||
'%'=32
|
||||
'.'=33
|
||||
','=34
|
||||
'['=35
|
||||
']'=36
|
||||
'('=37
|
||||
')'=38
|
||||
'|'=39
|
|
@ -0,0 +1,29 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.eql;
|
||||
|
||||
public abstract class EqlClientException extends EqlException {
|
||||
|
||||
protected EqlClientException(String message, Object... args) {
|
||||
super(message, args);
|
||||
}
|
||||
|
||||
protected EqlClientException(String message, Throwable cause, boolean enableSuppression, boolean writableStackTrace) {
|
||||
super(message, cause, enableSuppression, writableStackTrace);
|
||||
}
|
||||
|
||||
protected EqlClientException(String message, Throwable cause) {
|
||||
super(message, cause);
|
||||
}
|
||||
|
||||
protected EqlClientException(Throwable cause, String message, Object... args) {
|
||||
super(cause, message, args);
|
||||
}
|
||||
|
||||
protected EqlClientException(Throwable cause) {
|
||||
super(cause);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,30 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.eql;
|
||||
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
|
||||
public abstract class EqlException extends ElasticsearchException {
|
||||
public EqlException(String message, Throwable cause, boolean enableSuppression, boolean writableStackTrace) {
|
||||
super(message, cause, enableSuppression, writableStackTrace);
|
||||
}
|
||||
|
||||
public EqlException(String message, Throwable cause) {
|
||||
super(message, cause);
|
||||
}
|
||||
|
||||
public EqlException(String message, Object... args) {
|
||||
super(message, args);
|
||||
}
|
||||
|
||||
public EqlException(Throwable cause, String message, Object... args) {
|
||||
super(message, cause, args);
|
||||
}
|
||||
|
||||
public EqlException(Throwable cause) {
|
||||
super(cause);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,32 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.eql;
|
||||
|
||||
public class EqlIllegalArgumentException extends EqlServerException {
|
||||
public EqlIllegalArgumentException(String message, Throwable cause, boolean enableSuppression, boolean writableStackTrace) {
|
||||
super(message, cause, enableSuppression, writableStackTrace);
|
||||
}
|
||||
|
||||
public EqlIllegalArgumentException(String message, Throwable cause) {
|
||||
super(message, cause);
|
||||
}
|
||||
|
||||
public EqlIllegalArgumentException(String message, Object... args) {
|
||||
super(message, args);
|
||||
}
|
||||
|
||||
public EqlIllegalArgumentException(Throwable cause, String message, Object... args) {
|
||||
super(cause, message, args);
|
||||
}
|
||||
|
||||
public EqlIllegalArgumentException(String message) {
|
||||
super(message);
|
||||
}
|
||||
|
||||
public EqlIllegalArgumentException(Throwable cause) {
|
||||
super(cause);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,29 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.eql;
|
||||
|
||||
public abstract class EqlServerException extends EqlException {
|
||||
|
||||
protected EqlServerException(String message, Object... args) {
|
||||
super(message, args);
|
||||
}
|
||||
|
||||
protected EqlServerException(String message, Throwable cause, boolean enableSuppression, boolean writableStackTrace) {
|
||||
super(message, cause, enableSuppression, writableStackTrace);
|
||||
}
|
||||
|
||||
protected EqlServerException(String message, Throwable cause) {
|
||||
super(message, cause);
|
||||
}
|
||||
|
||||
protected EqlServerException(Throwable cause, String message, Object... args) {
|
||||
super(cause, message, args);
|
||||
}
|
||||
|
||||
protected EqlServerException(Throwable cause) {
|
||||
super(cause);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,32 @@
|
|||
package org.elasticsearch.xpack.eql;
|
||||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
public class SqlIllegalArgumentException extends EqlServerException {
|
||||
public SqlIllegalArgumentException(String message, Throwable cause, boolean enableSuppression, boolean writableStackTrace) {
|
||||
super(message, cause, enableSuppression, writableStackTrace);
|
||||
}
|
||||
|
||||
public SqlIllegalArgumentException(String message, Throwable cause) {
|
||||
super(message, cause);
|
||||
}
|
||||
|
||||
public SqlIllegalArgumentException(String message, Object... args) {
|
||||
super(message, args);
|
||||
}
|
||||
|
||||
public SqlIllegalArgumentException(Throwable cause, String message, Object... args) {
|
||||
super(cause, message, args);
|
||||
}
|
||||
|
||||
public SqlIllegalArgumentException(String message) {
|
||||
super(message);
|
||||
}
|
||||
|
||||
public SqlIllegalArgumentException(Throwable cause) {
|
||||
super(cause);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,69 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.xpack.eql.expression;
|
||||
|
||||
import org.elasticsearch.xpack.eql.EqlIllegalArgumentException;
|
||||
import org.elasticsearch.xpack.eql.tree.Node;
|
||||
import org.elasticsearch.xpack.eql.tree.Source;
|
||||
import org.elasticsearch.xpack.eql.type.DataType;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
public abstract class Expression extends Node<Expression> {
|
||||
|
||||
/**
|
||||
* Order is important in the enum; any values should be added at the end.
|
||||
*/
|
||||
public enum Stage {
|
||||
PARSED, PRE_ANALYZED, ANALYZED, OPTIMIZED;
|
||||
}
|
||||
|
||||
private Stage stage = Stage.PARSED;
|
||||
|
||||
public Expression(Source source, List<Expression> children) {
|
||||
super(source, children);
|
||||
}
|
||||
|
||||
public boolean preAnalyzed() {
|
||||
return stage.ordinal() >= Stage.PRE_ANALYZED.ordinal();
|
||||
}
|
||||
|
||||
public void setPreAnalyzed() {
|
||||
stage = Stage.PRE_ANALYZED;
|
||||
}
|
||||
|
||||
public boolean analyzed() {
|
||||
return stage.ordinal() >= Stage.ANALYZED.ordinal();
|
||||
}
|
||||
|
||||
public void setAnalyzed() {
|
||||
stage = Stage.ANALYZED;
|
||||
}
|
||||
|
||||
public boolean optimized() {
|
||||
return stage.ordinal() >= Stage.OPTIMIZED.ordinal();
|
||||
}
|
||||
|
||||
public void setOptimized() {
|
||||
stage = Stage.OPTIMIZED;
|
||||
}
|
||||
|
||||
public boolean foldable() {
|
||||
return false;
|
||||
}
|
||||
|
||||
public Object fold() {
|
||||
throw new EqlIllegalArgumentException("{} is not foldable", toString());
|
||||
}
|
||||
|
||||
public abstract DataType dataType();
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return sourceText();
|
||||
}
|
||||
}
|
|
@ -0,0 +1,26 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.xpack.eql.expression;
|
||||
|
||||
import org.elasticsearch.xpack.eql.EqlIllegalArgumentException;
|
||||
import org.elasticsearch.xpack.eql.tree.Source;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import static java.util.Collections.emptyList;
|
||||
|
||||
public abstract class LeafExpression extends Expression {
|
||||
|
||||
public LeafExpression(Source source) {
|
||||
super(source, emptyList());
|
||||
}
|
||||
|
||||
@Override
|
||||
public Expression replaceChildren(List<Expression> newChildren) {
|
||||
throw new EqlIllegalArgumentException("{} doesn't have any children to replace", getClass().getSimpleName());
|
||||
}
|
||||
}
|
|
@ -0,0 +1,66 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.xpack.eql.expression;
|
||||
|
||||
import org.elasticsearch.xpack.eql.tree.NodeInfo;
|
||||
import org.elasticsearch.xpack.eql.tree.Source;
|
||||
import org.elasticsearch.xpack.eql.type.DataType;
|
||||
|
||||
import java.util.Objects;
|
||||
|
||||
public class Literal extends LeafExpression {
|
||||
|
||||
private final Object value;
|
||||
|
||||
public Literal(Source source, Object value) {
|
||||
super(source);
|
||||
this.value = value;
|
||||
}
|
||||
|
||||
@Override
|
||||
public DataType dataType() {
|
||||
return DataType.SCALAR;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected NodeInfo<? extends Expression> info() {
|
||||
return NodeInfo.create(this, Literal::new, value);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean foldable() {
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object fold() {
|
||||
return value;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(value);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (this == obj) {
|
||||
return true;
|
||||
}
|
||||
if (obj == null || getClass() != obj.getClass()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
Literal other = (Literal) obj;
|
||||
return Objects.equals(value, other.value);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return String.valueOf(value);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,131 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.eql.parser;
|
||||
|
||||
import org.antlr.v4.runtime.ParserRuleContext;
|
||||
import org.antlr.v4.runtime.Token;
|
||||
import org.antlr.v4.runtime.misc.Interval;
|
||||
import org.antlr.v4.runtime.tree.ParseTree;
|
||||
import org.antlr.v4.runtime.tree.TerminalNode;
|
||||
import org.elasticsearch.xpack.eql.expression.Expression;
|
||||
import org.elasticsearch.xpack.eql.tree.Location;
|
||||
import org.elasticsearch.xpack.eql.tree.Source;
|
||||
import org.elasticsearch.xpack.eql.util.Check;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* Base parsing visitor class offering utility methods.
|
||||
*/
|
||||
abstract class AbstractBuilder extends EqlBaseBaseVisitor<Object> {
|
||||
|
||||
@Override
|
||||
public Object visit(ParseTree tree) {
|
||||
Object result = super.visit(tree);
|
||||
Check.notNull(result, "Don't know how to handle context [{}] with value [{}]", tree.getClass(), tree.getText());
|
||||
return result;
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
protected <T> T typedParsing(ParseTree ctx, Class<T> type) {
|
||||
Object result = ctx.accept(this);
|
||||
if (type.isInstance(result)) {
|
||||
return (T) result;
|
||||
}
|
||||
|
||||
throw new ParsingException(source(ctx), "Invalid query '{}'[{}] given; expected {} but found {}",
|
||||
ctx.getText(), ctx.getClass().getSimpleName(),
|
||||
type.getSimpleName(), (result != null ? result.getClass().getSimpleName() : "null"));
|
||||
}
|
||||
|
||||
protected Expression expression(ParseTree ctx) {
|
||||
return typedParsing(ctx, Expression.class);
|
||||
}
|
||||
|
||||
protected List<Expression> expressions(List<? extends ParserRuleContext> ctxs) {
|
||||
return visitList(ctxs, Expression.class);
|
||||
}
|
||||
|
||||
protected <T> List<T> visitList(List<? extends ParserRuleContext> contexts, Class<T> clazz) {
|
||||
List<T> results = new ArrayList<>(contexts.size());
|
||||
for (ParserRuleContext context : contexts) {
|
||||
results.add(clazz.cast(visit(context)));
|
||||
}
|
||||
return results;
|
||||
}
|
||||
|
||||
static Source source(ParseTree ctx) {
|
||||
if (ctx instanceof ParserRuleContext) {
|
||||
return source((ParserRuleContext) ctx);
|
||||
}
|
||||
return Source.EMPTY;
|
||||
}
|
||||
|
||||
static Source source(TerminalNode terminalNode) {
|
||||
Check.notNull(terminalNode, "terminalNode is null");
|
||||
return source(terminalNode.getSymbol());
|
||||
}
|
||||
|
||||
static Source source(ParserRuleContext parserRuleContext) {
|
||||
Check.notNull(parserRuleContext, "parserRuleContext is null");
|
||||
Token start = parserRuleContext.start;
|
||||
Token stop = parserRuleContext.stop != null ? parserRuleContext.stop : start;
|
||||
Interval interval = new Interval(start.getStartIndex(), stop.getStopIndex());
|
||||
String text = start.getInputStream().getText(interval);
|
||||
return new Source(new Location(start.getLine(), start.getCharPositionInLine()), text);
|
||||
}
|
||||
|
||||
static Source source(Token token) {
|
||||
Check.notNull(token, "token is null");
|
||||
String text = token.getInputStream().getText(new Interval(token.getStartIndex(), token.getStopIndex()));
|
||||
return new Source(new Location(token.getLine(), token.getCharPositionInLine()), text);
|
||||
}
|
||||
|
||||
Source source(ParserRuleContext begin, ParserRuleContext end) {
|
||||
Check.notNull(begin, "begin is null");
|
||||
Check.notNull(end, "end is null");
|
||||
Token start = begin.start;
|
||||
Token stop = end.stop != null ? end.stop : begin.stop;
|
||||
Interval interval = new Interval(start.getStartIndex(), stop.getStopIndex());
|
||||
String text = start.getInputStream().getText(interval);
|
||||
return new Source(new Location(start.getLine(), start.getCharPositionInLine()), text);
|
||||
}
|
||||
|
||||
static Source source(TerminalNode begin, ParserRuleContext end) {
|
||||
Check.notNull(begin, "begin is null");
|
||||
Check.notNull(end, "end is null");
|
||||
Token start = begin.getSymbol();
|
||||
Token stop = end.stop != null ? end.stop : start;
|
||||
String text = start.getInputStream().getText(new Interval(start.getStartIndex(), stop.getStopIndex()));
|
||||
return new Source(new Location(start.getLine(), start.getCharPositionInLine()), text);
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieves the raw text of the node (without interpreting it as a string literal).
|
||||
*/
|
||||
static String text(ParseTree node) {
|
||||
return node == null ? null : node.getText();
|
||||
}
|
||||
|
||||
/**
|
||||
* Extracts the actual unescaped string (literal) value of a terminal node.
|
||||
*/
|
||||
static String string(TerminalNode node) {
|
||||
return node == null ? null : unquoteString(node.getText());
|
||||
}
|
||||
|
||||
static String unquoteString(String text) {
|
||||
// remove leading and trailing ' for strings and also eliminate escaped single quotes
|
||||
return text == null ? null : text.substring(1, text.length() - 1).replace("''", "'");
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object visitTerminal(TerminalNode node) {
|
||||
Source source = source(node);
|
||||
throw new ParsingException(source, "Does not know how to handle {}", source.text());
|
||||
}
|
||||
}
|
|
@ -0,0 +1,17 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.xpack.eql.parser;
|
||||
|
||||
import org.elasticsearch.xpack.eql.parser.EqlBaseParser.SingleStatementContext;
|
||||
|
||||
public class AstBuilder extends ExpressionBuilder {
|
||||
|
||||
@Override
|
||||
public Object visitSingleStatement(SingleStatementContext ctx) {
|
||||
return expression(ctx.statement());
|
||||
}
|
||||
}
|
|
@ -0,0 +1,45 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.eql.parser;
|
||||
|
||||
import org.antlr.v4.runtime.ANTLRInputStream;
|
||||
import org.antlr.v4.runtime.IntStream;
|
||||
|
||||
import java.util.Locale;
|
||||
|
||||
// extension of ANTLR that does the upper-casing once for the whole stream
|
||||
// the ugly part is that it has to duplicate LA method
|
||||
|
||||
// This approach is the official solution from the ANTLR authors
|
||||
// in that it's both faster and easier than having a dedicated lexer
|
||||
// see https://github.com/antlr/antlr4/issues/1002
|
||||
class CaseInsensitiveStream extends ANTLRInputStream {
|
||||
protected char[] uppedChars;
|
||||
|
||||
CaseInsensitiveStream(String input) {
|
||||
super(input);
|
||||
this.uppedChars = input.toUpperCase(Locale.ROOT).toCharArray();
|
||||
}
|
||||
|
||||
// this part is copied from ANTLRInputStream
|
||||
@Override
|
||||
public int LA(int i) {
|
||||
if (i == 0) {
|
||||
return 0; // undefined
|
||||
}
|
||||
if (i < 0) {
|
||||
i++;
|
||||
if ((p + i - 1) < 0) {
|
||||
return IntStream.EOF;
|
||||
}
|
||||
}
|
||||
|
||||
if ((p + i - 1) >= n) {
|
||||
return IntStream.EOF;
|
||||
}
|
||||
return uppedChars[p + i - 1];
|
||||
}
|
||||
}
|
|
@ -0,0 +1,531 @@
|
|||
// ANTLR GENERATED CODE: DO NOT EDIT
|
||||
package org.elasticsearch.xpack.eql.parser;
|
||||
|
||||
import org.antlr.v4.runtime.ParserRuleContext;
|
||||
import org.antlr.v4.runtime.tree.ErrorNode;
|
||||
import org.antlr.v4.runtime.tree.TerminalNode;
|
||||
|
||||
/**
|
||||
* This class provides an empty implementation of {@link EqlBaseListener},
|
||||
* which can be extended to create a listener which only needs to handle a subset
|
||||
* of the available methods.
|
||||
*/
|
||||
class EqlBaseBaseListener implements EqlBaseListener {
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* <p>The default implementation does nothing.</p>
|
||||
*/
|
||||
@Override public void enterSingleStatement(EqlBaseParser.SingleStatementContext ctx) { }
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* <p>The default implementation does nothing.</p>
|
||||
*/
|
||||
@Override public void exitSingleStatement(EqlBaseParser.SingleStatementContext ctx) { }
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* <p>The default implementation does nothing.</p>
|
||||
*/
|
||||
@Override public void enterSingleExpression(EqlBaseParser.SingleExpressionContext ctx) { }
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* <p>The default implementation does nothing.</p>
|
||||
*/
|
||||
@Override public void exitSingleExpression(EqlBaseParser.SingleExpressionContext ctx) { }
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* <p>The default implementation does nothing.</p>
|
||||
*/
|
||||
@Override public void enterStatement(EqlBaseParser.StatementContext ctx) { }
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* <p>The default implementation does nothing.</p>
|
||||
*/
|
||||
@Override public void exitStatement(EqlBaseParser.StatementContext ctx) { }
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* <p>The default implementation does nothing.</p>
|
||||
*/
|
||||
@Override public void enterQuery(EqlBaseParser.QueryContext ctx) { }
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* <p>The default implementation does nothing.</p>
|
||||
*/
|
||||
@Override public void exitQuery(EqlBaseParser.QueryContext ctx) { }
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* <p>The default implementation does nothing.</p>
|
||||
*/
|
||||
@Override public void enterSequence(EqlBaseParser.SequenceContext ctx) { }
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* <p>The default implementation does nothing.</p>
|
||||
*/
|
||||
@Override public void exitSequence(EqlBaseParser.SequenceContext ctx) { }
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* <p>The default implementation does nothing.</p>
|
||||
*/
|
||||
@Override public void enterJoin(EqlBaseParser.JoinContext ctx) { }
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* <p>The default implementation does nothing.</p>
|
||||
*/
|
||||
@Override public void exitJoin(EqlBaseParser.JoinContext ctx) { }
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* <p>The default implementation does nothing.</p>
|
||||
*/
|
||||
@Override public void enterPipe(EqlBaseParser.PipeContext ctx) { }
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* <p>The default implementation does nothing.</p>
|
||||
*/
|
||||
@Override public void exitPipe(EqlBaseParser.PipeContext ctx) { }
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* <p>The default implementation does nothing.</p>
|
||||
*/
|
||||
@Override public void enterJoinKeys(EqlBaseParser.JoinKeysContext ctx) { }
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* <p>The default implementation does nothing.</p>
|
||||
*/
|
||||
@Override public void exitJoinKeys(EqlBaseParser.JoinKeysContext ctx) { }
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* <p>The default implementation does nothing.</p>
|
||||
*/
|
||||
@Override public void enterSpan(EqlBaseParser.SpanContext ctx) { }
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* <p>The default implementation does nothing.</p>
|
||||
*/
|
||||
@Override public void exitSpan(EqlBaseParser.SpanContext ctx) { }
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* <p>The default implementation does nothing.</p>
|
||||
*/
|
||||
@Override public void enterMatch(EqlBaseParser.MatchContext ctx) { }
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* <p>The default implementation does nothing.</p>
|
||||
*/
|
||||
@Override public void exitMatch(EqlBaseParser.MatchContext ctx) { }
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* <p>The default implementation does nothing.</p>
|
||||
*/
|
||||
@Override public void enterCondition(EqlBaseParser.ConditionContext ctx) { }
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* <p>The default implementation does nothing.</p>
|
||||
*/
|
||||
@Override public void exitCondition(EqlBaseParser.ConditionContext ctx) { }
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* <p>The default implementation does nothing.</p>
|
||||
*/
|
||||
@Override public void enterExpression(EqlBaseParser.ExpressionContext ctx) { }
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* <p>The default implementation does nothing.</p>
|
||||
*/
|
||||
@Override public void exitExpression(EqlBaseParser.ExpressionContext ctx) { }
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* <p>The default implementation does nothing.</p>
|
||||
*/
|
||||
@Override public void enterLogicalNot(EqlBaseParser.LogicalNotContext ctx) { }
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* <p>The default implementation does nothing.</p>
|
||||
*/
|
||||
@Override public void exitLogicalNot(EqlBaseParser.LogicalNotContext ctx) { }
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* <p>The default implementation does nothing.</p>
|
||||
*/
|
||||
@Override public void enterBooleanDefault(EqlBaseParser.BooleanDefaultContext ctx) { }
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* <p>The default implementation does nothing.</p>
|
||||
*/
|
||||
@Override public void exitBooleanDefault(EqlBaseParser.BooleanDefaultContext ctx) { }
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* <p>The default implementation does nothing.</p>
|
||||
*/
|
||||
@Override public void enterLogicalBinary(EqlBaseParser.LogicalBinaryContext ctx) { }
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* <p>The default implementation does nothing.</p>
|
||||
*/
|
||||
@Override public void exitLogicalBinary(EqlBaseParser.LogicalBinaryContext ctx) { }
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* <p>The default implementation does nothing.</p>
|
||||
*/
|
||||
@Override public void enterPredicated(EqlBaseParser.PredicatedContext ctx) { }
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* <p>The default implementation does nothing.</p>
|
||||
*/
|
||||
@Override public void exitPredicated(EqlBaseParser.PredicatedContext ctx) { }
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* <p>The default implementation does nothing.</p>
|
||||
*/
|
||||
@Override public void enterPredicate(EqlBaseParser.PredicateContext ctx) { }
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* <p>The default implementation does nothing.</p>
|
||||
*/
|
||||
@Override public void exitPredicate(EqlBaseParser.PredicateContext ctx) { }
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* <p>The default implementation does nothing.</p>
|
||||
*/
|
||||
@Override public void enterValueExpressionDefault(EqlBaseParser.ValueExpressionDefaultContext ctx) { }
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* <p>The default implementation does nothing.</p>
|
||||
*/
|
||||
@Override public void exitValueExpressionDefault(EqlBaseParser.ValueExpressionDefaultContext ctx) { }
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* <p>The default implementation does nothing.</p>
|
||||
*/
|
||||
@Override public void enterComparison(EqlBaseParser.ComparisonContext ctx) { }
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* <p>The default implementation does nothing.</p>
|
||||
*/
|
||||
@Override public void exitComparison(EqlBaseParser.ComparisonContext ctx) { }
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* <p>The default implementation does nothing.</p>
|
||||
*/
|
||||
@Override public void enterArithmeticBinary(EqlBaseParser.ArithmeticBinaryContext ctx) { }
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* <p>The default implementation does nothing.</p>
|
||||
*/
|
||||
@Override public void exitArithmeticBinary(EqlBaseParser.ArithmeticBinaryContext ctx) { }
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* <p>The default implementation does nothing.</p>
|
||||
*/
|
||||
@Override public void enterArithmeticUnary(EqlBaseParser.ArithmeticUnaryContext ctx) { }
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* <p>The default implementation does nothing.</p>
|
||||
*/
|
||||
@Override public void exitArithmeticUnary(EqlBaseParser.ArithmeticUnaryContext ctx) { }
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* <p>The default implementation does nothing.</p>
|
||||
*/
|
||||
@Override public void enterConstantDefault(EqlBaseParser.ConstantDefaultContext ctx) { }
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* <p>The default implementation does nothing.</p>
|
||||
*/
|
||||
@Override public void exitConstantDefault(EqlBaseParser.ConstantDefaultContext ctx) { }
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* <p>The default implementation does nothing.</p>
|
||||
*/
|
||||
@Override public void enterFunction(EqlBaseParser.FunctionContext ctx) { }
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* <p>The default implementation does nothing.</p>
|
||||
*/
|
||||
@Override public void exitFunction(EqlBaseParser.FunctionContext ctx) { }
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* <p>The default implementation does nothing.</p>
|
||||
*/
|
||||
@Override public void enterDereference(EqlBaseParser.DereferenceContext ctx) { }
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* <p>The default implementation does nothing.</p>
|
||||
*/
|
||||
@Override public void exitDereference(EqlBaseParser.DereferenceContext ctx) { }
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* <p>The default implementation does nothing.</p>
|
||||
*/
|
||||
@Override public void enterParenthesizedExpression(EqlBaseParser.ParenthesizedExpressionContext ctx) { }
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* <p>The default implementation does nothing.</p>
|
||||
*/
|
||||
@Override public void exitParenthesizedExpression(EqlBaseParser.ParenthesizedExpressionContext ctx) { }
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* <p>The default implementation does nothing.</p>
|
||||
*/
|
||||
@Override public void enterFunctionExpression(EqlBaseParser.FunctionExpressionContext ctx) { }
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* <p>The default implementation does nothing.</p>
|
||||
*/
|
||||
@Override public void exitFunctionExpression(EqlBaseParser.FunctionExpressionContext ctx) { }
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* <p>The default implementation does nothing.</p>
|
||||
*/
|
||||
@Override public void enterNullLiteral(EqlBaseParser.NullLiteralContext ctx) { }
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* <p>The default implementation does nothing.</p>
|
||||
*/
|
||||
@Override public void exitNullLiteral(EqlBaseParser.NullLiteralContext ctx) { }
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* <p>The default implementation does nothing.</p>
|
||||
*/
|
||||
@Override public void enterNumericLiteral(EqlBaseParser.NumericLiteralContext ctx) { }
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* <p>The default implementation does nothing.</p>
|
||||
*/
|
||||
@Override public void exitNumericLiteral(EqlBaseParser.NumericLiteralContext ctx) { }
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* <p>The default implementation does nothing.</p>
|
||||
*/
|
||||
@Override public void enterBooleanLiteral(EqlBaseParser.BooleanLiteralContext ctx) { }
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* <p>The default implementation does nothing.</p>
|
||||
*/
|
||||
@Override public void exitBooleanLiteral(EqlBaseParser.BooleanLiteralContext ctx) { }
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* <p>The default implementation does nothing.</p>
|
||||
*/
|
||||
@Override public void enterStringLiteral(EqlBaseParser.StringLiteralContext ctx) { }
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* <p>The default implementation does nothing.</p>
|
||||
*/
|
||||
@Override public void exitStringLiteral(EqlBaseParser.StringLiteralContext ctx) { }
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* <p>The default implementation does nothing.</p>
|
||||
*/
|
||||
@Override public void enterComparisonOperator(EqlBaseParser.ComparisonOperatorContext ctx) { }
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* <p>The default implementation does nothing.</p>
|
||||
*/
|
||||
@Override public void exitComparisonOperator(EqlBaseParser.ComparisonOperatorContext ctx) { }
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* <p>The default implementation does nothing.</p>
|
||||
*/
|
||||
@Override public void enterBooleanValue(EqlBaseParser.BooleanValueContext ctx) { }
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* <p>The default implementation does nothing.</p>
|
||||
*/
|
||||
@Override public void exitBooleanValue(EqlBaseParser.BooleanValueContext ctx) { }
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* <p>The default implementation does nothing.</p>
|
||||
*/
|
||||
@Override public void enterQualifiedNames(EqlBaseParser.QualifiedNamesContext ctx) { }
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* <p>The default implementation does nothing.</p>
|
||||
*/
|
||||
@Override public void exitQualifiedNames(EqlBaseParser.QualifiedNamesContext ctx) { }
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* <p>The default implementation does nothing.</p>
|
||||
*/
|
||||
@Override public void enterQualifiedName(EqlBaseParser.QualifiedNameContext ctx) { }
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* <p>The default implementation does nothing.</p>
|
||||
*/
|
||||
@Override public void exitQualifiedName(EqlBaseParser.QualifiedNameContext ctx) { }
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* <p>The default implementation does nothing.</p>
|
||||
*/
|
||||
@Override public void enterIdentifier(EqlBaseParser.IdentifierContext ctx) { }
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* <p>The default implementation does nothing.</p>
|
||||
*/
|
||||
@Override public void exitIdentifier(EqlBaseParser.IdentifierContext ctx) { }
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* <p>The default implementation does nothing.</p>
|
||||
*/
|
||||
@Override public void enterQuotedIdentifier(EqlBaseParser.QuotedIdentifierContext ctx) { }
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* <p>The default implementation does nothing.</p>
|
||||
*/
|
||||
@Override public void exitQuotedIdentifier(EqlBaseParser.QuotedIdentifierContext ctx) { }
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* <p>The default implementation does nothing.</p>
|
||||
*/
|
||||
@Override public void enterUnquotedIdentifier(EqlBaseParser.UnquotedIdentifierContext ctx) { }
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* <p>The default implementation does nothing.</p>
|
||||
*/
|
||||
@Override public void exitUnquotedIdentifier(EqlBaseParser.UnquotedIdentifierContext ctx) { }
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* <p>The default implementation does nothing.</p>
|
||||
*/
|
||||
@Override public void enterDigitIdentifier(EqlBaseParser.DigitIdentifierContext ctx) { }
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* <p>The default implementation does nothing.</p>
|
||||
*/
|
||||
@Override public void exitDigitIdentifier(EqlBaseParser.DigitIdentifierContext ctx) { }
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* <p>The default implementation does nothing.</p>
|
||||
*/
|
||||
@Override public void enterDecimalLiteral(EqlBaseParser.DecimalLiteralContext ctx) { }
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* <p>The default implementation does nothing.</p>
|
||||
*/
|
||||
@Override public void exitDecimalLiteral(EqlBaseParser.DecimalLiteralContext ctx) { }
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* <p>The default implementation does nothing.</p>
|
||||
*/
|
||||
@Override public void enterIntegerLiteral(EqlBaseParser.IntegerLiteralContext ctx) { }
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* <p>The default implementation does nothing.</p>
|
||||
*/
|
||||
@Override public void exitIntegerLiteral(EqlBaseParser.IntegerLiteralContext ctx) { }
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* <p>The default implementation does nothing.</p>
|
||||
*/
|
||||
@Override public void enterString(EqlBaseParser.StringContext ctx) { }
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* <p>The default implementation does nothing.</p>
|
||||
*/
|
||||
@Override public void exitString(EqlBaseParser.StringContext ctx) { }
|
||||
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* <p>The default implementation does nothing.</p>
|
||||
*/
|
||||
@Override public void enterEveryRule(ParserRuleContext ctx) { }
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* <p>The default implementation does nothing.</p>
|
||||
*/
|
||||
@Override public void exitEveryRule(ParserRuleContext ctx) { }
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* <p>The default implementation does nothing.</p>
|
||||
*/
|
||||
@Override public void visitTerminal(TerminalNode node) { }
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* <p>The default implementation does nothing.</p>
|
||||
*/
|
||||
@Override public void visitErrorNode(ErrorNode node) { }
|
||||
}
|
|
@ -0,0 +1,301 @@
|
|||
// ANTLR GENERATED CODE: DO NOT EDIT
|
||||
package org.elasticsearch.xpack.eql.parser;
|
||||
import org.antlr.v4.runtime.tree.AbstractParseTreeVisitor;
|
||||
|
||||
/**
|
||||
* This class provides an empty implementation of {@link EqlBaseVisitor},
|
||||
* which can be extended to create a visitor which only needs to handle a subset
|
||||
* of the available methods.
|
||||
*
|
||||
* @param <T> The return type of the visit operation. Use {@link Void} for
|
||||
* operations with no return type.
|
||||
*/
|
||||
class EqlBaseBaseVisitor<T> extends AbstractParseTreeVisitor<T> implements EqlBaseVisitor<T> {
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* <p>The default implementation returns the result of calling
|
||||
* {@link #visitChildren} on {@code ctx}.</p>
|
||||
*/
|
||||
@Override public T visitSingleStatement(EqlBaseParser.SingleStatementContext ctx) { return visitChildren(ctx); }
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* <p>The default implementation returns the result of calling
|
||||
* {@link #visitChildren} on {@code ctx}.</p>
|
||||
*/
|
||||
@Override public T visitSingleExpression(EqlBaseParser.SingleExpressionContext ctx) { return visitChildren(ctx); }
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* <p>The default implementation returns the result of calling
|
||||
* {@link #visitChildren} on {@code ctx}.</p>
|
||||
*/
|
||||
@Override public T visitStatement(EqlBaseParser.StatementContext ctx) { return visitChildren(ctx); }
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* <p>The default implementation returns the result of calling
|
||||
* {@link #visitChildren} on {@code ctx}.</p>
|
||||
*/
|
||||
@Override public T visitQuery(EqlBaseParser.QueryContext ctx) { return visitChildren(ctx); }
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* <p>The default implementation returns the result of calling
|
||||
* {@link #visitChildren} on {@code ctx}.</p>
|
||||
*/
|
||||
@Override public T visitSequence(EqlBaseParser.SequenceContext ctx) { return visitChildren(ctx); }
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* <p>The default implementation returns the result of calling
|
||||
* {@link #visitChildren} on {@code ctx}.</p>
|
||||
*/
|
||||
@Override public T visitJoin(EqlBaseParser.JoinContext ctx) { return visitChildren(ctx); }
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* <p>The default implementation returns the result of calling
|
||||
* {@link #visitChildren} on {@code ctx}.</p>
|
||||
*/
|
||||
@Override public T visitPipe(EqlBaseParser.PipeContext ctx) { return visitChildren(ctx); }
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* <p>The default implementation returns the result of calling
|
||||
* {@link #visitChildren} on {@code ctx}.</p>
|
||||
*/
|
||||
@Override public T visitJoinKeys(EqlBaseParser.JoinKeysContext ctx) { return visitChildren(ctx); }
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* <p>The default implementation returns the result of calling
|
||||
* {@link #visitChildren} on {@code ctx}.</p>
|
||||
*/
|
||||
@Override public T visitSpan(EqlBaseParser.SpanContext ctx) { return visitChildren(ctx); }
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* <p>The default implementation returns the result of calling
|
||||
* {@link #visitChildren} on {@code ctx}.</p>
|
||||
*/
|
||||
@Override public T visitMatch(EqlBaseParser.MatchContext ctx) { return visitChildren(ctx); }
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* <p>The default implementation returns the result of calling
|
||||
* {@link #visitChildren} on {@code ctx}.</p>
|
||||
*/
|
||||
@Override public T visitCondition(EqlBaseParser.ConditionContext ctx) { return visitChildren(ctx); }
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* <p>The default implementation returns the result of calling
|
||||
* {@link #visitChildren} on {@code ctx}.</p>
|
||||
*/
|
||||
@Override public T visitExpression(EqlBaseParser.ExpressionContext ctx) { return visitChildren(ctx); }
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* <p>The default implementation returns the result of calling
|
||||
* {@link #visitChildren} on {@code ctx}.</p>
|
||||
*/
|
||||
@Override public T visitLogicalNot(EqlBaseParser.LogicalNotContext ctx) { return visitChildren(ctx); }
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* <p>The default implementation returns the result of calling
|
||||
* {@link #visitChildren} on {@code ctx}.</p>
|
||||
*/
|
||||
@Override public T visitBooleanDefault(EqlBaseParser.BooleanDefaultContext ctx) { return visitChildren(ctx); }
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* <p>The default implementation returns the result of calling
|
||||
* {@link #visitChildren} on {@code ctx}.</p>
|
||||
*/
|
||||
@Override public T visitLogicalBinary(EqlBaseParser.LogicalBinaryContext ctx) { return visitChildren(ctx); }
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* <p>The default implementation returns the result of calling
|
||||
* {@link #visitChildren} on {@code ctx}.</p>
|
||||
*/
|
||||
@Override public T visitPredicated(EqlBaseParser.PredicatedContext ctx) { return visitChildren(ctx); }
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* <p>The default implementation returns the result of calling
|
||||
* {@link #visitChildren} on {@code ctx}.</p>
|
||||
*/
|
||||
@Override public T visitPredicate(EqlBaseParser.PredicateContext ctx) { return visitChildren(ctx); }
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* <p>The default implementation returns the result of calling
|
||||
* {@link #visitChildren} on {@code ctx}.</p>
|
||||
*/
|
||||
@Override public T visitValueExpressionDefault(EqlBaseParser.ValueExpressionDefaultContext ctx) { return visitChildren(ctx); }
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* <p>The default implementation returns the result of calling
|
||||
* {@link #visitChildren} on {@code ctx}.</p>
|
||||
*/
|
||||
@Override public T visitComparison(EqlBaseParser.ComparisonContext ctx) { return visitChildren(ctx); }
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* <p>The default implementation returns the result of calling
|
||||
* {@link #visitChildren} on {@code ctx}.</p>
|
||||
*/
|
||||
@Override public T visitArithmeticBinary(EqlBaseParser.ArithmeticBinaryContext ctx) { return visitChildren(ctx); }
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* <p>The default implementation returns the result of calling
|
||||
* {@link #visitChildren} on {@code ctx}.</p>
|
||||
*/
|
||||
@Override public T visitArithmeticUnary(EqlBaseParser.ArithmeticUnaryContext ctx) { return visitChildren(ctx); }
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* <p>The default implementation returns the result of calling
|
||||
* {@link #visitChildren} on {@code ctx}.</p>
|
||||
*/
|
||||
@Override public T visitConstantDefault(EqlBaseParser.ConstantDefaultContext ctx) { return visitChildren(ctx); }
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* <p>The default implementation returns the result of calling
|
||||
* {@link #visitChildren} on {@code ctx}.</p>
|
||||
*/
|
||||
@Override public T visitFunction(EqlBaseParser.FunctionContext ctx) { return visitChildren(ctx); }
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* <p>The default implementation returns the result of calling
|
||||
* {@link #visitChildren} on {@code ctx}.</p>
|
||||
*/
|
||||
@Override public T visitDereference(EqlBaseParser.DereferenceContext ctx) { return visitChildren(ctx); }
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* <p>The default implementation returns the result of calling
|
||||
* {@link #visitChildren} on {@code ctx}.</p>
|
||||
*/
|
||||
@Override public T visitParenthesizedExpression(EqlBaseParser.ParenthesizedExpressionContext ctx) { return visitChildren(ctx); }
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* <p>The default implementation returns the result of calling
|
||||
* {@link #visitChildren} on {@code ctx}.</p>
|
||||
*/
|
||||
@Override public T visitFunctionExpression(EqlBaseParser.FunctionExpressionContext ctx) { return visitChildren(ctx); }
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* <p>The default implementation returns the result of calling
|
||||
* {@link #visitChildren} on {@code ctx}.</p>
|
||||
*/
|
||||
@Override public T visitNullLiteral(EqlBaseParser.NullLiteralContext ctx) { return visitChildren(ctx); }
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* <p>The default implementation returns the result of calling
|
||||
* {@link #visitChildren} on {@code ctx}.</p>
|
||||
*/
|
||||
@Override public T visitNumericLiteral(EqlBaseParser.NumericLiteralContext ctx) { return visitChildren(ctx); }
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* <p>The default implementation returns the result of calling
|
||||
* {@link #visitChildren} on {@code ctx}.</p>
|
||||
*/
|
||||
@Override public T visitBooleanLiteral(EqlBaseParser.BooleanLiteralContext ctx) { return visitChildren(ctx); }
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* <p>The default implementation returns the result of calling
|
||||
* {@link #visitChildren} on {@code ctx}.</p>
|
||||
*/
|
||||
@Override public T visitStringLiteral(EqlBaseParser.StringLiteralContext ctx) { return visitChildren(ctx); }
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* <p>The default implementation returns the result of calling
|
||||
* {@link #visitChildren} on {@code ctx}.</p>
|
||||
*/
|
||||
@Override public T visitComparisonOperator(EqlBaseParser.ComparisonOperatorContext ctx) { return visitChildren(ctx); }
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* <p>The default implementation returns the result of calling
|
||||
* {@link #visitChildren} on {@code ctx}.</p>
|
||||
*/
|
||||
@Override public T visitBooleanValue(EqlBaseParser.BooleanValueContext ctx) { return visitChildren(ctx); }
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* <p>The default implementation returns the result of calling
|
||||
* {@link #visitChildren} on {@code ctx}.</p>
|
||||
*/
|
||||
@Override public T visitQualifiedNames(EqlBaseParser.QualifiedNamesContext ctx) { return visitChildren(ctx); }
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* <p>The default implementation returns the result of calling
|
||||
* {@link #visitChildren} on {@code ctx}.</p>
|
||||
*/
|
||||
@Override public T visitQualifiedName(EqlBaseParser.QualifiedNameContext ctx) { return visitChildren(ctx); }
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* <p>The default implementation returns the result of calling
|
||||
* {@link #visitChildren} on {@code ctx}.</p>
|
||||
*/
|
||||
@Override public T visitIdentifier(EqlBaseParser.IdentifierContext ctx) { return visitChildren(ctx); }
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* <p>The default implementation returns the result of calling
|
||||
* {@link #visitChildren} on {@code ctx}.</p>
|
||||
*/
|
||||
@Override public T visitQuotedIdentifier(EqlBaseParser.QuotedIdentifierContext ctx) { return visitChildren(ctx); }
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* <p>The default implementation returns the result of calling
|
||||
* {@link #visitChildren} on {@code ctx}.</p>
|
||||
*/
|
||||
@Override public T visitUnquotedIdentifier(EqlBaseParser.UnquotedIdentifierContext ctx) { return visitChildren(ctx); }
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* <p>The default implementation returns the result of calling
|
||||
* {@link #visitChildren} on {@code ctx}.</p>
|
||||
*/
|
||||
@Override public T visitDigitIdentifier(EqlBaseParser.DigitIdentifierContext ctx) { return visitChildren(ctx); }
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* <p>The default implementation returns the result of calling
|
||||
* {@link #visitChildren} on {@code ctx}.</p>
|
||||
*/
|
||||
@Override public T visitDecimalLiteral(EqlBaseParser.DecimalLiteralContext ctx) { return visitChildren(ctx); }
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* <p>The default implementation returns the result of calling
|
||||
* {@link #visitChildren} on {@code ctx}.</p>
|
||||
*/
|
||||
@Override public T visitIntegerLiteral(EqlBaseParser.IntegerLiteralContext ctx) { return visitChildren(ctx); }
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* <p>The default implementation returns the result of calling
|
||||
* {@link #visitChildren} on {@code ctx}.</p>
|
||||
*/
|
||||
@Override public T visitString(EqlBaseParser.StringContext ctx) { return visitChildren(ctx); }
|
||||
}
|
|
@ -0,0 +1,265 @@
|
|||
// ANTLR GENERATED CODE: DO NOT EDIT
|
||||
package org.elasticsearch.xpack.eql.parser;
|
||||
import org.antlr.v4.runtime.Lexer;
|
||||
import org.antlr.v4.runtime.CharStream;
|
||||
import org.antlr.v4.runtime.Token;
|
||||
import org.antlr.v4.runtime.TokenStream;
|
||||
import org.antlr.v4.runtime.*;
|
||||
import org.antlr.v4.runtime.atn.*;
|
||||
import org.antlr.v4.runtime.dfa.DFA;
|
||||
import org.antlr.v4.runtime.misc.*;
|
||||
|
||||
@SuppressWarnings({"all", "warnings", "unchecked", "unused", "cast"})
|
||||
class EqlBaseLexer extends Lexer {
|
||||
static { RuntimeMetaData.checkVersion("4.5.3", RuntimeMetaData.VERSION); }
|
||||
|
||||
protected static final DFA[] _decisionToDFA;
|
||||
protected static final PredictionContextCache _sharedContextCache =
|
||||
new PredictionContextCache();
|
||||
public static final int
|
||||
AND=1, ANY=2, ASC=3, BETWEEN=4, BY=5, CHILD=6, DESCENDANT=7, EVENT=8,
|
||||
FALSE=9, IN=10, JOIN=11, MAXSPAN=12, NOT=13, NULL=14, OF=15, OR=16, SEQUENCE=17,
|
||||
TRUE=18, UNTIL=19, WHERE=20, WITH=21, EQ=22, NEQ=23, LT=24, LTE=25, GT=26,
|
||||
GTE=27, PLUS=28, MINUS=29, ASTERISK=30, SLASH=31, PERCENT=32, DOT=33,
|
||||
COMMA=34, LB=35, RB=36, LP=37, RP=38, PIPE=39, STRING=40, INTEGER_VALUE=41,
|
||||
DECIMAL_VALUE=42, IDENTIFIER=43, DIGIT_IDENTIFIER=44, QUOTED_IDENTIFIER=45,
|
||||
SIMPLE_COMMENT=46, BRACKETED_COMMENT=47, WS=48, UNRECOGNIZED=49;
|
||||
public static String[] modeNames = {
|
||||
"DEFAULT_MODE"
|
||||
};
|
||||
|
||||
public static final String[] ruleNames = {
|
||||
"AND", "ANY", "ASC", "BETWEEN", "BY", "CHILD", "DESCENDANT", "EVENT",
|
||||
"FALSE", "IN", "JOIN", "MAXSPAN", "NOT", "NULL", "OF", "OR", "SEQUENCE",
|
||||
"TRUE", "UNTIL", "WHERE", "WITH", "EQ", "NEQ", "LT", "LTE", "GT", "GTE",
|
||||
"PLUS", "MINUS", "ASTERISK", "SLASH", "PERCENT", "DOT", "COMMA", "LB",
|
||||
"RB", "LP", "RP", "PIPE", "STRING", "INTEGER_VALUE", "DECIMAL_VALUE",
|
||||
"IDENTIFIER", "DIGIT_IDENTIFIER", "QUOTED_IDENTIFIER", "EXPONENT", "DIGIT",
|
||||
"LETTER", "SIMPLE_COMMENT", "BRACKETED_COMMENT", "WS", "UNRECOGNIZED"
|
||||
};
|
||||
|
||||
private static final String[] _LITERAL_NAMES = {
|
||||
null, "'AND'", "'ANY'", "'ASC'", "'BETWEEN'", "'BY'", "'CHILD'", "'DESCENDANT'",
|
||||
"'EVENT'", "'FALSE'", "'IN'", "'JOIN'", "'MAXSPAN'", "'NOT'", "'NULL'",
|
||||
"'OF'", "'OR'", "'SEQUENCE'", "'TRUE'", "'UNTIL'", "'WHERE'", "'WITH'",
|
||||
null, null, "'<'", "'<='", "'>'", "'>='", "'+'", "'-'", "'*'", "'/'",
|
||||
"'%'", "'.'", "','", "'['", "']'", "'('", "')'", "'|'"
|
||||
};
|
||||
private static final String[] _SYMBOLIC_NAMES = {
|
||||
null, "AND", "ANY", "ASC", "BETWEEN", "BY", "CHILD", "DESCENDANT", "EVENT",
|
||||
"FALSE", "IN", "JOIN", "MAXSPAN", "NOT", "NULL", "OF", "OR", "SEQUENCE",
|
||||
"TRUE", "UNTIL", "WHERE", "WITH", "EQ", "NEQ", "LT", "LTE", "GT", "GTE",
|
||||
"PLUS", "MINUS", "ASTERISK", "SLASH", "PERCENT", "DOT", "COMMA", "LB",
|
||||
"RB", "LP", "RP", "PIPE", "STRING", "INTEGER_VALUE", "DECIMAL_VALUE",
|
||||
"IDENTIFIER", "DIGIT_IDENTIFIER", "QUOTED_IDENTIFIER", "SIMPLE_COMMENT",
|
||||
"BRACKETED_COMMENT", "WS", "UNRECOGNIZED"
|
||||
};
|
||||
public static final Vocabulary VOCABULARY = new VocabularyImpl(_LITERAL_NAMES, _SYMBOLIC_NAMES);
|
||||
|
||||
/**
|
||||
* @deprecated Use {@link #VOCABULARY} instead.
|
||||
*/
|
||||
@Deprecated
|
||||
public static final String[] tokenNames;
|
||||
static {
|
||||
tokenNames = new String[_SYMBOLIC_NAMES.length];
|
||||
for (int i = 0; i < tokenNames.length; i++) {
|
||||
tokenNames[i] = VOCABULARY.getLiteralName(i);
|
||||
if (tokenNames[i] == null) {
|
||||
tokenNames[i] = VOCABULARY.getSymbolicName(i);
|
||||
}
|
||||
|
||||
if (tokenNames[i] == null) {
|
||||
tokenNames[i] = "<INVALID>";
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
@Deprecated
|
||||
public String[] getTokenNames() {
|
||||
return tokenNames;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
||||
public Vocabulary getVocabulary() {
|
||||
return VOCABULARY;
|
||||
}
|
||||
|
||||
|
||||
public EqlBaseLexer(CharStream input) {
|
||||
super(input);
|
||||
_interp = new LexerATNSimulator(this,_ATN,_decisionToDFA,_sharedContextCache);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getGrammarFileName() { return "EqlBase.g4"; }
|
||||
|
||||
@Override
|
||||
public String[] getRuleNames() { return ruleNames; }
|
||||
|
||||
@Override
|
||||
public String getSerializedATN() { return _serializedATN; }
|
||||
|
||||
@Override
|
||||
public String[] getModeNames() { return modeNames; }
|
||||
|
||||
@Override
|
||||
public ATN getATN() { return _ATN; }
|
||||
|
||||
public static final String _serializedATN =
|
||||
"\3\u0430\ud6d1\u8206\uad2d\u4417\uaef1\u8d80\uaadd\2\63\u01a2\b\1\4\2"+
|
||||
"\t\2\4\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7\t\7\4\b\t\b\4\t\t\t\4\n\t\n\4"+
|
||||
"\13\t\13\4\f\t\f\4\r\t\r\4\16\t\16\4\17\t\17\4\20\t\20\4\21\t\21\4\22"+
|
||||
"\t\22\4\23\t\23\4\24\t\24\4\25\t\25\4\26\t\26\4\27\t\27\4\30\t\30\4\31"+
|
||||
"\t\31\4\32\t\32\4\33\t\33\4\34\t\34\4\35\t\35\4\36\t\36\4\37\t\37\4 \t"+
|
||||
" \4!\t!\4\"\t\"\4#\t#\4$\t$\4%\t%\4&\t&\4\'\t\'\4(\t(\4)\t)\4*\t*\4+\t"+
|
||||
"+\4,\t,\4-\t-\4.\t.\4/\t/\4\60\t\60\4\61\t\61\4\62\t\62\4\63\t\63\4\64"+
|
||||
"\t\64\4\65\t\65\3\2\3\2\3\2\3\2\3\3\3\3\3\3\3\3\3\4\3\4\3\4\3\4\3\5\3"+
|
||||
"\5\3\5\3\5\3\5\3\5\3\5\3\5\3\6\3\6\3\6\3\7\3\7\3\7\3\7\3\7\3\7\3\b\3\b"+
|
||||
"\3\b\3\b\3\b\3\b\3\b\3\b\3\b\3\b\3\b\3\t\3\t\3\t\3\t\3\t\3\t\3\n\3\n\3"+
|
||||
"\n\3\n\3\n\3\n\3\13\3\13\3\13\3\f\3\f\3\f\3\f\3\f\3\r\3\r\3\r\3\r\3\r"+
|
||||
"\3\r\3\r\3\r\3\16\3\16\3\16\3\16\3\17\3\17\3\17\3\17\3\17\3\20\3\20\3"+
|
||||
"\20\3\21\3\21\3\21\3\22\3\22\3\22\3\22\3\22\3\22\3\22\3\22\3\22\3\23\3"+
|
||||
"\23\3\23\3\23\3\23\3\24\3\24\3\24\3\24\3\24\3\24\3\25\3\25\3\25\3\25\3"+
|
||||
"\25\3\25\3\26\3\26\3\26\3\26\3\26\3\27\3\27\3\27\5\27\u00e1\n\27\3\30"+
|
||||
"\3\30\3\30\3\30\5\30\u00e7\n\30\3\31\3\31\3\32\3\32\3\32\3\33\3\33\3\34"+
|
||||
"\3\34\3\34\3\35\3\35\3\36\3\36\3\37\3\37\3 \3 \3!\3!\3\"\3\"\3#\3#\3$"+
|
||||
"\3$\3%\3%\3&\3&\3\'\3\'\3(\3(\3)\3)\7)\u010d\n)\f)\16)\u0110\13)\3)\3"+
|
||||
")\3)\7)\u0115\n)\f)\16)\u0118\13)\3)\5)\u011b\n)\3*\6*\u011e\n*\r*\16"+
|
||||
"*\u011f\3+\6+\u0123\n+\r+\16+\u0124\3+\3+\7+\u0129\n+\f+\16+\u012c\13"+
|
||||
"+\3+\3+\6+\u0130\n+\r+\16+\u0131\3+\6+\u0135\n+\r+\16+\u0136\3+\3+\7+"+
|
||||
"\u013b\n+\f+\16+\u013e\13+\5+\u0140\n+\3+\3+\3+\3+\6+\u0146\n+\r+\16+"+
|
||||
"\u0147\3+\3+\5+\u014c\n+\3,\3,\5,\u0150\n,\3,\3,\3,\7,\u0155\n,\f,\16"+
|
||||
",\u0158\13,\3-\3-\3-\3-\6-\u015e\n-\r-\16-\u015f\3.\3.\3.\3.\7.\u0166"+
|
||||
"\n.\f.\16.\u0169\13.\3.\3.\3/\3/\5/\u016f\n/\3/\6/\u0172\n/\r/\16/\u0173"+
|
||||
"\3\60\3\60\3\61\3\61\3\62\3\62\3\62\3\62\7\62\u017e\n\62\f\62\16\62\u0181"+
|
||||
"\13\62\3\62\5\62\u0184\n\62\3\62\5\62\u0187\n\62\3\62\3\62\3\63\3\63\3"+
|
||||
"\63\3\63\3\63\7\63\u0190\n\63\f\63\16\63\u0193\13\63\3\63\3\63\3\63\3"+
|
||||
"\63\3\63\3\64\6\64\u019b\n\64\r\64\16\64\u019c\3\64\3\64\3\65\3\65\3\u0191"+
|
||||
"\2\66\3\3\5\4\7\5\t\6\13\7\r\b\17\t\21\n\23\13\25\f\27\r\31\16\33\17\35"+
|
||||
"\20\37\21!\22#\23%\24\'\25)\26+\27-\30/\31\61\32\63\33\65\34\67\359\36"+
|
||||
";\37= ?!A\"C#E$G%I&K\'M(O)Q*S+U,W-Y.[/]\2_\2a\2c\60e\61g\62i\63\3\2\n"+
|
||||
"\3\2))\3\2$$\4\2BBaa\4\2--//\3\2\62;\3\2C\\\4\2\f\f\17\17\5\2\13\f\17"+
|
||||
"\17\"\"\u01bf\2\3\3\2\2\2\2\5\3\2\2\2\2\7\3\2\2\2\2\t\3\2\2\2\2\13\3\2"+
|
||||
"\2\2\2\r\3\2\2\2\2\17\3\2\2\2\2\21\3\2\2\2\2\23\3\2\2\2\2\25\3\2\2\2\2"+
|
||||
"\27\3\2\2\2\2\31\3\2\2\2\2\33\3\2\2\2\2\35\3\2\2\2\2\37\3\2\2\2\2!\3\2"+
|
||||
"\2\2\2#\3\2\2\2\2%\3\2\2\2\2\'\3\2\2\2\2)\3\2\2\2\2+\3\2\2\2\2-\3\2\2"+
|
||||
"\2\2/\3\2\2\2\2\61\3\2\2\2\2\63\3\2\2\2\2\65\3\2\2\2\2\67\3\2\2\2\29\3"+
|
||||
"\2\2\2\2;\3\2\2\2\2=\3\2\2\2\2?\3\2\2\2\2A\3\2\2\2\2C\3\2\2\2\2E\3\2\2"+
|
||||
"\2\2G\3\2\2\2\2I\3\2\2\2\2K\3\2\2\2\2M\3\2\2\2\2O\3\2\2\2\2Q\3\2\2\2\2"+
|
||||
"S\3\2\2\2\2U\3\2\2\2\2W\3\2\2\2\2Y\3\2\2\2\2[\3\2\2\2\2c\3\2\2\2\2e\3"+
|
||||
"\2\2\2\2g\3\2\2\2\2i\3\2\2\2\3k\3\2\2\2\5o\3\2\2\2\7s\3\2\2\2\tw\3\2\2"+
|
||||
"\2\13\177\3\2\2\2\r\u0082\3\2\2\2\17\u0088\3\2\2\2\21\u0093\3\2\2\2\23"+
|
||||
"\u0099\3\2\2\2\25\u009f\3\2\2\2\27\u00a2\3\2\2\2\31\u00a7\3\2\2\2\33\u00af"+
|
||||
"\3\2\2\2\35\u00b3\3\2\2\2\37\u00b8\3\2\2\2!\u00bb\3\2\2\2#\u00be\3\2\2"+
|
||||
"\2%\u00c7\3\2\2\2\'\u00cc\3\2\2\2)\u00d2\3\2\2\2+\u00d8\3\2\2\2-\u00e0"+
|
||||
"\3\2\2\2/\u00e6\3\2\2\2\61\u00e8\3\2\2\2\63\u00ea\3\2\2\2\65\u00ed\3\2"+
|
||||
"\2\2\67\u00ef\3\2\2\29\u00f2\3\2\2\2;\u00f4\3\2\2\2=\u00f6\3\2\2\2?\u00f8"+
|
||||
"\3\2\2\2A\u00fa\3\2\2\2C\u00fc\3\2\2\2E\u00fe\3\2\2\2G\u0100\3\2\2\2I"+
|
||||
"\u0102\3\2\2\2K\u0104\3\2\2\2M\u0106\3\2\2\2O\u0108\3\2\2\2Q\u011a\3\2"+
|
||||
"\2\2S\u011d\3\2\2\2U\u014b\3\2\2\2W\u014f\3\2\2\2Y\u0159\3\2\2\2[\u0161"+
|
||||
"\3\2\2\2]\u016c\3\2\2\2_\u0175\3\2\2\2a\u0177\3\2\2\2c\u0179\3\2\2\2e"+
|
||||
"\u018a\3\2\2\2g\u019a\3\2\2\2i\u01a0\3\2\2\2kl\7C\2\2lm\7P\2\2mn\7F\2"+
|
||||
"\2n\4\3\2\2\2op\7C\2\2pq\7P\2\2qr\7[\2\2r\6\3\2\2\2st\7C\2\2tu\7U\2\2"+
|
||||
"uv\7E\2\2v\b\3\2\2\2wx\7D\2\2xy\7G\2\2yz\7V\2\2z{\7Y\2\2{|\7G\2\2|}\7"+
|
||||
"G\2\2}~\7P\2\2~\n\3\2\2\2\177\u0080\7D\2\2\u0080\u0081\7[\2\2\u0081\f"+
|
||||
"\3\2\2\2\u0082\u0083\7E\2\2\u0083\u0084\7J\2\2\u0084\u0085\7K\2\2\u0085"+
|
||||
"\u0086\7N\2\2\u0086\u0087\7F\2\2\u0087\16\3\2\2\2\u0088\u0089\7F\2\2\u0089"+
|
||||
"\u008a\7G\2\2\u008a\u008b\7U\2\2\u008b\u008c\7E\2\2\u008c\u008d\7G\2\2"+
|
||||
"\u008d\u008e\7P\2\2\u008e\u008f\7F\2\2\u008f\u0090\7C\2\2\u0090\u0091"+
|
||||
"\7P\2\2\u0091\u0092\7V\2\2\u0092\20\3\2\2\2\u0093\u0094\7G\2\2\u0094\u0095"+
|
||||
"\7X\2\2\u0095\u0096\7G\2\2\u0096\u0097\7P\2\2\u0097\u0098\7V\2\2\u0098"+
|
||||
"\22\3\2\2\2\u0099\u009a\7H\2\2\u009a\u009b\7C\2\2\u009b\u009c\7N\2\2\u009c"+
|
||||
"\u009d\7U\2\2\u009d\u009e\7G\2\2\u009e\24\3\2\2\2\u009f\u00a0\7K\2\2\u00a0"+
|
||||
"\u00a1\7P\2\2\u00a1\26\3\2\2\2\u00a2\u00a3\7L\2\2\u00a3\u00a4\7Q\2\2\u00a4"+
|
||||
"\u00a5\7K\2\2\u00a5\u00a6\7P\2\2\u00a6\30\3\2\2\2\u00a7\u00a8\7O\2\2\u00a8"+
|
||||
"\u00a9\7C\2\2\u00a9\u00aa\7Z\2\2\u00aa\u00ab\7U\2\2\u00ab\u00ac\7R\2\2"+
|
||||
"\u00ac\u00ad\7C\2\2\u00ad\u00ae\7P\2\2\u00ae\32\3\2\2\2\u00af\u00b0\7"+
|
||||
"P\2\2\u00b0\u00b1\7Q\2\2\u00b1\u00b2\7V\2\2\u00b2\34\3\2\2\2\u00b3\u00b4"+
|
||||
"\7P\2\2\u00b4\u00b5\7W\2\2\u00b5\u00b6\7N\2\2\u00b6\u00b7\7N\2\2\u00b7"+
|
||||
"\36\3\2\2\2\u00b8\u00b9\7Q\2\2\u00b9\u00ba\7H\2\2\u00ba \3\2\2\2\u00bb"+
|
||||
"\u00bc\7Q\2\2\u00bc\u00bd\7T\2\2\u00bd\"\3\2\2\2\u00be\u00bf\7U\2\2\u00bf"+
|
||||
"\u00c0\7G\2\2\u00c0\u00c1\7S\2\2\u00c1\u00c2\7W\2\2\u00c2\u00c3\7G\2\2"+
|
||||
"\u00c3\u00c4\7P\2\2\u00c4\u00c5\7E\2\2\u00c5\u00c6\7G\2\2\u00c6$\3\2\2"+
|
||||
"\2\u00c7\u00c8\7V\2\2\u00c8\u00c9\7T\2\2\u00c9\u00ca\7W\2\2\u00ca\u00cb"+
|
||||
"\7G\2\2\u00cb&\3\2\2\2\u00cc\u00cd\7W\2\2\u00cd\u00ce\7P\2\2\u00ce\u00cf"+
|
||||
"\7V\2\2\u00cf\u00d0\7K\2\2\u00d0\u00d1\7N\2\2\u00d1(\3\2\2\2\u00d2\u00d3"+
|
||||
"\7Y\2\2\u00d3\u00d4\7J\2\2\u00d4\u00d5\7G\2\2\u00d5\u00d6\7T\2\2\u00d6"+
|
||||
"\u00d7\7G\2\2\u00d7*\3\2\2\2\u00d8\u00d9\7Y\2\2\u00d9\u00da\7K\2\2\u00da"+
|
||||
"\u00db\7V\2\2\u00db\u00dc\7J\2\2\u00dc,\3\2\2\2\u00dd\u00e1\7?\2\2\u00de"+
|
||||
"\u00df\7?\2\2\u00df\u00e1\7?\2\2\u00e0\u00dd\3\2\2\2\u00e0\u00de\3\2\2"+
|
||||
"\2\u00e1.\3\2\2\2\u00e2\u00e3\7>\2\2\u00e3\u00e7\7@\2\2\u00e4\u00e5\7"+
|
||||
"#\2\2\u00e5\u00e7\7?\2\2\u00e6\u00e2\3\2\2\2\u00e6\u00e4\3\2\2\2\u00e7"+
|
||||
"\60\3\2\2\2\u00e8\u00e9\7>\2\2\u00e9\62\3\2\2\2\u00ea\u00eb\7>\2\2\u00eb"+
|
||||
"\u00ec\7?\2\2\u00ec\64\3\2\2\2\u00ed\u00ee\7@\2\2\u00ee\66\3\2\2\2\u00ef"+
|
||||
"\u00f0\7@\2\2\u00f0\u00f1\7?\2\2\u00f18\3\2\2\2\u00f2\u00f3\7-\2\2\u00f3"+
|
||||
":\3\2\2\2\u00f4\u00f5\7/\2\2\u00f5<\3\2\2\2\u00f6\u00f7\7,\2\2\u00f7>"+
|
||||
"\3\2\2\2\u00f8\u00f9\7\61\2\2\u00f9@\3\2\2\2\u00fa\u00fb\7\'\2\2\u00fb"+
|
||||
"B\3\2\2\2\u00fc\u00fd\7\60\2\2\u00fdD\3\2\2\2\u00fe\u00ff\7.\2\2\u00ff"+
|
||||
"F\3\2\2\2\u0100\u0101\7]\2\2\u0101H\3\2\2\2\u0102\u0103\7_\2\2\u0103J"+
|
||||
"\3\2\2\2\u0104\u0105\7*\2\2\u0105L\3\2\2\2\u0106\u0107\7+\2\2\u0107N\3"+
|
||||
"\2\2\2\u0108\u0109\7~\2\2\u0109P\3\2\2\2\u010a\u010e\7)\2\2\u010b\u010d"+
|
||||
"\n\2\2\2\u010c\u010b\3\2\2\2\u010d\u0110\3\2\2\2\u010e\u010c\3\2\2\2\u010e"+
|
||||
"\u010f\3\2\2\2\u010f\u0111\3\2\2\2\u0110\u010e\3\2\2\2\u0111\u011b\7)"+
|
||||
"\2\2\u0112\u0116\7$\2\2\u0113\u0115\n\3\2\2\u0114\u0113\3\2\2\2\u0115"+
|
||||
"\u0118\3\2\2\2\u0116\u0114\3\2\2\2\u0116\u0117\3\2\2\2\u0117\u0119\3\2"+
|
||||
"\2\2\u0118\u0116\3\2\2\2\u0119\u011b\7$\2\2\u011a\u010a\3\2\2\2\u011a"+
|
||||
"\u0112\3\2\2\2\u011bR\3\2\2\2\u011c\u011e\5_\60\2\u011d\u011c\3\2\2\2"+
|
||||
"\u011e\u011f\3\2\2\2\u011f\u011d\3\2\2\2\u011f\u0120\3\2\2\2\u0120T\3"+
|
||||
"\2\2\2\u0121\u0123\5_\60\2\u0122\u0121\3\2\2\2\u0123\u0124\3\2\2\2\u0124"+
|
||||
"\u0122\3\2\2\2\u0124\u0125\3\2\2\2\u0125\u0126\3\2\2\2\u0126\u012a\5C"+
|
||||
"\"\2\u0127\u0129\5_\60\2\u0128\u0127\3\2\2\2\u0129\u012c\3\2\2\2\u012a"+
|
||||
"\u0128\3\2\2\2\u012a\u012b\3\2\2\2\u012b\u014c\3\2\2\2\u012c\u012a\3\2"+
|
||||
"\2\2\u012d\u012f\5C\"\2\u012e\u0130\5_\60\2\u012f\u012e\3\2\2\2\u0130"+
|
||||
"\u0131\3\2\2\2\u0131\u012f\3\2\2\2\u0131\u0132\3\2\2\2\u0132\u014c\3\2"+
|
||||
"\2\2\u0133\u0135\5_\60\2\u0134\u0133\3\2\2\2\u0135\u0136\3\2\2\2\u0136"+
|
||||
"\u0134\3\2\2\2\u0136\u0137\3\2\2\2\u0137\u013f\3\2\2\2\u0138\u013c\5C"+
|
||||
"\"\2\u0139\u013b\5_\60\2\u013a\u0139\3\2\2\2\u013b\u013e\3\2\2\2\u013c"+
|
||||
"\u013a\3\2\2\2\u013c\u013d\3\2\2\2\u013d\u0140\3\2\2\2\u013e\u013c\3\2"+
|
||||
"\2\2\u013f\u0138\3\2\2\2\u013f\u0140\3\2\2\2\u0140\u0141\3\2\2\2\u0141"+
|
||||
"\u0142\5]/\2\u0142\u014c\3\2\2\2\u0143\u0145\5C\"\2\u0144\u0146\5_\60"+
|
||||
"\2\u0145\u0144\3\2\2\2\u0146\u0147\3\2\2\2\u0147\u0145\3\2\2\2\u0147\u0148"+
|
||||
"\3\2\2\2\u0148\u0149\3\2\2\2\u0149\u014a\5]/\2\u014a\u014c\3\2\2\2\u014b"+
|
||||
"\u0122\3\2\2\2\u014b\u012d\3\2\2\2\u014b\u0134\3\2\2\2\u014b\u0143\3\2"+
|
||||
"\2\2\u014cV\3\2\2\2\u014d\u0150\5a\61\2\u014e\u0150\7a\2\2\u014f\u014d"+
|
||||
"\3\2\2\2\u014f\u014e\3\2\2\2\u0150\u0156\3\2\2\2\u0151\u0155\5a\61\2\u0152"+
|
||||
"\u0155\5_\60\2\u0153\u0155\t\4\2\2\u0154\u0151\3\2\2\2\u0154\u0152\3\2"+
|
||||
"\2\2\u0154\u0153\3\2\2\2\u0155\u0158\3\2\2\2\u0156\u0154\3\2\2\2\u0156"+
|
||||
"\u0157\3\2\2\2\u0157X\3\2\2\2\u0158\u0156\3\2\2\2\u0159\u015d\5_\60\2"+
|
||||
"\u015a\u015e\5a\61\2\u015b\u015e\5_\60\2\u015c\u015e\t\4\2\2\u015d\u015a"+
|
||||
"\3\2\2\2\u015d\u015b\3\2\2\2\u015d\u015c\3\2\2\2\u015e\u015f\3\2\2\2\u015f"+
|
||||
"\u015d\3\2\2\2\u015f\u0160\3\2\2\2\u0160Z\3\2\2\2\u0161\u0167\7$\2\2\u0162"+
|
||||
"\u0166\n\3\2\2\u0163\u0164\7$\2\2\u0164\u0166\7$\2\2\u0165\u0162\3\2\2"+
|
||||
"\2\u0165\u0163\3\2\2\2\u0166\u0169\3\2\2\2\u0167\u0165\3\2\2\2\u0167\u0168"+
|
||||
"\3\2\2\2\u0168\u016a\3\2\2\2\u0169\u0167\3\2\2\2\u016a\u016b\7$\2\2\u016b"+
|
||||
"\\\3\2\2\2\u016c\u016e\7G\2\2\u016d\u016f\t\5\2\2\u016e\u016d\3\2\2\2"+
|
||||
"\u016e\u016f\3\2\2\2\u016f\u0171\3\2\2\2\u0170\u0172\5_\60\2\u0171\u0170"+
|
||||
"\3\2\2\2\u0172\u0173\3\2\2\2\u0173\u0171\3\2\2\2\u0173\u0174\3\2\2\2\u0174"+
|
||||
"^\3\2\2\2\u0175\u0176\t\6\2\2\u0176`\3\2\2\2\u0177\u0178\t\7\2\2\u0178"+
|
||||
"b\3\2\2\2\u0179\u017a\7\61\2\2\u017a\u017b\7\61\2\2\u017b\u017f\3\2\2"+
|
||||
"\2\u017c\u017e\n\b\2\2\u017d\u017c\3\2\2\2\u017e\u0181\3\2\2\2\u017f\u017d"+
|
||||
"\3\2\2\2\u017f\u0180\3\2\2\2\u0180\u0183\3\2\2\2\u0181\u017f\3\2\2\2\u0182"+
|
||||
"\u0184\7\17\2\2\u0183\u0182\3\2\2\2\u0183\u0184\3\2\2\2\u0184\u0186\3"+
|
||||
"\2\2\2\u0185\u0187\7\f\2\2\u0186\u0185\3\2\2\2\u0186\u0187\3\2\2\2\u0187"+
|
||||
"\u0188\3\2\2\2\u0188\u0189\b\62\2\2\u0189d\3\2\2\2\u018a\u018b\7\61\2"+
|
||||
"\2\u018b\u018c\7,\2\2\u018c\u0191\3\2\2\2\u018d\u0190\5e\63\2\u018e\u0190"+
|
||||
"\13\2\2\2\u018f\u018d\3\2\2\2\u018f\u018e\3\2\2\2\u0190\u0193\3\2\2\2"+
|
||||
"\u0191\u0192\3\2\2\2\u0191\u018f\3\2\2\2\u0192\u0194\3\2\2\2\u0193\u0191"+
|
||||
"\3\2\2\2\u0194\u0195\7,\2\2\u0195\u0196\7\61\2\2\u0196\u0197\3\2\2\2\u0197"+
|
||||
"\u0198\b\63\2\2\u0198f\3\2\2\2\u0199\u019b\t\t\2\2\u019a\u0199\3\2\2\2"+
|
||||
"\u019b\u019c\3\2\2\2\u019c\u019a\3\2\2\2\u019c\u019d\3\2\2\2\u019d\u019e"+
|
||||
"\3\2\2\2\u019e\u019f\b\64\2\2\u019fh\3\2\2\2\u01a0\u01a1\13\2\2\2\u01a1"+
|
||||
"j\3\2\2\2 \2\u00e0\u00e6\u010e\u0116\u011a\u011f\u0124\u012a\u0131\u0136"+
|
||||
"\u013c\u013f\u0147\u014b\u014f\u0154\u0156\u015d\u015f\u0165\u0167\u016e"+
|
||||
"\u0173\u017f\u0183\u0186\u018f\u0191\u019c\3\2\3\2";
|
||||
public static final ATN _ATN =
|
||||
new ATNDeserializer().deserialize(_serializedATN.toCharArray());
|
||||
static {
|
||||
_decisionToDFA = new DFA[_ATN.getNumberOfDecisions()];
|
||||
for (int i = 0; i < _ATN.getNumberOfDecisions(); i++) {
|
||||
_decisionToDFA[i] = new DFA(_ATN.getDecisionState(i), i);
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,460 @@
|
|||
// ANTLR GENERATED CODE: DO NOT EDIT
|
||||
package org.elasticsearch.xpack.eql.parser;
|
||||
import org.antlr.v4.runtime.tree.ParseTreeListener;
|
||||
|
||||
/**
|
||||
* This interface defines a complete listener for a parse tree produced by
|
||||
* {@link EqlBaseParser}.
|
||||
*/
|
||||
interface EqlBaseListener extends ParseTreeListener {
|
||||
/**
|
||||
* Enter a parse tree produced by {@link EqlBaseParser#singleStatement}.
|
||||
* @param ctx the parse tree
|
||||
*/
|
||||
void enterSingleStatement(EqlBaseParser.SingleStatementContext ctx);
|
||||
/**
|
||||
* Exit a parse tree produced by {@link EqlBaseParser#singleStatement}.
|
||||
* @param ctx the parse tree
|
||||
*/
|
||||
void exitSingleStatement(EqlBaseParser.SingleStatementContext ctx);
|
||||
/**
|
||||
* Enter a parse tree produced by {@link EqlBaseParser#singleExpression}.
|
||||
* @param ctx the parse tree
|
||||
*/
|
||||
void enterSingleExpression(EqlBaseParser.SingleExpressionContext ctx);
|
||||
/**
|
||||
* Exit a parse tree produced by {@link EqlBaseParser#singleExpression}.
|
||||
* @param ctx the parse tree
|
||||
*/
|
||||
void exitSingleExpression(EqlBaseParser.SingleExpressionContext ctx);
|
||||
/**
|
||||
* Enter a parse tree produced by {@link EqlBaseParser#statement}.
|
||||
* @param ctx the parse tree
|
||||
*/
|
||||
void enterStatement(EqlBaseParser.StatementContext ctx);
|
||||
/**
|
||||
* Exit a parse tree produced by {@link EqlBaseParser#statement}.
|
||||
* @param ctx the parse tree
|
||||
*/
|
||||
void exitStatement(EqlBaseParser.StatementContext ctx);
|
||||
/**
|
||||
* Enter a parse tree produced by {@link EqlBaseParser#query}.
|
||||
* @param ctx the parse tree
|
||||
*/
|
||||
void enterQuery(EqlBaseParser.QueryContext ctx);
|
||||
/**
|
||||
* Exit a parse tree produced by {@link EqlBaseParser#query}.
|
||||
* @param ctx the parse tree
|
||||
*/
|
||||
void exitQuery(EqlBaseParser.QueryContext ctx);
|
||||
/**
|
||||
* Enter a parse tree produced by {@link EqlBaseParser#sequence}.
|
||||
* @param ctx the parse tree
|
||||
*/
|
||||
void enterSequence(EqlBaseParser.SequenceContext ctx);
|
||||
/**
|
||||
* Exit a parse tree produced by {@link EqlBaseParser#sequence}.
|
||||
* @param ctx the parse tree
|
||||
*/
|
||||
void exitSequence(EqlBaseParser.SequenceContext ctx);
|
||||
/**
|
||||
* Enter a parse tree produced by {@link EqlBaseParser#join}.
|
||||
* @param ctx the parse tree
|
||||
*/
|
||||
void enterJoin(EqlBaseParser.JoinContext ctx);
|
||||
/**
|
||||
* Exit a parse tree produced by {@link EqlBaseParser#join}.
|
||||
* @param ctx the parse tree
|
||||
*/
|
||||
void exitJoin(EqlBaseParser.JoinContext ctx);
|
||||
/**
|
||||
* Enter a parse tree produced by {@link EqlBaseParser#pipe}.
|
||||
* @param ctx the parse tree
|
||||
*/
|
||||
void enterPipe(EqlBaseParser.PipeContext ctx);
|
||||
/**
|
||||
* Exit a parse tree produced by {@link EqlBaseParser#pipe}.
|
||||
* @param ctx the parse tree
|
||||
*/
|
||||
void exitPipe(EqlBaseParser.PipeContext ctx);
|
||||
/**
|
||||
* Enter a parse tree produced by {@link EqlBaseParser#joinKeys}.
|
||||
* @param ctx the parse tree
|
||||
*/
|
||||
void enterJoinKeys(EqlBaseParser.JoinKeysContext ctx);
|
||||
/**
|
||||
* Exit a parse tree produced by {@link EqlBaseParser#joinKeys}.
|
||||
* @param ctx the parse tree
|
||||
*/
|
||||
void exitJoinKeys(EqlBaseParser.JoinKeysContext ctx);
|
||||
/**
|
||||
* Enter a parse tree produced by {@link EqlBaseParser#span}.
|
||||
* @param ctx the parse tree
|
||||
*/
|
||||
void enterSpan(EqlBaseParser.SpanContext ctx);
|
||||
/**
|
||||
* Exit a parse tree produced by {@link EqlBaseParser#span}.
|
||||
* @param ctx the parse tree
|
||||
*/
|
||||
void exitSpan(EqlBaseParser.SpanContext ctx);
|
||||
/**
|
||||
* Enter a parse tree produced by {@link EqlBaseParser#match}.
|
||||
* @param ctx the parse tree
|
||||
*/
|
||||
void enterMatch(EqlBaseParser.MatchContext ctx);
|
||||
/**
|
||||
* Exit a parse tree produced by {@link EqlBaseParser#match}.
|
||||
* @param ctx the parse tree
|
||||
*/
|
||||
void exitMatch(EqlBaseParser.MatchContext ctx);
|
||||
/**
|
||||
* Enter a parse tree produced by {@link EqlBaseParser#condition}.
|
||||
* @param ctx the parse tree
|
||||
*/
|
||||
void enterCondition(EqlBaseParser.ConditionContext ctx);
|
||||
/**
|
||||
* Exit a parse tree produced by {@link EqlBaseParser#condition}.
|
||||
* @param ctx the parse tree
|
||||
*/
|
||||
void exitCondition(EqlBaseParser.ConditionContext ctx);
|
||||
/**
|
||||
* Enter a parse tree produced by {@link EqlBaseParser#expression}.
|
||||
* @param ctx the parse tree
|
||||
*/
|
||||
void enterExpression(EqlBaseParser.ExpressionContext ctx);
|
||||
/**
|
||||
* Exit a parse tree produced by {@link EqlBaseParser#expression}.
|
||||
* @param ctx the parse tree
|
||||
*/
|
||||
void exitExpression(EqlBaseParser.ExpressionContext ctx);
|
||||
/**
|
||||
* Enter a parse tree produced by the {@code logicalNot}
|
||||
* labeled alternative in {@link EqlBaseParser#booleanExpression}.
|
||||
* @param ctx the parse tree
|
||||
*/
|
||||
void enterLogicalNot(EqlBaseParser.LogicalNotContext ctx);
|
||||
/**
|
||||
* Exit a parse tree produced by the {@code logicalNot}
|
||||
* labeled alternative in {@link EqlBaseParser#booleanExpression}.
|
||||
* @param ctx the parse tree
|
||||
*/
|
||||
void exitLogicalNot(EqlBaseParser.LogicalNotContext ctx);
|
||||
/**
|
||||
* Enter a parse tree produced by the {@code booleanDefault}
|
||||
* labeled alternative in {@link EqlBaseParser#booleanExpression}.
|
||||
* @param ctx the parse tree
|
||||
*/
|
||||
void enterBooleanDefault(EqlBaseParser.BooleanDefaultContext ctx);
|
||||
/**
|
||||
* Exit a parse tree produced by the {@code booleanDefault}
|
||||
* labeled alternative in {@link EqlBaseParser#booleanExpression}.
|
||||
* @param ctx the parse tree
|
||||
*/
|
||||
void exitBooleanDefault(EqlBaseParser.BooleanDefaultContext ctx);
|
||||
/**
|
||||
* Enter a parse tree produced by the {@code logicalBinary}
|
||||
* labeled alternative in {@link EqlBaseParser#booleanExpression}.
|
||||
* @param ctx the parse tree
|
||||
*/
|
||||
void enterLogicalBinary(EqlBaseParser.LogicalBinaryContext ctx);
|
||||
/**
|
||||
* Exit a parse tree produced by the {@code logicalBinary}
|
||||
* labeled alternative in {@link EqlBaseParser#booleanExpression}.
|
||||
* @param ctx the parse tree
|
||||
*/
|
||||
void exitLogicalBinary(EqlBaseParser.LogicalBinaryContext ctx);
|
||||
/**
|
||||
* Enter a parse tree produced by {@link EqlBaseParser#predicated}.
|
||||
* @param ctx the parse tree
|
||||
*/
|
||||
void enterPredicated(EqlBaseParser.PredicatedContext ctx);
|
||||
/**
|
||||
* Exit a parse tree produced by {@link EqlBaseParser#predicated}.
|
||||
* @param ctx the parse tree
|
||||
*/
|
||||
void exitPredicated(EqlBaseParser.PredicatedContext ctx);
|
||||
/**
|
||||
* Enter a parse tree produced by {@link EqlBaseParser#predicate}.
|
||||
* @param ctx the parse tree
|
||||
*/
|
||||
void enterPredicate(EqlBaseParser.PredicateContext ctx);
|
||||
/**
|
||||
* Exit a parse tree produced by {@link EqlBaseParser#predicate}.
|
||||
* @param ctx the parse tree
|
||||
*/
|
||||
void exitPredicate(EqlBaseParser.PredicateContext ctx);
|
||||
/**
|
||||
* Enter a parse tree produced by the {@code valueExpressionDefault}
|
||||
* labeled alternative in {@link EqlBaseParser#valueExpression}.
|
||||
* @param ctx the parse tree
|
||||
*/
|
||||
void enterValueExpressionDefault(EqlBaseParser.ValueExpressionDefaultContext ctx);
|
||||
/**
|
||||
* Exit a parse tree produced by the {@code valueExpressionDefault}
|
||||
* labeled alternative in {@link EqlBaseParser#valueExpression}.
|
||||
* @param ctx the parse tree
|
||||
*/
|
||||
void exitValueExpressionDefault(EqlBaseParser.ValueExpressionDefaultContext ctx);
|
||||
/**
|
||||
* Enter a parse tree produced by the {@code comparison}
|
||||
* labeled alternative in {@link EqlBaseParser#valueExpression}.
|
||||
* @param ctx the parse tree
|
||||
*/
|
||||
void enterComparison(EqlBaseParser.ComparisonContext ctx);
|
||||
/**
|
||||
* Exit a parse tree produced by the {@code comparison}
|
||||
* labeled alternative in {@link EqlBaseParser#valueExpression}.
|
||||
* @param ctx the parse tree
|
||||
*/
|
||||
void exitComparison(EqlBaseParser.ComparisonContext ctx);
|
||||
/**
|
||||
* Enter a parse tree produced by the {@code arithmeticBinary}
|
||||
* labeled alternative in {@link EqlBaseParser#valueExpression}.
|
||||
* @param ctx the parse tree
|
||||
*/
|
||||
void enterArithmeticBinary(EqlBaseParser.ArithmeticBinaryContext ctx);
|
||||
/**
|
||||
* Exit a parse tree produced by the {@code arithmeticBinary}
|
||||
* labeled alternative in {@link EqlBaseParser#valueExpression}.
|
||||
* @param ctx the parse tree
|
||||
*/
|
||||
void exitArithmeticBinary(EqlBaseParser.ArithmeticBinaryContext ctx);
|
||||
/**
|
||||
* Enter a parse tree produced by the {@code arithmeticUnary}
|
||||
* labeled alternative in {@link EqlBaseParser#valueExpression}.
|
||||
* @param ctx the parse tree
|
||||
*/
|
||||
void enterArithmeticUnary(EqlBaseParser.ArithmeticUnaryContext ctx);
|
||||
/**
|
||||
* Exit a parse tree produced by the {@code arithmeticUnary}
|
||||
* labeled alternative in {@link EqlBaseParser#valueExpression}.
|
||||
* @param ctx the parse tree
|
||||
*/
|
||||
void exitArithmeticUnary(EqlBaseParser.ArithmeticUnaryContext ctx);
|
||||
/**
|
||||
* Enter a parse tree produced by the {@code constantDefault}
|
||||
* labeled alternative in {@link EqlBaseParser#primaryExpression}.
|
||||
* @param ctx the parse tree
|
||||
*/
|
||||
void enterConstantDefault(EqlBaseParser.ConstantDefaultContext ctx);
|
||||
/**
|
||||
* Exit a parse tree produced by the {@code constantDefault}
|
||||
* labeled alternative in {@link EqlBaseParser#primaryExpression}.
|
||||
* @param ctx the parse tree
|
||||
*/
|
||||
void exitConstantDefault(EqlBaseParser.ConstantDefaultContext ctx);
|
||||
/**
|
||||
* Enter a parse tree produced by the {@code function}
|
||||
* labeled alternative in {@link EqlBaseParser#primaryExpression}.
|
||||
* @param ctx the parse tree
|
||||
*/
|
||||
void enterFunction(EqlBaseParser.FunctionContext ctx);
|
||||
/**
|
||||
* Exit a parse tree produced by the {@code function}
|
||||
* labeled alternative in {@link EqlBaseParser#primaryExpression}.
|
||||
* @param ctx the parse tree
|
||||
*/
|
||||
void exitFunction(EqlBaseParser.FunctionContext ctx);
|
||||
/**
|
||||
* Enter a parse tree produced by the {@code dereference}
|
||||
* labeled alternative in {@link EqlBaseParser#primaryExpression}.
|
||||
* @param ctx the parse tree
|
||||
*/
|
||||
void enterDereference(EqlBaseParser.DereferenceContext ctx);
|
||||
/**
|
||||
* Exit a parse tree produced by the {@code dereference}
|
||||
* labeled alternative in {@link EqlBaseParser#primaryExpression}.
|
||||
* @param ctx the parse tree
|
||||
*/
|
||||
void exitDereference(EqlBaseParser.DereferenceContext ctx);
|
||||
/**
|
||||
* Enter a parse tree produced by the {@code parenthesizedExpression}
|
||||
* labeled alternative in {@link EqlBaseParser#primaryExpression}.
|
||||
* @param ctx the parse tree
|
||||
*/
|
||||
void enterParenthesizedExpression(EqlBaseParser.ParenthesizedExpressionContext ctx);
|
||||
/**
|
||||
* Exit a parse tree produced by the {@code parenthesizedExpression}
|
||||
* labeled alternative in {@link EqlBaseParser#primaryExpression}.
|
||||
* @param ctx the parse tree
|
||||
*/
|
||||
void exitParenthesizedExpression(EqlBaseParser.ParenthesizedExpressionContext ctx);
|
||||
/**
|
||||
* Enter a parse tree produced by {@link EqlBaseParser#functionExpression}.
|
||||
* @param ctx the parse tree
|
||||
*/
|
||||
void enterFunctionExpression(EqlBaseParser.FunctionExpressionContext ctx);
|
||||
/**
|
||||
* Exit a parse tree produced by {@link EqlBaseParser#functionExpression}.
|
||||
* @param ctx the parse tree
|
||||
*/
|
||||
void exitFunctionExpression(EqlBaseParser.FunctionExpressionContext ctx);
|
||||
/**
|
||||
* Enter a parse tree produced by the {@code nullLiteral}
|
||||
* labeled alternative in {@link EqlBaseParser#constant}.
|
||||
* @param ctx the parse tree
|
||||
*/
|
||||
void enterNullLiteral(EqlBaseParser.NullLiteralContext ctx);
|
||||
/**
|
||||
* Exit a parse tree produced by the {@code nullLiteral}
|
||||
* labeled alternative in {@link EqlBaseParser#constant}.
|
||||
* @param ctx the parse tree
|
||||
*/
|
||||
void exitNullLiteral(EqlBaseParser.NullLiteralContext ctx);
|
||||
/**
|
||||
* Enter a parse tree produced by the {@code numericLiteral}
|
||||
* labeled alternative in {@link EqlBaseParser#constant}.
|
||||
* @param ctx the parse tree
|
||||
*/
|
||||
void enterNumericLiteral(EqlBaseParser.NumericLiteralContext ctx);
|
||||
/**
|
||||
* Exit a parse tree produced by the {@code numericLiteral}
|
||||
* labeled alternative in {@link EqlBaseParser#constant}.
|
||||
* @param ctx the parse tree
|
||||
*/
|
||||
void exitNumericLiteral(EqlBaseParser.NumericLiteralContext ctx);
|
||||
/**
|
||||
* Enter a parse tree produced by the {@code booleanLiteral}
|
||||
* labeled alternative in {@link EqlBaseParser#constant}.
|
||||
* @param ctx the parse tree
|
||||
*/
|
||||
void enterBooleanLiteral(EqlBaseParser.BooleanLiteralContext ctx);
|
||||
/**
|
||||
* Exit a parse tree produced by the {@code booleanLiteral}
|
||||
* labeled alternative in {@link EqlBaseParser#constant}.
|
||||
* @param ctx the parse tree
|
||||
*/
|
||||
void exitBooleanLiteral(EqlBaseParser.BooleanLiteralContext ctx);
|
||||
/**
|
||||
* Enter a parse tree produced by the {@code stringLiteral}
|
||||
* labeled alternative in {@link EqlBaseParser#constant}.
|
||||
* @param ctx the parse tree
|
||||
*/
|
||||
void enterStringLiteral(EqlBaseParser.StringLiteralContext ctx);
|
||||
/**
|
||||
* Exit a parse tree produced by the {@code stringLiteral}
|
||||
* labeled alternative in {@link EqlBaseParser#constant}.
|
||||
* @param ctx the parse tree
|
||||
*/
|
||||
void exitStringLiteral(EqlBaseParser.StringLiteralContext ctx);
|
||||
/**
|
||||
* Enter a parse tree produced by {@link EqlBaseParser#comparisonOperator}.
|
||||
* @param ctx the parse tree
|
||||
*/
|
||||
void enterComparisonOperator(EqlBaseParser.ComparisonOperatorContext ctx);
|
||||
/**
|
||||
* Exit a parse tree produced by {@link EqlBaseParser#comparisonOperator}.
|
||||
* @param ctx the parse tree
|
||||
*/
|
||||
void exitComparisonOperator(EqlBaseParser.ComparisonOperatorContext ctx);
|
||||
/**
|
||||
* Enter a parse tree produced by {@link EqlBaseParser#booleanValue}.
|
||||
* @param ctx the parse tree
|
||||
*/
|
||||
void enterBooleanValue(EqlBaseParser.BooleanValueContext ctx);
|
||||
/**
|
||||
* Exit a parse tree produced by {@link EqlBaseParser#booleanValue}.
|
||||
* @param ctx the parse tree
|
||||
*/
|
||||
void exitBooleanValue(EqlBaseParser.BooleanValueContext ctx);
|
||||
/**
|
||||
* Enter a parse tree produced by {@link EqlBaseParser#qualifiedNames}.
|
||||
* @param ctx the parse tree
|
||||
*/
|
||||
void enterQualifiedNames(EqlBaseParser.QualifiedNamesContext ctx);
|
||||
/**
|
||||
* Exit a parse tree produced by {@link EqlBaseParser#qualifiedNames}.
|
||||
* @param ctx the parse tree
|
||||
*/
|
||||
void exitQualifiedNames(EqlBaseParser.QualifiedNamesContext ctx);
|
||||
/**
|
||||
* Enter a parse tree produced by {@link EqlBaseParser#qualifiedName}.
|
||||
* @param ctx the parse tree
|
||||
*/
|
||||
void enterQualifiedName(EqlBaseParser.QualifiedNameContext ctx);
|
||||
/**
|
||||
* Exit a parse tree produced by {@link EqlBaseParser#qualifiedName}.
|
||||
* @param ctx the parse tree
|
||||
*/
|
||||
void exitQualifiedName(EqlBaseParser.QualifiedNameContext ctx);
|
||||
/**
|
||||
* Enter a parse tree produced by {@link EqlBaseParser#identifier}.
|
||||
* @param ctx the parse tree
|
||||
*/
|
||||
void enterIdentifier(EqlBaseParser.IdentifierContext ctx);
|
||||
/**
|
||||
* Exit a parse tree produced by {@link EqlBaseParser#identifier}.
|
||||
* @param ctx the parse tree
|
||||
*/
|
||||
void exitIdentifier(EqlBaseParser.IdentifierContext ctx);
|
||||
/**
|
||||
* Enter a parse tree produced by the {@code quotedIdentifier}
|
||||
* labeled alternative in {@link EqlBaseParser#quoteIdentifier}.
|
||||
* @param ctx the parse tree
|
||||
*/
|
||||
void enterQuotedIdentifier(EqlBaseParser.QuotedIdentifierContext ctx);
|
||||
/**
|
||||
* Exit a parse tree produced by the {@code quotedIdentifier}
|
||||
* labeled alternative in {@link EqlBaseParser#quoteIdentifier}.
|
||||
* @param ctx the parse tree
|
||||
*/
|
||||
void exitQuotedIdentifier(EqlBaseParser.QuotedIdentifierContext ctx);
|
||||
/**
|
||||
* Enter a parse tree produced by the {@code unquotedIdentifier}
|
||||
* labeled alternative in {@link EqlBaseParser#unquoteIdentifier}.
|
||||
* @param ctx the parse tree
|
||||
*/
|
||||
void enterUnquotedIdentifier(EqlBaseParser.UnquotedIdentifierContext ctx);
|
||||
/**
|
||||
* Exit a parse tree produced by the {@code unquotedIdentifier}
|
||||
* labeled alternative in {@link EqlBaseParser#unquoteIdentifier}.
|
||||
* @param ctx the parse tree
|
||||
*/
|
||||
void exitUnquotedIdentifier(EqlBaseParser.UnquotedIdentifierContext ctx);
|
||||
/**
|
||||
* Enter a parse tree produced by the {@code digitIdentifier}
|
||||
* labeled alternative in {@link EqlBaseParser#unquoteIdentifier}.
|
||||
* @param ctx the parse tree
|
||||
*/
|
||||
void enterDigitIdentifier(EqlBaseParser.DigitIdentifierContext ctx);
|
||||
/**
|
||||
* Exit a parse tree produced by the {@code digitIdentifier}
|
||||
* labeled alternative in {@link EqlBaseParser#unquoteIdentifier}.
|
||||
* @param ctx the parse tree
|
||||
*/
|
||||
void exitDigitIdentifier(EqlBaseParser.DigitIdentifierContext ctx);
|
||||
/**
|
||||
* Enter a parse tree produced by the {@code decimalLiteral}
|
||||
* labeled alternative in {@link EqlBaseParser#number}.
|
||||
* @param ctx the parse tree
|
||||
*/
|
||||
void enterDecimalLiteral(EqlBaseParser.DecimalLiteralContext ctx);
|
||||
/**
|
||||
* Exit a parse tree produced by the {@code decimalLiteral}
|
||||
* labeled alternative in {@link EqlBaseParser#number}.
|
||||
* @param ctx the parse tree
|
||||
*/
|
||||
void exitDecimalLiteral(EqlBaseParser.DecimalLiteralContext ctx);
|
||||
/**
|
||||
* Enter a parse tree produced by the {@code integerLiteral}
|
||||
* labeled alternative in {@link EqlBaseParser#number}.
|
||||
* @param ctx the parse tree
|
||||
*/
|
||||
void enterIntegerLiteral(EqlBaseParser.IntegerLiteralContext ctx);
|
||||
/**
|
||||
* Exit a parse tree produced by the {@code integerLiteral}
|
||||
* labeled alternative in {@link EqlBaseParser#number}.
|
||||
* @param ctx the parse tree
|
||||
*/
|
||||
void exitIntegerLiteral(EqlBaseParser.IntegerLiteralContext ctx);
|
||||
/**
|
||||
* Enter a parse tree produced by {@link EqlBaseParser#string}.
|
||||
* @param ctx the parse tree
|
||||
*/
|
||||
void enterString(EqlBaseParser.StringContext ctx);
|
||||
/**
|
||||
* Exit a parse tree produced by {@link EqlBaseParser#string}.
|
||||
* @param ctx the parse tree
|
||||
*/
|
||||
void exitString(EqlBaseParser.StringContext ctx);
|
||||
}
|
File diff suppressed because it is too large
Load Diff
|
@ -0,0 +1,279 @@
|
|||
// ANTLR GENERATED CODE: DO NOT EDIT
|
||||
package org.elasticsearch.xpack.eql.parser;
|
||||
import org.antlr.v4.runtime.tree.ParseTreeVisitor;
|
||||
|
||||
/**
|
||||
* This interface defines a complete generic visitor for a parse tree produced
|
||||
* by {@link EqlBaseParser}.
|
||||
*
|
||||
* @param <T> The return type of the visit operation. Use {@link Void} for
|
||||
* operations with no return type.
|
||||
*/
|
||||
interface EqlBaseVisitor<T> extends ParseTreeVisitor<T> {
|
||||
/**
|
||||
* Visit a parse tree produced by {@link EqlBaseParser#singleStatement}.
|
||||
* @param ctx the parse tree
|
||||
* @return the visitor result
|
||||
*/
|
||||
T visitSingleStatement(EqlBaseParser.SingleStatementContext ctx);
|
||||
/**
|
||||
* Visit a parse tree produced by {@link EqlBaseParser#singleExpression}.
|
||||
* @param ctx the parse tree
|
||||
* @return the visitor result
|
||||
*/
|
||||
T visitSingleExpression(EqlBaseParser.SingleExpressionContext ctx);
|
||||
/**
|
||||
* Visit a parse tree produced by {@link EqlBaseParser#statement}.
|
||||
* @param ctx the parse tree
|
||||
* @return the visitor result
|
||||
*/
|
||||
T visitStatement(EqlBaseParser.StatementContext ctx);
|
||||
/**
|
||||
* Visit a parse tree produced by {@link EqlBaseParser#query}.
|
||||
* @param ctx the parse tree
|
||||
* @return the visitor result
|
||||
*/
|
||||
T visitQuery(EqlBaseParser.QueryContext ctx);
|
||||
/**
|
||||
* Visit a parse tree produced by {@link EqlBaseParser#sequence}.
|
||||
* @param ctx the parse tree
|
||||
* @return the visitor result
|
||||
*/
|
||||
T visitSequence(EqlBaseParser.SequenceContext ctx);
|
||||
/**
|
||||
* Visit a parse tree produced by {@link EqlBaseParser#join}.
|
||||
* @param ctx the parse tree
|
||||
* @return the visitor result
|
||||
*/
|
||||
T visitJoin(EqlBaseParser.JoinContext ctx);
|
||||
/**
|
||||
* Visit a parse tree produced by {@link EqlBaseParser#pipe}.
|
||||
* @param ctx the parse tree
|
||||
* @return the visitor result
|
||||
*/
|
||||
T visitPipe(EqlBaseParser.PipeContext ctx);
|
||||
/**
|
||||
* Visit a parse tree produced by {@link EqlBaseParser#joinKeys}.
|
||||
* @param ctx the parse tree
|
||||
* @return the visitor result
|
||||
*/
|
||||
T visitJoinKeys(EqlBaseParser.JoinKeysContext ctx);
|
||||
/**
|
||||
* Visit a parse tree produced by {@link EqlBaseParser#span}.
|
||||
* @param ctx the parse tree
|
||||
* @return the visitor result
|
||||
*/
|
||||
T visitSpan(EqlBaseParser.SpanContext ctx);
|
||||
/**
|
||||
* Visit a parse tree produced by {@link EqlBaseParser#match}.
|
||||
* @param ctx the parse tree
|
||||
* @return the visitor result
|
||||
*/
|
||||
T visitMatch(EqlBaseParser.MatchContext ctx);
|
||||
/**
|
||||
* Visit a parse tree produced by {@link EqlBaseParser#condition}.
|
||||
* @param ctx the parse tree
|
||||
* @return the visitor result
|
||||
*/
|
||||
T visitCondition(EqlBaseParser.ConditionContext ctx);
|
||||
/**
|
||||
* Visit a parse tree produced by {@link EqlBaseParser#expression}.
|
||||
* @param ctx the parse tree
|
||||
* @return the visitor result
|
||||
*/
|
||||
T visitExpression(EqlBaseParser.ExpressionContext ctx);
|
||||
/**
|
||||
* Visit a parse tree produced by the {@code logicalNot}
|
||||
* labeled alternative in {@link EqlBaseParser#booleanExpression}.
|
||||
* @param ctx the parse tree
|
||||
* @return the visitor result
|
||||
*/
|
||||
T visitLogicalNot(EqlBaseParser.LogicalNotContext ctx);
|
||||
/**
|
||||
* Visit a parse tree produced by the {@code booleanDefault}
|
||||
* labeled alternative in {@link EqlBaseParser#booleanExpression}.
|
||||
* @param ctx the parse tree
|
||||
* @return the visitor result
|
||||
*/
|
||||
T visitBooleanDefault(EqlBaseParser.BooleanDefaultContext ctx);
|
||||
/**
|
||||
* Visit a parse tree produced by the {@code logicalBinary}
|
||||
* labeled alternative in {@link EqlBaseParser#booleanExpression}.
|
||||
* @param ctx the parse tree
|
||||
* @return the visitor result
|
||||
*/
|
||||
T visitLogicalBinary(EqlBaseParser.LogicalBinaryContext ctx);
|
||||
/**
|
||||
* Visit a parse tree produced by {@link EqlBaseParser#predicated}.
|
||||
* @param ctx the parse tree
|
||||
* @return the visitor result
|
||||
*/
|
||||
T visitPredicated(EqlBaseParser.PredicatedContext ctx);
|
||||
/**
|
||||
* Visit a parse tree produced by {@link EqlBaseParser#predicate}.
|
||||
* @param ctx the parse tree
|
||||
* @return the visitor result
|
||||
*/
|
||||
T visitPredicate(EqlBaseParser.PredicateContext ctx);
|
||||
/**
|
||||
* Visit a parse tree produced by the {@code valueExpressionDefault}
|
||||
* labeled alternative in {@link EqlBaseParser#valueExpression}.
|
||||
* @param ctx the parse tree
|
||||
* @return the visitor result
|
||||
*/
|
||||
T visitValueExpressionDefault(EqlBaseParser.ValueExpressionDefaultContext ctx);
|
||||
/**
|
||||
* Visit a parse tree produced by the {@code comparison}
|
||||
* labeled alternative in {@link EqlBaseParser#valueExpression}.
|
||||
* @param ctx the parse tree
|
||||
* @return the visitor result
|
||||
*/
|
||||
T visitComparison(EqlBaseParser.ComparisonContext ctx);
|
||||
/**
|
||||
* Visit a parse tree produced by the {@code arithmeticBinary}
|
||||
* labeled alternative in {@link EqlBaseParser#valueExpression}.
|
||||
* @param ctx the parse tree
|
||||
* @return the visitor result
|
||||
*/
|
||||
T visitArithmeticBinary(EqlBaseParser.ArithmeticBinaryContext ctx);
|
||||
/**
|
||||
* Visit a parse tree produced by the {@code arithmeticUnary}
|
||||
* labeled alternative in {@link EqlBaseParser#valueExpression}.
|
||||
* @param ctx the parse tree
|
||||
* @return the visitor result
|
||||
*/
|
||||
T visitArithmeticUnary(EqlBaseParser.ArithmeticUnaryContext ctx);
|
||||
/**
|
||||
* Visit a parse tree produced by the {@code constantDefault}
|
||||
* labeled alternative in {@link EqlBaseParser#primaryExpression}.
|
||||
* @param ctx the parse tree
|
||||
* @return the visitor result
|
||||
*/
|
||||
T visitConstantDefault(EqlBaseParser.ConstantDefaultContext ctx);
|
||||
/**
|
||||
* Visit a parse tree produced by the {@code function}
|
||||
* labeled alternative in {@link EqlBaseParser#primaryExpression}.
|
||||
* @param ctx the parse tree
|
||||
* @return the visitor result
|
||||
*/
|
||||
T visitFunction(EqlBaseParser.FunctionContext ctx);
|
||||
/**
|
||||
* Visit a parse tree produced by the {@code dereference}
|
||||
* labeled alternative in {@link EqlBaseParser#primaryExpression}.
|
||||
* @param ctx the parse tree
|
||||
* @return the visitor result
|
||||
*/
|
||||
T visitDereference(EqlBaseParser.DereferenceContext ctx);
|
||||
/**
|
||||
* Visit a parse tree produced by the {@code parenthesizedExpression}
|
||||
* labeled alternative in {@link EqlBaseParser#primaryExpression}.
|
||||
* @param ctx the parse tree
|
||||
* @return the visitor result
|
||||
*/
|
||||
T visitParenthesizedExpression(EqlBaseParser.ParenthesizedExpressionContext ctx);
|
||||
/**
|
||||
* Visit a parse tree produced by {@link EqlBaseParser#functionExpression}.
|
||||
* @param ctx the parse tree
|
||||
* @return the visitor result
|
||||
*/
|
||||
T visitFunctionExpression(EqlBaseParser.FunctionExpressionContext ctx);
|
||||
/**
|
||||
* Visit a parse tree produced by the {@code nullLiteral}
|
||||
* labeled alternative in {@link EqlBaseParser#constant}.
|
||||
* @param ctx the parse tree
|
||||
* @return the visitor result
|
||||
*/
|
||||
T visitNullLiteral(EqlBaseParser.NullLiteralContext ctx);
|
||||
/**
|
||||
* Visit a parse tree produced by the {@code numericLiteral}
|
||||
* labeled alternative in {@link EqlBaseParser#constant}.
|
||||
* @param ctx the parse tree
|
||||
* @return the visitor result
|
||||
*/
|
||||
T visitNumericLiteral(EqlBaseParser.NumericLiteralContext ctx);
|
||||
/**
|
||||
* Visit a parse tree produced by the {@code booleanLiteral}
|
||||
* labeled alternative in {@link EqlBaseParser#constant}.
|
||||
* @param ctx the parse tree
|
||||
* @return the visitor result
|
||||
*/
|
||||
T visitBooleanLiteral(EqlBaseParser.BooleanLiteralContext ctx);
|
||||
/**
|
||||
* Visit a parse tree produced by the {@code stringLiteral}
|
||||
* labeled alternative in {@link EqlBaseParser#constant}.
|
||||
* @param ctx the parse tree
|
||||
* @return the visitor result
|
||||
*/
|
||||
T visitStringLiteral(EqlBaseParser.StringLiteralContext ctx);
|
||||
/**
|
||||
* Visit a parse tree produced by {@link EqlBaseParser#comparisonOperator}.
|
||||
* @param ctx the parse tree
|
||||
* @return the visitor result
|
||||
*/
|
||||
T visitComparisonOperator(EqlBaseParser.ComparisonOperatorContext ctx);
|
||||
/**
|
||||
* Visit a parse tree produced by {@link EqlBaseParser#booleanValue}.
|
||||
* @param ctx the parse tree
|
||||
* @return the visitor result
|
||||
*/
|
||||
T visitBooleanValue(EqlBaseParser.BooleanValueContext ctx);
|
||||
/**
|
||||
* Visit a parse tree produced by {@link EqlBaseParser#qualifiedNames}.
|
||||
* @param ctx the parse tree
|
||||
* @return the visitor result
|
||||
*/
|
||||
T visitQualifiedNames(EqlBaseParser.QualifiedNamesContext ctx);
|
||||
/**
|
||||
* Visit a parse tree produced by {@link EqlBaseParser#qualifiedName}.
|
||||
* @param ctx the parse tree
|
||||
* @return the visitor result
|
||||
*/
|
||||
T visitQualifiedName(EqlBaseParser.QualifiedNameContext ctx);
|
||||
/**
|
||||
* Visit a parse tree produced by {@link EqlBaseParser#identifier}.
|
||||
* @param ctx the parse tree
|
||||
* @return the visitor result
|
||||
*/
|
||||
T visitIdentifier(EqlBaseParser.IdentifierContext ctx);
|
||||
/**
|
||||
* Visit a parse tree produced by the {@code quotedIdentifier}
|
||||
* labeled alternative in {@link EqlBaseParser#quoteIdentifier}.
|
||||
* @param ctx the parse tree
|
||||
* @return the visitor result
|
||||
*/
|
||||
T visitQuotedIdentifier(EqlBaseParser.QuotedIdentifierContext ctx);
|
||||
/**
|
||||
* Visit a parse tree produced by the {@code unquotedIdentifier}
|
||||
* labeled alternative in {@link EqlBaseParser#unquoteIdentifier}.
|
||||
* @param ctx the parse tree
|
||||
* @return the visitor result
|
||||
*/
|
||||
T visitUnquotedIdentifier(EqlBaseParser.UnquotedIdentifierContext ctx);
|
||||
/**
|
||||
* Visit a parse tree produced by the {@code digitIdentifier}
|
||||
* labeled alternative in {@link EqlBaseParser#unquoteIdentifier}.
|
||||
* @param ctx the parse tree
|
||||
* @return the visitor result
|
||||
*/
|
||||
T visitDigitIdentifier(EqlBaseParser.DigitIdentifierContext ctx);
|
||||
/**
|
||||
* Visit a parse tree produced by the {@code decimalLiteral}
|
||||
* labeled alternative in {@link EqlBaseParser#number}.
|
||||
* @param ctx the parse tree
|
||||
* @return the visitor result
|
||||
*/
|
||||
T visitDecimalLiteral(EqlBaseParser.DecimalLiteralContext ctx);
|
||||
/**
|
||||
* Visit a parse tree produced by the {@code integerLiteral}
|
||||
* labeled alternative in {@link EqlBaseParser#number}.
|
||||
* @param ctx the parse tree
|
||||
* @return the visitor result
|
||||
*/
|
||||
T visitIntegerLiteral(EqlBaseParser.IntegerLiteralContext ctx);
|
||||
/**
|
||||
* Visit a parse tree produced by {@link EqlBaseParser#string}.
|
||||
* @param ctx the parse tree
|
||||
* @return the visitor result
|
||||
*/
|
||||
T visitString(EqlBaseParser.StringContext ctx);
|
||||
}
|
|
@ -0,0 +1,161 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.eql.parser;
|
||||
|
||||
import org.antlr.v4.runtime.BaseErrorListener;
|
||||
import org.antlr.v4.runtime.CommonToken;
|
||||
import org.antlr.v4.runtime.CommonTokenStream;
|
||||
import org.antlr.v4.runtime.DiagnosticErrorListener;
|
||||
import org.antlr.v4.runtime.Parser;
|
||||
import org.antlr.v4.runtime.ParserRuleContext;
|
||||
import org.antlr.v4.runtime.RecognitionException;
|
||||
import org.antlr.v4.runtime.Recognizer;
|
||||
import org.antlr.v4.runtime.Token;
|
||||
import org.antlr.v4.runtime.atn.ATNConfigSet;
|
||||
import org.antlr.v4.runtime.atn.PredictionMode;
|
||||
import org.antlr.v4.runtime.dfa.DFA;
|
||||
import org.antlr.v4.runtime.misc.Pair;
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.elasticsearch.xpack.eql.expression.Expression;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.BitSet;
|
||||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
import java.util.function.BiFunction;
|
||||
import java.util.function.Function;
|
||||
|
||||
import static java.lang.String.format;
|
||||
|
||||
public class EqlParser {
|
||||
|
||||
private static final Logger log = LogManager.getLogger();
|
||||
|
||||
private final boolean DEBUG = true;
|
||||
|
||||
/**
|
||||
* Parses an EQL statement into execution plan
|
||||
* @param eql - the EQL statement
|
||||
*/
|
||||
public Expression createStatement(String eql) {
|
||||
if (log.isDebugEnabled()) {
|
||||
log.debug("Parsing as statement: {}", eql);
|
||||
}
|
||||
return invokeParser(eql, EqlBaseParser::singleStatement, AstBuilder::expression);
|
||||
}
|
||||
|
||||
public Expression createExpression(String expression) {
|
||||
if (log.isDebugEnabled()) {
|
||||
log.debug("Parsing as expression: {}", expression);
|
||||
}
|
||||
|
||||
return invokeParser(expression, EqlBaseParser::singleExpression, AstBuilder::expression);
|
||||
}
|
||||
|
||||
private <T> T invokeParser(String sql,
|
||||
Function<EqlBaseParser, ParserRuleContext> parseFunction,
|
||||
BiFunction<AstBuilder, ParserRuleContext, T> visitor) {
|
||||
try {
|
||||
EqlBaseLexer lexer = new EqlBaseLexer(new CaseInsensitiveStream(sql));
|
||||
|
||||
lexer.removeErrorListeners();
|
||||
lexer.addErrorListener(ERROR_LISTENER);
|
||||
|
||||
CommonTokenStream tokenStream = new CommonTokenStream(lexer);
|
||||
EqlBaseParser parser = new EqlBaseParser(tokenStream);
|
||||
|
||||
parser.addParseListener(new PostProcessor(Arrays.asList(parser.getRuleNames())));
|
||||
|
||||
parser.removeErrorListeners();
|
||||
parser.addErrorListener(ERROR_LISTENER);
|
||||
|
||||
parser.getInterpreter().setPredictionMode(PredictionMode.SLL);
|
||||
|
||||
if (DEBUG) {
|
||||
debug(parser);
|
||||
tokenStream.fill();
|
||||
|
||||
for (Token t : tokenStream.getTokens()) {
|
||||
String symbolicName = EqlBaseLexer.VOCABULARY.getSymbolicName(t.getType());
|
||||
String literalName = EqlBaseLexer.VOCABULARY.getLiteralName(t.getType());
|
||||
log.info(format(Locale.ROOT, " %-15s '%s'",
|
||||
symbolicName == null ? literalName : symbolicName,
|
||||
t.getText()));
|
||||
}
|
||||
}
|
||||
|
||||
ParserRuleContext tree = parseFunction.apply(parser);
|
||||
|
||||
if (DEBUG) {
|
||||
log.info("Parse tree {} " + tree.toStringTree());
|
||||
}
|
||||
|
||||
return visitor.apply(new AstBuilder(), tree);
|
||||
} catch (StackOverflowError e) {
|
||||
throw new ParsingException("SQL statement is too large, " +
|
||||
"causing stack overflow when generating the parsing tree: [{}]", sql);
|
||||
}
|
||||
}
|
||||
|
||||
private static void debug(EqlBaseParser parser) {
|
||||
|
||||
// when debugging, use the exact prediction mode (needed for diagnostics as well)
|
||||
parser.getInterpreter().setPredictionMode(PredictionMode.LL_EXACT_AMBIG_DETECTION);
|
||||
|
||||
parser.addParseListener(parser.new TraceListener());
|
||||
|
||||
parser.addErrorListener(new DiagnosticErrorListener(false) {
|
||||
@Override
|
||||
public void reportAttemptingFullContext(Parser recognizer, DFA dfa,
|
||||
int startIndex, int stopIndex, BitSet conflictingAlts, ATNConfigSet configs) {}
|
||||
|
||||
@Override
|
||||
public void reportContextSensitivity(Parser recognizer, DFA dfa,
|
||||
int startIndex, int stopIndex, int prediction, ATNConfigSet configs) {}
|
||||
});
|
||||
}
|
||||
|
||||
private class PostProcessor extends EqlBaseBaseListener {
|
||||
private final List<String> ruleNames;
|
||||
|
||||
PostProcessor(List<String> ruleNames) {
|
||||
this.ruleNames = ruleNames;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void exitDigitIdentifier(EqlBaseParser.DigitIdentifierContext context) {
|
||||
Token token = context.DIGIT_IDENTIFIER().getSymbol();
|
||||
throw new ParsingException(
|
||||
"identifiers must not start with a digit; please use double quotes",
|
||||
null,
|
||||
token.getLine(),
|
||||
token.getCharPositionInLine());
|
||||
}
|
||||
|
||||
@Override
|
||||
public void exitQuotedIdentifier(EqlBaseParser.QuotedIdentifierContext context) {
|
||||
// Remove quotes
|
||||
context.getParent().removeLastChild();
|
||||
|
||||
Token token = (Token) context.getChild(0).getPayload();
|
||||
context.getParent().addChild(new CommonToken(
|
||||
new Pair<>(token.getTokenSource(), token.getInputStream()),
|
||||
EqlBaseLexer.IDENTIFIER,
|
||||
token.getChannel(),
|
||||
token.getStartIndex() + 1,
|
||||
token.getStopIndex() - 1));
|
||||
}
|
||||
}
|
||||
|
||||
private static final BaseErrorListener ERROR_LISTENER = new BaseErrorListener() {
|
||||
@Override
|
||||
public void syntaxError(Recognizer<?, ?> recognizer, Object offendingSymbol, int line,
|
||||
int charPositionInLine, String message, RecognitionException e) {
|
||||
throw new ParsingException(message, e, line, charPositionInLine);
|
||||
}
|
||||
};
|
||||
}
|
|
@ -0,0 +1,11 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.xpack.eql.parser;
|
||||
|
||||
public class ExpressionBuilder extends IdentifierBuilder {
|
||||
|
||||
}
|
|
@ -0,0 +1,20 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.eql.parser;
|
||||
|
||||
import org.elasticsearch.xpack.eql.parser.EqlBaseParser.IdentifierContext;
|
||||
|
||||
abstract class IdentifierBuilder extends AbstractBuilder {
|
||||
|
||||
@Override
|
||||
public String visitIdentifier(IdentifierContext ctx) {
|
||||
return ctx == null ? null : unquoteIdentifier(ctx.getText());
|
||||
}
|
||||
|
||||
private static String unquoteIdentifier(String identifier) {
|
||||
return identifier.replace("\"\"", "\"");
|
||||
}
|
||||
}
|
|
@ -0,0 +1,61 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.eql.parser;
|
||||
|
||||
import org.elasticsearch.rest.RestStatus;
|
||||
import org.elasticsearch.xpack.eql.EqlClientException;
|
||||
import org.elasticsearch.xpack.eql.tree.Source;
|
||||
|
||||
import static org.elasticsearch.common.logging.LoggerMessageFormat.format;
|
||||
|
||||
public class ParsingException extends EqlClientException {
|
||||
private final int line;
|
||||
private final int charPositionInLine;
|
||||
|
||||
public ParsingException(String message, Exception cause, int line, int charPositionInLine) {
|
||||
super(message, cause);
|
||||
this.line = line;
|
||||
this.charPositionInLine = charPositionInLine;
|
||||
}
|
||||
|
||||
ParsingException(String message, Object... args) {
|
||||
this(Source.EMPTY, message, args);
|
||||
}
|
||||
|
||||
public ParsingException(Source source, String message, Object... args) {
|
||||
super(message, args);
|
||||
this.line = source.source().getLineNumber();
|
||||
this.charPositionInLine = source.source().getColumnNumber();
|
||||
}
|
||||
|
||||
public ParsingException(Exception cause, Source source, String message, Object... args) {
|
||||
super(cause, message, args);
|
||||
this.line = source.source().getLineNumber();
|
||||
this.charPositionInLine = source.source().getColumnNumber();
|
||||
}
|
||||
|
||||
public int getLineNumber() {
|
||||
return line;
|
||||
}
|
||||
|
||||
public int getColumnNumber() {
|
||||
return charPositionInLine + 1;
|
||||
}
|
||||
|
||||
public String getErrorMessage() {
|
||||
return super.getMessage();
|
||||
}
|
||||
|
||||
@Override
|
||||
public RestStatus status() {
|
||||
return RestStatus.BAD_REQUEST;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getMessage() {
|
||||
return format("line {}:{}: {}", getLineNumber(), getColumnNumber(), getErrorMessage());
|
||||
}
|
||||
}
|
|
@ -0,0 +1,13 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.xpack.eql.plugin;
|
||||
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
|
||||
public class EqlPlugin extends Plugin {
|
||||
|
||||
}
|
|
@ -0,0 +1,48 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.eql.tree;
|
||||
|
||||
import java.util.Objects;
|
||||
|
||||
public final class Location {
|
||||
private final int line;
|
||||
private final int charPositionInLine;
|
||||
|
||||
public static final Location EMPTY = new Location(-1, -2);
|
||||
|
||||
public Location(int line, int charPositionInLine) {
|
||||
this.line = line;
|
||||
this.charPositionInLine = charPositionInLine;
|
||||
}
|
||||
|
||||
public int getLineNumber() {
|
||||
return line;
|
||||
}
|
||||
|
||||
public int getColumnNumber() {
|
||||
return charPositionInLine + 1;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "@" + getLineNumber() + ":" + getColumnNumber();
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(line, charPositionInLine);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (obj == null || obj.getClass() != getClass()) {
|
||||
return false;
|
||||
}
|
||||
Location other = (Location) obj;
|
||||
return line == other.line
|
||||
&& charPositionInLine == other.charPositionInLine;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,398 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.eql.tree;
|
||||
|
||||
import org.elasticsearch.xpack.eql.EqlIllegalArgumentException;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.BitSet;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
import java.util.function.Consumer;
|
||||
import java.util.function.Function;
|
||||
import java.util.function.Predicate;
|
||||
|
||||
import static java.util.Collections.emptyList;
|
||||
|
||||
/**
|
||||
* Immutable tree structure.
|
||||
* The traversal is done depth-first, pre-order (first the node then its children), that is seeks up and then goes down.
|
||||
* Alternative method for post-order (children first, then node) is also offered, that is seeks down and then goes up.
|
||||
*
|
||||
* Allows transformation which returns the same tree (if no change has been performed) or a new tree otherwise.
|
||||
*
|
||||
* While it tries as much as possible to use functional Java, due to lack of parallelism,
|
||||
* the use of streams and iterators is not really useful and brings too much baggage which
|
||||
* might be used incorrectly.
|
||||
*
|
||||
* @param <T> node type
|
||||
*/
|
||||
public abstract class Node<T extends Node<T>> {
|
||||
private static final int TO_STRING_MAX_PROP = 10;
|
||||
private static final int TO_STRING_MAX_WIDTH = 110;
|
||||
|
||||
private final Source source;
|
||||
private final List<T> children;
|
||||
|
||||
public Node(Source source, List<T> children) {
|
||||
this.source = (source != null ? source : Source.EMPTY);
|
||||
if (children.contains(null)) {
|
||||
throw new EqlIllegalArgumentException("Null children are not allowed");
|
||||
}
|
||||
this.children = children;
|
||||
}
|
||||
|
||||
public Source source() {
|
||||
return source;
|
||||
}
|
||||
|
||||
public Location sourceLocation() {
|
||||
return source.source();
|
||||
}
|
||||
|
||||
public String sourceText() {
|
||||
return source.text();
|
||||
}
|
||||
|
||||
public List<T> children() {
|
||||
return children;
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public void forEachDown(Consumer<? super T> action) {
|
||||
action.accept((T) this);
|
||||
children().forEach(c -> c.forEachDown(action));
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public <E extends T> void forEachDown(Consumer<? super E> action, final Class<E> typeToken) {
|
||||
forEachDown(t -> {
|
||||
if (typeToken.isInstance(t)) {
|
||||
action.accept((E) t);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public void forEachUp(Consumer<? super T> action) {
|
||||
children().forEach(c -> c.forEachUp(action));
|
||||
action.accept((T) this);
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public <E extends T> void forEachUp(Consumer<? super E> action, final Class<E> typeToken) {
|
||||
forEachUp(t -> {
|
||||
if (typeToken.isInstance(t)) {
|
||||
action.accept((E) t);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
public <E> void forEachPropertiesOnly(Consumer<? super E> rule, Class<E> typeToken) {
|
||||
forEachProperty(rule, typeToken);
|
||||
}
|
||||
|
||||
public <E> void forEachPropertiesDown(Consumer<? super E> rule, Class<E> typeToken) {
|
||||
forEachDown(e -> e.forEachProperty(rule, typeToken));
|
||||
}
|
||||
|
||||
public <E> void forEachPropertiesUp(Consumer<? super E> rule, Class<E> typeToken) {
|
||||
forEachUp(e -> e.forEachProperty(rule, typeToken));
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
protected <E> void forEachProperty(Consumer<? super E> rule, Class<E> typeToken) {
|
||||
for (Object prop : info().properties()) {
|
||||
// skip children (only properties are interesting)
|
||||
if (prop != children && !children.contains(prop) && typeToken.isInstance(prop)) {
|
||||
rule.accept((E) prop);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public boolean anyMatch(Predicate<? super T> predicate) {
|
||||
boolean result = predicate.test((T) this);
|
||||
if (!result) {
|
||||
for (T child : children) {
|
||||
if (child.anyMatch(predicate)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
public List<T> collect(Predicate<? super T> predicate) {
|
||||
List<T> l = new ArrayList<>();
|
||||
forEachDown(n -> {
|
||||
if (predicate.test(n)) {
|
||||
l.add(n);
|
||||
}
|
||||
});
|
||||
return l.isEmpty() ? emptyList() : l;
|
||||
}
|
||||
|
||||
public List<T> collectLeaves() {
|
||||
return collect(n -> n.children().isEmpty());
|
||||
}
|
||||
|
||||
// parse the list in pre-order and on match, skip the child/branch and move on to the next child/branch
|
||||
public List<T> collectFirstChildren(Predicate<? super T> predicate) {
|
||||
List<T> matches = new ArrayList<>();
|
||||
doCollectFirst(predicate, matches);
|
||||
return matches;
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
protected void doCollectFirst(Predicate<? super T> predicate, List<T> matches) {
|
||||
T t = (T) this;
|
||||
if (predicate.test(t)) {
|
||||
matches.add(t);
|
||||
} else {
|
||||
for (T child : children()) {
|
||||
child.doCollectFirst(predicate, matches);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// TODO: maybe add a flatMap (need to double check the Stream bit)
|
||||
|
||||
//
|
||||
// Transform methods
|
||||
//
|
||||
|
||||
//
|
||||
// transform the node itself and its children
|
||||
//
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public T transformDown(Function<? super T, ? extends T> rule) {
|
||||
T root = rule.apply((T) this);
|
||||
Node<T> node = this.equals(root) ? this : root;
|
||||
|
||||
return node.transformChildren(child -> child.transformDown(rule));
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public <E extends T> T transformDown(Function<E, ? extends T> rule, final Class<E> typeToken) {
|
||||
// type filtering function
|
||||
return transformDown((t) -> (typeToken.isInstance(t) ? rule.apply((E) t) : t));
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public T transformUp(Function<? super T, ? extends T> rule) {
|
||||
T transformed = transformChildren(child -> child.transformUp(rule));
|
||||
T node = this.equals(transformed) ? (T) this : transformed;
|
||||
return rule.apply(node);
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public <E extends T> T transformUp(Function<E, ? extends T> rule, final Class<E> typeToken) {
|
||||
// type filtering function
|
||||
return transformUp((t) -> (typeToken.isInstance(t) ? rule.apply((E) t) : t));
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
protected <R extends Function<? super T, ? extends T>> T transformChildren(Function<T, ? extends T> traversalOperation) {
|
||||
boolean childrenChanged = false;
|
||||
|
||||
// stream() could be used but the code is just as complicated without any advantages
|
||||
// further more, it would include bring in all the associated stream/collector object creation even though in
|
||||
// most cases the immediate tree would be quite small (0,1,2 elements)
|
||||
List<T> transformedChildren = new ArrayList<>(children().size());
|
||||
|
||||
for (T child : children) {
|
||||
T next = traversalOperation.apply(child);
|
||||
if (!child.equals(next)) {
|
||||
childrenChanged = true;
|
||||
}
|
||||
else {
|
||||
// use the initial value
|
||||
next = child;
|
||||
}
|
||||
transformedChildren.add(next);
|
||||
}
|
||||
|
||||
return (childrenChanged ? replaceChildren(transformedChildren) : (T) this);
|
||||
}
|
||||
|
||||
/**
|
||||
* Replace the children of this node.
|
||||
*/
|
||||
public abstract T replaceChildren(List<T> newChildren);
|
||||
|
||||
//
|
||||
// transform the node properties and use the tree only for navigation
|
||||
//
|
||||
|
||||
public <E> T transformPropertiesOnly(Function<? super E, ? extends E> rule, Class<E> typeToken) {
|
||||
return transformNodeProps(rule, typeToken);
|
||||
}
|
||||
|
||||
public <E> T transformPropertiesDown(Function<? super E, ? extends E> rule, Class<E> typeToken) {
|
||||
return transformDown(t -> t.transformNodeProps(rule, typeToken));
|
||||
}
|
||||
|
||||
public <E> T transformPropertiesUp(Function<? super E, ? extends E> rule, Class<E> typeToken) {
|
||||
return transformUp(t -> t.transformNodeProps(rule, typeToken));
|
||||
}
|
||||
|
||||
/**
|
||||
* Transform this node's properties.
|
||||
* <p>
|
||||
* This always returns something of the same type as the current
|
||||
* node but since {@link Node} doesn't have a {@code SelfT} parameter
|
||||
* we return the closest thing we do have: {@code T}, which is the
|
||||
* root of the hierarchy for the this node.
|
||||
*/
|
||||
protected final <E> T transformNodeProps(Function<? super E, ? extends E> rule, Class<E> typeToken) {
|
||||
return info().transform(rule, typeToken);
|
||||
}
|
||||
|
||||
/**
|
||||
* Return the information about this node.
|
||||
*/
|
||||
protected abstract NodeInfo<? extends T> info();
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(children);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (this == obj) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (obj == null || getClass() != obj.getClass()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
Node<?> other = (Node<?>) obj;
|
||||
return Objects.equals(children(), other.children());
|
||||
}
|
||||
|
||||
public String nodeName() {
|
||||
return getClass().getSimpleName();
|
||||
}
|
||||
|
||||
/**
|
||||
* The values of all the properties that are important
|
||||
* to this {@link Node}.
|
||||
*/
|
||||
public List<Object> nodeProperties() {
|
||||
return info().properties();
|
||||
}
|
||||
|
||||
public String nodeString() {
|
||||
StringBuilder sb = new StringBuilder();
|
||||
sb.append(nodeName());
|
||||
sb.append("[");
|
||||
sb.append(propertiesToString(true));
|
||||
sb.append("]");
|
||||
return sb.toString();
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return treeString(new StringBuilder(), 0, new BitSet()).toString();
|
||||
}
|
||||
|
||||
/**
|
||||
* Render this {@link Node} as a tree like
|
||||
* <pre>
|
||||
* {@code
|
||||
* Project[[i{f}#0]]
|
||||
* \_Filter[i{f}#1]
|
||||
* \_SubQueryAlias[test]
|
||||
* \_EsRelation[test][i{f}#2]
|
||||
* }
|
||||
* </pre>
|
||||
*/
|
||||
final StringBuilder treeString(StringBuilder sb, int depth, BitSet hasParentPerDepth) {
|
||||
if (depth > 0) {
|
||||
// draw children
|
||||
for (int column = 0; column < depth; column++) {
|
||||
if (hasParentPerDepth.get(column)) {
|
||||
sb.append("|");
|
||||
// if not the last elder, adding padding (since each column has two chars ("|_" or "\_")
|
||||
if (column < depth - 1) {
|
||||
sb.append(" ");
|
||||
}
|
||||
}
|
||||
else {
|
||||
// if the child has no parent (elder on the previous level), it means its the last sibling
|
||||
sb.append((column == depth - 1) ? "\\" : " ");
|
||||
}
|
||||
}
|
||||
|
||||
sb.append("_");
|
||||
}
|
||||
|
||||
sb.append(nodeString());
|
||||
|
||||
List<T> children = children();
|
||||
if (!children.isEmpty()) {
|
||||
sb.append("\n");
|
||||
}
|
||||
for (int i = 0; i < children.size(); i++) {
|
||||
T t = children.get(i);
|
||||
hasParentPerDepth.set(depth, i < children.size() - 1);
|
||||
t.treeString(sb, depth + 1, hasParentPerDepth);
|
||||
if (i < children.size() - 1) {
|
||||
sb.append("\n");
|
||||
}
|
||||
}
|
||||
return sb;
|
||||
}
|
||||
|
||||
/**
|
||||
* Render the properties of this {@link Node} one by
|
||||
* one like {@code foo bar baz}. These go inside the
|
||||
* {@code [} and {@code ]} of the output of {@link #treeString}.
|
||||
*/
|
||||
public String propertiesToString(boolean skipIfChild) {
|
||||
StringBuilder sb = new StringBuilder();
|
||||
|
||||
List<?> children = children();
|
||||
// eliminate children (they are rendered as part of the tree)
|
||||
int remainingProperties = TO_STRING_MAX_PROP;
|
||||
int maxWidth = 0;
|
||||
boolean needsComma = false;
|
||||
|
||||
List<Object> props = nodeProperties();
|
||||
for (Object prop : props) {
|
||||
// consider a property if it is not ignored AND
|
||||
// it's not a child (optional)
|
||||
if (!(skipIfChild && (children.contains(prop) || children.equals(prop)))) {
|
||||
if (remainingProperties-- < 0) {
|
||||
sb.append("...").append(props.size() - TO_STRING_MAX_PROP).append("fields not shown");
|
||||
break;
|
||||
}
|
||||
|
||||
if (needsComma) {
|
||||
sb.append(",");
|
||||
}
|
||||
String stringValue = Objects.toString(prop);
|
||||
if (maxWidth + stringValue.length() > TO_STRING_MAX_WIDTH) {
|
||||
int cutoff = Math.max(0, TO_STRING_MAX_WIDTH - maxWidth);
|
||||
sb.append(stringValue.substring(0, cutoff));
|
||||
sb.append("\n");
|
||||
stringValue = stringValue.substring(cutoff);
|
||||
maxWidth = 0;
|
||||
}
|
||||
maxWidth += stringValue.length();
|
||||
sb.append(stringValue);
|
||||
|
||||
needsComma = true;
|
||||
}
|
||||
}
|
||||
|
||||
return sb.toString();
|
||||
}
|
||||
}
|
|
@ -0,0 +1,451 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.eql.tree;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
import java.util.function.BiFunction;
|
||||
import java.util.function.Function;
|
||||
|
||||
import static java.util.Collections.emptyList;
|
||||
import static java.util.Collections.unmodifiableList;
|
||||
|
||||
/**
|
||||
* Information about a {@link Node}.
|
||||
* <p>
|
||||
* All the uses of this are fairly non-OO and we're looking
|
||||
* for ways to use this less and less.
|
||||
* <p>
|
||||
* The implementations of this class are super copy-and-paste-ish
|
||||
* but they are better then the sneaky reflection tricks we had
|
||||
* earlier. Still terrifying.
|
||||
*
|
||||
* @param <T> actual subclass of node that produced this {@linkplain NodeInfo}
|
||||
*/
|
||||
public abstract class NodeInfo<T extends Node<?>> {
|
||||
protected final T node;
|
||||
|
||||
private NodeInfo(T node) {
|
||||
this.node = node;
|
||||
}
|
||||
|
||||
/**
|
||||
* Values for all properties on the instance that created
|
||||
* this {@linkplain NodeInfo}.
|
||||
*/
|
||||
public final List<Object> properties() {
|
||||
return unmodifiableList(innerProperties());
|
||||
}
|
||||
protected abstract List<Object> innerProperties();
|
||||
|
||||
/**
|
||||
* Transform the properties on {@code node}, returning a new instance
|
||||
* of {@code N} if any properties change.
|
||||
*/
|
||||
final <E> T transform(Function<? super E, ? extends E> rule, Class<E> typeToken) {
|
||||
List<?> children = node.children();
|
||||
|
||||
Function<Object, Object> realRule = p -> {
|
||||
if (p != children && false == children.contains(p)
|
||||
&& (p == null || typeToken.isInstance(p))) {
|
||||
return rule.apply(typeToken.cast(p));
|
||||
}
|
||||
return p;
|
||||
};
|
||||
return innerTransform(realRule);
|
||||
}
|
||||
protected abstract T innerTransform(Function<Object, Object> rule);
|
||||
|
||||
/**
|
||||
* Builds a {@link NodeInfo} for Nodes without any properties.
|
||||
*/
|
||||
public static <T extends Node<?>> NodeInfo<T> create(T n) {
|
||||
return new NodeInfo<T>(n) {
|
||||
@Override
|
||||
protected List<Object> innerProperties() {
|
||||
return emptyList();
|
||||
}
|
||||
|
||||
protected T innerTransform(Function<Object, Object> rule) {
|
||||
return node;
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
public static <T extends Node<?>, P1> NodeInfo<T> create(
|
||||
T n, BiFunction<Source, P1, T> ctor,
|
||||
P1 p1) {
|
||||
return new NodeInfo<T>(n) {
|
||||
@Override
|
||||
protected List<Object> innerProperties() {
|
||||
return Arrays.asList(p1);
|
||||
}
|
||||
|
||||
protected T innerTransform(Function<Object, Object> rule) {
|
||||
boolean same = true;
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
P1 newP1 = (P1) rule.apply(p1);
|
||||
same &= Objects.equals(p1, newP1);
|
||||
|
||||
return same ? node : ctor.apply(node.source(), newP1);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
public static <T extends Node<?>, P1, P2> NodeInfo<T> create(
|
||||
T n, NodeCtor2<P1, P2, T> ctor,
|
||||
P1 p1, P2 p2) {
|
||||
return new NodeInfo<T>(n) {
|
||||
@Override
|
||||
protected List<Object> innerProperties() {
|
||||
return Arrays.asList(p1, p2);
|
||||
}
|
||||
|
||||
protected T innerTransform(Function<Object, Object> rule) {
|
||||
boolean same = true;
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
P1 newP1 = (P1) rule.apply(p1);
|
||||
same &= Objects.equals(p1, newP1);
|
||||
@SuppressWarnings("unchecked")
|
||||
P2 newP2 = (P2) rule.apply(p2);
|
||||
same &= Objects.equals(p2, newP2);
|
||||
|
||||
return same ? node : ctor.apply(node.source(), newP1, newP2);
|
||||
}
|
||||
};
|
||||
}
|
||||
public interface NodeCtor2<P1, P2, T> {
|
||||
T apply(Source l, P1 p1, P2 p2);
|
||||
}
|
||||
|
||||
public static <T extends Node<?>, P1, P2, P3> NodeInfo<T> create(
|
||||
T n, NodeCtor3<P1, P2, P3, T> ctor,
|
||||
P1 p1, P2 p2, P3 p3) {
|
||||
return new NodeInfo<T>(n) {
|
||||
@Override
|
||||
protected List<Object> innerProperties() {
|
||||
return Arrays.asList(p1, p2, p3);
|
||||
}
|
||||
|
||||
protected T innerTransform(Function<Object, Object> rule) {
|
||||
boolean same = true;
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
P1 newP1 = (P1) rule.apply(p1);
|
||||
same &= Objects.equals(p1, newP1);
|
||||
@SuppressWarnings("unchecked")
|
||||
P2 newP2 = (P2) rule.apply(p2);
|
||||
same &= Objects.equals(p2, newP2);
|
||||
@SuppressWarnings("unchecked")
|
||||
P3 newP3 = (P3) rule.apply(p3);
|
||||
same &= Objects.equals(p3, newP3);
|
||||
|
||||
return same ? node : ctor.apply(node.source(), newP1, newP2, newP3);
|
||||
}
|
||||
};
|
||||
}
|
||||
public interface NodeCtor3<P1, P2, P3, T> {
|
||||
T apply(Source l, P1 p1, P2 p2, P3 p3);
|
||||
}
|
||||
|
||||
public static <T extends Node<?>, P1, P2, P3, P4> NodeInfo<T> create(
|
||||
T n, NodeCtor4<P1, P2, P3, P4, T> ctor,
|
||||
P1 p1, P2 p2, P3 p3, P4 p4) {
|
||||
return new NodeInfo<T>(n) {
|
||||
@Override
|
||||
protected List<Object> innerProperties() {
|
||||
return Arrays.asList(p1, p2, p3, p4);
|
||||
}
|
||||
|
||||
protected T innerTransform(Function<Object, Object> rule) {
|
||||
boolean same = true;
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
P1 newP1 = (P1) rule.apply(p1);
|
||||
same &= Objects.equals(p1, newP1);
|
||||
@SuppressWarnings("unchecked")
|
||||
P2 newP2 = (P2) rule.apply(p2);
|
||||
same &= Objects.equals(p2, newP2);
|
||||
@SuppressWarnings("unchecked")
|
||||
P3 newP3 = (P3) rule.apply(p3);
|
||||
same &= Objects.equals(p3, newP3);
|
||||
@SuppressWarnings("unchecked")
|
||||
P4 newP4 = (P4) rule.apply(p4);
|
||||
same &= Objects.equals(p4, newP4);
|
||||
|
||||
return same ? node : ctor.apply(node.source(), newP1, newP2, newP3, newP4);
|
||||
}
|
||||
};
|
||||
}
|
||||
public interface NodeCtor4<P1, P2, P3, P4, T> {
|
||||
T apply(Source l, P1 p1, P2 p2, P3 p3, P4 p4);
|
||||
}
|
||||
|
||||
public static <T extends Node<?>, P1, P2, P3, P4, P5> NodeInfo<T> create(
|
||||
T n, NodeCtor5<P1, P2, P3, P4, P5, T> ctor,
|
||||
P1 p1, P2 p2, P3 p3, P4 p4, P5 p5) {
|
||||
return new NodeInfo<T>(n) {
|
||||
@Override
|
||||
protected List<Object> innerProperties() {
|
||||
return Arrays.asList(p1, p2, p3, p4, p5);
|
||||
}
|
||||
|
||||
protected T innerTransform(Function<Object, Object> rule) {
|
||||
boolean same = true;
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
P1 newP1 = (P1) rule.apply(p1);
|
||||
same &= Objects.equals(p1, newP1);
|
||||
@SuppressWarnings("unchecked")
|
||||
P2 newP2 = (P2) rule.apply(p2);
|
||||
same &= Objects.equals(p2, newP2);
|
||||
@SuppressWarnings("unchecked")
|
||||
P3 newP3 = (P3) rule.apply(p3);
|
||||
same &= Objects.equals(p3, newP3);
|
||||
@SuppressWarnings("unchecked")
|
||||
P4 newP4 = (P4) rule.apply(p4);
|
||||
same &= Objects.equals(p4, newP4);
|
||||
@SuppressWarnings("unchecked")
|
||||
P5 newP5 = (P5) rule.apply(p5);
|
||||
same &= Objects.equals(p5, newP5);
|
||||
|
||||
return same ? node : ctor.apply(node.source(), newP1, newP2, newP3, newP4, newP5);
|
||||
}
|
||||
};
|
||||
}
|
||||
public interface NodeCtor5<P1, P2, P3, P4, P5, T> {
|
||||
T apply(Source l, P1 p1, P2 p2, P3 p3, P4 p4, P5 p5);
|
||||
}
|
||||
|
||||
public static <T extends Node<?>, P1, P2, P3, P4, P5, P6> NodeInfo<T> create(
|
||||
T n, NodeCtor6<P1, P2, P3, P4, P5, P6, T> ctor,
|
||||
P1 p1, P2 p2, P3 p3, P4 p4, P5 p5, P6 p6) {
|
||||
return new NodeInfo<T>(n) {
|
||||
@Override
|
||||
protected List<Object> innerProperties() {
|
||||
return Arrays.asList(p1, p2, p3, p4, p5, p6);
|
||||
}
|
||||
|
||||
protected T innerTransform(Function<Object, Object> rule) {
|
||||
boolean same = true;
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
P1 newP1 = (P1) rule.apply(p1);
|
||||
same &= Objects.equals(p1, newP1);
|
||||
@SuppressWarnings("unchecked")
|
||||
P2 newP2 = (P2) rule.apply(p2);
|
||||
same &= Objects.equals(p2, newP2);
|
||||
@SuppressWarnings("unchecked")
|
||||
P3 newP3 = (P3) rule.apply(p3);
|
||||
same &= Objects.equals(p3, newP3);
|
||||
@SuppressWarnings("unchecked")
|
||||
P4 newP4 = (P4) rule.apply(p4);
|
||||
same &= Objects.equals(p4, newP4);
|
||||
@SuppressWarnings("unchecked")
|
||||
P5 newP5 = (P5) rule.apply(p5);
|
||||
same &= Objects.equals(p5, newP5);
|
||||
@SuppressWarnings("unchecked")
|
||||
P6 newP6 = (P6) rule.apply(p6);
|
||||
same &= Objects.equals(p6, newP6);
|
||||
|
||||
return same ? node : ctor.apply(node.source(), newP1, newP2, newP3, newP4, newP5, newP6);
|
||||
}
|
||||
};
|
||||
}
|
||||
public interface NodeCtor6<P1, P2, P3, P4, P5, P6, T> {
|
||||
T apply(Source l, P1 p1, P2 p2, P3 p3, P4 p4, P5 p5, P6 p6);
|
||||
}
|
||||
|
||||
public static <T extends Node<?>, P1, P2, P3, P4, P5, P6, P7> NodeInfo<T> create(
|
||||
T n, NodeCtor7<P1, P2, P3, P4, P5, P6, P7, T> ctor,
|
||||
P1 p1, P2 p2, P3 p3, P4 p4, P5 p5, P6 p6, P7 p7) {
|
||||
return new NodeInfo<T>(n) {
|
||||
@Override
|
||||
protected List<Object> innerProperties() {
|
||||
return Arrays.asList(p1, p2, p3, p4, p5, p6, p7);
|
||||
}
|
||||
|
||||
protected T innerTransform(Function<Object, Object> rule) {
|
||||
boolean same = true;
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
P1 newP1 = (P1) rule.apply(p1);
|
||||
same &= Objects.equals(p1, newP1);
|
||||
@SuppressWarnings("unchecked")
|
||||
P2 newP2 = (P2) rule.apply(p2);
|
||||
same &= Objects.equals(p2, newP2);
|
||||
@SuppressWarnings("unchecked")
|
||||
P3 newP3 = (P3) rule.apply(p3);
|
||||
same &= Objects.equals(p3, newP3);
|
||||
@SuppressWarnings("unchecked")
|
||||
P4 newP4 = (P4) rule.apply(p4);
|
||||
same &= Objects.equals(p4, newP4);
|
||||
@SuppressWarnings("unchecked")
|
||||
P5 newP5 = (P5) rule.apply(p5);
|
||||
same &= Objects.equals(p5, newP5);
|
||||
@SuppressWarnings("unchecked")
|
||||
P6 newP6 = (P6) rule.apply(p6);
|
||||
same &= Objects.equals(p6, newP6);
|
||||
@SuppressWarnings("unchecked")
|
||||
P7 newP7 = (P7) rule.apply(p7);
|
||||
same &= Objects.equals(p7, newP7);
|
||||
|
||||
return same ? node : ctor.apply(node.source(), newP1, newP2, newP3, newP4, newP5, newP6, newP7);
|
||||
}
|
||||
};
|
||||
}
|
||||
public interface NodeCtor7<P1, P2, P3, P4, P5, P6, P7, T> {
|
||||
T apply(Source l, P1 p1, P2 p2, P3 p3, P4 p4, P5 p5, P6 p6, P7 p7);
|
||||
}
|
||||
|
||||
public static <T extends Node<?>, P1, P2, P3, P4, P5, P6, P7, P8> NodeInfo<T> create(
|
||||
T n, NodeCtor8<P1, P2, P3, P4, P5, P6, P7, P8, T> ctor,
|
||||
P1 p1, P2 p2, P3 p3, P4 p4, P5 p5, P6 p6, P7 p7, P8 p8) {
|
||||
return new NodeInfo<T>(n) {
|
||||
@Override
|
||||
protected List<Object> innerProperties() {
|
||||
return Arrays.asList(p1, p2, p3, p4, p5, p6, p7, p8);
|
||||
}
|
||||
|
||||
protected T innerTransform(Function<Object, Object> rule) {
|
||||
boolean same = true;
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
P1 newP1 = (P1) rule.apply(p1);
|
||||
same &= Objects.equals(p1, newP1);
|
||||
@SuppressWarnings("unchecked")
|
||||
P2 newP2 = (P2) rule.apply(p2);
|
||||
same &= Objects.equals(p2, newP2);
|
||||
@SuppressWarnings("unchecked")
|
||||
P3 newP3 = (P3) rule.apply(p3);
|
||||
same &= Objects.equals(p3, newP3);
|
||||
@SuppressWarnings("unchecked")
|
||||
P4 newP4 = (P4) rule.apply(p4);
|
||||
same &= Objects.equals(p4, newP4);
|
||||
@SuppressWarnings("unchecked")
|
||||
P5 newP5 = (P5) rule.apply(p5);
|
||||
same &= Objects.equals(p5, newP5);
|
||||
@SuppressWarnings("unchecked")
|
||||
P6 newP6 = (P6) rule.apply(p6);
|
||||
same &= Objects.equals(p6, newP6);
|
||||
@SuppressWarnings("unchecked")
|
||||
P7 newP7 = (P7) rule.apply(p7);
|
||||
same &= Objects.equals(p7, newP7);
|
||||
@SuppressWarnings("unchecked")
|
||||
P8 newP8 = (P8) rule.apply(p8);
|
||||
same &= Objects.equals(p8, newP8);
|
||||
|
||||
return same ? node : ctor.apply(node.source(), newP1, newP2, newP3, newP4, newP5, newP6, newP7, newP8);
|
||||
}
|
||||
};
|
||||
}
|
||||
public interface NodeCtor8<P1, P2, P3, P4, P5, P6, P7, P8, T> {
|
||||
T apply(Source l, P1 p1, P2 p2, P3 p3, P4 p4, P5 p5, P6 p6, P7 p7, P8 p8);
|
||||
}
|
||||
|
||||
public static <T extends Node<?>, P1, P2, P3, P4, P5, P6, P7, P8, P9> NodeInfo<T> create(
|
||||
T n, NodeCtor9<P1, P2, P3, P4, P5, P6, P7, P8, P9, T> ctor,
|
||||
P1 p1, P2 p2, P3 p3, P4 p4, P5 p5, P6 p6, P7 p7, P8 p8, P9 p9) {
|
||||
return new NodeInfo<T>(n) {
|
||||
@Override
|
||||
protected List<Object> innerProperties() {
|
||||
return Arrays.asList(p1, p2, p3, p4, p5, p6, p7, p8, p9);
|
||||
}
|
||||
|
||||
protected T innerTransform(Function<Object, Object> rule) {
|
||||
boolean same = true;
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
P1 newP1 = (P1) rule.apply(p1);
|
||||
same &= Objects.equals(p1, newP1);
|
||||
@SuppressWarnings("unchecked")
|
||||
P2 newP2 = (P2) rule.apply(p2);
|
||||
same &= Objects.equals(p2, newP2);
|
||||
@SuppressWarnings("unchecked")
|
||||
P3 newP3 = (P3) rule.apply(p3);
|
||||
same &= Objects.equals(p3, newP3);
|
||||
@SuppressWarnings("unchecked")
|
||||
P4 newP4 = (P4) rule.apply(p4);
|
||||
same &= Objects.equals(p4, newP4);
|
||||
@SuppressWarnings("unchecked")
|
||||
P5 newP5 = (P5) rule.apply(p5);
|
||||
same &= Objects.equals(p5, newP5);
|
||||
@SuppressWarnings("unchecked")
|
||||
P6 newP6 = (P6) rule.apply(p6);
|
||||
same &= Objects.equals(p6, newP6);
|
||||
@SuppressWarnings("unchecked")
|
||||
P7 newP7 = (P7) rule.apply(p7);
|
||||
same &= Objects.equals(p7, newP7);
|
||||
@SuppressWarnings("unchecked")
|
||||
P8 newP8 = (P8) rule.apply(p8);
|
||||
same &= Objects.equals(p8, newP8);
|
||||
@SuppressWarnings("unchecked")
|
||||
P9 newP9 = (P9) rule.apply(p9);
|
||||
same &= Objects.equals(p9, newP9);
|
||||
|
||||
return same ? node : ctor.apply(node.source(), newP1, newP2, newP3, newP4, newP5, newP6, newP7, newP8, newP9);
|
||||
}
|
||||
};
|
||||
}
|
||||
public interface NodeCtor9<P1, P2, P3, P4, P5, P6, P7, P8, P9, T> {
|
||||
T apply(Source l, P1 p1, P2 p2, P3 p3, P4 p4, P5 p5, P6 p6, P7 p7, P8 p8, P9 p9);
|
||||
}
|
||||
|
||||
public static <T extends Node<?>, P1, P2, P3, P4, P5, P6, P7, P8, P9, P10> NodeInfo<T> create(
|
||||
T n, NodeCtor10<P1, P2, P3, P4, P5, P6, P7, P8, P9, P10, T> ctor,
|
||||
P1 p1, P2 p2, P3 p3, P4 p4, P5 p5, P6 p6, P7 p7, P8 p8, P9 p9, P10 p10) {
|
||||
return new NodeInfo<T>(n) {
|
||||
@Override
|
||||
protected List<Object> innerProperties() {
|
||||
return Arrays.asList(p1, p2, p3, p4, p5, p6, p7, p8, p9, p10);
|
||||
}
|
||||
|
||||
protected T innerTransform(Function<Object, Object> rule) {
|
||||
boolean same = true;
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
P1 newP1 = (P1) rule.apply(p1);
|
||||
same &= Objects.equals(p1, newP1);
|
||||
@SuppressWarnings("unchecked")
|
||||
P2 newP2 = (P2) rule.apply(p2);
|
||||
same &= Objects.equals(p2, newP2);
|
||||
@SuppressWarnings("unchecked")
|
||||
P3 newP3 = (P3) rule.apply(p3);
|
||||
same &= Objects.equals(p3, newP3);
|
||||
@SuppressWarnings("unchecked")
|
||||
P4 newP4 = (P4) rule.apply(p4);
|
||||
same &= Objects.equals(p4, newP4);
|
||||
@SuppressWarnings("unchecked")
|
||||
P5 newP5 = (P5) rule.apply(p5);
|
||||
same &= Objects.equals(p5, newP5);
|
||||
@SuppressWarnings("unchecked")
|
||||
P6 newP6 = (P6) rule.apply(p6);
|
||||
same &= Objects.equals(p6, newP6);
|
||||
@SuppressWarnings("unchecked")
|
||||
P7 newP7 = (P7) rule.apply(p7);
|
||||
same &= Objects.equals(p7, newP7);
|
||||
@SuppressWarnings("unchecked")
|
||||
P8 newP8 = (P8) rule.apply(p8);
|
||||
same &= Objects.equals(p8, newP8);
|
||||
@SuppressWarnings("unchecked")
|
||||
P9 newP9 = (P9) rule.apply(p9);
|
||||
same &= Objects.equals(p9, newP9);
|
||||
@SuppressWarnings("unchecked")
|
||||
P10 newP10 = (P10) rule.apply(p10);
|
||||
same &= Objects.equals(p10, newP10);
|
||||
|
||||
return same ? node : ctor.apply(node.source(), newP1, newP2, newP3, newP4, newP5, newP6, newP7, newP8,
|
||||
newP9, newP10);
|
||||
}
|
||||
};
|
||||
}
|
||||
public interface NodeCtor10<P1, P2, P3, P4, P5, P6, P7, P8, P9, P10, T> {
|
||||
T apply(Source l, P1 p1, P2 p2, P3 p3, P4 p4, P5 p5, P6 p6, P7 p7, P8 p8, P9 p9, P10 p10);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,56 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.eql.tree;
|
||||
|
||||
public abstract class NodeUtils {
|
||||
public static <A extends Node<A>, B extends Node<B>> String diffString(A left, B right) {
|
||||
return diffString(left.toString(), right.toString());
|
||||
}
|
||||
|
||||
public static String diffString(String left, String right) {
|
||||
// break the strings into lines
|
||||
// then compare each line
|
||||
String[] leftSplit = left.split("\\n");
|
||||
String[] rightSplit = right.split("\\n");
|
||||
|
||||
// find max - we could use streams but autoboxing is not cool
|
||||
int leftMaxPadding = 0;
|
||||
for (String string : leftSplit) {
|
||||
leftMaxPadding = Math.max(string.length(), leftMaxPadding);
|
||||
}
|
||||
|
||||
// try to allocate the buffer - 5 represents the column comparison chars
|
||||
StringBuilder sb = new StringBuilder(left.length() + right.length() + Math.max(left.length(), right.length()) * 3);
|
||||
|
||||
boolean leftAvailable = true, rightAvailable = true;
|
||||
for (int leftIndex = 0, rightIndex = 0; leftAvailable || rightAvailable; leftIndex++, rightIndex++) {
|
||||
String leftRow = "", rightRow = leftRow;
|
||||
if (leftIndex < leftSplit.length) {
|
||||
leftRow = leftSplit[leftIndex];
|
||||
}
|
||||
else {
|
||||
leftAvailable = false;
|
||||
}
|
||||
sb.append(leftRow);
|
||||
for (int i = leftRow.length(); i < leftMaxPadding; i++) {
|
||||
sb.append(" ");
|
||||
}
|
||||
// right side still available
|
||||
if (rightIndex < rightSplit.length) {
|
||||
rightRow = rightSplit[rightIndex];
|
||||
}
|
||||
else {
|
||||
rightAvailable = false;
|
||||
}
|
||||
if (leftAvailable || rightAvailable) {
|
||||
sb.append(leftRow.equals(rightRow) ? " = " : " ! ");
|
||||
sb.append(rightRow);
|
||||
sb.append("\n");
|
||||
}
|
||||
}
|
||||
return sb.toString();
|
||||
}
|
||||
}
|
|
@ -0,0 +1,58 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.xpack.eql.tree;
|
||||
|
||||
import java.util.Objects;
|
||||
|
||||
public final class Source {
|
||||
|
||||
public static final Source EMPTY = new Source(Location.EMPTY, "");
|
||||
|
||||
private final Location location;
|
||||
private final String text;
|
||||
|
||||
public Source(int line, int charPositionInLine, String text) {
|
||||
this(new Location(line, charPositionInLine), text);
|
||||
}
|
||||
|
||||
public Source(Location location, String text) {
|
||||
this.location = location;
|
||||
this.text = text;
|
||||
}
|
||||
|
||||
public Location source() {
|
||||
return location;
|
||||
}
|
||||
|
||||
public String text() {
|
||||
return text;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(location, text);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (this == obj) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (obj == null || getClass() != obj.getClass()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
Source other = (Source) obj;
|
||||
return Objects.equals(location, other.location) && Objects.equals(text, other.text);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return text + location;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,23 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.xpack.eql.type;
|
||||
|
||||
//TODO: tuple, list of sequence, etc?
|
||||
public enum DataType {
|
||||
|
||||
SCALAR("SCALAR");
|
||||
|
||||
private String resultType;
|
||||
|
||||
DataType(String resultType) {
|
||||
this.resultType = resultType;
|
||||
}
|
||||
|
||||
public String resultType() {
|
||||
return resultType;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,39 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.eql.util;
|
||||
|
||||
import org.elasticsearch.xpack.eql.EqlIllegalArgumentException;
|
||||
|
||||
/**
|
||||
* Utility class used for checking various conditions at runtime, inside EQL (hence the specific exception) with
|
||||
* minimum amount of code
|
||||
*/
|
||||
public abstract class Check {
|
||||
|
||||
public static void isTrue(boolean expression, String message, Object... values) {
|
||||
if (!expression) {
|
||||
throw new EqlIllegalArgumentException(message, values);
|
||||
}
|
||||
}
|
||||
|
||||
public static void isTrue(boolean expression, String message) {
|
||||
if (!expression) {
|
||||
throw new EqlIllegalArgumentException(message);
|
||||
}
|
||||
}
|
||||
|
||||
public static void notNull(Object object, String message) {
|
||||
if (object == null) {
|
||||
throw new EqlIllegalArgumentException(message);
|
||||
}
|
||||
}
|
||||
|
||||
public static void notNull(Object object, String message, Object... values) {
|
||||
if (object == null) {
|
||||
throw new EqlIllegalArgumentException(message, values);
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,85 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.xpack.eql.parser;
|
||||
|
||||
import org.elasticsearch.common.SuppressForbidden;
|
||||
import org.elasticsearch.common.collect.Tuple;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.elasticsearch.xpack.eql.tree.Source;
|
||||
|
||||
import java.io.BufferedReader;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.InputStreamReader;
|
||||
import java.net.URL;
|
||||
import java.net.URLConnection;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
|
||||
/**
|
||||
* Test for checking the overall grammar by throwing a number of valid queries at the parser to see whether any exception is raised.
|
||||
* In time, the queries themselves get to be checked against the actual execution model and eventually against the expected results.
|
||||
*/
|
||||
public class GrammarTests extends ESTestCase {
|
||||
|
||||
public void testGrammar() throws Exception {
|
||||
EqlParser parser = new EqlParser();
|
||||
List<Tuple<String, Integer>> lines = readQueries("/grammar-queries.eql");
|
||||
for (Tuple<String, Integer> line : lines) {
|
||||
String q = line.v1();
|
||||
try {
|
||||
parser.createStatement(q);
|
||||
} catch (ParsingException pe) {
|
||||
if (pe.getErrorMessage().startsWith("Does not know how to handle")) {
|
||||
// ignore for now
|
||||
}
|
||||
else {
|
||||
throw new ParsingException(new Source(pe.getLineNumber() + line.v2() - 1, pe.getColumnNumber(), q),
|
||||
pe.getErrorMessage() + " inside statement <{}>", q);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private static List<Tuple<String, Integer>> readQueries(String source) throws Exception {
|
||||
URL url = GrammarTests.class.getResource(source);
|
||||
Objects.requireNonNull(source, "Cannot find resource " + url);
|
||||
|
||||
List<Tuple<String, Integer>> queries = new ArrayList<>();
|
||||
|
||||
StringBuilder query = new StringBuilder();
|
||||
try (BufferedReader reader = new BufferedReader(new InputStreamReader(readFromJarUrl(url), StandardCharsets.UTF_8))) {
|
||||
String line;
|
||||
int lineNumber = 1;
|
||||
|
||||
while ((line = reader.readLine()) != null) {
|
||||
// ignore comments
|
||||
if (line.isEmpty() == false && line.startsWith("//") == false) {
|
||||
query.append(line);
|
||||
|
||||
if (line.endsWith(";") == true) {
|
||||
query.setLength(query.length() - 1);
|
||||
queries.add(new Tuple<>(query.toString(), lineNumber));
|
||||
query.setLength(0);
|
||||
}
|
||||
}
|
||||
lineNumber++;
|
||||
}
|
||||
}
|
||||
return queries;
|
||||
}
|
||||
|
||||
@SuppressForbidden(reason = "test reads from jar")
|
||||
private static InputStream readFromJarUrl(URL source) throws IOException {
|
||||
URLConnection con = source.openConnection();
|
||||
// do not to cache files (to avoid keeping file handles around)
|
||||
con.setUseCaches(false);
|
||||
return con.getInputStream();
|
||||
}
|
||||
}
|
|
@ -0,0 +1,95 @@
|
|||
process where process_name == "svchost.exe" and command_line != "* -k *";
|
||||
process where process_name in ('ipconfig.exe', 'netstat.exe', 'systeminfo.exe', 'route.exe');
|
||||
process where subtype.create and wildcard(command_line, "*.ost *", "*.pst *")
|
||||
;
|
||||
|
||||
process where subtype.create and
|
||||
process_name == "attrib.exe" and command_line == "* +h*"
|
||||
;
|
||||
|
||||
file where file_name == "*Library/Preferences/*.plist";
|
||||
|
||||
|
||||
//
|
||||
// Pipes
|
||||
//
|
||||
|
||||
process where true | count;
|
||||
process where true | count process_name;
|
||||
process where true | count parent_process_name, process_name;
|
||||
process where true | unique process_name;
|
||||
process where true | unique process_name, command_line;
|
||||
|
||||
|
||||
network where true
|
||||
| unique destination_address, destination_port
|
||||
| filter timestamp_utc >= "2018-05-01";
|
||||
|
||||
|
||||
process where true | unique_count process_name | filter count < 5;
|
||||
|
||||
process where process_name == "powershell.exe"
|
||||
| unique command_line
|
||||
| head 50
|
||||
;
|
||||
|
||||
security where event_id == 4624
|
||||
| tail 10
|
||||
;
|
||||
|
||||
file where true | sort file_name
|
||||
;
|
||||
|
||||
network where total_out_bytes > 100000000
|
||||
| sort total_out_bytes
|
||||
| tail 5
|
||||
;
|
||||
|
||||
//
|
||||
// Sequences
|
||||
//
|
||||
|
||||
sequence by user_name
|
||||
[process where process_name == "whoami"]
|
||||
[process where process_name == "hostname"]
|
||||
[process where process_name == "ifconfig"]
|
||||
;
|
||||
|
||||
sequence with maxspan=30s
|
||||
[network where destination_port==3389 and event_subtype_full="*_accept_event*"]
|
||||
[security where event_id in (4624, 4625) and logon_type == 10]
|
||||
;
|
||||
|
||||
sequence with maxspan=30s
|
||||
[network where destination_port==3389 and event_subtype_full="*_accept_event"] by source_address
|
||||
[security where event_id in (4624, 4625) and logon_type == 10] by ip_address
|
||||
;
|
||||
|
||||
sequence with maxspan=5m
|
||||
[ file where file_name == "*.exe"] by user_name, file_path
|
||||
[ process where true] by user_name, process_path
|
||||
;
|
||||
|
||||
sequence by user_name with maxspan=5m
|
||||
[ file where file_name == "*.exe"] by file_path
|
||||
[ process where true] by process_path
|
||||
;
|
||||
|
||||
//
|
||||
// Joins
|
||||
//
|
||||
|
||||
join by source_ip, destination_ip
|
||||
[network where destination_port == 3389] // RDP
|
||||
[network where destination_port == 135] // RPC
|
||||
[network where destination_port == 445] // SMB
|
||||
;
|
||||
|
||||
join by pid
|
||||
[process where true]
|
||||
[network where true]
|
||||
[registry where true]
|
||||
[file where true]
|
||||
|
||||
until [process where event_subtype_full == "termination_event"]
|
||||
;
|
Loading…
Reference in New Issue