EQL: backport updates to 7.x (#51940)

This commit is contained in:
Costin Leau 2020-02-05 16:45:58 +02:00 committed by GitHub
parent 3be70f64d8
commit 6ff0e411a8
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
52 changed files with 4152 additions and 1745 deletions

View File

@ -27,6 +27,7 @@ dependencies {
testCompile project(':test:framework')
testCompile project(path: xpackModule('core'), configuration: 'testArtifacts')
testCompile project(path: xpackModule('security'), configuration: 'testArtifacts')
testCompile project(path: xpackModule('ql'), configuration: 'testArtifacts')
testCompile project(path: ':modules:reindex', configuration: 'runtime')
testCompile project(path: ':modules:parent-join', configuration: 'runtime')
testCompile project(path: ':modules:analysis-common', configuration: 'runtime')

View File

@ -6,9 +6,6 @@
grammar EqlBase;
tokens {
DELIMITER
}
singleStatement
: statement EOF
@ -19,45 +16,54 @@ singleExpression
;
statement
: query (PIPE pipe)*
: query pipe*
;
query
: sequence
| join
| condition
| eventQuery
;
sequenceParams
: WITH (MAXSPAN EQ timeUnit)
;
sequence
: SEQUENCE (by=joinKeys)? (span)?
match+
(UNTIL match)?
: SEQUENCE (by=joinKeys sequenceParams? | sequenceParams by=joinKeys?)?
sequenceTerm sequenceTerm+
(UNTIL sequenceTerm)?
;
join
: JOIN (by=joinKeys)?
match+
(UNTIL match)?
joinTerm joinTerm+
(UNTIL joinTerm)?
;
pipe
: kind=IDENTIFIER (booleanExpression (COMMA booleanExpression)*)?
: PIPE kind=IDENTIFIER (booleanExpression (COMMA booleanExpression)*)?
;
joinKeys
: BY qualifiedNames
;
span
: WITH MAXSPAN EQ DIGIT_IDENTIFIER
: BY expression (COMMA expression)*
;
match
: LB condition RB (by=joinKeys)?
joinTerm
: subquery (by=joinKeys)?
;
sequenceTerm
: subquery (FORK (EQ booleanValue)?)? (by=joinKeys)?
;
subquery
: LB eventQuery RB
;
condition
: event=qualifiedName WHERE expression
eventQuery
: event=identifier WHERE expression
;
expression
@ -66,34 +72,28 @@ expression
booleanExpression
: NOT booleanExpression #logicalNot
| predicated #booleanDefault
| relationship=IDENTIFIER OF subquery #processCheck
| valueExpression #booleanDefault
| left=booleanExpression operator=AND right=booleanExpression #logicalBinary
| left=booleanExpression operator=OR right=booleanExpression #logicalBinary
;
// workaround for:
// https://github.com/antlr/antlr4/issues/780
// https://github.com/antlr/antlr4/issues/781
predicated
: valueExpression predicate?
;
// dedicated calls for each branch are not used to reuse the NOT handling across them
// instead the property kind is used for differentiation
predicate
: NOT? kind=BETWEEN lower=valueExpression AND upper=valueExpression
| NOT? kind=IN LP valueExpression (COMMA valueExpression)* RP
| NOT? kind=IN LP query RP
;
valueExpression
: primaryExpression #valueExpressionDefault
: primaryExpression predicate? #valueExpressionDefault
| operator=(MINUS | PLUS) valueExpression #arithmeticUnary
| left=valueExpression operator=(ASTERISK | SLASH | PERCENT) right=valueExpression #arithmeticBinary
| left=valueExpression operator=(PLUS | MINUS) right=valueExpression #arithmeticBinary
| left=valueExpression comparisonOperator right=valueExpression #comparison
;
// workaround for
// https://github.com/antlr/antlr4/issues/780
// https://github.com/antlr/antlr4/issues/781
predicate
: NOT? kind=IN LP expression (COMMA expression)* RP
;
primaryExpression
: constant #constantDefault
| functionExpression #function
@ -102,14 +102,14 @@ primaryExpression
;
functionExpression
: identifier LP (expression (COMMA expression)*)? RP
: name=IDENTIFIER LP (expression (COMMA expression)*)? RP
;
constant
: NULL #nullLiteral
| number #numericLiteral
| booleanValue #booleanLiteral
| STRING+ #stringLiteral
| string #stringLiteral
;
comparisonOperator
@ -120,26 +120,17 @@ booleanValue
: TRUE | FALSE
;
qualifiedNames
: qualifiedName (COMMA qualifiedName)*
;
qualifiedName
: (identifier DOT)* identifier
: identifier (DOT identifier | LB INTEGER_VALUE+ RB)*
;
identifier
: quoteIdentifier
| unquoteIdentifier
: IDENTIFIER
| ESCAPED_IDENTIFIER
;
quoteIdentifier
: QUOTED_IDENTIFIER #quotedIdentifier
;
unquoteIdentifier
: IDENTIFIER #unquotedIdentifier
| DIGIT_IDENTIFIER #digitIdentifier
timeUnit
: number unit=IDENTIFIER?
;
number
@ -151,31 +142,26 @@ string
: STRING
;
AND: 'AND';
ANY: 'ANY';
ASC: 'ASC';
BETWEEN: 'BETWEEN';
BY: 'BY';
CHILD: 'CHILD';
DESCENDANT: 'DESCENDANT';
EVENT: 'EVENT';
FALSE: 'FALSE';
IN: 'IN';
JOIN: 'JOIN';
MAXSPAN: 'MAXSPAN';
NOT: 'NOT';
NULL: 'NULL';
OF: 'OF';
OR: 'OR';
SEQUENCE: 'SEQUENCE';
TRUE: 'TRUE';
UNTIL: 'UNTIL';
WHERE: 'WHERE';
WITH: 'WITH';
AND: 'and';
BY: 'by';
FALSE: 'false';
FORK: 'fork';
IN: 'in';
JOIN: 'join';
MAXSPAN: 'maxspan';
NOT: 'not';
NULL: 'null';
OF: 'of';
OR: 'or';
SEQUENCE: 'sequence';
TRUE: 'true';
UNTIL: 'until';
WHERE: 'where';
WITH: 'with';
// Operators
EQ : '=' | '==';
NEQ : '<>' | '!=';
NEQ : '!=';
LT : '<';
LTE : '<=';
GT : '>';
@ -194,9 +180,16 @@ LP: '(';
RP: ')';
PIPE: '|';
ESCAPED_IDENTIFIER
: '`' (~'`')* '`'
;
STRING
: '\'' ( ~'\'')* '\''
| '"' ( ~'"' )* '"'
: '\'' ('\\' [btnfr"'\\] | ~[\r\n'\\])* '\''
| '"' ('\\' [btnfr"'\\] | ~[\r\n"\\])* '"'
| '?"' ('\\"' |~["\r\n])* '"'
| '?\'' ('\\\'' |~['\r\n])* '\''
;
INTEGER_VALUE
@ -210,20 +203,13 @@ DECIMAL_VALUE
| DOT DIGIT+ EXPONENT
;
// make @timestamp not require escaping, since @ has no other meaning
IDENTIFIER
: (LETTER | '_') (LETTER | DIGIT | '_' | '@' )*
: (LETTER | '_' | '@') (LETTER | DIGIT | '_')*
;
DIGIT_IDENTIFIER
: DIGIT (LETTER | DIGIT | '_' | '@')+
;
QUOTED_IDENTIFIER
: '"' ( ~'"' | '""' )* '"'
;
fragment EXPONENT
: 'E' [+-]? DIGIT+
: [Ee] [+-]? DIGIT+
;
fragment DIGIT
@ -231,10 +217,10 @@ fragment DIGIT
;
fragment LETTER
: [A-Z]
: [A-Za-z]
;
SIMPLE_COMMENT
LINE_COMMENT
: '//' ~[\r\n]* '\r'? '\n'? -> channel(HIDDEN)
;
@ -246,9 +232,12 @@ WS
: [ \r\n\t]+ -> channel(HIDDEN)
;
// Catch-all for anything we can't recognize.
// We use this to be able to ignore and recover all the text
// when splitting statements with DelimiterLexer
/*
UNRECOGNIZED
: .
;
;
*/

View File

@ -1,87 +0,0 @@
AND=1
ANY=2
ASC=3
BETWEEN=4
BY=5
CHILD=6
DESCENDANT=7
EVENT=8
FALSE=9
IN=10
JOIN=11
MAXSPAN=12
NOT=13
NULL=14
OF=15
OR=16
SEQUENCE=17
TRUE=18
UNTIL=19
WHERE=20
WITH=21
EQ=22
NEQ=23
LT=24
LTE=25
GT=26
GTE=27
PLUS=28
MINUS=29
ASTERISK=30
SLASH=31
PERCENT=32
DOT=33
COMMA=34
LB=35
RB=36
LP=37
RP=38
PIPE=39
STRING=40
INTEGER_VALUE=41
DECIMAL_VALUE=42
IDENTIFIER=43
DIGIT_IDENTIFIER=44
QUOTED_IDENTIFIER=45
SIMPLE_COMMENT=46
BRACKETED_COMMENT=47
WS=48
UNRECOGNIZED=49
DELIMITER=50
'AND'=1
'ANY'=2
'ASC'=3
'BETWEEN'=4
'BY'=5
'CHILD'=6
'DESCENDANT'=7
'EVENT'=8
'FALSE'=9
'IN'=10
'JOIN'=11
'MAXSPAN'=12
'NOT'=13
'NULL'=14
'OF'=15
'OR'=16
'SEQUENCE'=17
'TRUE'=18
'UNTIL'=19
'WHERE'=20
'WITH'=21
'<'=24
'<='=25
'>'=26
'>='=27
'+'=28
'-'=29
'*'=30
'/'=31
'%'=32
'.'=33
','=34
'['=35
']'=36
'('=37
')'=38
'|'=39

View File

@ -1,86 +0,0 @@
AND=1
ANY=2
ASC=3
BETWEEN=4
BY=5
CHILD=6
DESCENDANT=7
EVENT=8
FALSE=9
IN=10
JOIN=11
MAXSPAN=12
NOT=13
NULL=14
OF=15
OR=16
SEQUENCE=17
TRUE=18
UNTIL=19
WHERE=20
WITH=21
EQ=22
NEQ=23
LT=24
LTE=25
GT=26
GTE=27
PLUS=28
MINUS=29
ASTERISK=30
SLASH=31
PERCENT=32
DOT=33
COMMA=34
LB=35
RB=36
LP=37
RP=38
PIPE=39
STRING=40
INTEGER_VALUE=41
DECIMAL_VALUE=42
IDENTIFIER=43
DIGIT_IDENTIFIER=44
QUOTED_IDENTIFIER=45
SIMPLE_COMMENT=46
BRACKETED_COMMENT=47
WS=48
UNRECOGNIZED=49
'AND'=1
'ANY'=2
'ASC'=3
'BETWEEN'=4
'BY'=5
'CHILD'=6
'DESCENDANT'=7
'EVENT'=8
'FALSE'=9
'IN'=10
'JOIN'=11
'MAXSPAN'=12
'NOT'=13
'NULL'=14
'OF'=15
'OR'=16
'SEQUENCE'=17
'TRUE'=18
'UNTIL'=19
'WHERE'=20
'WITH'=21
'<'=24
'<='=25
'>'=26
'>='=27
'+'=28
'-'=29
'*'=30
'/'=31
'%'=32
'.'=33
','=34
'['=35
']'=36
'('=37
')'=38
'|'=39

View File

@ -26,6 +26,10 @@ import java.util.Objects;
import java.util.function.Supplier;
import static org.elasticsearch.action.ValidateActions.addValidationError;
import static org.elasticsearch.xpack.eql.action.RequestDefaults.FETCH_SIZE;
import static org.elasticsearch.xpack.eql.action.RequestDefaults.FIELD_EVENT_TYPE;
import static org.elasticsearch.xpack.eql.action.RequestDefaults.FIELD_TIMESTAMP;
import static org.elasticsearch.xpack.eql.action.RequestDefaults.IMPLICIT_JOIN_KEY;
public class EqlSearchRequest extends ActionRequest implements IndicesRequest.Replaceable, ToXContent {
@ -34,10 +38,10 @@ public class EqlSearchRequest extends ActionRequest implements IndicesRequest.Re
false, true, false);
private QueryBuilder query = null;
private String timestampField = "@timestamp";
private String eventTypeField = "event.category";
private String implicitJoinKeyField = "agent.id";
private int fetchSize = 50;
private String timestampField = FIELD_TIMESTAMP;
private String eventTypeField = FIELD_EVENT_TYPE;
private String implicitJoinKeyField = IMPLICIT_JOIN_KEY;
private int fetchSize = FETCH_SIZE;
private SearchAfterBuilder searchAfterBuilder;
private String rule;

View File

@ -0,0 +1,18 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.eql.action;
public final class RequestDefaults {
private RequestDefaults() {}
public static final String FIELD_TIMESTAMP = "@timestamp";
public static final String FIELD_EVENT_TYPE = "event_type";
public static final String IMPLICIT_JOIN_KEY = "agent.id";
public static int FETCH_SIZE = 50;
}

View File

@ -0,0 +1,97 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.eql.analysis;
import org.elasticsearch.xpack.ql.expression.Attribute;
import org.elasticsearch.xpack.ql.expression.FieldAttribute;
import org.elasticsearch.xpack.ql.expression.UnresolvedAttribute;
import org.elasticsearch.xpack.ql.type.DataTypes;
import org.elasticsearch.xpack.ql.type.InvalidMappedField;
import org.elasticsearch.xpack.ql.type.UnsupportedEsField;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.Objects;
import static java.util.stream.Collectors.toList;
public final class AnalysisUtils {
private AnalysisUtils() {}
//
// Shared methods around the analyzer rules
//
static Attribute resolveAgainstList(UnresolvedAttribute u, Collection<Attribute> attrList) {
return resolveAgainstList(u, attrList, false);
}
static Attribute resolveAgainstList(UnresolvedAttribute u, Collection<Attribute> attrList, boolean allowCompound) {
List<Attribute> matches = new ArrayList<>();
// first take into account the qualified version
boolean qualified = u.qualifier() != null;
for (Attribute attribute : attrList) {
if (!attribute.synthetic()) {
boolean match = qualified ? Objects.equals(u.qualifiedName(), attribute.qualifiedName()) :
// if the field is unqualified
// first check the names directly
(Objects.equals(u.name(), attribute.name())
// but also if the qualifier might not be quoted and if there's any ambiguity with nested fields
|| Objects.equals(u.name(), attribute.qualifiedName()));
if (match) {
matches.add(attribute.withLocation(u.source()));
}
}
}
// none found
if (matches.isEmpty()) {
return null;
}
if (matches.size() == 1) {
return handleSpecialFields(u, matches.get(0), allowCompound);
}
return u.withUnresolvedMessage(
"Reference [" + u.qualifiedName() + "] is ambiguous (to disambiguate use quotes or qualifiers); matches any of "
+ matches.stream().map(a -> "\"" + a.qualifier() + "\".\"" + a.name() + "\"").sorted().collect(toList()));
}
private static Attribute handleSpecialFields(UnresolvedAttribute u, Attribute named, boolean allowCompound) {
// if it's a object/compound type, keep it unresolved with a nice error message
if (named instanceof FieldAttribute) {
FieldAttribute fa = (FieldAttribute) named;
// incompatible mappings
if (fa.field() instanceof InvalidMappedField) {
named = u.withUnresolvedMessage("Cannot use field [" + fa.name() + "] due to ambiguities being "
+ ((InvalidMappedField) fa.field()).errorMessage());
}
// unsupported types
else if (DataTypes.isUnsupported(fa.dataType())) {
UnsupportedEsField unsupportedField = (UnsupportedEsField) fa.field();
if (unsupportedField.hasInherited()) {
named = u.withUnresolvedMessage("Cannot use field [" + fa.name() + "] with unsupported type ["
+ unsupportedField.getOriginalType() + "] " + "in hierarchy (field [" + unsupportedField.getInherited() + "])");
} else {
named = u.withUnresolvedMessage(
"Cannot use field [" + fa.name() + "] with unsupported type [" + unsupportedField.getOriginalType() + "]");
}
}
// compound fields
else if (allowCompound == false && DataTypes.isPrimitive(fa.dataType()) == false) {
named = u.withUnresolvedMessage(
"Cannot use field [" + fa.name() + "] type [" + fa.dataType().typeName() + "] only its subfields");
}
}
return named;
}
}

View File

@ -0,0 +1,105 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.eql.analysis;
import org.elasticsearch.xpack.ql.expression.Attribute;
import org.elasticsearch.xpack.ql.expression.NamedExpression;
import org.elasticsearch.xpack.ql.expression.UnresolvedAttribute;
import org.elasticsearch.xpack.ql.expression.function.FunctionRegistry;
import org.elasticsearch.xpack.ql.plan.logical.LogicalPlan;
import org.elasticsearch.xpack.ql.rule.Rule;
import org.elasticsearch.xpack.ql.rule.RuleExecutor;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import static java.util.Arrays.asList;
import static org.elasticsearch.xpack.eql.analysis.AnalysisUtils.resolveAgainstList;
public class Analyzer extends RuleExecutor<LogicalPlan> {
private final FunctionRegistry functionRegistry;
private final Verifier verifier;
public Analyzer(FunctionRegistry functionRegistry, Verifier verifier) {
this.functionRegistry = functionRegistry;
this.verifier = verifier;
}
@Override
protected Iterable<RuleExecutor<LogicalPlan>.Batch> batches() {
Batch resolution = new Batch("Resolution",
new ResolveRefs());
return asList(resolution);
}
public LogicalPlan analyze(LogicalPlan plan) {
return verify(execute(plan));
}
private LogicalPlan verify(LogicalPlan plan) {
Collection<Failure> failures = verifier.verify(plan);
if (!failures.isEmpty()) {
throw new VerificationException(failures);
}
return plan;
}
private static class ResolveRefs extends AnalyzeRule<LogicalPlan> {
@Override
protected LogicalPlan rule(LogicalPlan plan) {
// if the children are not resolved, there's no way the node can be resolved
if (!plan.childrenResolved()) {
return plan;
}
// okay, there's a chance so let's get started
if (log.isTraceEnabled()) {
log.trace("Attempting to resolve {}", plan.nodeString());
}
return plan.transformExpressionsUp(e -> {
if (e instanceof UnresolvedAttribute) {
UnresolvedAttribute u = (UnresolvedAttribute) e;
List<Attribute> childrenOutput = new ArrayList<>();
for (LogicalPlan child : plan.children()) {
childrenOutput.addAll(child.output());
}
NamedExpression named = resolveAgainstList(u, childrenOutput);
// if resolved, return it; otherwise keep it in place to be resolved later
if (named != null) {
if (log.isTraceEnabled()) {
log.trace("Resolved {} to {}", u, named);
}
return named;
}
}
return e;
});
}
}
abstract static class AnalyzeRule<SubPlan extends LogicalPlan> extends Rule<SubPlan, LogicalPlan> {
// transformUp (post-order) - that is first children and then the node
// but with a twist; only if the tree is not resolved or analyzed
@Override
public final LogicalPlan apply(LogicalPlan plan) {
return plan.transformUp(t -> t.analyzed() || skipResolved() && t.resolved() ? t : rule(t), typeToken());
}
@Override
protected abstract LogicalPlan rule(SubPlan plan);
protected boolean skipResolved() {
return true;
}
}
}

View File

@ -0,0 +1,27 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.eql.analysis;
import org.elasticsearch.xpack.ql.plan.logical.LogicalPlan;
import org.elasticsearch.xpack.ql.rule.Rule;
public abstract class AnalyzerRule<SubPlan extends LogicalPlan> extends Rule<SubPlan, LogicalPlan> {
// transformUp (post-order) - that is first children and then the node
// but with a twist; only if the tree is not resolved or analyzed
@Override
public final LogicalPlan apply(LogicalPlan plan) {
return plan.transformUp(t -> t.analyzed() || skipResolved() && t.resolved() ? t : rule(t), typeToken());
}
@Override
protected abstract LogicalPlan rule(SubPlan plan);
protected boolean skipResolved() {
return true;
}
}

View File

@ -0,0 +1,60 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.eql.analysis;
import org.elasticsearch.xpack.ql.tree.Node;
import java.util.Objects;
import static org.elasticsearch.common.logging.LoggerMessageFormat.format;
class Failure {
private final Node<?> node;
private final String message;
Failure(Node<?> node, String message) {
this.node = node;
this.message = message;
}
Node<?> node() {
return node;
}
String message() {
return message;
}
@Override
public int hashCode() {
return Objects.hash(message, node);
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null || getClass() != obj.getClass()) {
return false;
}
Failure other = (Failure) obj;
return Objects.equals(message, other.message) && Objects.equals(node, other.node);
}
@Override
public String toString() {
return message;
}
static Failure fail(Node<?> source, String message, Object... args) {
return new Failure(source, format(message, args));
}
}

View File

@ -0,0 +1,24 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.eql.analysis;
import org.elasticsearch.xpack.ql.index.IndexResolution;
import org.elasticsearch.xpack.ql.plan.logical.EsRelation;
import org.elasticsearch.xpack.ql.plan.logical.LogicalPlan;
import org.elasticsearch.xpack.ql.plan.logical.UnresolvedRelation;
public class PreAnalyzer {
public LogicalPlan preAnalyze(LogicalPlan plan, IndexResolution indices) {
if (plan.analyzed() == false) {
// FIXME: includeFrozen needs to be set already
plan = plan.transformUp(r -> new EsRelation(r.source(), indices.get(), false), UnresolvedRelation.class);
plan.forEachUp(LogicalPlan::setPreAnalyzed);
}
return plan;
}
}

View File

@ -0,0 +1,33 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.eql.analysis;
import org.elasticsearch.rest.RestStatus;
import org.elasticsearch.xpack.eql.EqlClientException;
import org.elasticsearch.xpack.ql.tree.Location;
import org.elasticsearch.xpack.ql.util.StringUtils;
import java.util.Collection;
import java.util.stream.Collectors;
public class VerificationException extends EqlClientException {
protected VerificationException(Collection<Failure> sources) {
super(asMessage(sources));
}
private static String asMessage(Collection<Failure> failures) {
return failures.stream().map(f -> {
Location l = f.node().source().source();
return "line " + l.getLineNumber() + ":" + l.getColumnNumber() + ": " + f.message();
}).collect(Collectors.joining(StringUtils.NEW_LINE, "Found " + failures.size() + " problem(s)\n", StringUtils.EMPTY));
}
@Override
public RestStatus status() {
return RestStatus.BAD_REQUEST;
}
}

View File

@ -0,0 +1,108 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.eql.analysis;
import org.elasticsearch.xpack.ql.capabilities.Unresolvable;
import org.elasticsearch.xpack.ql.expression.Attribute;
import org.elasticsearch.xpack.ql.expression.UnresolvedAttribute;
import org.elasticsearch.xpack.ql.plan.logical.LogicalPlan;
import org.elasticsearch.xpack.ql.tree.Node;
import org.elasticsearch.xpack.ql.type.DataTypes;
import org.elasticsearch.xpack.ql.util.StringUtils;
import java.util.ArrayList;
import java.util.Collection;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import static java.util.stream.Collectors.toMap;
import static org.elasticsearch.xpack.eql.analysis.Failure.fail;
/**
* The verifier has the role of checking the analyzed tree for failures and build a list of failures following this check.
* It is created in the plan executor along with the metrics instance passed as constructor parameter.
*/
public class Verifier {
public Map<Node<?>, String> verifyFailures(LogicalPlan plan) {
Collection<Failure> failures = verify(plan);
return failures.stream().collect(toMap(Failure::node, Failure::message));
}
Collection<Failure> verify(LogicalPlan plan) {
Set<Failure> failures = new LinkedHashSet<>();
// start bottom-up
plan.forEachUp(p -> {
if (p.analyzed()) {
return;
}
// if the children are unresolved, so will this node; counting it will only add noise
if (p.childrenResolved() == false) {
return;
}
Set<Failure> localFailures = new LinkedHashSet<>();
if (p instanceof Unresolvable) {
localFailures.add(fail(p, ((Unresolvable) p).unresolvedMessage()));
} else {
p.forEachExpressions(e -> {
// everything is fine, skip expression
if (e.resolved()) {
return;
}
e.forEachUp(ae -> {
// we're only interested in the children
if (ae.childrenResolved() == false) {
return;
}
if (ae instanceof Unresolvable) {
// handle Attributes differently to provide more context
if (ae instanceof UnresolvedAttribute) {
UnresolvedAttribute ua = (UnresolvedAttribute) ae;
// only work out the synonyms for raw unresolved attributes
if (ua.customMessage() == false) {
boolean useQualifier = ua.qualifier() != null;
List<String> potentialMatches = new ArrayList<>();
for (Attribute a : p.inputSet()) {
String nameCandidate = useQualifier ? a.qualifiedName() : a.name();
// add only primitives (object types would only result in another error)
if (DataTypes.isUnsupported(a.dataType()) == false && DataTypes.isPrimitive(a.dataType())) {
potentialMatches.add(nameCandidate);
}
}
List<String> matches = StringUtils.findSimilar(ua.qualifiedName(), potentialMatches);
if (matches.isEmpty() == false) {
ae = ua.withUnresolvedMessage(UnresolvedAttribute.errorMessage(ua.qualifiedName(), matches));
}
}
}
localFailures.add(fail(ae, ((Unresolvable) ae).unresolvedMessage()));
return;
}
// type resolution
if (ae.typeResolved().unresolved()) {
localFailures.add(fail(ae, ae.typeResolved().message()));
}
});
});
}
failures.addAll(localFailures);
});
return failures;
}
}

View File

@ -0,0 +1,58 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.eql.execution;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.client.Client;
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
import org.elasticsearch.xpack.eql.analysis.Analyzer;
import org.elasticsearch.xpack.eql.analysis.PreAnalyzer;
import org.elasticsearch.xpack.eql.analysis.Verifier;
import org.elasticsearch.xpack.eql.optimizer.Optimizer;
import org.elasticsearch.xpack.eql.parser.ParserParams;
import org.elasticsearch.xpack.eql.planner.Planner;
import org.elasticsearch.xpack.eql.session.Configuration;
import org.elasticsearch.xpack.eql.session.EqlSession;
import org.elasticsearch.xpack.eql.session.Results;
import org.elasticsearch.xpack.ql.expression.function.FunctionRegistry;
import org.elasticsearch.xpack.ql.index.IndexResolver;
import static org.elasticsearch.action.ActionListener.wrap;
public class PlanExecutor {
private final Client client;
private final NamedWriteableRegistry writableRegistry;
private final IndexResolver indexResolver;
private final FunctionRegistry functionRegistry;
private final PreAnalyzer preAnalyzer;
private final Analyzer analyzer;
private final Optimizer optimizer;
private final Planner planner;
public PlanExecutor(Client client, IndexResolver indexResolver, NamedWriteableRegistry writeableRegistry) {
this.client = client;
this.writableRegistry = writeableRegistry;
this.indexResolver = indexResolver;
this.functionRegistry = null;
this.preAnalyzer = new PreAnalyzer();
this.analyzer = new Analyzer(functionRegistry, new Verifier());
this.optimizer = new Optimizer();
this.planner = new Planner();
}
private EqlSession newSession(Configuration cfg) {
return new EqlSession(client, cfg, indexResolver, preAnalyzer, analyzer, optimizer, planner, this);
}
public void eql(Configuration cfg, String eql, ParserParams parserParams, ActionListener<Results> listener) {
newSession(cfg).eql(eql, parserParams, wrap(listener::onResponse, listener::onFailure));
}
}

View File

@ -0,0 +1,15 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.eql.expression.function;
import org.elasticsearch.xpack.ql.expression.function.FunctionRegistry;
public class EqlFunctionRegistry extends FunctionRegistry {
public EqlFunctionRegistry() {
}
}

View File

@ -0,0 +1,24 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.eql.optimizer;
import org.elasticsearch.xpack.ql.plan.logical.LogicalPlan;
import org.elasticsearch.xpack.ql.rule.RuleExecutor;
import static java.util.Collections.emptyList;
public class Optimizer extends RuleExecutor<LogicalPlan> {
public LogicalPlan optimize(LogicalPlan verified) {
return verified.optimized() ? verified : execute(verified);
}
@Override
protected Iterable<RuleExecutor<LogicalPlan>.Batch> batches() {
return emptyList();
}
}

View File

@ -10,19 +10,23 @@ import org.antlr.v4.runtime.Token;
import org.antlr.v4.runtime.misc.Interval;
import org.antlr.v4.runtime.tree.ParseTree;
import org.antlr.v4.runtime.tree.TerminalNode;
import org.elasticsearch.xpack.ql.expression.Expression;
import org.elasticsearch.xpack.ql.plan.logical.LogicalPlan;
import org.elasticsearch.xpack.ql.tree.Location;
import org.elasticsearch.xpack.ql.tree.Source;
import org.elasticsearch.xpack.ql.util.Check;
import java.util.ArrayList;
import java.util.List;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
/**
* Base parsing visitor class offering utility methods.
*/
abstract class AbstractBuilder extends EqlBaseBaseVisitor<Object> {
private static final Pattern slashPattern = Pattern.compile("\\\\.");
@Override
public Object visit(ParseTree tree) {
Object result = super.visit(tree);
@ -42,12 +46,12 @@ abstract class AbstractBuilder extends EqlBaseBaseVisitor<Object> {
type.getSimpleName(), (result != null ? result.getClass().getSimpleName() : "null"));
}
protected Expression expression(ParseTree ctx) {
return typedParsing(ctx, Expression.class);
protected LogicalPlan plan(ParseTree ctx) {
return typedParsing(ctx, LogicalPlan.class);
}
protected List<Expression> expressions(List<? extends ParserRuleContext> ctxs) {
return visitList(ctxs, Expression.class);
protected List<LogicalPlan> plans(List<? extends ParserRuleContext> ctxs) {
return visitList(ctxs, LogicalPlan.class);
}
protected <T> List<T> visitList(List<? extends ParserRuleContext> contexts, Class<T> clazz) {
@ -111,16 +115,62 @@ abstract class AbstractBuilder extends EqlBaseBaseVisitor<Object> {
return node == null ? null : node.getText();
}
/**
* Extracts the actual unescaped string (literal) value of a terminal node.
*/
static String string(TerminalNode node) {
return node == null ? null : unquoteString(node.getText());
}
static String unquoteString(String text) {
public static String unquoteString(String text) {
// remove leading and trailing ' for strings and also eliminate escaped single quotes
return text == null ? null : text.substring(1, text.length() - 1).replace("''", "'");
if (text == null) {
return null;
}
// unescaped strings can be interpreted directly
if (text.startsWith("?")) {
return text.substring(2, text.length() - 1);
}
text = text.substring(1, text.length() - 1);
StringBuffer resultString = new StringBuffer();
Matcher regexMatcher = slashPattern.matcher(text);
while (regexMatcher.find()) {
String source = regexMatcher.group();
String replacement;
switch (source) {
case "\\t":
replacement = "\t";
break;
case "\\b":
replacement = "\b";
break;
case "\\f":
replacement = "\f";
break;
case "\\n":
replacement = "\n";
break;
case "\\r":
replacement = "\r";
break;
case "\\\"":
replacement = "\"";
break;
case "\\'":
replacement = "'";
break;
case "\\\\":
// will be interpreted as regex, so we have to escape it
replacement = "\\\\";
break;
default:
// unknown escape sequence, pass through as-is
replacement = source;
}
regexMatcher.appendReplacement(resultString, replacement);
}
regexMatcher.appendTail(resultString);
return resultString.toString();
}
@Override
@ -128,4 +178,5 @@ abstract class AbstractBuilder extends EqlBaseBaseVisitor<Object> {
Source source = source(node);
throw new ParsingException(source, "Does not know how to handle {}", source.text());
}
}
}

View File

@ -7,11 +7,16 @@
package org.elasticsearch.xpack.eql.parser;
import org.elasticsearch.xpack.eql.parser.EqlBaseParser.SingleStatementContext;
import org.elasticsearch.xpack.ql.plan.logical.LogicalPlan;
public class AstBuilder extends ExpressionBuilder {
public class AstBuilder extends LogicalPlanBuilder {
AstBuilder(ParserParams params) {
super(params);
}
@Override
public Object visitSingleStatement(SingleStatementContext ctx) {
return expression(ctx.statement());
public LogicalPlan visitSingleStatement(SingleStatementContext ctx) {
return plan(ctx.statement());
}
}
}

View File

@ -1,45 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.eql.parser;
import org.antlr.v4.runtime.ANTLRInputStream;
import org.antlr.v4.runtime.IntStream;
import java.util.Locale;
// extension of ANTLR that does the upper-casing once for the whole stream
// the ugly part is that it has to duplicate LA method
// This approach is the official solution from the ANTLR authors
// in that it's both faster and easier than having a dedicated lexer
// see https://github.com/antlr/antlr4/issues/1002
class CaseInsensitiveStream extends ANTLRInputStream {
protected char[] uppedChars;
CaseInsensitiveStream(String input) {
super(input);
this.uppedChars = input.toUpperCase(Locale.ROOT).toCharArray();
}
// this part is copied from ANTLRInputStream
@Override
public int LA(int i) {
if (i == 0) {
return 0; // undefined
}
if (i < 0) {
i++;
if ((p + i - 1) < 0) {
return IntStream.EOF;
}
}
if ((p + i - 1) >= n) {
return IntStream.EOF;
}
return uppedChars[p + i - 1];
}
}

View File

@ -59,6 +59,18 @@ class EqlBaseBaseListener implements EqlBaseListener {
* <p>The default implementation does nothing.</p>
*/
@Override public void exitQuery(EqlBaseParser.QueryContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterSequenceParams(EqlBaseParser.SequenceParamsContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitSequenceParams(EqlBaseParser.SequenceParamsContext ctx) { }
/**
* {@inheritDoc}
*
@ -112,37 +124,49 @@ class EqlBaseBaseListener implements EqlBaseListener {
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterSpan(EqlBaseParser.SpanContext ctx) { }
@Override public void enterJoinTerm(EqlBaseParser.JoinTermContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitSpan(EqlBaseParser.SpanContext ctx) { }
@Override public void exitJoinTerm(EqlBaseParser.JoinTermContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterMatch(EqlBaseParser.MatchContext ctx) { }
@Override public void enterSequenceTerm(EqlBaseParser.SequenceTermContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitMatch(EqlBaseParser.MatchContext ctx) { }
@Override public void exitSequenceTerm(EqlBaseParser.SequenceTermContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterCondition(EqlBaseParser.ConditionContext ctx) { }
@Override public void enterSubquery(EqlBaseParser.SubqueryContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitCondition(EqlBaseParser.ConditionContext ctx) { }
@Override public void exitSubquery(EqlBaseParser.SubqueryContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterEventQuery(EqlBaseParser.EventQueryContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitEventQuery(EqlBaseParser.EventQueryContext ctx) { }
/**
* {@inheritDoc}
*
@ -179,6 +203,18 @@ class EqlBaseBaseListener implements EqlBaseListener {
* <p>The default implementation does nothing.</p>
*/
@Override public void exitBooleanDefault(EqlBaseParser.BooleanDefaultContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterProcessCheck(EqlBaseParser.ProcessCheckContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitProcessCheck(EqlBaseParser.ProcessCheckContext ctx) { }
/**
* {@inheritDoc}
*
@ -191,30 +227,6 @@ class EqlBaseBaseListener implements EqlBaseListener {
* <p>The default implementation does nothing.</p>
*/
@Override public void exitLogicalBinary(EqlBaseParser.LogicalBinaryContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterPredicated(EqlBaseParser.PredicatedContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitPredicated(EqlBaseParser.PredicatedContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterPredicate(EqlBaseParser.PredicateContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitPredicate(EqlBaseParser.PredicateContext ctx) { }
/**
* {@inheritDoc}
*
@ -263,6 +275,18 @@ class EqlBaseBaseListener implements EqlBaseListener {
* <p>The default implementation does nothing.</p>
*/
@Override public void exitArithmeticUnary(EqlBaseParser.ArithmeticUnaryContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterPredicate(EqlBaseParser.PredicateContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitPredicate(EqlBaseParser.PredicateContext ctx) { }
/**
* {@inheritDoc}
*
@ -395,18 +419,6 @@ class EqlBaseBaseListener implements EqlBaseListener {
* <p>The default implementation does nothing.</p>
*/
@Override public void exitBooleanValue(EqlBaseParser.BooleanValueContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterQualifiedNames(EqlBaseParser.QualifiedNamesContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitQualifiedNames(EqlBaseParser.QualifiedNamesContext ctx) { }
/**
* {@inheritDoc}
*
@ -436,37 +448,13 @@ class EqlBaseBaseListener implements EqlBaseListener {
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterQuotedIdentifier(EqlBaseParser.QuotedIdentifierContext ctx) { }
@Override public void enterTimeUnit(EqlBaseParser.TimeUnitContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitQuotedIdentifier(EqlBaseParser.QuotedIdentifierContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterUnquotedIdentifier(EqlBaseParser.UnquotedIdentifierContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitUnquotedIdentifier(EqlBaseParser.UnquotedIdentifierContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterDigitIdentifier(EqlBaseParser.DigitIdentifierContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitDigitIdentifier(EqlBaseParser.DigitIdentifierContext ctx) { }
@Override public void exitTimeUnit(EqlBaseParser.TimeUnitContext ctx) { }
/**
* {@inheritDoc}
*

View File

@ -39,6 +39,13 @@ class EqlBaseBaseVisitor<T> extends AbstractParseTreeVisitor<T> implements EqlBa
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitQuery(EqlBaseParser.QueryContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitSequenceParams(EqlBaseParser.SequenceParamsContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
@ -73,21 +80,28 @@ class EqlBaseBaseVisitor<T> extends AbstractParseTreeVisitor<T> implements EqlBa
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitSpan(EqlBaseParser.SpanContext ctx) { return visitChildren(ctx); }
@Override public T visitJoinTerm(EqlBaseParser.JoinTermContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitMatch(EqlBaseParser.MatchContext ctx) { return visitChildren(ctx); }
@Override public T visitSequenceTerm(EqlBaseParser.SequenceTermContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitCondition(EqlBaseParser.ConditionContext ctx) { return visitChildren(ctx); }
@Override public T visitSubquery(EqlBaseParser.SubqueryContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitEventQuery(EqlBaseParser.EventQueryContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
@ -109,6 +123,13 @@ class EqlBaseBaseVisitor<T> extends AbstractParseTreeVisitor<T> implements EqlBa
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitBooleanDefault(EqlBaseParser.BooleanDefaultContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitProcessCheck(EqlBaseParser.ProcessCheckContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
@ -116,20 +137,6 @@ class EqlBaseBaseVisitor<T> extends AbstractParseTreeVisitor<T> implements EqlBa
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitLogicalBinary(EqlBaseParser.LogicalBinaryContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitPredicated(EqlBaseParser.PredicatedContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitPredicate(EqlBaseParser.PredicateContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
@ -158,6 +165,13 @@ class EqlBaseBaseVisitor<T> extends AbstractParseTreeVisitor<T> implements EqlBa
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitArithmeticUnary(EqlBaseParser.ArithmeticUnaryContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitPredicate(EqlBaseParser.PredicateContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
@ -235,13 +249,6 @@ class EqlBaseBaseVisitor<T> extends AbstractParseTreeVisitor<T> implements EqlBa
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitBooleanValue(EqlBaseParser.BooleanValueContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitQualifiedNames(EqlBaseParser.QualifiedNamesContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
@ -262,21 +269,7 @@ class EqlBaseBaseVisitor<T> extends AbstractParseTreeVisitor<T> implements EqlBa
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitQuotedIdentifier(EqlBaseParser.QuotedIdentifierContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitUnquotedIdentifier(EqlBaseParser.UnquotedIdentifierContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitDigitIdentifier(EqlBaseParser.DigitIdentifierContext ctx) { return visitChildren(ctx); }
@Override public T visitTimeUnit(EqlBaseParser.TimeUnitContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*

View File

@ -17,42 +17,39 @@ class EqlBaseLexer extends Lexer {
protected static final PredictionContextCache _sharedContextCache =
new PredictionContextCache();
public static final int
AND=1, ANY=2, ASC=3, BETWEEN=4, BY=5, CHILD=6, DESCENDANT=7, EVENT=8,
FALSE=9, IN=10, JOIN=11, MAXSPAN=12, NOT=13, NULL=14, OF=15, OR=16, SEQUENCE=17,
TRUE=18, UNTIL=19, WHERE=20, WITH=21, EQ=22, NEQ=23, LT=24, LTE=25, GT=26,
GTE=27, PLUS=28, MINUS=29, ASTERISK=30, SLASH=31, PERCENT=32, DOT=33,
COMMA=34, LB=35, RB=36, LP=37, RP=38, PIPE=39, STRING=40, INTEGER_VALUE=41,
DECIMAL_VALUE=42, IDENTIFIER=43, DIGIT_IDENTIFIER=44, QUOTED_IDENTIFIER=45,
SIMPLE_COMMENT=46, BRACKETED_COMMENT=47, WS=48, UNRECOGNIZED=49;
AND=1, BY=2, FALSE=3, FORK=4, IN=5, JOIN=6, MAXSPAN=7, NOT=8, NULL=9,
OF=10, OR=11, SEQUENCE=12, TRUE=13, UNTIL=14, WHERE=15, WITH=16, EQ=17,
NEQ=18, LT=19, LTE=20, GT=21, GTE=22, PLUS=23, MINUS=24, ASTERISK=25,
SLASH=26, PERCENT=27, DOT=28, COMMA=29, LB=30, RB=31, LP=32, RP=33, PIPE=34,
ESCAPED_IDENTIFIER=35, STRING=36, INTEGER_VALUE=37, DECIMAL_VALUE=38,
IDENTIFIER=39, LINE_COMMENT=40, BRACKETED_COMMENT=41, WS=42;
public static String[] modeNames = {
"DEFAULT_MODE"
};
public static final String[] ruleNames = {
"AND", "ANY", "ASC", "BETWEEN", "BY", "CHILD", "DESCENDANT", "EVENT",
"FALSE", "IN", "JOIN", "MAXSPAN", "NOT", "NULL", "OF", "OR", "SEQUENCE",
"TRUE", "UNTIL", "WHERE", "WITH", "EQ", "NEQ", "LT", "LTE", "GT", "GTE",
"PLUS", "MINUS", "ASTERISK", "SLASH", "PERCENT", "DOT", "COMMA", "LB",
"RB", "LP", "RP", "PIPE", "STRING", "INTEGER_VALUE", "DECIMAL_VALUE",
"IDENTIFIER", "DIGIT_IDENTIFIER", "QUOTED_IDENTIFIER", "EXPONENT", "DIGIT",
"LETTER", "SIMPLE_COMMENT", "BRACKETED_COMMENT", "WS", "UNRECOGNIZED"
"AND", "BY", "FALSE", "FORK", "IN", "JOIN", "MAXSPAN", "NOT", "NULL",
"OF", "OR", "SEQUENCE", "TRUE", "UNTIL", "WHERE", "WITH", "EQ", "NEQ",
"LT", "LTE", "GT", "GTE", "PLUS", "MINUS", "ASTERISK", "SLASH", "PERCENT",
"DOT", "COMMA", "LB", "RB", "LP", "RP", "PIPE", "ESCAPED_IDENTIFIER",
"STRING", "INTEGER_VALUE", "DECIMAL_VALUE", "IDENTIFIER", "EXPONENT",
"DIGIT", "LETTER", "LINE_COMMENT", "BRACKETED_COMMENT", "WS"
};
private static final String[] _LITERAL_NAMES = {
null, "'AND'", "'ANY'", "'ASC'", "'BETWEEN'", "'BY'", "'CHILD'", "'DESCENDANT'",
"'EVENT'", "'FALSE'", "'IN'", "'JOIN'", "'MAXSPAN'", "'NOT'", "'NULL'",
"'OF'", "'OR'", "'SEQUENCE'", "'TRUE'", "'UNTIL'", "'WHERE'", "'WITH'",
null, null, "'<'", "'<='", "'>'", "'>='", "'+'", "'-'", "'*'", "'/'",
"'%'", "'.'", "','", "'['", "']'", "'('", "')'", "'|'"
null, "'and'", "'by'", "'false'", "'fork'", "'in'", "'join'", "'maxspan'",
"'not'", "'null'", "'of'", "'or'", "'sequence'", "'true'", "'until'",
"'where'", "'with'", null, "'!='", "'<'", "'<='", "'>'", "'>='", "'+'",
"'-'", "'*'", "'/'", "'%'", "'.'", "','", "'['", "']'", "'('", "')'",
"'|'"
};
private static final String[] _SYMBOLIC_NAMES = {
null, "AND", "ANY", "ASC", "BETWEEN", "BY", "CHILD", "DESCENDANT", "EVENT",
"FALSE", "IN", "JOIN", "MAXSPAN", "NOT", "NULL", "OF", "OR", "SEQUENCE",
"TRUE", "UNTIL", "WHERE", "WITH", "EQ", "NEQ", "LT", "LTE", "GT", "GTE",
"PLUS", "MINUS", "ASTERISK", "SLASH", "PERCENT", "DOT", "COMMA", "LB",
"RB", "LP", "RP", "PIPE", "STRING", "INTEGER_VALUE", "DECIMAL_VALUE",
"IDENTIFIER", "DIGIT_IDENTIFIER", "QUOTED_IDENTIFIER", "SIMPLE_COMMENT",
"BRACKETED_COMMENT", "WS", "UNRECOGNIZED"
null, "AND", "BY", "FALSE", "FORK", "IN", "JOIN", "MAXSPAN", "NOT", "NULL",
"OF", "OR", "SEQUENCE", "TRUE", "UNTIL", "WHERE", "WITH", "EQ", "NEQ",
"LT", "LTE", "GT", "GTE", "PLUS", "MINUS", "ASTERISK", "SLASH", "PERCENT",
"DOT", "COMMA", "LB", "RB", "LP", "RP", "PIPE", "ESCAPED_IDENTIFIER",
"STRING", "INTEGER_VALUE", "DECIMAL_VALUE", "IDENTIFIER", "LINE_COMMENT",
"BRACKETED_COMMENT", "WS"
};
public static final Vocabulary VOCABULARY = new VocabularyImpl(_LITERAL_NAMES, _SYMBOLIC_NAMES);
@ -109,151 +106,140 @@ class EqlBaseLexer extends Lexer {
public ATN getATN() { return _ATN; }
public static final String _serializedATN =
"\3\u0430\ud6d1\u8206\uad2d\u4417\uaef1\u8d80\uaadd\2\63\u01a2\b\1\4\2"+
"\t\2\4\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7\t\7\4\b\t\b\4\t\t\t\4\n\t\n\4"+
"\13\t\13\4\f\t\f\4\r\t\r\4\16\t\16\4\17\t\17\4\20\t\20\4\21\t\21\4\22"+
"\t\22\4\23\t\23\4\24\t\24\4\25\t\25\4\26\t\26\4\27\t\27\4\30\t\30\4\31"+
"\t\31\4\32\t\32\4\33\t\33\4\34\t\34\4\35\t\35\4\36\t\36\4\37\t\37\4 \t"+
" \4!\t!\4\"\t\"\4#\t#\4$\t$\4%\t%\4&\t&\4\'\t\'\4(\t(\4)\t)\4*\t*\4+\t"+
"+\4,\t,\4-\t-\4.\t.\4/\t/\4\60\t\60\4\61\t\61\4\62\t\62\4\63\t\63\4\64"+
"\t\64\4\65\t\65\3\2\3\2\3\2\3\2\3\3\3\3\3\3\3\3\3\4\3\4\3\4\3\4\3\5\3"+
"\5\3\5\3\5\3\5\3\5\3\5\3\5\3\6\3\6\3\6\3\7\3\7\3\7\3\7\3\7\3\7\3\b\3\b"+
"\3\b\3\b\3\b\3\b\3\b\3\b\3\b\3\b\3\b\3\t\3\t\3\t\3\t\3\t\3\t\3\n\3\n\3"+
"\n\3\n\3\n\3\n\3\13\3\13\3\13\3\f\3\f\3\f\3\f\3\f\3\r\3\r\3\r\3\r\3\r"+
"\3\r\3\r\3\r\3\16\3\16\3\16\3\16\3\17\3\17\3\17\3\17\3\17\3\20\3\20\3"+
"\20\3\21\3\21\3\21\3\22\3\22\3\22\3\22\3\22\3\22\3\22\3\22\3\22\3\23\3"+
"\23\3\23\3\23\3\23\3\24\3\24\3\24\3\24\3\24\3\24\3\25\3\25\3\25\3\25\3"+
"\25\3\25\3\26\3\26\3\26\3\26\3\26\3\27\3\27\3\27\5\27\u00e1\n\27\3\30"+
"\3\30\3\30\3\30\5\30\u00e7\n\30\3\31\3\31\3\32\3\32\3\32\3\33\3\33\3\34"+
"\3\34\3\34\3\35\3\35\3\36\3\36\3\37\3\37\3 \3 \3!\3!\3\"\3\"\3#\3#\3$"+
"\3$\3%\3%\3&\3&\3\'\3\'\3(\3(\3)\3)\7)\u010d\n)\f)\16)\u0110\13)\3)\3"+
")\3)\7)\u0115\n)\f)\16)\u0118\13)\3)\5)\u011b\n)\3*\6*\u011e\n*\r*\16"+
"*\u011f\3+\6+\u0123\n+\r+\16+\u0124\3+\3+\7+\u0129\n+\f+\16+\u012c\13"+
"+\3+\3+\6+\u0130\n+\r+\16+\u0131\3+\6+\u0135\n+\r+\16+\u0136\3+\3+\7+"+
"\u013b\n+\f+\16+\u013e\13+\5+\u0140\n+\3+\3+\3+\3+\6+\u0146\n+\r+\16+"+
"\u0147\3+\3+\5+\u014c\n+\3,\3,\5,\u0150\n,\3,\3,\3,\7,\u0155\n,\f,\16"+
",\u0158\13,\3-\3-\3-\3-\6-\u015e\n-\r-\16-\u015f\3.\3.\3.\3.\7.\u0166"+
"\n.\f.\16.\u0169\13.\3.\3.\3/\3/\5/\u016f\n/\3/\6/\u0172\n/\r/\16/\u0173"+
"\3\60\3\60\3\61\3\61\3\62\3\62\3\62\3\62\7\62\u017e\n\62\f\62\16\62\u0181"+
"\13\62\3\62\5\62\u0184\n\62\3\62\5\62\u0187\n\62\3\62\3\62\3\63\3\63\3"+
"\63\3\63\3\63\7\63\u0190\n\63\f\63\16\63\u0193\13\63\3\63\3\63\3\63\3"+
"\63\3\63\3\64\6\64\u019b\n\64\r\64\16\64\u019c\3\64\3\64\3\65\3\65\3\u0191"+
"\2\66\3\3\5\4\7\5\t\6\13\7\r\b\17\t\21\n\23\13\25\f\27\r\31\16\33\17\35"+
"\20\37\21!\22#\23%\24\'\25)\26+\27-\30/\31\61\32\63\33\65\34\67\359\36"+
";\37= ?!A\"C#E$G%I&K\'M(O)Q*S+U,W-Y.[/]\2_\2a\2c\60e\61g\62i\63\3\2\n"+
"\3\2))\3\2$$\4\2BBaa\4\2--//\3\2\62;\3\2C\\\4\2\f\f\17\17\5\2\13\f\17"+
"\17\"\"\u01bf\2\3\3\2\2\2\2\5\3\2\2\2\2\7\3\2\2\2\2\t\3\2\2\2\2\13\3\2"+
"\2\2\2\r\3\2\2\2\2\17\3\2\2\2\2\21\3\2\2\2\2\23\3\2\2\2\2\25\3\2\2\2\2"+
"\27\3\2\2\2\2\31\3\2\2\2\2\33\3\2\2\2\2\35\3\2\2\2\2\37\3\2\2\2\2!\3\2"+
"\2\2\2#\3\2\2\2\2%\3\2\2\2\2\'\3\2\2\2\2)\3\2\2\2\2+\3\2\2\2\2-\3\2\2"+
"\2\2/\3\2\2\2\2\61\3\2\2\2\2\63\3\2\2\2\2\65\3\2\2\2\2\67\3\2\2\2\29\3"+
"\2\2\2\2;\3\2\2\2\2=\3\2\2\2\2?\3\2\2\2\2A\3\2\2\2\2C\3\2\2\2\2E\3\2\2"+
"\2\2G\3\2\2\2\2I\3\2\2\2\2K\3\2\2\2\2M\3\2\2\2\2O\3\2\2\2\2Q\3\2\2\2\2"+
"S\3\2\2\2\2U\3\2\2\2\2W\3\2\2\2\2Y\3\2\2\2\2[\3\2\2\2\2c\3\2\2\2\2e\3"+
"\2\2\2\2g\3\2\2\2\2i\3\2\2\2\3k\3\2\2\2\5o\3\2\2\2\7s\3\2\2\2\tw\3\2\2"+
"\2\13\177\3\2\2\2\r\u0082\3\2\2\2\17\u0088\3\2\2\2\21\u0093\3\2\2\2\23"+
"\u0099\3\2\2\2\25\u009f\3\2\2\2\27\u00a2\3\2\2\2\31\u00a7\3\2\2\2\33\u00af"+
"\3\2\2\2\35\u00b3\3\2\2\2\37\u00b8\3\2\2\2!\u00bb\3\2\2\2#\u00be\3\2\2"+
"\2%\u00c7\3\2\2\2\'\u00cc\3\2\2\2)\u00d2\3\2\2\2+\u00d8\3\2\2\2-\u00e0"+
"\3\2\2\2/\u00e6\3\2\2\2\61\u00e8\3\2\2\2\63\u00ea\3\2\2\2\65\u00ed\3\2"+
"\2\2\67\u00ef\3\2\2\29\u00f2\3\2\2\2;\u00f4\3\2\2\2=\u00f6\3\2\2\2?\u00f8"+
"\3\2\2\2A\u00fa\3\2\2\2C\u00fc\3\2\2\2E\u00fe\3\2\2\2G\u0100\3\2\2\2I"+
"\u0102\3\2\2\2K\u0104\3\2\2\2M\u0106\3\2\2\2O\u0108\3\2\2\2Q\u011a\3\2"+
"\2\2S\u011d\3\2\2\2U\u014b\3\2\2\2W\u014f\3\2\2\2Y\u0159\3\2\2\2[\u0161"+
"\3\2\2\2]\u016c\3\2\2\2_\u0175\3\2\2\2a\u0177\3\2\2\2c\u0179\3\2\2\2e"+
"\u018a\3\2\2\2g\u019a\3\2\2\2i\u01a0\3\2\2\2kl\7C\2\2lm\7P\2\2mn\7F\2"+
"\2n\4\3\2\2\2op\7C\2\2pq\7P\2\2qr\7[\2\2r\6\3\2\2\2st\7C\2\2tu\7U\2\2"+
"uv\7E\2\2v\b\3\2\2\2wx\7D\2\2xy\7G\2\2yz\7V\2\2z{\7Y\2\2{|\7G\2\2|}\7"+
"G\2\2}~\7P\2\2~\n\3\2\2\2\177\u0080\7D\2\2\u0080\u0081\7[\2\2\u0081\f"+
"\3\2\2\2\u0082\u0083\7E\2\2\u0083\u0084\7J\2\2\u0084\u0085\7K\2\2\u0085"+
"\u0086\7N\2\2\u0086\u0087\7F\2\2\u0087\16\3\2\2\2\u0088\u0089\7F\2\2\u0089"+
"\u008a\7G\2\2\u008a\u008b\7U\2\2\u008b\u008c\7E\2\2\u008c\u008d\7G\2\2"+
"\u008d\u008e\7P\2\2\u008e\u008f\7F\2\2\u008f\u0090\7C\2\2\u0090\u0091"+
"\7P\2\2\u0091\u0092\7V\2\2\u0092\20\3\2\2\2\u0093\u0094\7G\2\2\u0094\u0095"+
"\7X\2\2\u0095\u0096\7G\2\2\u0096\u0097\7P\2\2\u0097\u0098\7V\2\2\u0098"+
"\22\3\2\2\2\u0099\u009a\7H\2\2\u009a\u009b\7C\2\2\u009b\u009c\7N\2\2\u009c"+
"\u009d\7U\2\2\u009d\u009e\7G\2\2\u009e\24\3\2\2\2\u009f\u00a0\7K\2\2\u00a0"+
"\u00a1\7P\2\2\u00a1\26\3\2\2\2\u00a2\u00a3\7L\2\2\u00a3\u00a4\7Q\2\2\u00a4"+
"\u00a5\7K\2\2\u00a5\u00a6\7P\2\2\u00a6\30\3\2\2\2\u00a7\u00a8\7O\2\2\u00a8"+
"\u00a9\7C\2\2\u00a9\u00aa\7Z\2\2\u00aa\u00ab\7U\2\2\u00ab\u00ac\7R\2\2"+
"\u00ac\u00ad\7C\2\2\u00ad\u00ae\7P\2\2\u00ae\32\3\2\2\2\u00af\u00b0\7"+
"P\2\2\u00b0\u00b1\7Q\2\2\u00b1\u00b2\7V\2\2\u00b2\34\3\2\2\2\u00b3\u00b4"+
"\7P\2\2\u00b4\u00b5\7W\2\2\u00b5\u00b6\7N\2\2\u00b6\u00b7\7N\2\2\u00b7"+
"\36\3\2\2\2\u00b8\u00b9\7Q\2\2\u00b9\u00ba\7H\2\2\u00ba \3\2\2\2\u00bb"+
"\u00bc\7Q\2\2\u00bc\u00bd\7T\2\2\u00bd\"\3\2\2\2\u00be\u00bf\7U\2\2\u00bf"+
"\u00c0\7G\2\2\u00c0\u00c1\7S\2\2\u00c1\u00c2\7W\2\2\u00c2\u00c3\7G\2\2"+
"\u00c3\u00c4\7P\2\2\u00c4\u00c5\7E\2\2\u00c5\u00c6\7G\2\2\u00c6$\3\2\2"+
"\2\u00c7\u00c8\7V\2\2\u00c8\u00c9\7T\2\2\u00c9\u00ca\7W\2\2\u00ca\u00cb"+
"\7G\2\2\u00cb&\3\2\2\2\u00cc\u00cd\7W\2\2\u00cd\u00ce\7P\2\2\u00ce\u00cf"+
"\7V\2\2\u00cf\u00d0\7K\2\2\u00d0\u00d1\7N\2\2\u00d1(\3\2\2\2\u00d2\u00d3"+
"\7Y\2\2\u00d3\u00d4\7J\2\2\u00d4\u00d5\7G\2\2\u00d5\u00d6\7T\2\2\u00d6"+
"\u00d7\7G\2\2\u00d7*\3\2\2\2\u00d8\u00d9\7Y\2\2\u00d9\u00da\7K\2\2\u00da"+
"\u00db\7V\2\2\u00db\u00dc\7J\2\2\u00dc,\3\2\2\2\u00dd\u00e1\7?\2\2\u00de"+
"\u00df\7?\2\2\u00df\u00e1\7?\2\2\u00e0\u00dd\3\2\2\2\u00e0\u00de\3\2\2"+
"\2\u00e1.\3\2\2\2\u00e2\u00e3\7>\2\2\u00e3\u00e7\7@\2\2\u00e4\u00e5\7"+
"#\2\2\u00e5\u00e7\7?\2\2\u00e6\u00e2\3\2\2\2\u00e6\u00e4\3\2\2\2\u00e7"+
"\60\3\2\2\2\u00e8\u00e9\7>\2\2\u00e9\62\3\2\2\2\u00ea\u00eb\7>\2\2\u00eb"+
"\u00ec\7?\2\2\u00ec\64\3\2\2\2\u00ed\u00ee\7@\2\2\u00ee\66\3\2\2\2\u00ef"+
"\u00f0\7@\2\2\u00f0\u00f1\7?\2\2\u00f18\3\2\2\2\u00f2\u00f3\7-\2\2\u00f3"+
":\3\2\2\2\u00f4\u00f5\7/\2\2\u00f5<\3\2\2\2\u00f6\u00f7\7,\2\2\u00f7>"+
"\3\2\2\2\u00f8\u00f9\7\61\2\2\u00f9@\3\2\2\2\u00fa\u00fb\7\'\2\2\u00fb"+
"B\3\2\2\2\u00fc\u00fd\7\60\2\2\u00fdD\3\2\2\2\u00fe\u00ff\7.\2\2\u00ff"+
"F\3\2\2\2\u0100\u0101\7]\2\2\u0101H\3\2\2\2\u0102\u0103\7_\2\2\u0103J"+
"\3\2\2\2\u0104\u0105\7*\2\2\u0105L\3\2\2\2\u0106\u0107\7+\2\2\u0107N\3"+
"\2\2\2\u0108\u0109\7~\2\2\u0109P\3\2\2\2\u010a\u010e\7)\2\2\u010b\u010d"+
"\n\2\2\2\u010c\u010b\3\2\2\2\u010d\u0110\3\2\2\2\u010e\u010c\3\2\2\2\u010e"+
"\u010f\3\2\2\2\u010f\u0111\3\2\2\2\u0110\u010e\3\2\2\2\u0111\u011b\7)"+
"\2\2\u0112\u0116\7$\2\2\u0113\u0115\n\3\2\2\u0114\u0113\3\2\2\2\u0115"+
"\u0118\3\2\2\2\u0116\u0114\3\2\2\2\u0116\u0117\3\2\2\2\u0117\u0119\3\2"+
"\2\2\u0118\u0116\3\2\2\2\u0119\u011b\7$\2\2\u011a\u010a\3\2\2\2\u011a"+
"\u0112\3\2\2\2\u011bR\3\2\2\2\u011c\u011e\5_\60\2\u011d\u011c\3\2\2\2"+
"\u011e\u011f\3\2\2\2\u011f\u011d\3\2\2\2\u011f\u0120\3\2\2\2\u0120T\3"+
"\2\2\2\u0121\u0123\5_\60\2\u0122\u0121\3\2\2\2\u0123\u0124\3\2\2\2\u0124"+
"\u0122\3\2\2\2\u0124\u0125\3\2\2\2\u0125\u0126\3\2\2\2\u0126\u012a\5C"+
"\"\2\u0127\u0129\5_\60\2\u0128\u0127\3\2\2\2\u0129\u012c\3\2\2\2\u012a"+
"\u0128\3\2\2\2\u012a\u012b\3\2\2\2\u012b\u014c\3\2\2\2\u012c\u012a\3\2"+
"\2\2\u012d\u012f\5C\"\2\u012e\u0130\5_\60\2\u012f\u012e\3\2\2\2\u0130"+
"\u0131\3\2\2\2\u0131\u012f\3\2\2\2\u0131\u0132\3\2\2\2\u0132\u014c\3\2"+
"\2\2\u0133\u0135\5_\60\2\u0134\u0133\3\2\2\2\u0135\u0136\3\2\2\2\u0136"+
"\u0134\3\2\2\2\u0136\u0137\3\2\2\2\u0137\u013f\3\2\2\2\u0138\u013c\5C"+
"\"\2\u0139\u013b\5_\60\2\u013a\u0139\3\2\2\2\u013b\u013e\3\2\2\2\u013c"+
"\u013a\3\2\2\2\u013c\u013d\3\2\2\2\u013d\u0140\3\2\2\2\u013e\u013c\3\2"+
"\2\2\u013f\u0138\3\2\2\2\u013f\u0140\3\2\2\2\u0140\u0141\3\2\2\2\u0141"+
"\u0142\5]/\2\u0142\u014c\3\2\2\2\u0143\u0145\5C\"\2\u0144\u0146\5_\60"+
"\2\u0145\u0144\3\2\2\2\u0146\u0147\3\2\2\2\u0147\u0145\3\2\2\2\u0147\u0148"+
"\3\2\2\2\u0148\u0149\3\2\2\2\u0149\u014a\5]/\2\u014a\u014c\3\2\2\2\u014b"+
"\u0122\3\2\2\2\u014b\u012d\3\2\2\2\u014b\u0134\3\2\2\2\u014b\u0143\3\2"+
"\2\2\u014cV\3\2\2\2\u014d\u0150\5a\61\2\u014e\u0150\7a\2\2\u014f\u014d"+
"\3\2\2\2\u014f\u014e\3\2\2\2\u0150\u0156\3\2\2\2\u0151\u0155\5a\61\2\u0152"+
"\u0155\5_\60\2\u0153\u0155\t\4\2\2\u0154\u0151\3\2\2\2\u0154\u0152\3\2"+
"\2\2\u0154\u0153\3\2\2\2\u0155\u0158\3\2\2\2\u0156\u0154\3\2\2\2\u0156"+
"\u0157\3\2\2\2\u0157X\3\2\2\2\u0158\u0156\3\2\2\2\u0159\u015d\5_\60\2"+
"\u015a\u015e\5a\61\2\u015b\u015e\5_\60\2\u015c\u015e\t\4\2\2\u015d\u015a"+
"\3\2\2\2\u015d\u015b\3\2\2\2\u015d\u015c\3\2\2\2\u015e\u015f\3\2\2\2\u015f"+
"\u015d\3\2\2\2\u015f\u0160\3\2\2\2\u0160Z\3\2\2\2\u0161\u0167\7$\2\2\u0162"+
"\u0166\n\3\2\2\u0163\u0164\7$\2\2\u0164\u0166\7$\2\2\u0165\u0162\3\2\2"+
"\2\u0165\u0163\3\2\2\2\u0166\u0169\3\2\2\2\u0167\u0165\3\2\2\2\u0167\u0168"+
"\3\2\2\2\u0168\u016a\3\2\2\2\u0169\u0167\3\2\2\2\u016a\u016b\7$\2\2\u016b"+
"\\\3\2\2\2\u016c\u016e\7G\2\2\u016d\u016f\t\5\2\2\u016e\u016d\3\2\2\2"+
"\u016e\u016f\3\2\2\2\u016f\u0171\3\2\2\2\u0170\u0172\5_\60\2\u0171\u0170"+
"\3\2\2\2\u0172\u0173\3\2\2\2\u0173\u0171\3\2\2\2\u0173\u0174\3\2\2\2\u0174"+
"^\3\2\2\2\u0175\u0176\t\6\2\2\u0176`\3\2\2\2\u0177\u0178\t\7\2\2\u0178"+
"b\3\2\2\2\u0179\u017a\7\61\2\2\u017a\u017b\7\61\2\2\u017b\u017f\3\2\2"+
"\2\u017c\u017e\n\b\2\2\u017d\u017c\3\2\2\2\u017e\u0181\3\2\2\2\u017f\u017d"+
"\3\2\2\2\u017f\u0180\3\2\2\2\u0180\u0183\3\2\2\2\u0181\u017f\3\2\2\2\u0182"+
"\u0184\7\17\2\2\u0183\u0182\3\2\2\2\u0183\u0184\3\2\2\2\u0184\u0186\3"+
"\2\2\2\u0185\u0187\7\f\2\2\u0186\u0185\3\2\2\2\u0186\u0187\3\2\2\2\u0187"+
"\u0188\3\2\2\2\u0188\u0189\b\62\2\2\u0189d\3\2\2\2\u018a\u018b\7\61\2"+
"\2\u018b\u018c\7,\2\2\u018c\u0191\3\2\2\2\u018d\u0190\5e\63\2\u018e\u0190"+
"\13\2\2\2\u018f\u018d\3\2\2\2\u018f\u018e\3\2\2\2\u0190\u0193\3\2\2\2"+
"\u0191\u0192\3\2\2\2\u0191\u018f\3\2\2\2\u0192\u0194\3\2\2\2\u0193\u0191"+
"\3\2\2\2\u0194\u0195\7,\2\2\u0195\u0196\7\61\2\2\u0196\u0197\3\2\2\2\u0197"+
"\u0198\b\63\2\2\u0198f\3\2\2\2\u0199\u019b\t\t\2\2\u019a\u0199\3\2\2\2"+
"\u019b\u019c\3\2\2\2\u019c\u019a\3\2\2\2\u019c\u019d\3\2\2\2\u019d\u019e"+
"\3\2\2\2\u019e\u019f\b\64\2\2\u019fh\3\2\2\2\u01a0\u01a1\13\2\2\2\u01a1"+
"j\3\2\2\2 \2\u00e0\u00e6\u010e\u0116\u011a\u011f\u0124\u012a\u0131\u0136"+
"\u013c\u013f\u0147\u014b\u014f\u0154\u0156\u015d\u015f\u0165\u0167\u016e"+
"\u0173\u017f\u0183\u0186\u018f\u0191\u019c\3\2\3\2";
"\3\u0430\ud6d1\u8206\uad2d\u4417\uaef1\u8d80\uaadd\2,\u017f\b\1\4\2\t"+
"\2\4\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7\t\7\4\b\t\b\4\t\t\t\4\n\t\n\4\13"+
"\t\13\4\f\t\f\4\r\t\r\4\16\t\16\4\17\t\17\4\20\t\20\4\21\t\21\4\22\t\22"+
"\4\23\t\23\4\24\t\24\4\25\t\25\4\26\t\26\4\27\t\27\4\30\t\30\4\31\t\31"+
"\4\32\t\32\4\33\t\33\4\34\t\34\4\35\t\35\4\36\t\36\4\37\t\37\4 \t \4!"+
"\t!\4\"\t\"\4#\t#\4$\t$\4%\t%\4&\t&\4\'\t\'\4(\t(\4)\t)\4*\t*\4+\t+\4"+
",\t,\4-\t-\4.\t.\3\2\3\2\3\2\3\2\3\3\3\3\3\3\3\4\3\4\3\4\3\4\3\4\3\4\3"+
"\5\3\5\3\5\3\5\3\5\3\6\3\6\3\6\3\7\3\7\3\7\3\7\3\7\3\b\3\b\3\b\3\b\3\b"+
"\3\b\3\b\3\b\3\t\3\t\3\t\3\t\3\n\3\n\3\n\3\n\3\n\3\13\3\13\3\13\3\f\3"+
"\f\3\f\3\r\3\r\3\r\3\r\3\r\3\r\3\r\3\r\3\r\3\16\3\16\3\16\3\16\3\16\3"+
"\17\3\17\3\17\3\17\3\17\3\17\3\20\3\20\3\20\3\20\3\20\3\20\3\21\3\21\3"+
"\21\3\21\3\21\3\22\3\22\3\22\5\22\u00b1\n\22\3\23\3\23\3\23\3\24\3\24"+
"\3\25\3\25\3\25\3\26\3\26\3\27\3\27\3\27\3\30\3\30\3\31\3\31\3\32\3\32"+
"\3\33\3\33\3\34\3\34\3\35\3\35\3\36\3\36\3\37\3\37\3 \3 \3!\3!\3\"\3\""+
"\3#\3#\3$\3$\7$\u00da\n$\f$\16$\u00dd\13$\3$\3$\3%\3%\3%\3%\7%\u00e5\n"+
"%\f%\16%\u00e8\13%\3%\3%\3%\3%\3%\7%\u00ef\n%\f%\16%\u00f2\13%\3%\3%\3"+
"%\3%\3%\3%\3%\7%\u00fb\n%\f%\16%\u00fe\13%\3%\3%\3%\3%\3%\3%\3%\7%\u0107"+
"\n%\f%\16%\u010a\13%\3%\5%\u010d\n%\3&\6&\u0110\n&\r&\16&\u0111\3\'\6"+
"\'\u0115\n\'\r\'\16\'\u0116\3\'\3\'\7\'\u011b\n\'\f\'\16\'\u011e\13\'"+
"\3\'\3\'\6\'\u0122\n\'\r\'\16\'\u0123\3\'\6\'\u0127\n\'\r\'\16\'\u0128"+
"\3\'\3\'\7\'\u012d\n\'\f\'\16\'\u0130\13\'\5\'\u0132\n\'\3\'\3\'\3\'\3"+
"\'\6\'\u0138\n\'\r\'\16\'\u0139\3\'\3\'\5\'\u013e\n\'\3(\3(\5(\u0142\n"+
"(\3(\3(\3(\7(\u0147\n(\f(\16(\u014a\13(\3)\3)\5)\u014e\n)\3)\6)\u0151"+
"\n)\r)\16)\u0152\3*\3*\3+\3+\3,\3,\3,\3,\7,\u015d\n,\f,\16,\u0160\13,"+
"\3,\5,\u0163\n,\3,\5,\u0166\n,\3,\3,\3-\3-\3-\3-\3-\7-\u016f\n-\f-\16"+
"-\u0172\13-\3-\3-\3-\3-\3-\3.\6.\u017a\n.\r.\16.\u017b\3.\3.\3\u0170\2"+
"/\3\3\5\4\7\5\t\6\13\7\r\b\17\t\21\n\23\13\25\f\27\r\31\16\33\17\35\20"+
"\37\21!\22#\23%\24\'\25)\26+\27-\30/\31\61\32\63\33\65\34\67\359\36;\37"+
"= ?!A\"C#E$G%I&K\'M(O)Q\2S\2U\2W*Y+[,\3\2\17\3\2bb\n\2$$))^^ddhhppttv"+
"v\6\2\f\f\17\17))^^\6\2\f\f\17\17$$^^\5\2\f\f\17\17$$\5\2\f\f\17\17))"+
"\4\2BBaa\4\2GGgg\4\2--//\3\2\62;\4\2C\\c|\4\2\f\f\17\17\5\2\13\f\17\17"+
"\"\"\u019f\2\3\3\2\2\2\2\5\3\2\2\2\2\7\3\2\2\2\2\t\3\2\2\2\2\13\3\2\2"+
"\2\2\r\3\2\2\2\2\17\3\2\2\2\2\21\3\2\2\2\2\23\3\2\2\2\2\25\3\2\2\2\2\27"+
"\3\2\2\2\2\31\3\2\2\2\2\33\3\2\2\2\2\35\3\2\2\2\2\37\3\2\2\2\2!\3\2\2"+
"\2\2#\3\2\2\2\2%\3\2\2\2\2\'\3\2\2\2\2)\3\2\2\2\2+\3\2\2\2\2-\3\2\2\2"+
"\2/\3\2\2\2\2\61\3\2\2\2\2\63\3\2\2\2\2\65\3\2\2\2\2\67\3\2\2\2\29\3\2"+
"\2\2\2;\3\2\2\2\2=\3\2\2\2\2?\3\2\2\2\2A\3\2\2\2\2C\3\2\2\2\2E\3\2\2\2"+
"\2G\3\2\2\2\2I\3\2\2\2\2K\3\2\2\2\2M\3\2\2\2\2O\3\2\2\2\2W\3\2\2\2\2Y"+
"\3\2\2\2\2[\3\2\2\2\3]\3\2\2\2\5a\3\2\2\2\7d\3\2\2\2\tj\3\2\2\2\13o\3"+
"\2\2\2\rr\3\2\2\2\17w\3\2\2\2\21\177\3\2\2\2\23\u0083\3\2\2\2\25\u0088"+
"\3\2\2\2\27\u008b\3\2\2\2\31\u008e\3\2\2\2\33\u0097\3\2\2\2\35\u009c\3"+
"\2\2\2\37\u00a2\3\2\2\2!\u00a8\3\2\2\2#\u00b0\3\2\2\2%\u00b2\3\2\2\2\'"+
"\u00b5\3\2\2\2)\u00b7\3\2\2\2+\u00ba\3\2\2\2-\u00bc\3\2\2\2/\u00bf\3\2"+
"\2\2\61\u00c1\3\2\2\2\63\u00c3\3\2\2\2\65\u00c5\3\2\2\2\67\u00c7\3\2\2"+
"\29\u00c9\3\2\2\2;\u00cb\3\2\2\2=\u00cd\3\2\2\2?\u00cf\3\2\2\2A\u00d1"+
"\3\2\2\2C\u00d3\3\2\2\2E\u00d5\3\2\2\2G\u00d7\3\2\2\2I\u010c\3\2\2\2K"+
"\u010f\3\2\2\2M\u013d\3\2\2\2O\u0141\3\2\2\2Q\u014b\3\2\2\2S\u0154\3\2"+
"\2\2U\u0156\3\2\2\2W\u0158\3\2\2\2Y\u0169\3\2\2\2[\u0179\3\2\2\2]^\7c"+
"\2\2^_\7p\2\2_`\7f\2\2`\4\3\2\2\2ab\7d\2\2bc\7{\2\2c\6\3\2\2\2de\7h\2"+
"\2ef\7c\2\2fg\7n\2\2gh\7u\2\2hi\7g\2\2i\b\3\2\2\2jk\7h\2\2kl\7q\2\2lm"+
"\7t\2\2mn\7m\2\2n\n\3\2\2\2op\7k\2\2pq\7p\2\2q\f\3\2\2\2rs\7l\2\2st\7"+
"q\2\2tu\7k\2\2uv\7p\2\2v\16\3\2\2\2wx\7o\2\2xy\7c\2\2yz\7z\2\2z{\7u\2"+
"\2{|\7r\2\2|}\7c\2\2}~\7p\2\2~\20\3\2\2\2\177\u0080\7p\2\2\u0080\u0081"+
"\7q\2\2\u0081\u0082\7v\2\2\u0082\22\3\2\2\2\u0083\u0084\7p\2\2\u0084\u0085"+
"\7w\2\2\u0085\u0086\7n\2\2\u0086\u0087\7n\2\2\u0087\24\3\2\2\2\u0088\u0089"+
"\7q\2\2\u0089\u008a\7h\2\2\u008a\26\3\2\2\2\u008b\u008c\7q\2\2\u008c\u008d"+
"\7t\2\2\u008d\30\3\2\2\2\u008e\u008f\7u\2\2\u008f\u0090\7g\2\2\u0090\u0091"+
"\7s\2\2\u0091\u0092\7w\2\2\u0092\u0093\7g\2\2\u0093\u0094\7p\2\2\u0094"+
"\u0095\7e\2\2\u0095\u0096\7g\2\2\u0096\32\3\2\2\2\u0097\u0098\7v\2\2\u0098"+
"\u0099\7t\2\2\u0099\u009a\7w\2\2\u009a\u009b\7g\2\2\u009b\34\3\2\2\2\u009c"+
"\u009d\7w\2\2\u009d\u009e\7p\2\2\u009e\u009f\7v\2\2\u009f\u00a0\7k\2\2"+
"\u00a0\u00a1\7n\2\2\u00a1\36\3\2\2\2\u00a2\u00a3\7y\2\2\u00a3\u00a4\7"+
"j\2\2\u00a4\u00a5\7g\2\2\u00a5\u00a6\7t\2\2\u00a6\u00a7\7g\2\2\u00a7 "+
"\3\2\2\2\u00a8\u00a9\7y\2\2\u00a9\u00aa\7k\2\2\u00aa\u00ab\7v\2\2\u00ab"+
"\u00ac\7j\2\2\u00ac\"\3\2\2\2\u00ad\u00b1\7?\2\2\u00ae\u00af\7?\2\2\u00af"+
"\u00b1\7?\2\2\u00b0\u00ad\3\2\2\2\u00b0\u00ae\3\2\2\2\u00b1$\3\2\2\2\u00b2"+
"\u00b3\7#\2\2\u00b3\u00b4\7?\2\2\u00b4&\3\2\2\2\u00b5\u00b6\7>\2\2\u00b6"+
"(\3\2\2\2\u00b7\u00b8\7>\2\2\u00b8\u00b9\7?\2\2\u00b9*\3\2\2\2\u00ba\u00bb"+
"\7@\2\2\u00bb,\3\2\2\2\u00bc\u00bd\7@\2\2\u00bd\u00be\7?\2\2\u00be.\3"+
"\2\2\2\u00bf\u00c0\7-\2\2\u00c0\60\3\2\2\2\u00c1\u00c2\7/\2\2\u00c2\62"+
"\3\2\2\2\u00c3\u00c4\7,\2\2\u00c4\64\3\2\2\2\u00c5\u00c6\7\61\2\2\u00c6"+
"\66\3\2\2\2\u00c7\u00c8\7\'\2\2\u00c88\3\2\2\2\u00c9\u00ca\7\60\2\2\u00ca"+
":\3\2\2\2\u00cb\u00cc\7.\2\2\u00cc<\3\2\2\2\u00cd\u00ce\7]\2\2\u00ce>"+
"\3\2\2\2\u00cf\u00d0\7_\2\2\u00d0@\3\2\2\2\u00d1\u00d2\7*\2\2\u00d2B\3"+
"\2\2\2\u00d3\u00d4\7+\2\2\u00d4D\3\2\2\2\u00d5\u00d6\7~\2\2\u00d6F\3\2"+
"\2\2\u00d7\u00db\7b\2\2\u00d8\u00da\n\2\2\2\u00d9\u00d8\3\2\2\2\u00da"+
"\u00dd\3\2\2\2\u00db\u00d9\3\2\2\2\u00db\u00dc\3\2\2\2\u00dc\u00de\3\2"+
"\2\2\u00dd\u00db\3\2\2\2\u00de\u00df\7b\2\2\u00dfH\3\2\2\2\u00e0\u00e6"+
"\7)\2\2\u00e1\u00e2\7^\2\2\u00e2\u00e5\t\3\2\2\u00e3\u00e5\n\4\2\2\u00e4"+
"\u00e1\3\2\2\2\u00e4\u00e3\3\2\2\2\u00e5\u00e8\3\2\2\2\u00e6\u00e4\3\2"+
"\2\2\u00e6\u00e7\3\2\2\2\u00e7\u00e9\3\2\2\2\u00e8\u00e6\3\2\2\2\u00e9"+
"\u010d\7)\2\2\u00ea\u00f0\7$\2\2\u00eb\u00ec\7^\2\2\u00ec\u00ef\t\3\2"+
"\2\u00ed\u00ef\n\5\2\2\u00ee\u00eb\3\2\2\2\u00ee\u00ed\3\2\2\2\u00ef\u00f2"+
"\3\2\2\2\u00f0\u00ee\3\2\2\2\u00f0\u00f1\3\2\2\2\u00f1\u00f3\3\2\2\2\u00f2"+
"\u00f0\3\2\2\2\u00f3\u010d\7$\2\2\u00f4\u00f5\7A\2\2\u00f5\u00f6\7$\2"+
"\2\u00f6\u00fc\3\2\2\2\u00f7\u00f8\7^\2\2\u00f8\u00fb\7$\2\2\u00f9\u00fb"+
"\n\6\2\2\u00fa\u00f7\3\2\2\2\u00fa\u00f9\3\2\2\2\u00fb\u00fe\3\2\2\2\u00fc"+
"\u00fa\3\2\2\2\u00fc\u00fd\3\2\2\2\u00fd\u00ff\3\2\2\2\u00fe\u00fc\3\2"+
"\2\2\u00ff\u010d\7$\2\2\u0100\u0101\7A\2\2\u0101\u0102\7)\2\2\u0102\u0108"+
"\3\2\2\2\u0103\u0104\7^\2\2\u0104\u0107\7)\2\2\u0105\u0107\n\7\2\2\u0106"+
"\u0103\3\2\2\2\u0106\u0105\3\2\2\2\u0107\u010a\3\2\2\2\u0108\u0106\3\2"+
"\2\2\u0108\u0109\3\2\2\2\u0109\u010b\3\2\2\2\u010a\u0108\3\2\2\2\u010b"+
"\u010d\7)\2\2\u010c\u00e0\3\2\2\2\u010c\u00ea\3\2\2\2\u010c\u00f4\3\2"+
"\2\2\u010c\u0100\3\2\2\2\u010dJ\3\2\2\2\u010e\u0110\5S*\2\u010f\u010e"+
"\3\2\2\2\u0110\u0111\3\2\2\2\u0111\u010f\3\2\2\2\u0111\u0112\3\2\2\2\u0112"+
"L\3\2\2\2\u0113\u0115\5S*\2\u0114\u0113\3\2\2\2\u0115\u0116\3\2\2\2\u0116"+
"\u0114\3\2\2\2\u0116\u0117\3\2\2\2\u0117\u0118\3\2\2\2\u0118\u011c\59"+
"\35\2\u0119\u011b\5S*\2\u011a\u0119\3\2\2\2\u011b\u011e\3\2\2\2\u011c"+
"\u011a\3\2\2\2\u011c\u011d\3\2\2\2\u011d\u013e\3\2\2\2\u011e\u011c\3\2"+
"\2\2\u011f\u0121\59\35\2\u0120\u0122\5S*\2\u0121\u0120\3\2\2\2\u0122\u0123"+
"\3\2\2\2\u0123\u0121\3\2\2\2\u0123\u0124\3\2\2\2\u0124\u013e\3\2\2\2\u0125"+
"\u0127\5S*\2\u0126\u0125\3\2\2\2\u0127\u0128\3\2\2\2\u0128\u0126\3\2\2"+
"\2\u0128\u0129\3\2\2\2\u0129\u0131\3\2\2\2\u012a\u012e\59\35\2\u012b\u012d"+
"\5S*\2\u012c\u012b\3\2\2\2\u012d\u0130\3\2\2\2\u012e\u012c\3\2\2\2\u012e"+
"\u012f\3\2\2\2\u012f\u0132\3\2\2\2\u0130\u012e\3\2\2\2\u0131\u012a\3\2"+
"\2\2\u0131\u0132\3\2\2\2\u0132\u0133\3\2\2\2\u0133\u0134\5Q)\2\u0134\u013e"+
"\3\2\2\2\u0135\u0137\59\35\2\u0136\u0138\5S*\2\u0137\u0136\3\2\2\2\u0138"+
"\u0139\3\2\2\2\u0139\u0137\3\2\2\2\u0139\u013a\3\2\2\2\u013a\u013b\3\2"+
"\2\2\u013b\u013c\5Q)\2\u013c\u013e\3\2\2\2\u013d\u0114\3\2\2\2\u013d\u011f"+
"\3\2\2\2\u013d\u0126\3\2\2\2\u013d\u0135\3\2\2\2\u013eN\3\2\2\2\u013f"+
"\u0142\5U+\2\u0140\u0142\t\b\2\2\u0141\u013f\3\2\2\2\u0141\u0140\3\2\2"+
"\2\u0142\u0148\3\2\2\2\u0143\u0147\5U+\2\u0144\u0147\5S*\2\u0145\u0147"+
"\7a\2\2\u0146\u0143\3\2\2\2\u0146\u0144\3\2\2\2\u0146\u0145\3\2\2\2\u0147"+
"\u014a\3\2\2\2\u0148\u0146\3\2\2\2\u0148\u0149\3\2\2\2\u0149P\3\2\2\2"+
"\u014a\u0148\3\2\2\2\u014b\u014d\t\t\2\2\u014c\u014e\t\n\2\2\u014d\u014c"+
"\3\2\2\2\u014d\u014e\3\2\2\2\u014e\u0150\3\2\2\2\u014f\u0151\5S*\2\u0150"+
"\u014f\3\2\2\2\u0151\u0152\3\2\2\2\u0152\u0150\3\2\2\2\u0152\u0153\3\2"+
"\2\2\u0153R\3\2\2\2\u0154\u0155\t\13\2\2\u0155T\3\2\2\2\u0156\u0157\t"+
"\f\2\2\u0157V\3\2\2\2\u0158\u0159\7\61\2\2\u0159\u015a\7\61\2\2\u015a"+
"\u015e\3\2\2\2\u015b\u015d\n\r\2\2\u015c\u015b\3\2\2\2\u015d\u0160\3\2"+
"\2\2\u015e\u015c\3\2\2\2\u015e\u015f\3\2\2\2\u015f\u0162\3\2\2\2\u0160"+
"\u015e\3\2\2\2\u0161\u0163\7\17\2\2\u0162\u0161\3\2\2\2\u0162\u0163\3"+
"\2\2\2\u0163\u0165\3\2\2\2\u0164\u0166\7\f\2\2\u0165\u0164\3\2\2\2\u0165"+
"\u0166\3\2\2\2\u0166\u0167\3\2\2\2\u0167\u0168\b,\2\2\u0168X\3\2\2\2\u0169"+
"\u016a\7\61\2\2\u016a\u016b\7,\2\2\u016b\u0170\3\2\2\2\u016c\u016f\5Y"+
"-\2\u016d\u016f\13\2\2\2\u016e\u016c\3\2\2\2\u016e\u016d\3\2\2\2\u016f"+
"\u0172\3\2\2\2\u0170\u0171\3\2\2\2\u0170\u016e\3\2\2\2\u0171\u0173\3\2"+
"\2\2\u0172\u0170\3\2\2\2\u0173\u0174\7,\2\2\u0174\u0175\7\61\2\2\u0175"+
"\u0176\3\2\2\2\u0176\u0177\b-\2\2\u0177Z\3\2\2\2\u0178\u017a\t\16\2\2"+
"\u0179\u0178\3\2\2\2\u017a\u017b\3\2\2\2\u017b\u0179\3\2\2\2\u017b\u017c"+
"\3\2\2\2\u017c\u017d\3\2\2\2\u017d\u017e\b.\2\2\u017e\\\3\2\2\2\"\2\u00b0"+
"\u00db\u00e4\u00e6\u00ee\u00f0\u00fa\u00fc\u0106\u0108\u010c\u0111\u0116"+
"\u011c\u0123\u0128\u012e\u0131\u0139\u013d\u0141\u0146\u0148\u014d\u0152"+
"\u015e\u0162\u0165\u016e\u0170\u017b\3\2\3\2";
public static final ATN _ATN =
new ATNDeserializer().deserialize(_serializedATN.toCharArray());
static {

View File

@ -47,6 +47,16 @@ interface EqlBaseListener extends ParseTreeListener {
* @param ctx the parse tree
*/
void exitQuery(EqlBaseParser.QueryContext ctx);
/**
* Enter a parse tree produced by {@link EqlBaseParser#sequenceParams}.
* @param ctx the parse tree
*/
void enterSequenceParams(EqlBaseParser.SequenceParamsContext ctx);
/**
* Exit a parse tree produced by {@link EqlBaseParser#sequenceParams}.
* @param ctx the parse tree
*/
void exitSequenceParams(EqlBaseParser.SequenceParamsContext ctx);
/**
* Enter a parse tree produced by {@link EqlBaseParser#sequence}.
* @param ctx the parse tree
@ -88,35 +98,45 @@ interface EqlBaseListener extends ParseTreeListener {
*/
void exitJoinKeys(EqlBaseParser.JoinKeysContext ctx);
/**
* Enter a parse tree produced by {@link EqlBaseParser#span}.
* Enter a parse tree produced by {@link EqlBaseParser#joinTerm}.
* @param ctx the parse tree
*/
void enterSpan(EqlBaseParser.SpanContext ctx);
void enterJoinTerm(EqlBaseParser.JoinTermContext ctx);
/**
* Exit a parse tree produced by {@link EqlBaseParser#span}.
* Exit a parse tree produced by {@link EqlBaseParser#joinTerm}.
* @param ctx the parse tree
*/
void exitSpan(EqlBaseParser.SpanContext ctx);
void exitJoinTerm(EqlBaseParser.JoinTermContext ctx);
/**
* Enter a parse tree produced by {@link EqlBaseParser#match}.
* Enter a parse tree produced by {@link EqlBaseParser#sequenceTerm}.
* @param ctx the parse tree
*/
void enterMatch(EqlBaseParser.MatchContext ctx);
void enterSequenceTerm(EqlBaseParser.SequenceTermContext ctx);
/**
* Exit a parse tree produced by {@link EqlBaseParser#match}.
* Exit a parse tree produced by {@link EqlBaseParser#sequenceTerm}.
* @param ctx the parse tree
*/
void exitMatch(EqlBaseParser.MatchContext ctx);
void exitSequenceTerm(EqlBaseParser.SequenceTermContext ctx);
/**
* Enter a parse tree produced by {@link EqlBaseParser#condition}.
* Enter a parse tree produced by {@link EqlBaseParser#subquery}.
* @param ctx the parse tree
*/
void enterCondition(EqlBaseParser.ConditionContext ctx);
void enterSubquery(EqlBaseParser.SubqueryContext ctx);
/**
* Exit a parse tree produced by {@link EqlBaseParser#condition}.
* Exit a parse tree produced by {@link EqlBaseParser#subquery}.
* @param ctx the parse tree
*/
void exitCondition(EqlBaseParser.ConditionContext ctx);
void exitSubquery(EqlBaseParser.SubqueryContext ctx);
/**
* Enter a parse tree produced by {@link EqlBaseParser#eventQuery}.
* @param ctx the parse tree
*/
void enterEventQuery(EqlBaseParser.EventQueryContext ctx);
/**
* Exit a parse tree produced by {@link EqlBaseParser#eventQuery}.
* @param ctx the parse tree
*/
void exitEventQuery(EqlBaseParser.EventQueryContext ctx);
/**
* Enter a parse tree produced by {@link EqlBaseParser#expression}.
* @param ctx the parse tree
@ -151,6 +171,18 @@ interface EqlBaseListener extends ParseTreeListener {
* @param ctx the parse tree
*/
void exitBooleanDefault(EqlBaseParser.BooleanDefaultContext ctx);
/**
* Enter a parse tree produced by the {@code processCheck}
* labeled alternative in {@link EqlBaseParser#booleanExpression}.
* @param ctx the parse tree
*/
void enterProcessCheck(EqlBaseParser.ProcessCheckContext ctx);
/**
* Exit a parse tree produced by the {@code processCheck}
* labeled alternative in {@link EqlBaseParser#booleanExpression}.
* @param ctx the parse tree
*/
void exitProcessCheck(EqlBaseParser.ProcessCheckContext ctx);
/**
* Enter a parse tree produced by the {@code logicalBinary}
* labeled alternative in {@link EqlBaseParser#booleanExpression}.
@ -163,26 +195,6 @@ interface EqlBaseListener extends ParseTreeListener {
* @param ctx the parse tree
*/
void exitLogicalBinary(EqlBaseParser.LogicalBinaryContext ctx);
/**
* Enter a parse tree produced by {@link EqlBaseParser#predicated}.
* @param ctx the parse tree
*/
void enterPredicated(EqlBaseParser.PredicatedContext ctx);
/**
* Exit a parse tree produced by {@link EqlBaseParser#predicated}.
* @param ctx the parse tree
*/
void exitPredicated(EqlBaseParser.PredicatedContext ctx);
/**
* Enter a parse tree produced by {@link EqlBaseParser#predicate}.
* @param ctx the parse tree
*/
void enterPredicate(EqlBaseParser.PredicateContext ctx);
/**
* Exit a parse tree produced by {@link EqlBaseParser#predicate}.
* @param ctx the parse tree
*/
void exitPredicate(EqlBaseParser.PredicateContext ctx);
/**
* Enter a parse tree produced by the {@code valueExpressionDefault}
* labeled alternative in {@link EqlBaseParser#valueExpression}.
@ -231,6 +243,16 @@ interface EqlBaseListener extends ParseTreeListener {
* @param ctx the parse tree
*/
void exitArithmeticUnary(EqlBaseParser.ArithmeticUnaryContext ctx);
/**
* Enter a parse tree produced by {@link EqlBaseParser#predicate}.
* @param ctx the parse tree
*/
void enterPredicate(EqlBaseParser.PredicateContext ctx);
/**
* Exit a parse tree produced by {@link EqlBaseParser#predicate}.
* @param ctx the parse tree
*/
void exitPredicate(EqlBaseParser.PredicateContext ctx);
/**
* Enter a parse tree produced by the {@code constantDefault}
* labeled alternative in {@link EqlBaseParser#primaryExpression}.
@ -357,16 +379,6 @@ interface EqlBaseListener extends ParseTreeListener {
* @param ctx the parse tree
*/
void exitBooleanValue(EqlBaseParser.BooleanValueContext ctx);
/**
* Enter a parse tree produced by {@link EqlBaseParser#qualifiedNames}.
* @param ctx the parse tree
*/
void enterQualifiedNames(EqlBaseParser.QualifiedNamesContext ctx);
/**
* Exit a parse tree produced by {@link EqlBaseParser#qualifiedNames}.
* @param ctx the parse tree
*/
void exitQualifiedNames(EqlBaseParser.QualifiedNamesContext ctx);
/**
* Enter a parse tree produced by {@link EqlBaseParser#qualifiedName}.
* @param ctx the parse tree
@ -388,41 +400,15 @@ interface EqlBaseListener extends ParseTreeListener {
*/
void exitIdentifier(EqlBaseParser.IdentifierContext ctx);
/**
* Enter a parse tree produced by the {@code quotedIdentifier}
* labeled alternative in {@link EqlBaseParser#quoteIdentifier}.
* Enter a parse tree produced by {@link EqlBaseParser#timeUnit}.
* @param ctx the parse tree
*/
void enterQuotedIdentifier(EqlBaseParser.QuotedIdentifierContext ctx);
void enterTimeUnit(EqlBaseParser.TimeUnitContext ctx);
/**
* Exit a parse tree produced by the {@code quotedIdentifier}
* labeled alternative in {@link EqlBaseParser#quoteIdentifier}.
* Exit a parse tree produced by {@link EqlBaseParser#timeUnit}.
* @param ctx the parse tree
*/
void exitQuotedIdentifier(EqlBaseParser.QuotedIdentifierContext ctx);
/**
* Enter a parse tree produced by the {@code unquotedIdentifier}
* labeled alternative in {@link EqlBaseParser#unquoteIdentifier}.
* @param ctx the parse tree
*/
void enterUnquotedIdentifier(EqlBaseParser.UnquotedIdentifierContext ctx);
/**
* Exit a parse tree produced by the {@code unquotedIdentifier}
* labeled alternative in {@link EqlBaseParser#unquoteIdentifier}.
* @param ctx the parse tree
*/
void exitUnquotedIdentifier(EqlBaseParser.UnquotedIdentifierContext ctx);
/**
* Enter a parse tree produced by the {@code digitIdentifier}
* labeled alternative in {@link EqlBaseParser#unquoteIdentifier}.
* @param ctx the parse tree
*/
void enterDigitIdentifier(EqlBaseParser.DigitIdentifierContext ctx);
/**
* Exit a parse tree produced by the {@code digitIdentifier}
* labeled alternative in {@link EqlBaseParser#unquoteIdentifier}.
* @param ctx the parse tree
*/
void exitDigitIdentifier(EqlBaseParser.DigitIdentifierContext ctx);
void exitTimeUnit(EqlBaseParser.TimeUnitContext ctx);
/**
* Enter a parse tree produced by the {@code decimalLiteral}
* labeled alternative in {@link EqlBaseParser#number}.

View File

@ -34,6 +34,12 @@ interface EqlBaseVisitor<T> extends ParseTreeVisitor<T> {
* @return the visitor result
*/
T visitQuery(EqlBaseParser.QueryContext ctx);
/**
* Visit a parse tree produced by {@link EqlBaseParser#sequenceParams}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitSequenceParams(EqlBaseParser.SequenceParamsContext ctx);
/**
* Visit a parse tree produced by {@link EqlBaseParser#sequence}.
* @param ctx the parse tree
@ -59,23 +65,29 @@ interface EqlBaseVisitor<T> extends ParseTreeVisitor<T> {
*/
T visitJoinKeys(EqlBaseParser.JoinKeysContext ctx);
/**
* Visit a parse tree produced by {@link EqlBaseParser#span}.
* Visit a parse tree produced by {@link EqlBaseParser#joinTerm}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitSpan(EqlBaseParser.SpanContext ctx);
T visitJoinTerm(EqlBaseParser.JoinTermContext ctx);
/**
* Visit a parse tree produced by {@link EqlBaseParser#match}.
* Visit a parse tree produced by {@link EqlBaseParser#sequenceTerm}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitMatch(EqlBaseParser.MatchContext ctx);
T visitSequenceTerm(EqlBaseParser.SequenceTermContext ctx);
/**
* Visit a parse tree produced by {@link EqlBaseParser#condition}.
* Visit a parse tree produced by {@link EqlBaseParser#subquery}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitCondition(EqlBaseParser.ConditionContext ctx);
T visitSubquery(EqlBaseParser.SubqueryContext ctx);
/**
* Visit a parse tree produced by {@link EqlBaseParser#eventQuery}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitEventQuery(EqlBaseParser.EventQueryContext ctx);
/**
* Visit a parse tree produced by {@link EqlBaseParser#expression}.
* @param ctx the parse tree
@ -96,6 +108,13 @@ interface EqlBaseVisitor<T> extends ParseTreeVisitor<T> {
* @return the visitor result
*/
T visitBooleanDefault(EqlBaseParser.BooleanDefaultContext ctx);
/**
* Visit a parse tree produced by the {@code processCheck}
* labeled alternative in {@link EqlBaseParser#booleanExpression}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitProcessCheck(EqlBaseParser.ProcessCheckContext ctx);
/**
* Visit a parse tree produced by the {@code logicalBinary}
* labeled alternative in {@link EqlBaseParser#booleanExpression}.
@ -103,18 +122,6 @@ interface EqlBaseVisitor<T> extends ParseTreeVisitor<T> {
* @return the visitor result
*/
T visitLogicalBinary(EqlBaseParser.LogicalBinaryContext ctx);
/**
* Visit a parse tree produced by {@link EqlBaseParser#predicated}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitPredicated(EqlBaseParser.PredicatedContext ctx);
/**
* Visit a parse tree produced by {@link EqlBaseParser#predicate}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitPredicate(EqlBaseParser.PredicateContext ctx);
/**
* Visit a parse tree produced by the {@code valueExpressionDefault}
* labeled alternative in {@link EqlBaseParser#valueExpression}.
@ -143,6 +150,12 @@ interface EqlBaseVisitor<T> extends ParseTreeVisitor<T> {
* @return the visitor result
*/
T visitArithmeticUnary(EqlBaseParser.ArithmeticUnaryContext ctx);
/**
* Visit a parse tree produced by {@link EqlBaseParser#predicate}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitPredicate(EqlBaseParser.PredicateContext ctx);
/**
* Visit a parse tree produced by the {@code constantDefault}
* labeled alternative in {@link EqlBaseParser#primaryExpression}.
@ -217,12 +230,6 @@ interface EqlBaseVisitor<T> extends ParseTreeVisitor<T> {
* @return the visitor result
*/
T visitBooleanValue(EqlBaseParser.BooleanValueContext ctx);
/**
* Visit a parse tree produced by {@link EqlBaseParser#qualifiedNames}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitQualifiedNames(EqlBaseParser.QualifiedNamesContext ctx);
/**
* Visit a parse tree produced by {@link EqlBaseParser#qualifiedName}.
* @param ctx the parse tree
@ -236,26 +243,11 @@ interface EqlBaseVisitor<T> extends ParseTreeVisitor<T> {
*/
T visitIdentifier(EqlBaseParser.IdentifierContext ctx);
/**
* Visit a parse tree produced by the {@code quotedIdentifier}
* labeled alternative in {@link EqlBaseParser#quoteIdentifier}.
* Visit a parse tree produced by {@link EqlBaseParser#timeUnit}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitQuotedIdentifier(EqlBaseParser.QuotedIdentifierContext ctx);
/**
* Visit a parse tree produced by the {@code unquotedIdentifier}
* labeled alternative in {@link EqlBaseParser#unquoteIdentifier}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitUnquotedIdentifier(EqlBaseParser.UnquotedIdentifierContext ctx);
/**
* Visit a parse tree produced by the {@code digitIdentifier}
* labeled alternative in {@link EqlBaseParser#unquoteIdentifier}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitDigitIdentifier(EqlBaseParser.DigitIdentifierContext ctx);
T visitTimeUnit(EqlBaseParser.TimeUnitContext ctx);
/**
* Visit a parse tree produced by the {@code decimalLiteral}
* labeled alternative in {@link EqlBaseParser#number}.

View File

@ -5,8 +5,8 @@
*/
package org.elasticsearch.xpack.eql.parser;
import org.antlr.v4.runtime.ANTLRInputStream;
import org.antlr.v4.runtime.BaseErrorListener;
import org.antlr.v4.runtime.CommonToken;
import org.antlr.v4.runtime.CommonTokenStream;
import org.antlr.v4.runtime.DiagnosticErrorListener;
import org.antlr.v4.runtime.Parser;
@ -17,10 +17,10 @@ import org.antlr.v4.runtime.Token;
import org.antlr.v4.runtime.atn.ATNConfigSet;
import org.antlr.v4.runtime.atn.PredictionMode;
import org.antlr.v4.runtime.dfa.DFA;
import org.antlr.v4.runtime.misc.Pair;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.elasticsearch.xpack.ql.expression.Expression;
import org.elasticsearch.xpack.ql.plan.logical.LogicalPlan;
import java.util.Arrays;
import java.util.BitSet;
@ -35,32 +35,39 @@ public class EqlParser {
private static final Logger log = LogManager.getLogger();
private final boolean DEBUG = true;
private final boolean DEBUG = false;
/**
* Parses an EQL statement into execution plan
* @param eql - the EQL statement
*/
public Expression createStatement(String eql) {
public LogicalPlan createStatement(String eql) {
return createStatement(eql, new ParserParams());
}
public LogicalPlan createStatement(String eql, ParserParams params) {
if (log.isDebugEnabled()) {
log.debug("Parsing as statement: {}", eql);
}
return invokeParser(eql, EqlBaseParser::singleStatement, AstBuilder::expression);
return invokeParser(eql, params, EqlBaseParser::singleStatement, AstBuilder::plan);
}
public Expression createExpression(String expression) {
return createExpression(expression, new ParserParams());
}
public Expression createExpression(String expression, ParserParams params) {
if (log.isDebugEnabled()) {
log.debug("Parsing as expression: {}", expression);
}
return invokeParser(expression, EqlBaseParser::singleExpression, AstBuilder::expression);
return invokeParser(expression, params, EqlBaseParser::singleExpression, AstBuilder::expression);
}
private <T> T invokeParser(String sql,
private <T> T invokeParser(String eql, ParserParams params,
Function<EqlBaseParser, ParserRuleContext> parseFunction,
BiFunction<AstBuilder, ParserRuleContext, T> visitor) {
BiFunction<AstBuilder, ParserRuleContext, T> visitor) {
try {
EqlBaseLexer lexer = new EqlBaseLexer(new CaseInsensitiveStream(sql));
EqlBaseLexer lexer = new EqlBaseLexer(new ANTLRInputStream(eql));
lexer.removeErrorListeners();
lexer.addErrorListener(ERROR_LISTENER);
@ -94,10 +101,10 @@ public class EqlParser {
log.info("Parse tree {} " + tree.toStringTree());
}
return visitor.apply(new AstBuilder(), tree);
return visitor.apply(new AstBuilder(params), tree);
} catch (StackOverflowError e) {
throw new ParsingException("SQL statement is too large, " +
"causing stack overflow when generating the parsing tree: [{}]", sql);
throw new ParsingException("EQL statement is too large, " +
"causing stack overflow when generating the parsing tree: [{}]", eql);
}
}
@ -126,28 +133,101 @@ public class EqlParser {
this.ruleNames = ruleNames;
}
@Override
public void exitDigitIdentifier(EqlBaseParser.DigitIdentifierContext context) {
Token token = context.DIGIT_IDENTIFIER().getSymbol();
throw new ParsingException(
"identifiers must not start with a digit; please use double quotes",
null,
token.getLine(),
token.getCharPositionInLine());
public void exitFunctionExpression(EqlBaseParser.FunctionExpressionContext context) {
Token token = context.name;
String functionName = token.getText();
switch (functionName) {
case "add":
case "between":
case "cidrMatch":
case "concat":
case "divide":
case "endsWith":
case "indexOf":
case "length":
case "match":
case "modulo":
case "multiply":
case "number":
case "startsWith":
case "string":
case "stringContains":
case "substring":
case "subtract":
case "wildcard":
break;
case "arrayContains":
case "arrayCount":
case "arraySearch":
throw new ParsingException(
"unsupported function " + functionName,
null,
token.getLine(),
token.getCharPositionInLine());
default:
throw new ParsingException(
"unknown function " + functionName,
null,
token.getLine(),
token.getCharPositionInLine());
}
}
@Override
public void exitQuotedIdentifier(EqlBaseParser.QuotedIdentifierContext context) {
// Remove quotes
context.getParent().removeLastChild();
public void exitJoin(EqlBaseParser.JoinContext context) {
Token token = context.JOIN().getSymbol();
throw new ParsingException(
"join is not supported",
null,
token.getLine(),
token.getCharPositionInLine());
}
Token token = (Token) context.getChild(0).getPayload();
context.getParent().addChild(new CommonToken(
new Pair<>(token.getTokenSource(), token.getInputStream()),
EqlBaseLexer.IDENTIFIER,
token.getChannel(),
token.getStartIndex() + 1,
token.getStopIndex() - 1));
@Override
public void exitPipe(EqlBaseParser.PipeContext context) {
Token token = context.PIPE().getSymbol();
throw new ParsingException(
"pipes are not supported",
null,
token.getLine(),
token.getCharPositionInLine());
}
@Override
public void exitProcessCheck(EqlBaseParser.ProcessCheckContext context) {
Token token = context.relationship;
throw new ParsingException(
"process relationships are not supported",
null,
token.getLine(),
token.getCharPositionInLine());
}
@Override
public void exitSequence(EqlBaseParser.SequenceContext context) {
Token token = context.SEQUENCE().getSymbol();
throw new ParsingException(
"sequence is not supported",
null,
token.getLine(),
token.getCharPositionInLine());
}
@Override
public void exitQualifiedName(EqlBaseParser.QualifiedNameContext context) {
if (context.INTEGER_VALUE().size() > 0) {
Token firstIndex = context.INTEGER_VALUE(0).getSymbol();
throw new ParsingException(
"array indexes are not supported",
null,
firstIndex.getLine(),
firstIndex.getCharPositionInLine());
}
}
}
@ -158,4 +238,4 @@ public class EqlParser {
throw new ParsingException(message, e, line, charPositionInLine);
}
};
}
}

View File

@ -6,6 +6,237 @@
package org.elasticsearch.xpack.eql.parser;
import org.antlr.v4.runtime.ParserRuleContext;
import org.antlr.v4.runtime.tree.ParseTree;
import org.antlr.v4.runtime.tree.TerminalNode;
import org.elasticsearch.xpack.eql.parser.EqlBaseParser.ArithmeticUnaryContext;
import org.elasticsearch.xpack.eql.parser.EqlBaseParser.ComparisonContext;
import org.elasticsearch.xpack.eql.parser.EqlBaseParser.DereferenceContext;
import org.elasticsearch.xpack.eql.parser.EqlBaseParser.FunctionExpressionContext;
import org.elasticsearch.xpack.eql.parser.EqlBaseParser.LogicalBinaryContext;
import org.elasticsearch.xpack.eql.parser.EqlBaseParser.LogicalNotContext;
import org.elasticsearch.xpack.eql.parser.EqlBaseParser.PredicateContext;
import org.elasticsearch.xpack.eql.parser.EqlBaseParser.ValueExpressionDefaultContext;
import org.elasticsearch.xpack.ql.QlIllegalArgumentException;
import org.elasticsearch.xpack.ql.expression.Expression;
import org.elasticsearch.xpack.ql.expression.Literal;
import org.elasticsearch.xpack.ql.expression.UnresolvedAttribute;
import org.elasticsearch.xpack.ql.expression.function.Function;
import org.elasticsearch.xpack.ql.expression.function.UnresolvedFunction;
import org.elasticsearch.xpack.ql.expression.predicate.logical.And;
import org.elasticsearch.xpack.ql.expression.predicate.logical.Not;
import org.elasticsearch.xpack.ql.expression.predicate.logical.Or;
import org.elasticsearch.xpack.ql.expression.predicate.operator.arithmetic.Add;
import org.elasticsearch.xpack.ql.expression.predicate.operator.arithmetic.Div;
import org.elasticsearch.xpack.ql.expression.predicate.operator.arithmetic.Mod;
import org.elasticsearch.xpack.ql.expression.predicate.operator.arithmetic.Mul;
import org.elasticsearch.xpack.ql.expression.predicate.operator.arithmetic.Neg;
import org.elasticsearch.xpack.ql.expression.predicate.operator.arithmetic.Sub;
import org.elasticsearch.xpack.ql.expression.predicate.operator.comparison.Equals;
import org.elasticsearch.xpack.ql.expression.predicate.operator.comparison.GreaterThan;
import org.elasticsearch.xpack.ql.expression.predicate.operator.comparison.GreaterThanOrEqual;
import org.elasticsearch.xpack.ql.expression.predicate.operator.comparison.LessThan;
import org.elasticsearch.xpack.ql.expression.predicate.operator.comparison.LessThanOrEqual;
import org.elasticsearch.xpack.ql.expression.predicate.operator.comparison.NotEquals;
import org.elasticsearch.xpack.ql.tree.Source;
import org.elasticsearch.xpack.ql.type.DataType;
import org.elasticsearch.xpack.ql.type.DataTypes;
import org.elasticsearch.xpack.ql.util.StringUtils;
import java.util.List;
public class ExpressionBuilder extends IdentifierBuilder {
protected Expression expression(ParseTree ctx) {
return typedParsing(ctx, Expression.class);
}
protected List<Expression> expressions(List<? extends ParserRuleContext> contexts) {
return visitList(contexts, Expression.class);
}
@Override
public Expression visitSingleExpression(EqlBaseParser.SingleExpressionContext ctx) {
return expression(ctx.expression());
}
@Override
public Expression visitArithmeticUnary(ArithmeticUnaryContext ctx) {
Expression expr = expression(ctx.valueExpression());
Source source = source(ctx);
int type = ctx.operator.getType();
return type == EqlBaseParser.MINUS ? new Neg(source, expr) : expr;
}
@Override
public Expression visitArithmeticBinary(EqlBaseParser.ArithmeticBinaryContext ctx) {
Expression left = expression(ctx.left);
Expression right = expression(ctx.right);
Source source = source(ctx);
int type = ctx.operator.getType();
switch (type) {
case EqlBaseParser.ASTERISK:
return new Mul(source, left, right);
case EqlBaseParser.SLASH:
return new Div(source, left, right);
case EqlBaseParser.PERCENT:
return new Mod(source, left, right);
case EqlBaseParser.PLUS:
return new Add(source, left, right);
case EqlBaseParser.MINUS:
return new Sub(source, left, right);
default:
throw new ParsingException(source, "Unknown arithmetic {}", source.text());
}
}
@Override
public Literal visitBooleanValue(EqlBaseParser.BooleanValueContext ctx) {
Source source = source(ctx);
return new Literal(source, ctx.TRUE() != null, DataTypes.BOOLEAN);
}
@Override
public Expression visitComparison(ComparisonContext ctx) {
Expression left = expression(ctx.left);
Expression right = expression(ctx.right);
TerminalNode op = (TerminalNode) ctx.comparisonOperator().getChild(0);
Source source = source(ctx);
switch (op.getSymbol().getType()) {
case EqlBaseParser.EQ:
return new Equals(source, left, right);
case EqlBaseParser.NEQ:
return new NotEquals(source, left, right);
case EqlBaseParser.LT:
return new LessThan(source, left, right);
case EqlBaseParser.LTE:
return new LessThanOrEqual(source, left, right);
case EqlBaseParser.GT:
return new GreaterThan(source, left, right);
case EqlBaseParser.GTE:
return new GreaterThanOrEqual(source, left, right);
default:
throw new ParsingException(source, "Unknown operator {}", source.text());
}
}
@Override
public Expression visitValueExpressionDefault(ValueExpressionDefaultContext ctx) {
Expression expr = expression(ctx.primaryExpression());
Source source = source(ctx);
PredicateContext predicate = ctx.predicate();
if (predicate == null) {
return expr;
}
List<Expression> container = expressions(predicate.expression());
// TODO: Add IN to QL and use that directly
Expression checkInSet = null;
for (Expression inner : container) {
Expression termCheck = new Equals(source, expr, inner);
checkInSet = checkInSet == null ? termCheck : new Or(source, checkInSet, termCheck);
}
return predicate.NOT() != null ? new Not(source, checkInSet) : checkInSet;
}
@Override
public Expression visitDecimalLiteral(EqlBaseParser.DecimalLiteralContext ctx) {
Source source = source(ctx);
String text = ctx.getText();
try {
return new Literal(source, Double.valueOf(StringUtils.parseDouble(text)), DataTypes.DOUBLE);
} catch (QlIllegalArgumentException siae) {
throw new ParsingException(source, siae.getMessage());
}
}
@Override
public Expression visitDereference(DereferenceContext ctx) {
return new UnresolvedAttribute(source(ctx), visitQualifiedName(ctx.qualifiedName()));
}
@Override
public Function visitFunctionExpression(FunctionExpressionContext ctx) {
Source source = source(ctx);
String name = ctx.name.getText();
List<Expression> arguments = expressions(ctx.expression());
return new UnresolvedFunction(source, name, UnresolvedFunction.ResolutionType.STANDARD, arguments);
}
@Override
public Literal visitIntegerLiteral(EqlBaseParser.IntegerLiteralContext ctx) {
Source source = source(ctx);
String text = ctx.getText();
long value;
try {
value = Long.valueOf(StringUtils.parseLong(text));
} catch (QlIllegalArgumentException siae) {
// if it's too large, then quietly try to parse as a float instead
try {
return new Literal(source, Double.valueOf(StringUtils.parseDouble(text)), DataTypes.DOUBLE);
} catch (QlIllegalArgumentException ignored) {
}
throw new ParsingException(source, siae.getMessage());
}
Object val = Long.valueOf(value);
DataType type = DataTypes.LONG;
// try to downsize to int if possible (since that's the most common type)
if ((int) value == value) {
type = DataTypes.INTEGER;
val = Integer.valueOf((int) value);
}
return new Literal(source, val, type);
}
@Override
public Expression visitLogicalBinary(LogicalBinaryContext ctx) {
int type = ctx.operator.getType();
Source source = source(ctx);
Expression left = expression(ctx.left);
Expression right = expression(ctx.right);
if (type == EqlBaseParser.AND) {
return new And(source, left, right);
} else {
return new Or(source, left, right);
}
}
@Override
public Not visitLogicalNot(LogicalNotContext ctx) {
return new Not(source(ctx), expression(ctx.booleanExpression()));
}
@Override
public Literal visitNullLiteral(EqlBaseParser.NullLiteralContext ctx) {
Source source = source(ctx);
return new Literal(source, null, DataTypes.NULL);
}
@Override
public Expression visitParenthesizedExpression(EqlBaseParser.ParenthesizedExpressionContext ctx) {
return expression(ctx.expression());
}
@Override
public Literal visitString(EqlBaseParser.StringContext ctx) {
return new Literal(source(ctx), unquoteString(ctx.getText()), DataTypes.KEYWORD);
}
}

View File

@ -5,7 +5,9 @@
*/
package org.elasticsearch.xpack.eql.parser;
import org.elasticsearch.common.Strings;
import org.elasticsearch.xpack.eql.parser.EqlBaseParser.IdentifierContext;
import org.elasticsearch.xpack.eql.parser.EqlBaseParser.QualifiedNameContext;
abstract class IdentifierBuilder extends AbstractBuilder {
@ -14,7 +16,17 @@ abstract class IdentifierBuilder extends AbstractBuilder {
return ctx == null ? null : unquoteIdentifier(ctx.getText());
}
private static String unquoteIdentifier(String identifier) {
return identifier.replace("\"\"", "\"");
@Override
public String visitQualifiedName(QualifiedNameContext ctx) {
if (ctx == null) {
return null;
}
// this is fine, because we've already checked for array indexes [...]
return Strings.collectionToDelimitedString(visitList(ctx.identifier(), String.class), ".");
}
}
private static String unquoteIdentifier(String identifier) {
return identifier.replace("`", "");
}
}

View File

@ -0,0 +1,45 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.eql.parser;
import org.elasticsearch.xpack.ql.expression.Expression;
import org.elasticsearch.xpack.ql.expression.Literal;
import org.elasticsearch.xpack.ql.expression.UnresolvedAttribute;
import org.elasticsearch.xpack.ql.expression.predicate.logical.And;
import org.elasticsearch.xpack.ql.expression.predicate.operator.comparison.Equals;
import org.elasticsearch.xpack.ql.plan.logical.Filter;
import org.elasticsearch.xpack.ql.plan.logical.LogicalPlan;
import org.elasticsearch.xpack.ql.plan.logical.UnresolvedRelation;
import org.elasticsearch.xpack.ql.tree.Source;
import org.elasticsearch.xpack.ql.type.DataTypes;
public abstract class LogicalPlanBuilder extends ExpressionBuilder {
private final ParserParams params;
public LogicalPlanBuilder(ParserParams params) {
this.params = params;
}
@Override
public LogicalPlan visitEventQuery(EqlBaseParser.EventQueryContext ctx) {
Source source = source(ctx);
Expression condition = expression(ctx.expression());
if (ctx.event != null) {
Source eventSource = source(ctx.event);
String eventName = visitIdentifier(ctx.event);
Literal eventValue = new Literal(eventSource, eventName, DataTypes.KEYWORD);
UnresolvedAttribute eventField = new UnresolvedAttribute(eventSource, params.fieldEventType());
Expression eventMatch = new Equals(eventSource, eventField, eventValue);
condition = new And(source, eventMatch, condition);
}
return new Filter(source(ctx), new UnresolvedRelation(Source.EMPTY, null, "", false, ""), condition);
}
}

View File

@ -0,0 +1,58 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.eql.parser;
import java.util.List;
import static java.util.Collections.emptyList;
import static org.elasticsearch.xpack.eql.action.RequestDefaults.FIELD_EVENT_TYPE;
import static org.elasticsearch.xpack.eql.action.RequestDefaults.FIELD_TIMESTAMP;
import static org.elasticsearch.xpack.eql.action.RequestDefaults.IMPLICIT_JOIN_KEY;
public class ParserParams {
private String fieldEventType = FIELD_EVENT_TYPE;
private String fieldTimestamp = FIELD_TIMESTAMP;
private String implicitJoinKey = IMPLICIT_JOIN_KEY;
private List<Object> queryParams = emptyList();
public String fieldEventType() {
return fieldEventType;
}
public ParserParams fieldEventType(String fieldEventType) {
this.fieldEventType = fieldEventType;
return this;
}
public String fieldTimestamp() {
return fieldTimestamp;
}
public ParserParams fieldTimestamp(String fieldTimestamp) {
this.fieldTimestamp = fieldTimestamp;
return this;
}
public String implicitJoinKey() {
return implicitJoinKey;
}
public ParserParams implicitJoinKey(String implicitJoinKey) {
this.implicitJoinKey = implicitJoinKey;
return this;
}
public List<Object> params() {
return queryParams;
}
public ParserParams params(List<Object> params) {
this.queryParams = params;
return this;
}
}

View File

@ -0,0 +1,31 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.eql.plan.physical;
import org.elasticsearch.xpack.eql.session.Executable;
import org.elasticsearch.xpack.ql.plan.QueryPlan;
import org.elasticsearch.xpack.ql.tree.Source;
import java.util.List;
/**
* A PhysicalPlan is "how" a LogicalPlan (the "what") actually gets translated into one or more queries.
*
* LogicalPlan = I want to get from DEN to SFO
* PhysicalPlan = take Delta, DEN to SJC, then SJC to SFO
*/
public abstract class PhysicalPlan extends QueryPlan<PhysicalPlan> implements Executable {
public PhysicalPlan(Source source, List<PhysicalPlan> children) {
super(source, children);
}
@Override
public abstract int hashCode();
@Override
public abstract boolean equals(Object obj);
}

View File

@ -0,0 +1,17 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.eql.planner;
import org.elasticsearch.xpack.eql.plan.physical.PhysicalPlan;
import org.elasticsearch.xpack.ql.plan.logical.LogicalPlan;
public class Planner {
public PhysicalPlan plan(LogicalPlan plan) {
throw new UnsupportedOperationException();
}
}

View File

@ -8,20 +8,33 @@ package org.elasticsearch.xpack.eql.plugin;
import org.elasticsearch.Build;
import org.elasticsearch.action.ActionRequest;
import org.elasticsearch.action.ActionResponse;
import org.elasticsearch.client.Client;
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
import org.elasticsearch.cluster.node.DiscoveryNodes;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
import org.elasticsearch.common.settings.ClusterSettings;
import org.elasticsearch.common.settings.IndexScopedSettings;
import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.settings.SettingsFilter;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.env.Environment;
import org.elasticsearch.env.NodeEnvironment;
import org.elasticsearch.plugins.ActionPlugin;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.rest.RestController;
import org.elasticsearch.rest.RestHandler;
import org.elasticsearch.script.ScriptService;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.watcher.ResourceWatcherService;
import org.elasticsearch.xpack.eql.action.EqlSearchAction;
import org.elasticsearch.xpack.eql.execution.PlanExecutor;
import org.elasticsearch.xpack.ql.index.IndexResolver;
import org.elasticsearch.xpack.ql.type.DefaultDataTypeRegistry;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.function.Supplier;
@ -34,6 +47,20 @@ public class EqlPlugin extends Plugin implements ActionPlugin {
Setting.Property.NodeScope
);
@Override
public Collection<Object> createComponents(Client client, ClusterService clusterService, ThreadPool threadPool,
ResourceWatcherService resourceWatcherService, ScriptService scriptService, NamedXContentRegistry xContentRegistry,
Environment environment, NodeEnvironment nodeEnvironment, NamedWriteableRegistry namedWriteableRegistry) {
return createComponents(client, clusterService.getClusterName().value(), namedWriteableRegistry);
}
private Collection<Object> createComponents(Client client, String clusterName, NamedWriteableRegistry namedWriteableRegistry) {
IndexResolver indexResolver = new IndexResolver(client, clusterName, DefaultDataTypeRegistry.INSTANCE);
PlanExecutor planExecutor = new PlanExecutor(client, indexResolver, namedWriteableRegistry);
return Arrays.asList(planExecutor);
}
@Override
public List<ActionHandler<? extends ActionRequest, ? extends ActionResponse>> getActions() {
@ -60,6 +87,11 @@ public class EqlPlugin extends Plugin implements ActionPlugin {
return Build.CURRENT.isSnapshot();
}
// TODO: this needs to be used by all plugin methods - including getActions and createComponents
private boolean isEnabled(Settings settings) {
return EQL_ENABLED_SETTING.get(settings);
}
@Override
public List<RestHandler> getRestHandlers(Settings settings,
RestController restController,
@ -69,10 +101,9 @@ public class EqlPlugin extends Plugin implements ActionPlugin {
IndexNameExpressionResolver indexNameExpressionResolver,
Supplier<DiscoveryNodes> nodesInCluster) {
boolean enabled = EQL_ENABLED_SETTING.get(settings);
if (!enabled) {
if (isEnabled(settings) == false) {
return Collections.emptyList();
}
return Arrays.asList(new RestEqlSearchAction(restController));
}
}
}

View File

@ -12,6 +12,9 @@ import org.elasticsearch.action.support.HandledTransportAction;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.time.DateUtils;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.search.SearchHit;
import org.elasticsearch.tasks.Task;
import org.elasticsearch.threadpool.ThreadPool;
@ -21,7 +24,12 @@ import org.elasticsearch.xpack.core.security.SecurityContext;
import org.elasticsearch.xpack.eql.action.EqlSearchAction;
import org.elasticsearch.xpack.eql.action.EqlSearchRequest;
import org.elasticsearch.xpack.eql.action.EqlSearchResponse;
import org.elasticsearch.xpack.eql.execution.PlanExecutor;
import org.elasticsearch.xpack.eql.parser.ParserParams;
import org.elasticsearch.xpack.eql.session.Configuration;
import org.elasticsearch.xpack.eql.session.Results;
import java.time.ZoneId;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
@ -29,28 +37,44 @@ import java.util.List;
public class TransportEqlSearchAction extends HandledTransportAction<EqlSearchRequest, EqlSearchResponse> {
private final SecurityContext securityContext;
private final ClusterService clusterService;
private final PlanExecutor planExecutor;
@Inject
public TransportEqlSearchAction(Settings settings, ClusterService clusterService, TransportService transportService,
ThreadPool threadPool, ActionFilters actionFilters) {
ThreadPool threadPool, ActionFilters actionFilters, PlanExecutor planExecutor) {
super(EqlSearchAction.NAME, transportService, actionFilters, EqlSearchRequest::new);
this.securityContext = XPackSettings.SECURITY_ENABLED.get(settings) ?
new SecurityContext(settings, threadPool.getThreadContext()) : null;
this.clusterService = clusterService;
this.planExecutor = planExecutor;
}
@Override
protected void doExecute(Task task, EqlSearchRequest request, ActionListener<EqlSearchResponse> listener) {
operation(request, listener);
operation(planExecutor, request, username(securityContext), clusterName(clusterService), listener);
}
public static void operation(EqlSearchRequest request, ActionListener<EqlSearchResponse> listener) {
// TODO: implement parsing and querying
listener.onResponse(createResponse(request));
public static void operation(PlanExecutor planExecutor, EqlSearchRequest request, String username,
String clusterName, ActionListener<EqlSearchResponse> listener) {
// TODO: these should be sent by the client
ZoneId zoneId = DateUtils.of("Z");
QueryBuilder filter = request.query();
TimeValue timeout = TimeValue.timeValueSeconds(30);
boolean includeFrozen = request.indicesOptions().ignoreThrottled() == false;
String clientId = null;
ParserParams params = new ParserParams()
.fieldEventType(request.eventTypeField())
.fieldTimestamp(request.timestampField())
.implicitJoinKey(request.implicitJoinKeyField());
Configuration cfg = new Configuration(request.indices(), zoneId, username, clusterName, filter, timeout, includeFrozen, clientId);
//planExecutor.eql(cfg, request.rule(), params, wrap(r -> listener.onResponse(createResponse(r)), listener::onFailure));
listener.onResponse(createResponse(null));
}
static EqlSearchResponse createResponse(EqlSearchRequest request) {
static EqlSearchResponse createResponse(Results results) {
// Stubbed search response
// TODO: implement actual search response processing once the parser/executor is in place
List<SearchHit> events = Arrays.asList(
@ -63,4 +87,12 @@ public class TransportEqlSearchAction extends HandledTransportAction<EqlSearchRe
), null, new TotalHits(0, TotalHits.Relation.EQUAL_TO));
return new EqlSearchResponse(hits, 0, false);
}
}
static String username(SecurityContext securityContext) {
return securityContext != null && securityContext.getUser() != null ? securityContext.getUser().principal() : null;
}
static String clusterName(ClusterService clusterService) {
return clusterService.getClusterName().value();
}
}

View File

@ -0,0 +1,56 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.eql.session;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.index.query.QueryBuilder;
import java.time.ZoneId;
public class Configuration extends org.elasticsearch.xpack.ql.session.Configuration {
private final String[] indices;
private final TimeValue requestTimeout;
private final String clientId;
private final boolean includeFrozenIndices;
@Nullable
private QueryBuilder filter;
public Configuration(String[] indices, ZoneId zi, String username, String clusterName, QueryBuilder filter,
TimeValue requestTimeout, boolean includeFrozen, String clientId) {
super(zi, username, clusterName);
this.indices = indices;
this.filter = filter;
this.requestTimeout = requestTimeout;
this.clientId = clientId;
this.includeFrozenIndices = includeFrozen;
}
public String[] indices() {
return indices;
}
public TimeValue requestTimeout() {
return requestTimeout;
}
public QueryBuilder filter() {
return filter;
}
public String clientId() {
return clientId;
}
public boolean includeFrozen() {
return includeFrozenIndices;
}
}

View File

@ -0,0 +1,100 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.eql.session;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.client.Client;
import org.elasticsearch.common.Strings;
import org.elasticsearch.xpack.eql.analysis.Analyzer;
import org.elasticsearch.xpack.eql.analysis.PreAnalyzer;
import org.elasticsearch.xpack.eql.execution.PlanExecutor;
import org.elasticsearch.xpack.eql.optimizer.Optimizer;
import org.elasticsearch.xpack.eql.parser.EqlParser;
import org.elasticsearch.xpack.eql.parser.ParserParams;
import org.elasticsearch.xpack.eql.plan.physical.PhysicalPlan;
import org.elasticsearch.xpack.eql.planner.Planner;
import org.elasticsearch.xpack.ql.index.IndexResolver;
import org.elasticsearch.xpack.ql.plan.logical.LogicalPlan;
import static org.elasticsearch.action.ActionListener.wrap;
public class EqlSession {
private final Client client;
private final Configuration configuration;
private final IndexResolver indexResolver;
private final PreAnalyzer preAnalyzer;
private final Analyzer analyzer;
private final Optimizer optimizer;
private final Planner planner;
public EqlSession(Client client, Configuration cfg, IndexResolver indexResolver, PreAnalyzer preAnalyzer, Analyzer analyzer,
Optimizer optimizer, Planner planner, PlanExecutor planExecutor) {
this.client = client;
this.configuration = cfg;
this.indexResolver = indexResolver;
this.preAnalyzer = preAnalyzer;
this.analyzer = analyzer;
this.optimizer = optimizer;
this.planner = planner;
}
public Client client() {
return client;
}
public Optimizer optimizer() {
return optimizer;
}
public Configuration configuration() {
return configuration;
}
public void eql(String eql, ParserParams params, ActionListener<Results> listener) {
eqlExecutable(eql, params, wrap(e -> e.execute(this, listener), listener::onFailure));
}
public void eqlExecutable(String eql, ParserParams params, ActionListener<PhysicalPlan> listener) {
try {
physicalPlan(doParse(eql, params), listener);
} catch (Exception ex) {
listener.onFailure(ex);
}
}
public void physicalPlan(LogicalPlan optimized, ActionListener<PhysicalPlan> listener) {
optimizedPlan(optimized, wrap(o -> listener.onResponse(planner.plan(o)), listener::onFailure));
}
public void optimizedPlan(LogicalPlan verified, ActionListener<LogicalPlan> listener) {
analyzedPlan(verified, wrap(v -> listener.onResponse(optimizer.optimize(v)), listener::onFailure));
}
public void analyzedPlan(LogicalPlan parsed, ActionListener<LogicalPlan> listener) {
if (parsed.analyzed()) {
listener.onResponse(parsed);
return;
}
preAnalyze(parsed, wrap(p -> listener.onResponse(analyzer.analyze(p)), listener::onFailure));
}
private <T> void preAnalyze(LogicalPlan parsed, ActionListener<LogicalPlan> listener) {
String indexWildcard = Strings.arrayToCommaDelimitedString(configuration.indices());
indexResolver.resolveAsMergedMapping(indexWildcard, null, configuration.includeFrozen(), wrap(r -> {
listener.onResponse(preAnalyzer.preAnalyze(parsed, r));
}, listener::onFailure));
}
private LogicalPlan doParse(String eql, ParserParams params) {
return new EqlParser().createStatement(eql, params);
}
}

View File

@ -0,0 +1,19 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.eql.session;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.xpack.ql.expression.Attribute;
import java.util.List;
public interface Executable {
List<Attribute> output();
void execute(EqlSession session, ActionListener<Results> listener);
}

View File

@ -0,0 +1,31 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.eql.session;
import org.apache.lucene.search.TotalHits;
import java.util.List;
public class Results {
private final TotalHits totalHits;
private final List<Object> results;
public Results(TotalHits totalHits, List<Object> results) {
this.totalHits = totalHits;
this.results = results;
}
public TotalHits totalHits() {
return totalHits;
}
public List<Object> results() {
return results;
}
}

View File

@ -0,0 +1,25 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.eql.session;
import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.search.SearchHit;
import java.util.List;
public class Sequence {
private final List<Tuple<Object, List<SearchHit>>> events;
public Sequence(List<Tuple<Object, List<SearchHit>>> events) {
this.events = events;
}
public List<Tuple<Object, List<SearchHit>>> events() {
return events;
}
}

View File

@ -0,0 +1,34 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.eql;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.xpack.eql.session.Configuration;
import static org.elasticsearch.test.ESTestCase.randomAlphaOfLength;
import static org.elasticsearch.test.ESTestCase.randomBoolean;
import static org.elasticsearch.test.ESTestCase.randomNonNegativeLong;
import static org.elasticsearch.test.ESTestCase.randomZone;
public final class EqlTestUtils {
private EqlTestUtils() {}
public static final Configuration TEST_CFG = new Configuration(new String[] { "none" }, org.elasticsearch.xpack.ql.util.DateUtils.UTC,
"nobody", "cluster", null, TimeValue.timeValueSeconds(30), false, "");
public static Configuration randomConfiguration() {
return new Configuration(new String[] {randomAlphaOfLength(16)},
randomZone(),
randomAlphaOfLength(16),
randomAlphaOfLength(16),
null,
new TimeValue(randomNonNegativeLong()),
randomBoolean(),
randomAlphaOfLength(16));
}
}

View File

@ -0,0 +1,65 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.eql.analysis;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.xpack.eql.expression.function.EqlFunctionRegistry;
import org.elasticsearch.xpack.eql.parser.EqlParser;
import org.elasticsearch.xpack.ql.index.EsIndex;
import org.elasticsearch.xpack.ql.index.IndexResolution;
import org.elasticsearch.xpack.ql.plan.logical.LogicalPlan;
import org.elasticsearch.xpack.ql.type.EsField;
import org.elasticsearch.xpack.ql.type.TypesTests;
import java.util.Map;
public class VerifierTests extends ESTestCase {
private EqlParser parser = new EqlParser();
private IndexResolution index = IndexResolution.valid(new EsIndex("test", loadEqlMapping("mapping-default.json")));
private LogicalPlan accept(IndexResolution resolution, String eql) {
PreAnalyzer preAnalyzer = new PreAnalyzer();
Analyzer analyzer = new Analyzer(new EqlFunctionRegistry(), new Verifier());
return analyzer.analyze(preAnalyzer.preAnalyze(parser.createStatement(eql), resolution));
}
private LogicalPlan accept(String eql) {
return accept(index, eql);
}
private String error(String sql) {
return error(index, sql);
}
private String error(IndexResolution resolution, String eql) {
VerificationException e = expectThrows(VerificationException.class, () -> accept(resolution, eql));
assertTrue(e.getMessage().startsWith("Found "));
String header = "Found 1 problem(s)\nline ";
return e.getMessage().substring(header.length());
}
public void testBasicQuery() {
accept("foo where true");
}
public void testMissingColumn() {
assertEquals("1:11: Unknown column [xxx]", error("foo where xxx == 100"));
}
public void testMisspelledColumn() {
assertEquals("1:11: Unknown column [md4], did you mean [md5]?", error("foo where md4 == 1"));
}
public void testMisspelledColumnWithMultipleOptions() {
assertEquals("1:11: Unknown column [pib], did you mean any of [pid, ppid]?", error("foo where pib == 1"));
}
private static Map<String, EsField> loadEqlMapping(String name) {
return TypesTests.loadMapping(name);
}
}

View File

@ -0,0 +1,174 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.eql.parser;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.xpack.ql.expression.Expression;
import org.elasticsearch.xpack.ql.expression.Literal;
import org.elasticsearch.xpack.ql.expression.UnresolvedAttribute;
import org.elasticsearch.xpack.ql.expression.function.UnresolvedFunction;
import org.elasticsearch.xpack.ql.expression.predicate.logical.And;
import org.elasticsearch.xpack.ql.expression.predicate.logical.Or;
import org.elasticsearch.xpack.ql.expression.predicate.operator.arithmetic.Neg;
import org.elasticsearch.xpack.ql.expression.predicate.operator.comparison.Equals;
import org.elasticsearch.xpack.ql.expression.predicate.operator.comparison.GreaterThan;
import org.elasticsearch.xpack.ql.expression.predicate.operator.comparison.GreaterThanOrEqual;
import org.elasticsearch.xpack.ql.expression.predicate.operator.comparison.LessThan;
import org.elasticsearch.xpack.ql.expression.predicate.operator.comparison.LessThanOrEqual;
import org.elasticsearch.xpack.ql.expression.predicate.operator.comparison.NotEquals;
import org.elasticsearch.xpack.ql.type.DataTypes;
import java.util.Arrays;
import java.util.List;
import static org.elasticsearch.xpack.eql.parser.AbstractBuilder.unquoteString;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.instanceOf;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.nullValue;
public class ExpressionTests extends ESTestCase {
private final EqlParser parser = new EqlParser();
public Expression expr(String source) {
return parser.createExpression(source);
}
public void testStrings() throws Exception {
assertEquals("hello\"world", unquoteString("'hello\"world'"));
assertEquals("hello'world", unquoteString("\"hello'world\""));
assertEquals("hello\nworld", unquoteString("'hello\\nworld'"));
assertEquals("hello\\\nworld", unquoteString("'hello\\\\\\nworld'"));
assertEquals("hello\\\"world", unquoteString("'hello\\\\\\\"world'"));
// test for unescaped strings: ?"...." or ?'....'
assertEquals("hello\"world", unquoteString("?'hello\"world'"));
assertEquals("hello\\\"world", unquoteString("?'hello\\\"world'"));
assertEquals("hello'world", unquoteString("?\"hello'world\""));
assertEquals("hello\\nworld", unquoteString("?'hello\\nworld'"));
assertEquals("hello\\\\nworld", unquoteString("?'hello\\\\nworld'"));
assertEquals("hello\\\\\\nworld", unquoteString("?'hello\\\\\\nworld'"));
assertEquals("hello\\\\\\\"world", unquoteString("?'hello\\\\\\\"world'"));
}
public void testLiterals() {
assertEquals(Literal.TRUE, expr("true"));
assertEquals(Literal.FALSE, expr("false"));
assertEquals(Literal.NULL, expr("null"));
}
public void testSingleQuotedString() {
// "hello \" world"
Expression parsed = expr("'hello \\' world!'");
Expression expected = new Literal(null, "hello ' world!", DataTypes.KEYWORD);
assertEquals(expected, parsed);
}
public void testDoubleQuotedString() {
// "hello \" world"
Expression parsed = expr("\"hello \\\" world!\"");
Expression expected = new Literal(null, "hello \" world!", DataTypes.KEYWORD);
assertEquals(expected, parsed);
}
public void testSingleQuotedUnescapedString() {
// "hello \" world"
Expression parsed = expr("?'hello \\' world!'");
Expression expected = new Literal(null, "hello \\' world!", DataTypes.KEYWORD);
assertEquals(expected, parsed);
}
public void testDoubleQuotedUnescapedString() {
// "hello \" world"
Expression parsed = expr("?\"hello \\\" world!\"");
Expression expected = new Literal(null, "hello \\\" world!", DataTypes.KEYWORD);
assertEquals(expected, parsed);
}
public void testNumbers() {
assertEquals(new Literal(null, 8589934592L, DataTypes.LONG), expr("8589934592"));
assertEquals(new Literal(null, 5, DataTypes.INTEGER), expr("5"));
assertEquals(new Literal(null, 5e14, DataTypes.DOUBLE), expr("5e14"));
assertEquals(new Literal(null, 5.2, DataTypes.DOUBLE), expr("5.2"));
Expression parsed = expr("-5.2");
Expression expected = new Neg(null, new Literal(null, 5.2, DataTypes.DOUBLE));
assertEquals(expected, parsed);
}
public void testBackQuotedAttribute() {
String quote = "`";
String qualifier = "table";
String name = "@timestamp";
Expression exp = expr(quote + qualifier + quote + "." + quote + name + quote);
assertThat(exp, instanceOf(UnresolvedAttribute.class));
UnresolvedAttribute ua = (UnresolvedAttribute) exp;
assertThat(ua.name(), equalTo(qualifier + "." + name));
assertThat(ua.qualifiedName(), equalTo(qualifier + "." + name));
assertThat(ua.qualifier(), is(nullValue()));
}
public void testFunctions() {
List<Expression> arguments = Arrays.asList(
new UnresolvedAttribute(null, "some.field"),
new Literal(null, "test string", DataTypes.KEYWORD)
);
UnresolvedFunction.ResolutionType resolutionType = UnresolvedFunction.ResolutionType.STANDARD;
Expression expected = new UnresolvedFunction(null, "concat", resolutionType, arguments);
assertEquals(expected, expr("concat(some.field, 'test string')"));
}
public void testComparison() {
String fieldText = "field";
String valueText = "2.0";
Expression field = expr(fieldText);
Expression value = expr(valueText);
assertEquals(new Equals(null, field, value), expr(fieldText + "==" + valueText));
assertEquals(new NotEquals(null, field, value), expr(fieldText + "!=" + valueText));
assertEquals(new LessThanOrEqual(null, field, value), expr(fieldText + "<=" + valueText));
assertEquals(new GreaterThanOrEqual(null, field, value), expr(fieldText + ">=" + valueText));
assertEquals(new GreaterThan(null, field, value), expr(fieldText + ">" + valueText));
assertEquals(new LessThan(null, field, value), expr(fieldText + "<" + valueText));
}
public void testBoolean() {
String leftText = "process_name == 'net.exe'";
String rightText = "command_line == '* localgroup*'";
Expression lhs = expr(leftText);
Expression rhs = expr(rightText);
Expression booleanAnd = expr(leftText + " and " + rightText);
assertEquals(new And(null, lhs, rhs), booleanAnd);
Expression booleanOr = expr(leftText + " or " + rightText);
assertEquals(new Or(null, lhs, rhs), booleanOr);
}
public void testInSet() {
assertEquals(
expr("name in ('net.exe')"),
expr("name == 'net.exe'")
);
assertEquals(
expr("name in ('net.exe', 'whoami.exe', 'hostname.exe')"),
expr("name == 'net.exe' or name == 'whoami.exe' or name == 'hostname.exe'")
);
assertEquals(
expr("name not in ('net.exe', 'whoami.exe', 'hostname.exe')"),
expr("not (name == 'net.exe' or name == 'whoami.exe' or name == 'hostname.exe')")
);
}
}

View File

@ -28,21 +28,27 @@ import java.util.Objects;
*/
public class GrammarTests extends ESTestCase {
public void testGrammar() throws Exception {
public void testSupportedQueries() throws Exception {
EqlParser parser = new EqlParser();
List<Tuple<String, Integer>> lines = readQueries("/grammar-queries.eql");
List<Tuple<String, Integer>> lines = readQueries("/queries-supported.eql");
for (Tuple<String, Integer> line : lines) {
String q = line.v1();
try {
parser.createStatement(q);
} catch (ParsingException pe) {
if (pe.getErrorMessage().startsWith("Does not know how to handle")) {
// ignore for now
}
else {
throw new ParsingException(new Source(pe.getLineNumber() + line.v2() - 1, pe.getColumnNumber(), q),
pe.getErrorMessage() + " inside statement <{}>", q);
}
parser.createStatement(q);
}
}
public void testUnsupportedQueries() throws Exception {
EqlParser parser = new EqlParser();
List<Tuple<String, Integer>> lines = readQueries("/queries-unsupported.eql");
for (Tuple<String, Integer> line : lines) {
String q = line.v1();
ParsingException pe = expectThrows(
ParsingException.class,
"Query not identified as unsupported: " + q,
() -> parser.createStatement(q));
if (!pe.getErrorMessage().contains("supported")) {
throw new ParsingException(new Source(pe.getLineNumber() + line.v2() - 1, pe.getColumnNumber(), q),
pe.getErrorMessage() + " inside statement <{}>", q);
}
}
}
@ -67,6 +73,8 @@ public class GrammarTests extends ESTestCase {
query.setLength(query.length() - 1);
queries.add(new Tuple<>(query.toString(), lineNumber));
query.setLength(0);
} else {
query.append("\n");
}
}
lineNumber++;

View File

@ -0,0 +1,39 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.eql.parser;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.xpack.ql.expression.Expression;
import org.elasticsearch.xpack.ql.plan.logical.Filter;
import org.elasticsearch.xpack.ql.plan.logical.LogicalPlan;
import org.elasticsearch.xpack.ql.plan.logical.UnresolvedRelation;
import org.elasticsearch.xpack.ql.tree.Source;
public class LogicalPlanTests extends ESTestCase {
private final EqlParser parser = new EqlParser();
public Expression expr(String source) {
return parser.createExpression(source);
}
public void testEventQuery() {
LogicalPlan fullQuery = parser.createStatement("process where process_name == 'net.exe'");
Expression fullExpression = expr("event_type == 'process' and process_name == 'net.exe'");
assertEquals(fullQuery, new Filter(Source.EMPTY, new UnresolvedRelation(Source.EMPTY, null, "", false, ""), fullExpression));
}
public void testParameterizedEventQuery() {
ParserParams params = new ParserParams().fieldEventType("myCustomEvent");
LogicalPlan fullQuery = parser.createStatement("process where process_name == 'net.exe'", params);
Expression fullExpression = expr("myCustomEvent == 'process' and process_name == 'net.exe'");
assertEquals(fullQuery, new Filter(Source.EMPTY, new UnresolvedRelation(Source.EMPTY, null, "", false, ""), fullExpression));
}
}

View File

@ -1,95 +0,0 @@
process where process_name == "svchost.exe" and command_line != "* -k *";
process where process_name in ('ipconfig.exe', 'netstat.exe', 'systeminfo.exe', 'route.exe');
process where subtype.create and wildcard(command_line, "*.ost *", "*.pst *")
;
process where subtype.create and
process_name == "attrib.exe" and command_line == "* +h*"
;
file where file_name == "*Library/Preferences/*.plist";
//
// Pipes
//
process where true | count;
process where true | count process_name;
process where true | count parent_process_name, process_name;
process where true | unique process_name;
process where true | unique process_name, command_line;
network where true
| unique destination_address, destination_port
| filter timestamp_utc >= "2018-05-01";
process where true | unique_count process_name | filter count < 5;
process where process_name == "powershell.exe"
| unique command_line
| head 50
;
security where event_id == 4624
| tail 10
;
file where true | sort file_name
;
network where total_out_bytes > 100000000
| sort total_out_bytes
| tail 5
;
//
// Sequences
//
sequence by user_name
[process where process_name == "whoami"]
[process where process_name == "hostname"]
[process where process_name == "ifconfig"]
;
sequence with maxspan=30s
[network where destination_port==3389 and event_subtype_full="*_accept_event*"]
[security where event_id in (4624, 4625) and logon_type == 10]
;
sequence with maxspan=30s
[network where destination_port==3389 and event_subtype_full="*_accept_event"] by source_address
[security where event_id in (4624, 4625) and logon_type == 10] by ip_address
;
sequence with maxspan=5m
[ file where file_name == "*.exe"] by user_name, file_path
[ process where true] by user_name, process_path
;
sequence by user_name with maxspan=5m
[ file where file_name == "*.exe"] by file_path
[ process where true] by process_path
;
//
// Joins
//
join by source_ip, destination_ip
[network where destination_port == 3389] // RDP
[network where destination_port == 135] // RPC
[network where destination_port == 445] // SMB
;
join by pid
[process where true]
[network where true]
[registry where true]
[file where true]
until [process where event_subtype_full == "termination_event"]
;

View File

@ -0,0 +1,55 @@
{
"properties" : {
"command_line" : {
"type" : "keyword"
},
"event_type" : {
"type" : "keyword"
},
"md5" : {
"type" : "keyword"
},
"parent_process_name": {
"type" : "keyword"
},
"parent_process_path": {
"type" : "keyword"
},
"pid" : {
"type" : "long"
},
"ppid" : {
"type" : "long"
},
"process_name": {
"type" : "keyword"
},
"process_path": {
"type" : "keyword"
},
"subtype" : {
"type" : "keyword"
},
"timestamp" : {
"type" : "date"
},
"user" : {
"type" : "keyword"
},
"user_name" : {
"type" : "keyword"
},
"user_domain": {
"type" : "keyword"
},
"hostname" : {
"type" : "text",
"fields" : {
"keyword" : {
"type" : "keyword",
"ignore_above" : 256
}
}
}
}
}

View File

@ -0,0 +1,332 @@
process where process_name == "svchost.exe" and command_line != "* -k *";
process where process_name in ('ipconfig.exe', 'netstat.exe', 'systeminfo.exe', 'route.exe');
process where subtype.create and wildcard(command_line, "*.ost *", "*.pst *")
;
process where subtype.create and
process_name == "attrib.exe" and command_line == "* +h*"
;
file where file_name == "*Library/Preferences/*.plist";
/* UNIT TESTS FROM
* https://github.com/endgameinc/eql/blob/master/tests/test_parser.py
*/
file where true;
file where true and true;
file where false or true;
registry where not pid;
process where process_name == "net.exe" and command_line == "* user*.exe";
process where command_line == "~!@#$%^&*();'[]{}\\|<>?,./:\"-= ' ";
process where
pid == 4;
process where process_name in ("net.exe", "cmd.exe", "at.exe");
process where command_line == "*.exe *admin*" or command_line == "* a b*";
process where pid in (1,2,3,4,5,6,7,8) and abc == 100 and def == 200 and ghi == 300 and jkl == x;
process where ppid != pid;
image_load where not x != y;
image_load where not x == y;
image_load where not not not not x < y;
image_load where not x <= y;
image_load where not x >= y;
image_load where not x > y;
process where _leadingUnderscore == 100;
network where 1 * 2 + 3 * 4 + 10 / 2 == 2 + 12 + 5;
file where 1 - -2;
file where 1 + (-2);
file where 1 * (-2);
file where 3 * -length(file_path);
network where a * b + c * d + e / f == g + h + i;
network where a * (b + c * d) + e / f == g + h + i;
process where pid == 4 or pid == 5 or pid == 6 or pid == 7 or pid == 8;
network where pid == 0 or pid == 4 or (ppid == 0 or ppid = 4) or (abc == defgh) and process_name == "*" ;
network where pid = 4;
registry where a.b;
registry where a.b.c.d.e;
process where a > 100000000000000000000000000000000;
/* TESTS FROM
* https://raw.githubusercontent.com/endgameinc/eql/master/eql/etc/test_queries.toml
*/
process where serial_event_id = 1;
process where serial_event_id < 4;
process where false;
process where missing_field != null;
process where process_name == "impossible name" or (serial_event_id < 4.5 and serial_event_id >= 3.1)
;
process where serial_event_id<=8 and serial_event_id > 7
;
process where exit_code >= 0;
process where 0 <= exit_code;
process where exit_code <= 0;
process where exit_code < 1;
process where exit_code > -1;
process where -1 < exit_code;
process where exit_code > 0;
process where exit_code < 0;
process where 0 < exit_code;
process where 0 > exit_code;
process where (serial_event_id<=8 and serial_event_id > 7) and (opcode=3 and opcode>2);
process where (serial_event_id<9 and serial_event_id >= 7) or (opcode == pid);
registry where key_path == "*\\MACHINE\\SAM\\SAM\\*\\Account\\Us*ers\\00*03E9\\F";
process where process_path == "*\\red_ttp\\wininit.*" and opcode in (0,1,2,3,4);
file where file_path="*\\red_ttp\\winin*.*"
and opcode in (0,1,2) and user_name="vagrant"
;
file where file_path="*\\red_ttp\\winin*.*"
and opcode not in (0,1,2) and user_name="vagrant"
;
file where file_path="*\\red_ttp\\winin*.*"
and opcode not in (3, 4, 5, 6 ,7) and user_name="vagrant"
;
file where file_name in ("wininit.exe", "lsass.exe") and opcode == 2
;
process where opcode in (1,3) and process_name in (parent_process_name, "SYSTEM")
;
process where fake_field == "*";
registry where invalid_field_name != null;
registry where length(bad_field) > 0
;
process where opcode == 1
and process_name in ("net.exe", "net1.exe")
and not (parent_process_name == "net.exe"
and process_name == "net1.exe")
and command_line == "*group *admin*" and command_line != "* /add*";
process where process_name = "python.exe";
process where command_line == "*%*" ;
process where command_line == "*%*%*" ;
process where command_line == "%*%*" ;
process where match(?'.*?net1\s+localgroup\s+.*?', command_line)
;
process where match(?'.*?net1\s+\w+\s+.*?', command_line)
;
process where match(?'.*?net1\s+\w{4,15}\s+.*?', command_line)
;
process where match(?'.*?net1\s+\w{4,15}\s+.*?', command_line)
;
process where match(?'.*?net1\s+[localgrup]{4,15}\s+.*?', command_line)
;
file where opcode=0 and startsWith(file_name, 'exploRER.')
;
file where opcode=0 and startsWith(file_name, 'expLORER.exe')
;
file where opcode=0 and endsWith(file_name, 'loREr.exe');
file where opcode=0 and startsWith(file_name, 'explORER.EXE');
file where opcode=0 and startsWith('explorer.exeaaaaaaaa', file_name);
file where opcode=0 and serial_event_id = 88 and startsWith('explorer.exeaAAAA', 'EXPLORER.exe');
file where opcode=0 and stringContains('ABCDEFGHIexplorer.exeJKLMNOP', file_name)
;
file where opcode=0 and indexOf(file_name, 'plore') == 2 and not indexOf(file_name, '.pf')
;
file where opcode=0 and indexOf(file_name, 'explorer.') and indexOf(file_name, 'plore', 100)
;
file where opcode=0 and indexOf(file_name, 'plorer.', 0) == 2;
file where opcode=0 and indexOf(file_name, 'plorer.', 2);
file where opcode=0 and indexOf(file_name, 'plorer.', 4);
file where opcode=0 and indexOf(file_name, 'thing that never happened');
file where opcode=0 and indexOf(file_name, 'plorer.', 2) == 2;
file where opcode=0 and indexOf(file_name, 'explorer.', 0) == 0;
file where serial_event_id=88 and substring(file_name, 0, 4) == 'expl'
;
file where serial_event_id=88 and substring(file_name, 1, 3) == 'xp'
;
file where serial_event_id=88 and substring(file_name, -4) == '.exe'
;
file where serial_event_id=88 and substring(file_name, -4, -1) == '.ex'
;
process where add(serial_event_id, 0) == 1 and add(0, 1) == serial_event_id;
process where subtract(serial_event_id, -5) == 6;
process where multiply(6, serial_event_id) == 30 and divide(30, 4.0) == 7.5;
process where modulo(11, add(serial_event_id, 1)) == serial_event_id;
process where serial_event_id == number('5');
process where serial_event_id == number('0x32', 16);
process where serial_event_id == number('32', 16);
process where number(serial_event_id) == number(5);
process where concat(serial_event_id, ':', process_name, opcode) == '5:winINIT.exe3'
;
// undocumented function -- removing support
// network where safe(divide(process_name, process_name))
//;
file where serial_event_id == 82 and (true == (process_name in ('svchost.EXE', 'bad.exe', 'bad2.exe')))
;
file where serial_event_id - 1 == 81;
file where serial_event_id + 1 == 83;
file where serial_event_id * 2 == 164;
file where serial_event_id / 2 == 41;
file where serial_event_id % 40 == 2;
process where between(process_name, "s", "e") == "yst"
;
process where between(process_name, "s", "e", false) == "yst"
;
process where between(process_name, "s", "e", false, true) == "yst"
;
process where between(process_name, "s", "e", false, true) == "t"
;
process where between(process_name, "S", "e", false, true) == "yst"
;
process where between(process_name, "s", "e", true) == "ystem Idle Proc"
;
file where between(file_path, "dev", ".json", false) == "\\testlogs\\something"
;
file where between(file_path, "dev", ".json", true) == "\\testlogs\\something"
;
network where cidrMatch(source_address, "10.6.48.157/8")
;
network where cidrMatch(source_address, "192.168.0.0/16")
;
network where cidrMatch(source_address, "192.168.0.0/16", "10.6.48.157/8")
;
network where cidrMatch(source_address, "0.0.0.0/0")
;
process where length(between(process_name, 'g', 'e')) > 0
;
process where length(between(process_name, 'g', 'z')) > 0
;
// additional queries added for the elasticsearch specific implementation
// dots will still be interpreted by ES per usual
something where `my-hyphenated-field` == "value";
something where `my-hyphenated-field.with.nested.dots` == "value";
something where `@timestamp` == "2020-01-01 00:00:00";
something where `some escaped identifier` == "blah";
something where `some escaped identifier` == "blah";
something where `some.escaped.identifier` == "blah";

View File

@ -0,0 +1,700 @@
//
// Pipes
//
process where true | count;
process where true | count process_name;
process where true | count parent_process_name, process_name;
process where true | unique process_name;
process where true | unique process_name, command_line;
network where true
| unique destination_address, destination_port
| filter timestamp_utc >= "2018-05-01";
process where true | unique_count process_name | filter count < 5;
process where process_name == "powershell.exe"
| unique command_line
| head 50
;
security where event_id == 4624
| tail 10
;
file where true | sort file_name
;
network where total_out_bytes > 100000000
| sort total_out_bytes
| tail 5
;
//
// Sequences
//
sequence by user_name
[process where process_name == "whoami"]
[process where process_name == "hostname"]
[process where process_name == "ifconfig"]
;
sequence with maxspan=30s
[network where destination_port==3389 and event_subtype_full="*_accept_event*"]
[security where event_id in (4624, 4625) and logon_type == 10]
;
sequence with maxspan=30s
[network where destination_port==3389 and event_subtype_full="*_accept_event"] by source_address
[security where event_id in (4624, 4625) and logon_type == 10] by ip_address
;
sequence with maxspan=5m
[file where file_name == "*.exe"] by user_name, file_path
[process where true] by user_name, process_path
;
sequence by user_name with maxspan=5m
[file where file_name == "*.exe"] by file_path
[process where true] by process_path
;
//
// Joins
//
join by source_ip, destination_ip
[network where destination_port == 3389] // RDP
[network where destination_port == 135] // RPC
[network where destination_port == 445] // SMB
;
join by pid
[process where true]
[network where true]
[registry where true]
[file where true]
until [process where event_subtype_full == "termination_event"]
;
process where descendant of [process where process_name == "lsass.exe"] and process_name == "cmd.exe";
join [process where process_name == "*"] [file where file_path == "*"
];
join by pid [process where name == "*"] [file where path == "*"] until [process where opcode == 2];
sequence [process where name == "*"] [file where path == "*"] until [process where opcode == 2];
sequence by pid [process where name == "*"] [file where path == "*"] until [process where opcode == 2];
join [process where process_name == "*"] by process_path [file where file_path == "*"] by image_path;
sequence [process where process_name == "*"] by process_path [file where file_path == "*"] by image_path;
sequence by pid [process where process_name == "*"] [file where file_path == "*"];
sequence by pid with maxspan=200 [process where process_name == "*" ] [file where file_path == "*"];
sequence by pid with maxspan=2s [process where process_name == "*" ] [file where file_path == "*"];
sequence by pid with maxspan=2sec [process where process_name == "*" ] [file where file_path == "*"];
sequence by pid with maxspan=2seconds [process where process_name == "*" ] [file where file_path == "*"];
sequence with maxspan=2.5m [process where x == x] by pid [file where file_path == "*"] by ppid;
sequence by pid with maxspan=2.0h [process where process_name == "*"] [file where file_path == "*"];
sequence by pid with maxspan=2.0h [process where process_name == "*"] [file where file_path == "*"];
sequence by pid with maxspan=1.0075d [process where process_name == "*"] [file where file_path == "*"];
dns where pid == 100 | head 100 | tail 50 | unique pid;
network where pid == 100 | unique command_line | count;
security where user_domain == "endgame" | count user_name, a, b | tail 5;
process where 1==1 | count user_name, unique_pid, concat(field2,a,bc);
process where 1==1 | unique user_name, concat(field2,a,bc), field2;
process where true | filter true;
process where 1==1 | filter abc == def;
process where 1==1 | filter abc == def and 1 != 2;
process where 1==1 | count process_name | filter percent > 0.5;
any where true | unique a, b, c | sort a, b, c | count;
any where true | unique a, b, c | sort a, b, c | count;
any where true | unique a, b, c | sort a,b,c | count;
file where child of [registry where true];
file where event of [registry where true];
file where event of [registry where true];
file where descendant of [registry where true];
sequence by field1 [file where true] by f1 [process where true] by f1;
sequence by a,b,c,d [file where true] by f1,f2 [process where true] by f1,f2;
sequence [file where 1] by f1,f2 [process where 1] by f1,f2 until [process where 1] by f1,f2;
sequence by f [file where true] by a,b [process where true] by c,d until [process where 1] by e,f;
//sequence by unique_pid [process where true] [file where true] fork;
sequence by unique_pid [process where true] [file where true] fork=true;
// no longer supported
//sequence by unique_pid [process where true] [file where true] fork=1;
sequence by unique_pid [process where true] [file where true] fork=false;
// no longer supported
// sequence by unique_pid [process where true] [file where true] fork=0 [network where true];
sequence by unique_pid [process where true] [file where true] fork [network where true];
// no longer supported
// sequence by unique_pid [process where true] [file where true] fork=0;
sequence by unique_pid [process where true] [file where true] fork=true;
/* TESTS FROM
* https://raw.githubusercontent.com/endgameinc/eql/master/eql/etc/test_queries.toml
*/
process where true | head 6;
process where bad_field == null | head 5;
process where serial_event_id <= 8 and serial_event_id > 7
| filter serial_event_id == 8;
process where true
| filter serial_event_id <= 10
| filter serial_event_id > 6;
process where true
| filter serial_event_id <= 10
| filter serial_event_id > 6
| head 2;
process where true
| head 1000
| filter serial_event_id <= 10
| filter serial_event_id > 6
| tail 2
;
process where not (exit_code > -1)
and serial_event_id in (58, 64, 69, 74, 80, 85, 90, 93, 94)
| head 10
;
process where not (exit_code > -1) | head 7;
process where not (-1 < exit_code) | head 7;
process where process_name == "VMACTHLP.exe" and unique_pid == 12 | filter true;
process where process_name in ("python.exe", "SMSS.exe", "explorer.exe")
| unique process_name;
process where process_name in ("python.exe", "smss.exe", "Explorer.exe")
| unique length(process_name);
process where process_name in ("python.exe", "smss.exe", "explorer.exe")
| unique length(process_name) == length("python.exe");
process where process_name in ("Python.exe", "smss.exe", "explorer.exe")
| unique process_name != "python.exe";
process where process_name in ("python.exe", "smss.exe", "explorer.exe")
| unique process_name
| head 2
| tail 1;
process where process_name in ("python.exe", "smss.exe", "explorer.exe")
| unique process_name
| tail 2
| head 1;
process where process_name in ("python.exe", "smss.exe")
| unique process_name, parent_process_name;
process where process_name in ("python.exe", "smss.exe")
| unique process_name, parent_process_name;
process where process_name in ("python.exe", "smss.exe")
| head 5
| unique process_name, parent_process_name;
file where file_name == "csrss.exe" and opcode=0
and descendant of [process where opcode in (1,3) and process_name="cmd.exe"]
;
process where opcode=1 and process_name == "csrss.exe"
and descendant of [file where file_name == "csrss.exe" and opcode=0]
;
process where opcode=1 and process_name == "smss.exe"
and descendant of [
file where file_name == "csrss.exe" and opcode=0
and descendant of [
process where opcode in(1,3) and process_name="cmd.exe"
]
]
;
file where true
| tail 3;
file where true
| tail 4
| sort file_path;
process where true
| head 5
| sort md5, event_subtype_full, process_name;
process where true
| head 5
| sort md5, event_subtype_full, null_field, process_name;
process where true
| head 5
| sort md5, event_subtype_full, null_field, process_name;
process where true
| head 5
| sort md5, event_subtype_full, null_field, process_name
| head 2;
process where true
| head 5
| sort md5, event_subtype_full, null_field, process_name
| sort serial_event_id;
sequence
[process where serial_event_id = 1]
[process where serial_event_id = 2]
;
sequence
[process where serial_event_id < 5]
[process where serial_event_id = 5]
;
sequence
[process where serial_event_id=1] by unique_pid
[process where true] by unique_ppid;
sequence
[process where serial_event_id<3] by unique_pid
[process where true] by unique_ppid
;
sequence
[process where serial_event_id<3] by unique_pid * 2
[process where true] by unique_ppid * 2
;
sequence
[process where serial_event_id<3] by unique_pid * 2, length(unique_pid), string(unique_pid)
[process where true] by unique_ppid * 2, length(unique_ppid), string(unique_ppid)
;
sequence
[file where event_subtype_full == "file_create_event"] by file_path
[process where opcode == 1] by process_path
[process where opcode == 2] by process_path
[file where event_subtype_full == "file_delete_event"] by file_path
| head 4
| tail 2;
sequence with maxspan=1d
[file where event_subtype_full == "file_create_event"] by file_path
[process where opcode == 1] by process_path
[process where opcode == 2] by process_path
[file where event_subtype_full == "file_delete_event"] by file_path
| head 4
| tail 2;
sequence with maxspan=1h
[file where event_subtype_full == "file_create_event"] by file_path
[process where opcode == 1] by process_path
[process where opcode == 2] by process_path
[file where event_subtype_full == "file_delete_event"] by file_path
| head 4
| tail 2;
sequence with maxspan=1m
[file where event_subtype_full == "file_create_event"] by file_path
[process where opcode == 1] by process_path
[process where opcode == 2] by process_path
[file where event_subtype_full == "file_delete_event"] by file_path
| head 4
| tail 2;
sequence with maxspan=10s
[file where event_subtype_full == "file_create_event"] by file_path
[process where opcode == 1] by process_path
[process where opcode == 2] by process_path
[file where event_subtype_full == "file_delete_event"] by file_path
| head 4
| tail 2;
sequence with maxspan=0.5s
[file where event_subtype_full == "file_create_event"] by file_path
[process where opcode == 1] by process_path
[process where opcode == 2] by process_path
[file where event_subtype_full == "file_delete_event"] by file_path
| head 4
| tail 2;
sequence
[process where serial_event_id < 5]
[process where serial_event_id < 5]
;
sequence
[file where opcode=0 and file_name="svchost.exe"] by unique_pid
[process where opcode == 1] by unique_ppid
;
sequence
[file where opcode=0] by unique_pid
[file where opcode=0] by unique_pid
| head 1;
sequence
[file where opcode=0] by unique_pid
[file where opcode=0] by unique_pid
| filter events[1].serial_event_id == 92;
sequence
[file where opcode=0 and file_name="*.exe"] by unique_pid
[file where opcode=0 and file_name="*.exe"] by unique_pid
until [process where opcode=5000] by unique_ppid
| head 1;
sequence
[file where opcode=0 and file_name="*.exe"] by unique_pid
[file where opcode=0 and file_name="*.exe"] by unique_pid
until [process where opcode=1] by unique_ppid
| head 1;
join
[file where opcode=0 and file_name="*.exe"] by unique_pid
[file where opcode=2 and file_name="*.exe"] by unique_pid
until [process where opcode=1] by unique_ppid
| head 1;
join by user_name
[process where opcode in (1,3) and process_name="smss.exe"]
[process where opcode in (1,3) and process_name == "python.exe"]
;
join by unique_pid
[process where opcode=1]
[file where opcode=0 and file_name="svchost.exe"]
[file where opcode == 0 and file_name == "lsass.exe"];
join by string(unique_pid)
[process where opcode=1]
[file where opcode=0 and file_name="svchost.exe"]
[file where opcode == 0 and file_name == "lsass.exe"];
join by unique_pid
[process where opcode=1]
[file where opcode=0 and file_name="svchost.exe"]
[file where opcode == 0 and file_name == "lsass.exe"]
until [file where opcode == 2];
join by string(unique_pid), unique_pid, unique_pid * 2
[process where opcode=1]
[file where opcode=0 and file_name="svchost.exe"]
[file where opcode == 0 and file_name == "lsass.exe"]
until [file where opcode == 2];
join
[file where opcode=0 and file_name="svchost.exe"] by unique_pid
[process where opcode == 1] by unique_ppid
;
join by unique_pid
[process where opcode in (1,3) and process_name="python.exe"]
[file where file_name == "*.exe"];
join by user_name
[process where opcode in (1,3) and process_name="python.exe"]
[process where opcode in (1,3) and process_name == "smss.exe"]
;
join
[process where opcode in (1,3) and process_name="python.exe"]
[process where opcode in (1,3) and process_name == "smss.exe"]
;
any where true
| unique event_type_full;
process where opcode=1 and process_name in ("services.exe", "smss.exe", "lsass.exe")
and descendant of [process where process_name == "cmd.exe" ];
process where process_name in ("services.exe", "smss.exe", "lsass.exe")
and descendant of [process where process_name == "cmd.exe" ];
process where opcode=2 and process_name in ("services.exe", "smss.exe", "lsass.exe")
and descendant of [process where process_name == "cmd.exe" ];
process where process_name="svchost.exe"
and child of [file where file_name="svchost.exe" and opcode=0];
process where process_name="svchost.exe"
and not child of [file where file_name="svchost.exe" and opcode=0]
| head 3;
process where process_name="lsass.exe"
and child of [
process where process_name="python.exe"
and child of [process where process_name="cmd.exe"]
]
;
file where child of [
process where child of [
process where child of [process where process_name="*wsmprovhost.exe"]
]
]
| tail 1;
file where process_name = "python.exe"
| unique unique_pid;
file where event of [process where process_name = "python.exe" ]
| unique unique_pid;
process where event of [process where process_name = "python.exe" ];
sequence
[file where file_name="lsass.exe"] by file_path,process_path
[process where true] by process_path,parent_process_path
;
sequence by user_name
[file where file_name="lsass.exe"] by file_path, process_path
[process where true] by process_path, parent_process_path
;
sequence by pid
[file where file_name="lsass.exe"] by file_path,process_path
[process where true] by process_path,parent_process_path
;
sequence by user_name
[file where opcode=0] by file_path
[process where opcode=1] by process_path
[process where opcode=2] by process_path
[file where opcode=2] by file_path
| tail 1;
sequence by user_name
[file where opcode=0] by pid,file_path
[file where opcode=2] by pid,file_path
until [process where opcode=2] by ppid,process_path
;
sequence by user_name
[file where opcode=0] by pid,file_path
[file where opcode=2] by pid,file_path
until [process where opcode=5] by ppid,process_path
| head 2;
sequence by pid
[file where opcode=0] by file_path
[process where opcode=1] by process_path
[process where opcode=2] by process_path
[file where opcode=2] by file_path
| tail 1;
join by user_name
[file where true] by pid,file_path
[process where true] by ppid,process_path
| head 2;
sequence
[process where true] by unique_pid
[file where true] fork=true by unique_pid
[process where true] by unique_ppid
| head 4;
process where 'net.EXE' == original_file_name
| filter process_name="net*.exe"
;
process where process_name == original_file_name
| filter process_name='net*.exe'
;
process where original_file_name == process_name
| filter length(original_file_name) > 0
;
process where process_name != original_file_name
| filter length(original_file_name) > 0;
sequence by unique_pid [process where opcode=1 and process_name == 'msbuild.exe'] [network where true];
process where fake_field != "*"
| head 4;
process where not (fake_field == "*")
| head 4;
any where process_name == "svchost.exe"
| unique_count event_type_full, process_name;
any where process_name == "svchost.exe"
| sort event_type_full, serial_event_id
| unique_count event_type_full, process_name;
any where process_name == "svchost.exe"
| unique_count event_type_full, opcode
| filter count == 7;
any where process_name == "svchost.exe"
| unique_count event_type_full, opcode
| filter percent >= .5
;
// array functions
registry where arrayContains(bytes_written_string_list, 'En-uS');
registry where arrayContains(bytes_written_string_list, 'En');
network where mysterious_field
and arraySearch(mysterious_field.subarray, s, true)
;
registry where arraySearch(bytes_written_string_list, a, a == 'en-us');
registry where arraySearch(bytes_written_string_list, a, endsWith(a, '-us'));
network where mysterious_field and arraySearch(mysterious_field.subarray, s, false)
;
network where mysterious_field and arraySearch(mysterious_field.subarray, s, s.a == 's0-*')
;
network where mysterious_field and arraySearch(mysterious_field.subarray, s, s.a != 's0-*')
;
network where mysterious_field
and arraySearch(mysterious_field.subarray, sub1,
arraySearch(sub1.c, nested, nested.x.y == '*'))
;
network where mysterious_field
and arraySearch(mysterious_field.subarray, sub1,
sub1.a == 's0-a' and arraySearch(sub1.c, nested, nested.z == 's0-c1-x-z'))
;
network where mysterious_field
and arraySearch(mysterious_field.subarray, sub1,
sub1.a == 's0-a' and arraySearch(sub1.c, nested, nested.z == sub1.cross_match))
;
network where mysterious_field
and arraySearch(mysterious_field.subarray, sub1,
arraySearch(sub1.c, nested, nested.x.y == mysterious_field.outer_cross_match))
;
registry where arrayCount(bytes_written_string_list, s, s == '*-us') == 1
;
registry where arrayCount(bytes_written_string_list, s, s == '*en*') == 2
;
registry where arrayContains(bytes_written_string_list, "missing", "en-US")
;
// array fields
registry where length(bytes_written_string_list) == 2 and bytes_written_string_list[1] == "EN";
registry where length(bytes_written_string_list) > 0 and bytes_written_string_list[0] == 'EN-us'
;
registry where bytes_written_string_list[0] == 'EN-us'
;
registry where bytes_written_string_list[1] == 'EN'
;
registry where a[0];
registry where a.b.c[0];
registry where a[0].b;
registry where a[0][1].b;
registry where a[0].b[1];
registry where topField.subField[100].subsubField == 0;

View File

@ -8,6 +8,7 @@ package org.elasticsearch.xpack.ql.index;
import org.elasticsearch.xpack.ql.type.EsField;
import java.util.Map;
import java.util.Objects;
public class EsIndex {
@ -33,4 +34,23 @@ public class EsIndex {
public String toString() {
return name;
}
@Override
public int hashCode() {
return Objects.hash(name, mapping);
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null || getClass() != obj.getClass()) {
return false;
}
EsIndex other = (EsIndex) obj;
return Objects.equals(name, other.name) && mapping == other.mapping;
}
}

View File

@ -10,7 +10,7 @@ import org.elasticsearch.xpack.ql.tree.Source;
import java.util.Collections;
import java.util.List;
abstract class LeafPlan extends LogicalPlan {
public abstract class LeafPlan extends LogicalPlan {
protected LeafPlan(Source source) {
super(source, Collections.emptyList());

View File

@ -89,11 +89,11 @@ public class UnresolvedRelation extends LeafPlan implements Unresolvable {
}
UnresolvedRelation other = (UnresolvedRelation) obj;
return source().equals(other.source())
&& table.equals(other.table)
return Objects.equals(source(), other.source())
&& Objects.equals(table, other.table)
&& Objects.equals(alias, other.alias)
&& Objects.equals(frozen, other.frozen)
&& unresolvedMsg.equals(other.unresolvedMsg);
&& Objects.equals(unresolvedMsg, other.unresolvedMsg);
}
@Override

View File

@ -9,6 +9,7 @@ import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.common.xcontent.json.JsonXContent;
import org.elasticsearch.test.ESTestCase;
import java.io.IOException;
import java.io.InputStream;
import java.util.Map;
@ -195,9 +196,17 @@ public class TypesTests extends ESTestCase {
}
public static Map<String, EsField> loadMapping(DataTypeRegistry registry, String name, Boolean ordered) {
boolean order = ordered != null ? ordered.booleanValue() : randomBoolean();
InputStream stream = TypesTests.class.getResourceAsStream("/" + name);
assertNotNull("Could not find mapping resource:" + name, stream);
return Types.fromEs(registry, XContentHelper.convertToMap(JsonXContent.jsonXContent, stream, order));
return loadMapping(registry, stream, ordered);
}
public static Map<String, EsField> loadMapping(DataTypeRegistry registry, InputStream stream, Boolean ordered) {
boolean order = ordered != null ? ordered.booleanValue() : randomBoolean();
try (InputStream in = stream) {
return Types.fromEs(registry, XContentHelper.convertToMap(JsonXContent.jsonXContent, in, order));
} catch (IOException ex) {
throw new RuntimeException(ex);
}
}
}