Decouple the ANTLR AST from Painless.

Closes #18286
This commit is contained in:
Jack Conradson 2016-05-12 00:46:48 -07:00
parent 9fbbd73de1
commit 28164ccf2e
94 changed files with 8930 additions and 8255 deletions

View File

@ -7,8 +7,8 @@
<!-- On Windows, Checkstyle matches files using \ path separator -->
<!-- These files are generated by ANTLR so its silly to hold them to our rules. -->
<suppress files="org[/\\]elasticsearch[/\\]painless[/\\]PainlessLexer\.java" checks="." />
<suppress files="org[/\\]elasticsearch[/\\]painless[/\\]PainlessParser(|BaseVisitor|Visitor)\.java" checks="." />
<suppress files="org[/\\]elasticsearch[/\\]painless[/\\]antlr[/\\]PainlessLexer\.java" checks="." />
<suppress files="org[/\\]elasticsearch[/\\]painless[/\\]antlr[/\\]PainlessParser(|BaseVisitor|Visitor)\.java" checks="." />
<!-- Hopefully temporary suppression of LineLength on files that don't pass it. We should remove these when we the
files start to pass. -->

View File

@ -49,7 +49,7 @@
<include name="@{grammar}ParserVisitor.java" />
<include name="@{grammar}ParserBaseVisitor.java" />
</patternset>
<property name="output.path" location="src/main/java/org/elasticsearch/painless"/>
<property name="output.path" location="src/main/java/org/elasticsearch/painless/antlr"/>
<!-- delete parser and lexer so files will be generated -->
<delete dir="${output.path}">
<patternset refid="grammar.@{grammar}.patternset"/>
@ -63,7 +63,7 @@
<local name="grammar.path"/>
<local name="output.path"/>
<property name="grammar.path" location="src/main/antlr"/>
<property name="output.path" location="src/main/java/org/elasticsearch/painless"/>
<property name="output.path" location="src/main/java/org/elasticsearch/painless/antlr"/>
<!-- invoke ANTLR4 -->
<java classname="org.antlr.v4.Tool" fork="true" failonerror="true" classpathref="regenerate.classpath" taskname="antlr">
<sysproperty key="file.encoding" value="UTF-8"/>
@ -71,7 +71,7 @@
<sysproperty key="user.country" value="US"/>
<sysproperty key="user.variant" value=""/>
<arg value="-package"/>
<arg value="org.elasticsearch.painless"/>
<arg value="org.elasticsearch.painless.antlr"/>
<arg value="-o"/>
<arg path="${output.path}"/>
<arg path="${grammar.path}/@{grammar}Lexer.g4"/>
@ -85,7 +85,7 @@
<local name="grammar.path"/>
<local name="output.path"/>
<property name="grammar.path" location="src/main/antlr"/>
<property name="output.path" location="src/main/java/org/elasticsearch/painless"/>
<property name="output.path" location="src/main/java/org/elasticsearch/painless/antlr"/>
<!-- invoke ANTLR4 -->
<java classname="org.antlr.v4.Tool" fork="true" failonerror="true" classpathref="regenerate.classpath" taskname="antlr">
<sysproperty key="file.encoding" value="UTF-8"/>
@ -93,7 +93,7 @@
<sysproperty key="user.country" value="US"/>
<sysproperty key="user.variant" value=""/>
<arg value="-package"/>
<arg value="org.elasticsearch.painless"/>
<arg value="org.elasticsearch.painless.antlr"/>
<arg value="-no-listener"/>
<arg value="-visitor"/>
<!-- <arg value="-Xlog"/> -->
@ -110,7 +110,7 @@
<local name="grammar.path"/>
<local name="output.path"/>
<property name="grammar.path" location="src/main/antlr"/>
<property name="output.path" location="src/main/java/org/elasticsearch/painless"/>
<property name="output.path" location="src/main/java/org/elasticsearch/painless/antlr"/>
<patternset id="grammar.@{grammar}.patternset">
<include name="@{grammar}Lexer.java" />
<include name="@{grammar}Parser.java" />

View File

@ -63,7 +63,7 @@ EQR: '===';
NE: '!=';
NER: '!==';
BWAND: '&';
BWXOR: '^';
XOR: '^';
BWOR: '|';
BOOLAND: '&&';
BOOLOR: '||';

View File

@ -92,9 +92,9 @@ expression
| TRUE # true
| FALSE # false
| NULL # null
| <assoc=right> extstart increment # postinc
| <assoc=right> increment extstart # preinc
| extstart # external
| <assoc=right> chain ( INCR | DECR ) # postinc
| <assoc=right> ( INCR | DECR ) chain # preinc
| chain # read
| <assoc=right> ( BOOLNOT | BWNOT | ADD | SUB ) expression # unary
| <assoc=right> LP decltype RP expression # cast
| expression ( MUL | DIV | REM ) expression # binary
@ -103,39 +103,35 @@ expression
| expression ( LT | LTE | GT | GTE ) expression # comp
| expression ( EQ | EQR | NE | NER ) expression # comp
| expression BWAND expression # binary
| expression BWXOR expression # binary
| expression XOR expression # binary
| expression BWOR expression # binary
| expression BOOLAND expression # bool
| expression BOOLOR expression # bool
| <assoc=right> expression COND expression COLON expression # conditional
| <assoc=right> extstart ( ASSIGN | AADD | ASUB | AMUL | ADIV
| <assoc=right> chain ( ASSIGN | AADD | ASUB | AMUL | ADIV
| AREM | AAND | AXOR | AOR
| ALSH | ARSH | AUSH ) expression # assignment
;
extstart
: extprec
| extcast
| extvar
| extnew
| extstring
chain
: linkprec
| linkcast
| linkvar
| linknew
| linkstring
;
extprec: LP ( extprec | extcast | extvar | extnew | extstring ) RP ( extdot | extbrace )?;
extcast: LP decltype RP ( extprec | extcast | extvar | extnew | extstring );
extbrace: LBRACE expression RBRACE ( extdot | extbrace )?;
extdot: DOT ( extcall | extfield );
extcall: EXTID arguments ( extdot | extbrace )?;
extvar: identifier ( extdot | extbrace )?;
extfield: ( EXTID | EXTINTEGER ) ( extdot | extbrace )?;
extnew: NEW identifier ( ( arguments extdot? ) | ( ( LBRACE expression RBRACE )+ extdot? ) );
extstring: STRING (extdot | extbrace )?;
linkprec: LP ( linkprec | linkcast | linkvar | linknew | linkstring ) RP ( linkdot | linkbrace )?;
linkcast: LP decltype RP ( linkprec | linkcast | linkvar | linknew | linkstring );
linkbrace: LBRACE expression RBRACE ( linkdot | linkbrace )?;
linkdot: DOT ( linkcall | linkfield );
linkcall: EXTID arguments ( linkdot | linkbrace )?;
linkvar: identifier ( linkdot | linkbrace )?;
linkfield: ( EXTID | EXTINTEGER ) ( linkdot | linkbrace )?;
linknew: NEW identifier ( ( arguments linkdot? ) | ( ( LBRACE expression RBRACE )+ linkdot? ) );
linkstring: STRING (linkdot | linkbrace )?;
arguments
: ( LP ( expression ( COMMA expression )* )? RP )
;
increment
: INCR
| DECR
;

View File

@ -19,450 +19,20 @@
package org.elasticsearch.painless;
import org.elasticsearch.painless.PainlessParser.AfterthoughtContext;
import org.elasticsearch.painless.PainlessParser.ArgumentsContext;
import org.elasticsearch.painless.PainlessParser.AssignmentContext;
import org.elasticsearch.painless.PainlessParser.BinaryContext;
import org.elasticsearch.painless.PainlessParser.BoolContext;
import org.elasticsearch.painless.PainlessParser.BreakContext;
import org.elasticsearch.painless.PainlessParser.CastContext;
import org.elasticsearch.painless.PainlessParser.CompContext;
import org.elasticsearch.painless.PainlessParser.ConditionalContext;
import org.elasticsearch.painless.PainlessParser.ContinueContext;
import org.elasticsearch.painless.PainlessParser.DeclContext;
import org.elasticsearch.painless.PainlessParser.DeclarationContext;
import org.elasticsearch.painless.PainlessParser.DecltypeContext;
import org.elasticsearch.painless.PainlessParser.DeclvarContext;
import org.elasticsearch.painless.PainlessParser.DoContext;
import org.elasticsearch.painless.PainlessParser.EmptyContext;
import org.elasticsearch.painless.PainlessParser.EmptyscopeContext;
import org.elasticsearch.painless.PainlessParser.ExprContext;
import org.elasticsearch.painless.PainlessParser.ExtbraceContext;
import org.elasticsearch.painless.PainlessParser.ExtcallContext;
import org.elasticsearch.painless.PainlessParser.ExtcastContext;
import org.elasticsearch.painless.PainlessParser.ExtdotContext;
import org.elasticsearch.painless.PainlessParser.ExternalContext;
import org.elasticsearch.painless.PainlessParser.ExtfieldContext;
import org.elasticsearch.painless.PainlessParser.ExtnewContext;
import org.elasticsearch.painless.PainlessParser.ExtprecContext;
import org.elasticsearch.painless.PainlessParser.ExtstartContext;
import org.elasticsearch.painless.PainlessParser.ExtstringContext;
import org.elasticsearch.painless.PainlessParser.ExtvarContext;
import org.elasticsearch.painless.PainlessParser.FalseContext;
import org.elasticsearch.painless.PainlessParser.ForContext;
import org.elasticsearch.painless.PainlessParser.GenericContext;
import org.elasticsearch.painless.PainlessParser.IdentifierContext;
import org.elasticsearch.painless.PainlessParser.IfContext;
import org.elasticsearch.painless.PainlessParser.IncrementContext;
import org.elasticsearch.painless.PainlessParser.InitializerContext;
import org.elasticsearch.painless.PainlessParser.MultipleContext;
import org.elasticsearch.painless.PainlessParser.NullContext;
import org.elasticsearch.painless.PainlessParser.NumericContext;
import org.elasticsearch.painless.PainlessParser.PostincContext;
import org.elasticsearch.painless.PainlessParser.PrecedenceContext;
import org.elasticsearch.painless.PainlessParser.PreincContext;
import org.elasticsearch.painless.PainlessParser.ReturnContext;
import org.elasticsearch.painless.PainlessParser.SingleContext;
import org.elasticsearch.painless.PainlessParser.SourceContext;
import org.elasticsearch.painless.PainlessParser.ThrowContext;
import org.elasticsearch.painless.PainlessParser.TrapContext;
import org.elasticsearch.painless.PainlessParser.TrueContext;
import org.elasticsearch.painless.PainlessParser.TryContext;
import org.elasticsearch.painless.PainlessParser.UnaryContext;
import org.elasticsearch.painless.PainlessParser.WhileContext;
import org.elasticsearch.painless.Variables.Reserved;
import org.elasticsearch.painless.node.SSource;
class Analyzer extends PainlessParserBaseVisitor<Void> {
static void analyze(final Metadata metadata) {
new Analyzer(metadata);
/**
* Runs the analysis phase of compilation using the Painless AST.
*/
final class Analyzer {
static Variables analyze(final CompilerSettings settings, final Definition definition,
final Reserved shortcut, final SSource root) {
final Variables variables = new Variables(settings, definition, shortcut);
root.analyze(settings, definition, variables);
return variables;
}
private final AnalyzerStatement statement;
private final AnalyzerExpression expression;
private final AnalyzerExternal external;
private Analyzer(final Metadata metadata) {
final Definition definition = metadata.definition;
final AnalyzerUtility utility = new AnalyzerUtility(metadata);
final AnalyzerCaster caster = new AnalyzerCaster(definition);
final AnalyzerPromoter promoter = new AnalyzerPromoter(definition);
statement = new AnalyzerStatement(metadata, this, utility, caster);
expression = new AnalyzerExpression(metadata, this, caster, promoter);
external = new AnalyzerExternal(metadata, this, utility, caster, promoter);
utility.incrementScope();
utility.addVariable(null, "#this", definition.execType);
//
// reserved words.
//
// input map of parameters passed to the script.
metadata.paramsValueSlot = utility.addVariable(null, "params", definition.smapType).slot;
// scorer parameter passed to the script. internal use only.
metadata.scorerValueSlot = utility.addVariable(null, "#scorer", definition.objectType).slot;
// doc parameter passed to the script.
// TODO: currently working as a Map<String,Def>, we can do better?
metadata.docValueSlot = utility.addVariable(null, "doc", definition.smapType).slot;
// aggregation _value parameter passed to the script
metadata.aggregationValueSlot = utility.addVariable(null, "_value", definition.defType).slot;
//
// reserved words implemented as local variables
//
// loop counter to catch runaway scripts. internal use only.
metadata.loopCounterSlot = utility.addVariable(null, "#loop", definition.intType).slot;
// document's score as a read-only double.
metadata.scoreValueSlot = utility.addVariable(null, "_score", definition.doubleType).slot;
// ctx map set by executable scripts as a read-only map.
metadata.ctxValueSlot = utility.addVariable(null, "ctx", definition.smapType).slot;
metadata.createStatementMetadata(metadata.root);
visit(metadata.root);
utility.decrementScope();
}
@Override
public Void visitSource(final SourceContext ctx) {
statement.processSource(ctx);
return null;
}
@Override
public Void visitIf(final IfContext ctx) {
statement.processIf(ctx);
return null;
}
@Override
public Void visitWhile(final WhileContext ctx) {
statement.processWhile(ctx);
return null;
}
@Override
public Void visitDo(final DoContext ctx) {
statement.processDo(ctx);
return null;
}
@Override
public Void visitFor(final ForContext ctx) {
statement.processFor(ctx);
return null;
}
@Override
public Void visitDecl(final DeclContext ctx) {
statement.processDecl(ctx);
return null;
}
@Override
public Void visitContinue(final ContinueContext ctx) {
statement.processContinue(ctx);
return null;
}
@Override
public Void visitBreak(final BreakContext ctx) {
statement.processBreak(ctx);
return null;
}
@Override
public Void visitReturn(final ReturnContext ctx) {
statement.processReturn(ctx);
return null;
}
@Override
public Void visitTry(final TryContext ctx) {
statement.processTry(ctx);
return null;
}
@Override
public Void visitThrow(final ThrowContext ctx) {
statement.processThrow(ctx);
return null;
}
@Override
public Void visitExpr(final ExprContext ctx) {
statement.processExpr(ctx);
return null;
}
@Override
public Void visitMultiple(final MultipleContext ctx) {
statement.processMultiple(ctx);
return null;
}
@Override
public Void visitSingle(final SingleContext ctx) {
statement.processSingle(ctx);
return null;
}
@Override
public Void visitEmpty(final EmptyContext ctx) {
throw new UnsupportedOperationException(AnalyzerUtility.error(ctx) + "Unexpected state.");
}
@Override
public Void visitEmptyscope(final EmptyscopeContext ctx) {
throw new UnsupportedOperationException(AnalyzerUtility.error(ctx) + "Unexpected state.");
}
@Override
public Void visitInitializer(final InitializerContext ctx) {
statement.processInitializer(ctx);
return null;
}
@Override
public Void visitAfterthought(final AfterthoughtContext ctx) {
statement.processAfterthought(ctx);
return null;
}
@Override
public Void visitDeclaration(final DeclarationContext ctx) {
statement.processDeclaration(ctx);
return null;
}
@Override
public Void visitDecltype(final DecltypeContext ctx) {
statement.processDecltype(ctx);
return null;
}
@Override
public Void visitDeclvar(final DeclvarContext ctx) {
statement.processDeclvar(ctx);
return null;
}
@Override
public Void visitTrap(final TrapContext ctx) {
statement.processTrap(ctx);
return null;
}
@Override
public Void visitIdentifier(IdentifierContext ctx) {
throw new UnsupportedOperationException(AnalyzerUtility.error(ctx) + "Unexpected state.");
}
@Override
public Void visitGeneric(GenericContext ctx) {
throw new UnsupportedOperationException(AnalyzerUtility.error(ctx) + "Unexpected state.");
}
@Override
public Void visitPrecedence(final PrecedenceContext ctx) {
throw new UnsupportedOperationException(AnalyzerUtility.error(ctx) + "Unexpected state.");
}
@Override
public Void visitNumeric(final NumericContext ctx) {
expression.processNumeric(ctx);
return null;
}
@Override
public Void visitTrue(final TrueContext ctx) {
expression.processTrue(ctx);
return null;
}
@Override
public Void visitFalse(final FalseContext ctx) {
expression.processFalse(ctx);
return null;
}
@Override
public Void visitNull(final NullContext ctx) {
expression.processNull(ctx);
return null;
}
@Override
public Void visitExternal(final ExternalContext ctx) {
expression.processExternal(ctx);
return null;
}
@Override
public Void visitPostinc(final PostincContext ctx) {
expression.processPostinc(ctx);
return null;
}
@Override
public Void visitPreinc(final PreincContext ctx) {
expression.processPreinc(ctx);
return null;
}
@Override
public Void visitUnary(final UnaryContext ctx) {
expression.processUnary(ctx);
return null;
}
@Override
public Void visitCast(final CastContext ctx) {
expression.processCast(ctx);
return null;
}
@Override
public Void visitBinary(final BinaryContext ctx) {
expression.processBinary(ctx);
return null;
}
@Override
public Void visitComp(final CompContext ctx) {
expression.processComp(ctx);
return null;
}
@Override
public Void visitBool(final BoolContext ctx) {
expression.processBool(ctx);
return null;
}
@Override
public Void visitConditional(final ConditionalContext ctx) {
expression.processConditional(ctx);
return null;
}
@Override
public Void visitAssignment(final AssignmentContext ctx) {
expression.processAssignment(ctx);
return null;
}
@Override
public Void visitExtstart(final ExtstartContext ctx) {
external.processExtstart(ctx);
return null;
}
@Override
public Void visitExtprec(final ExtprecContext ctx) {
external.processExtprec(ctx);
return null;
}
@Override
public Void visitExtcast(final ExtcastContext ctx) {
external.processExtcast(ctx);
return null;
}
@Override
public Void visitExtbrace(final ExtbraceContext ctx) {
external.processExtbrace(ctx);
return null;
}
@Override
public Void visitExtdot(final ExtdotContext ctx) {
external.processExtdot(ctx);
return null;
}
@Override
public Void visitExtcall(final ExtcallContext ctx) {
external.processExtcall(ctx);
return null;
}
@Override
public Void visitExtvar(final ExtvarContext ctx) {
external.processExtvar(ctx);
return null;
}
@Override
public Void visitExtfield(final ExtfieldContext ctx) {
external.processExtfield(ctx);
return null;
}
@Override
public Void visitExtnew(final ExtnewContext ctx) {
external.processExtnew(ctx);
return null;
}
@Override
public Void visitExtstring(final ExtstringContext ctx) {
external.processExtstring(ctx);
return null;
}
@Override
public Void visitArguments(final ArgumentsContext ctx) {
throw new UnsupportedOperationException(AnalyzerUtility.error(ctx) + "Unexpected state.");
}
@Override
public Void visitIncrement(final IncrementContext ctx) {
expression.processIncrement(ctx);
return null;
}
private Analyzer() {}
}

View File

@ -19,46 +19,30 @@
package org.elasticsearch.painless;
import org.antlr.v4.runtime.ParserRuleContext;
import org.elasticsearch.painless.Definition.Cast;
import org.elasticsearch.painless.Definition.Method;
import org.elasticsearch.painless.Definition.Sort;
import org.elasticsearch.painless.Definition.Transform;
import org.elasticsearch.painless.Definition.Type;
import org.elasticsearch.painless.Metadata.ExpressionMetadata;
class AnalyzerCaster {
private final Definition definition;
import java.lang.reflect.InvocationTargetException;
AnalyzerCaster(final Definition definition) {
this.definition = definition;
}
/**
* Used during the analysis phase to collect legal type casts and promotions
* for type-checking and later to write necessary casts in the bytecode.
*/
public final class AnalyzerCaster {
void markCast(final ExpressionMetadata emd) {
if (emd.from == null) {
throw new IllegalStateException(AnalyzerUtility.error(emd.source) + "From cast type should never be null.");
public static Cast getLegalCast(final Definition definition,
final String location, final Type actual, final Type expected, final boolean explicit) {
final Cast cast = new Cast(actual, expected);
if (actual.equals(expected)) {
return null;
}
if (emd.to != null) {
emd.cast = getLegalCast(emd.source, emd.from, emd.to, emd.explicit || !emd.typesafe);
if (emd.preConst != null && emd.to.sort.constant) {
emd.postConst = constCast(emd.source, emd.preConst, emd.cast);
}
} else {
throw new IllegalStateException(AnalyzerUtility.error(emd.source) + "To cast type should never be null.");
}
}
Cast getLegalCast(final ParserRuleContext source, final Type from, final Type to, final boolean explicit) {
final Cast cast = new Cast(from, to);
if (from.equals(to)) {
return cast;
}
if (from.sort == Sort.DEF && to.sort != Sort.VOID || from.sort != Sort.VOID && to.sort == Sort.DEF) {
final Transform transform = definition.transforms.get(cast);
if (actual.sort == Sort.DEF && expected.sort != Sort.VOID || actual.sort != Sort.VOID && expected.sort == Sort.DEF) {
final Transform transform = definition.transformsMap.get(cast);
if (transform != null) {
return transform;
@ -67,17 +51,17 @@ class AnalyzerCaster {
return cast;
}
switch (from.sort) {
switch (actual.sort) {
case BOOL:
switch (to.sort) {
switch (expected.sort) {
case OBJECT:
case BOOL_OBJ:
return checkTransform(source, cast);
return checkTransform(definition, location, cast);
}
break;
case BYTE:
switch (to.sort) {
switch (expected.sort) {
case SHORT:
case INT:
case LONG:
@ -97,17 +81,17 @@ class AnalyzerCaster {
case LONG_OBJ:
case FLOAT_OBJ:
case DOUBLE_OBJ:
return checkTransform(source, cast);
return checkTransform(definition, location, cast);
case CHAR_OBJ:
if (explicit)
return checkTransform(source, cast);
return checkTransform(definition, location, cast);
break;
}
break;
case SHORT:
switch (to.sort) {
switch (expected.sort) {
case INT:
case LONG:
case FLOAT:
@ -126,18 +110,18 @@ class AnalyzerCaster {
case LONG_OBJ:
case FLOAT_OBJ:
case DOUBLE_OBJ:
return checkTransform(source, cast);
return checkTransform(definition, location, cast);
case BYTE_OBJ:
case CHAR_OBJ:
if (explicit)
return checkTransform(source, cast);
return checkTransform(definition, location, cast);
break;
}
break;
case CHAR:
switch (to.sort) {
switch (expected.sort) {
case INT:
case LONG:
case FLOAT:
@ -156,19 +140,19 @@ class AnalyzerCaster {
case LONG_OBJ:
case FLOAT_OBJ:
case DOUBLE_OBJ:
return checkTransform(source, cast);
return checkTransform(definition, location, cast);
case BYTE_OBJ:
case SHORT_OBJ:
case STRING:
if (explicit)
return checkTransform(source, cast);
return checkTransform(definition, location, cast);
break;
}
break;
case INT:
switch (to.sort) {
switch (expected.sort) {
case LONG:
case FLOAT:
case DOUBLE:
@ -186,19 +170,19 @@ class AnalyzerCaster {
case LONG_OBJ:
case FLOAT_OBJ:
case DOUBLE_OBJ:
return checkTransform(source, cast);
return checkTransform(definition, location, cast);
case BYTE_OBJ:
case SHORT_OBJ:
case CHAR_OBJ:
if (explicit)
return checkTransform(source, cast);
return checkTransform(definition, location, cast);
break;
}
break;
case LONG:
switch (to.sort) {
switch (expected.sort) {
case FLOAT:
case DOUBLE:
return cast;
@ -215,20 +199,20 @@ class AnalyzerCaster {
case LONG_OBJ:
case FLOAT_OBJ:
case DOUBLE_OBJ:
return checkTransform(source, cast);
return checkTransform(definition, location, cast);
case BYTE_OBJ:
case SHORT_OBJ:
case CHAR_OBJ:
case INT_OBJ:
if (explicit)
return checkTransform(source, cast);
return checkTransform(definition, location, cast);
break;
}
break;
case FLOAT:
switch (to.sort) {
switch (expected.sort) {
case DOUBLE:
return cast;
case BYTE:
@ -244,21 +228,21 @@ class AnalyzerCaster {
case NUMBER:
case FLOAT_OBJ:
case DOUBLE_OBJ:
return checkTransform(source, cast);
return checkTransform(definition, location, cast);
case BYTE_OBJ:
case SHORT_OBJ:
case CHAR_OBJ:
case INT_OBJ:
case LONG_OBJ:
if (explicit)
return checkTransform(source, cast);
return checkTransform(definition, location, cast);
break;
}
break;
case DOUBLE:
switch (to.sort) {
switch (expected.sort) {
case BYTE:
case SHORT:
case CHAR:
@ -272,7 +256,7 @@ class AnalyzerCaster {
case OBJECT:
case NUMBER:
case DOUBLE_OBJ:
return checkTransform(source, cast);
return checkTransform(definition, location, cast);
case BYTE_OBJ:
case SHORT_OBJ:
case CHAR_OBJ:
@ -280,7 +264,7 @@ class AnalyzerCaster {
case LONG_OBJ:
case FLOAT_OBJ:
if (explicit)
return checkTransform(source, cast);
return checkTransform(definition, location, cast);
break;
}
@ -288,7 +272,7 @@ class AnalyzerCaster {
break;
case OBJECT:
case NUMBER:
switch (to.sort) {
switch (expected.sort) {
case BYTE:
case SHORT:
case CHAR:
@ -297,21 +281,21 @@ class AnalyzerCaster {
case FLOAT:
case DOUBLE:
if (explicit)
return checkTransform(source, cast);
return checkTransform(definition, location, cast);
break;
}
break;
case BOOL_OBJ:
switch (to.sort) {
switch (expected.sort) {
case BOOL:
return checkTransform(source, cast);
return checkTransform(definition, location, cast);
}
break;
case BYTE_OBJ:
switch (to.sort) {
switch (expected.sort) {
case BYTE:
case SHORT:
case INT:
@ -323,18 +307,18 @@ class AnalyzerCaster {
case LONG_OBJ:
case FLOAT_OBJ:
case DOUBLE_OBJ:
return checkTransform(source, cast);
return checkTransform(definition, location, cast);
case CHAR:
case CHAR_OBJ:
if (explicit)
return checkTransform(source, cast);
return checkTransform(definition, location, cast);
break;
}
break;
case SHORT_OBJ:
switch (to.sort) {
switch (expected.sort) {
case SHORT:
case INT:
case LONG:
@ -344,20 +328,20 @@ class AnalyzerCaster {
case LONG_OBJ:
case FLOAT_OBJ:
case DOUBLE_OBJ:
return checkTransform(source, cast);
return checkTransform(definition, location, cast);
case BYTE:
case CHAR:
case BYTE_OBJ:
case CHAR_OBJ:
if (explicit)
return checkTransform(source, cast);
return checkTransform(definition, location, cast);
break;
}
break;
case CHAR_OBJ:
switch (to.sort) {
switch (expected.sort) {
case CHAR:
case INT:
case LONG:
@ -367,21 +351,21 @@ class AnalyzerCaster {
case LONG_OBJ:
case FLOAT_OBJ:
case DOUBLE_OBJ:
return checkTransform(source, cast);
return checkTransform(definition, location, cast);
case BYTE:
case SHORT:
case BYTE_OBJ:
case SHORT_OBJ:
case STRING:
if (explicit)
return checkTransform(source, cast);
return checkTransform(definition, location, cast);
break;
}
break;
case INT_OBJ:
switch (to.sort) {
switch (expected.sort) {
case INT:
case LONG:
case FLOAT:
@ -389,7 +373,7 @@ class AnalyzerCaster {
case LONG_OBJ:
case FLOAT_OBJ:
case DOUBLE_OBJ:
return checkTransform(source, cast);
return checkTransform(definition, location, cast);
case BYTE:
case SHORT:
case CHAR:
@ -397,20 +381,20 @@ class AnalyzerCaster {
case SHORT_OBJ:
case CHAR_OBJ:
if (explicit)
return checkTransform(source, cast);
return checkTransform(definition, location, cast);
break;
}
break;
case LONG_OBJ:
switch (to.sort) {
switch (expected.sort) {
case LONG:
case FLOAT:
case DOUBLE:
case FLOAT_OBJ:
case DOUBLE_OBJ:
return checkTransform(source, cast);
return checkTransform(definition, location, cast);
case BYTE:
case SHORT:
case CHAR:
@ -420,18 +404,18 @@ class AnalyzerCaster {
case CHAR_OBJ:
case INT_OBJ:
if (explicit)
return checkTransform(source, cast);
return checkTransform(definition, location, cast);
break;
}
break;
case FLOAT_OBJ:
switch (to.sort) {
switch (expected.sort) {
case FLOAT:
case DOUBLE:
case DOUBLE_OBJ:
return checkTransform(source, cast);
return checkTransform(definition, location, cast);
case BYTE:
case SHORT:
case CHAR:
@ -443,16 +427,16 @@ class AnalyzerCaster {
case INT_OBJ:
case LONG_OBJ:
if (explicit)
return checkTransform(source, cast);
return checkTransform(definition, location, cast);
break;
}
break;
case DOUBLE_OBJ:
switch (to.sort) {
switch (expected.sort) {
case DOUBLE:
return checkTransform(source, cast);
return checkTransform(definition, location, cast);
case BYTE:
case SHORT:
case CHAR:
@ -466,59 +450,57 @@ class AnalyzerCaster {
case LONG_OBJ:
case FLOAT_OBJ:
if (explicit)
return checkTransform(source, cast);
return checkTransform(definition, location, cast);
break;
}
break;
case STRING:
switch (to.sort) {
switch (expected.sort) {
case CHAR:
case CHAR_OBJ:
if (explicit)
return checkTransform(source, cast);
return checkTransform(definition, location, cast);
break;
}
}
try {
from.clazz.asSubclass(to.clazz);
actual.clazz.asSubclass(expected.clazz);
return cast;
} catch (final ClassCastException cce0) {
try {
if (explicit) {
to.clazz.asSubclass(from.clazz);
expected.clazz.asSubclass(actual.clazz);
return cast;
} else {
throw new ClassCastException(
AnalyzerUtility.error(source) + "Cannot cast from [" + from.name + "] to [" + to.name + "].");
"Error" + location + ": Cannot cast from [" + actual.name + "] to [" + expected.name + "].");
}
} catch (final ClassCastException cce1) {
throw new ClassCastException(
AnalyzerUtility.error(source) + "Cannot cast from [" + from.name + "] to [" + to.name + "].");
throw new ClassCastException("Error" + location + ": Cannot cast from [" + actual.name + "] to [" + expected.name + "].");
}
}
}
private Transform checkTransform(final ParserRuleContext source, final Cast cast) {
final Transform transform = definition.transforms.get(cast);
private static Transform checkTransform(final Definition definition, final String location, final Cast cast) {
final Transform transform = definition.transformsMap.get(cast);
if (transform == null) {
throw new ClassCastException(
AnalyzerUtility.error(source) + "Cannot cast from [" + cast.from.name + "] to [" + cast.to.name + "].");
throw new ClassCastException("Error" + location + ": Cannot cast from [" + cast.from.name + "] to [" + cast.to.name + "].");
}
return transform;
}
private Object constCast(final ParserRuleContext source, final Object constant, final Cast cast) {
public static Object constCast(final String location, final Object constant, final Cast cast) {
if (cast instanceof Transform) {
final Transform transform = (Transform)cast;
return invokeTransform(source, transform, constant);
return invokeTransform(location, transform, constant);
} else {
final Sort fsort = cast.from.sort;
final Sort tsort = cast.to.sort;
@ -543,17 +525,17 @@ class AnalyzerCaster {
case FLOAT: return number.floatValue();
case DOUBLE: return number.doubleValue();
default:
throw new IllegalStateException(AnalyzerUtility.error(source) + "Expected numeric type for cast.");
throw new IllegalStateException("Error" + location + ": Cannot cast from " +
"[" + cast.from.clazz.getCanonicalName() + "] to [" + cast.to.clazz.getCanonicalName() + "].");
}
} else {
throw new IllegalStateException(AnalyzerUtility.error(source) + "No valid constant cast from " +
"[" + cast.from.clazz.getCanonicalName() + "] to " +
"[" + cast.to.clazz.getCanonicalName() + "].");
throw new IllegalStateException("Error" + location + ": Cannot cast from " +
"[" + cast.from.clazz.getCanonicalName() + "] to [" + cast.to.clazz.getCanonicalName() + "].");
}
}
}
private Object invokeTransform(final ParserRuleContext source, final Transform transform, final Object object) {
private static Object invokeTransform(final String location, final Transform transform, final Object object) {
final Method method = transform.method;
final java.lang.reflect.Method jmethod = method.reflect;
final int modifiers = jmethod.getModifiers();
@ -564,11 +546,274 @@ class AnalyzerCaster {
} else {
return jmethod.invoke(object);
}
} catch (IllegalAccessException | IllegalArgumentException |
java.lang.reflect.InvocationTargetException | NullPointerException |
ExceptionInInitializerError exception) {
throw new IllegalArgumentException(AnalyzerUtility.error(source) +
"Cannot cast constant from [" + transform.from.name + "] to [" + transform.to.name + "].");
} catch (final IllegalAccessException | IllegalArgumentException |
InvocationTargetException | NullPointerException | ExceptionInInitializerError exception) {
throw new ClassCastException(
"Error" + location + ": Cannot cast from [" + transform.from.name + "] to [" + transform.to.name + "].");
}
}
public static Type promoteNumeric(final Definition definition, final Type from, final boolean decimal, final boolean primitive) {
final Sort sort = from.sort;
if (sort == Sort.DEF) {
return definition.defType;
} else if ((sort == Sort.DOUBLE || sort == Sort.DOUBLE_OBJ) && decimal) {
return primitive ? definition.doubleType : definition.doubleobjType;
} else if ((sort == Sort.FLOAT || sort == Sort.FLOAT_OBJ) && decimal) {
return primitive ? definition.floatType : definition.floatobjType;
} else if (sort == Sort.LONG || sort == Sort.LONG_OBJ) {
return primitive ? definition.longType : definition.longobjType;
} else if (sort == Sort.INT || sort == Sort.INT_OBJ ||
sort == Sort.CHAR || sort == Sort.CHAR_OBJ ||
sort == Sort.SHORT || sort == Sort.SHORT_OBJ ||
sort == Sort.BYTE || sort == Sort.BYTE_OBJ) {
return primitive ? definition.intType : definition.intobjType;
}
return null;
}
public static Type promoteNumeric(final Definition definition,
final Type from0, final Type from1, final boolean decimal, final boolean primitive) {
final Sort sort0 = from0.sort;
final Sort sort1 = from1.sort;
if (sort0 == Sort.DEF || sort1 == Sort.DEF) {
return definition.defType;
}
if (decimal) {
if (sort0 == Sort.DOUBLE || sort0 == Sort.DOUBLE_OBJ ||
sort1 == Sort.DOUBLE || sort1 == Sort.DOUBLE_OBJ) {
return primitive ? definition.doubleType : definition.doubleobjType;
} else if (sort0 == Sort.FLOAT || sort0 == Sort.FLOAT_OBJ || sort1 == Sort.FLOAT || sort1 == Sort.FLOAT_OBJ) {
return primitive ? definition.floatType : definition.floatobjType;
}
}
if (sort0 == Sort.LONG || sort0 == Sort.LONG_OBJ ||
sort1 == Sort.LONG || sort1 == Sort.LONG_OBJ) {
return primitive ? definition.longType : definition.longobjType;
} else if (sort0 == Sort.INT || sort0 == Sort.INT_OBJ ||
sort1 == Sort.INT || sort1 == Sort.INT_OBJ ||
sort0 == Sort.CHAR || sort0 == Sort.CHAR_OBJ ||
sort1 == Sort.CHAR || sort1 == Sort.CHAR_OBJ ||
sort0 == Sort.SHORT || sort0 == Sort.SHORT_OBJ ||
sort1 == Sort.SHORT || sort1 == Sort.SHORT_OBJ ||
sort0 == Sort.BYTE || sort0 == Sort.BYTE_OBJ ||
sort1 == Sort.BYTE || sort1 == Sort.BYTE_OBJ) {
return primitive ? definition.intType : definition.intobjType;
}
return null;
}
public static Type promoteAdd(final Definition definition, final Type from0, final Type from1) {
final Sort sort0 = from0.sort;
final Sort sort1 = from1.sort;
if (sort0 == Sort.STRING || sort1 == Sort.STRING) {
return definition.stringType;
}
return promoteNumeric(definition, from0, from1, true, true);
}
public static Type promoteXor(final Definition definition, final Type from0, final Type from1) {
final Sort sort0 = from0.sort;
final Sort sort1 = from1.sort;
if (sort0.bool || sort1.bool) {
return definition.booleanType;
}
return promoteNumeric(definition, from0, from1, false, true);
}
public static Type promoteEquality(final Definition definition, final Type from0, final Type from1) {
final Sort sort0 = from0.sort;
final Sort sort1 = from1.sort;
if (sort0 == Sort.DEF || sort1 == Sort.DEF) {
return definition.defType;
}
final boolean primitive = sort0.primitive && sort1.primitive;
if (sort0.bool && sort1.bool) {
return primitive ? definition.booleanType : definition.booleanobjType;
}
if (sort0.numeric && sort1.numeric) {
return promoteNumeric(definition, from0, from1, true, primitive);
}
return definition.objectType;
}
public static Type promoteReference(final Definition definition, final Type from0, final Type from1) {
final Sort sort0 = from0.sort;
final Sort sort1 = from1.sort;
if (sort0 == Sort.DEF || sort1 == Sort.DEF) {
return definition.defType;
}
if (sort0.primitive && sort1.primitive) {
if (sort0.bool && sort1.bool) {
return definition.booleanType;
}
if (sort0.numeric && sort1.numeric) {
return promoteNumeric(definition, from0, from1, true, true);
}
}
return definition.objectType;
}
public static Type promoteConditional(final Definition definition,
final Type from0, final Type from1, final Object const0, final Object const1) {
if (from0.equals(from1)) {
return from0;
}
final Sort sort0 = from0.sort;
final Sort sort1 = from1.sort;
if (sort0 == Sort.DEF || sort1 == Sort.DEF) {
return definition.defType;
}
final boolean primitive = sort0.primitive && sort1.primitive;
if (sort0.bool && sort1.bool) {
return primitive ? definition.booleanType : definition.booleanobjType;
}
if (sort0.numeric && sort1.numeric) {
if (sort0 == Sort.DOUBLE || sort0 == Sort.DOUBLE_OBJ || sort1 == Sort.DOUBLE || sort1 == Sort.DOUBLE_OBJ) {
return primitive ? definition.doubleType : definition.doubleobjType;
} else if (sort0 == Sort.FLOAT || sort0 == Sort.FLOAT_OBJ || sort1 == Sort.FLOAT || sort1 == Sort.FLOAT_OBJ) {
return primitive ? definition.floatType : definition.floatobjType;
} else if (sort0 == Sort.LONG || sort0 == Sort.LONG_OBJ || sort1 == Sort.LONG || sort1 == Sort.LONG_OBJ) {
return sort0.primitive && sort1.primitive ? definition.longType : definition.longobjType;
} else {
if (sort0 == Sort.BYTE || sort0 == Sort.BYTE_OBJ) {
if (sort1 == Sort.BYTE || sort1 == Sort.BYTE_OBJ) {
return primitive ? definition.byteType : definition.byteobjType;
} else if (sort1 == Sort.SHORT || sort1 == Sort.SHORT_OBJ) {
if (const1 != null) {
final short constant = (short)const1;
if (constant <= Byte.MAX_VALUE && constant >= Byte.MIN_VALUE) {
return primitive ? definition.byteType : definition.byteobjType;
}
}
return primitive ? definition.shortType : definition.shortobjType;
} else if (sort1 == Sort.CHAR || sort1 == Sort.CHAR_OBJ) {
return primitive ? definition.intType : definition.intobjType;
} else if (sort1 == Sort.INT || sort1 == Sort.INT_OBJ) {
if (const1 != null) {
final int constant = (int)const1;
if (constant <= Byte.MAX_VALUE && constant >= Byte.MIN_VALUE) {
return primitive ? definition.byteType : definition.byteobjType;
}
}
return primitive ? definition.intType : definition.intobjType;
}
} else if (sort0 == Sort.SHORT || sort0 == Sort.SHORT_OBJ) {
if (sort1 == Sort.BYTE || sort1 == Sort.BYTE_OBJ) {
if (const0 != null) {
final short constant = (short)const0;
if (constant <= Byte.MAX_VALUE && constant >= Byte.MIN_VALUE) {
return primitive ? definition.byteType : definition.byteobjType;
}
}
return primitive ? definition.shortType : definition.shortobjType;
} else if (sort1 == Sort.SHORT || sort1 == Sort.SHORT_OBJ) {
return primitive ? definition.shortType : definition.shortobjType;
} else if (sort1 == Sort.CHAR || sort1 == Sort.CHAR_OBJ) {
return primitive ? definition.intType : definition.intobjType;
} else if (sort1 == Sort.INT || sort1 == Sort.INT_OBJ) {
if (const1 != null) {
final int constant = (int)const1;
if (constant <= Short.MAX_VALUE && constant >= Short.MIN_VALUE) {
return primitive ? definition.shortType : definition.shortobjType;
}
}
return primitive ? definition.intType : definition.intobjType;
}
} else if (sort0 == Sort.CHAR || sort0 == Sort.CHAR_OBJ) {
if (sort1 == Sort.BYTE || sort1 == Sort.BYTE_OBJ) {
return primitive ? definition.intType : definition.intobjType;
} else if (sort1 == Sort.SHORT || sort1 == Sort.SHORT_OBJ) {
return primitive ? definition.intType : definition.intobjType;
} else if (sort1 == Sort.CHAR || sort1 == Sort.CHAR_OBJ) {
return primitive ? definition.charType : definition.charobjType;
} else if (sort1 == Sort.INT || sort1 == Sort.INT_OBJ) {
if (const1 != null) {
final int constant = (int)const1;
if (constant <= Character.MAX_VALUE && constant >= Character.MIN_VALUE) {
return primitive ? definition.byteType : definition.byteobjType;
}
}
return primitive ? definition.intType : definition.intobjType;
}
} else if (sort0 == Sort.INT || sort0 == Sort.INT_OBJ) {
if (sort1 == Sort.BYTE || sort1 == Sort.BYTE_OBJ) {
if (const0 != null) {
final int constant = (int)const0;
if (constant <= Byte.MAX_VALUE && constant >= Byte.MIN_VALUE) {
return primitive ? definition.byteType : definition.byteobjType;
}
}
return primitive ? definition.intType : definition.intobjType;
} else if (sort1 == Sort.SHORT || sort1 == Sort.SHORT_OBJ) {
if (const0 != null) {
final int constant = (int)const0;
if (constant <= Short.MAX_VALUE && constant >= Short.MIN_VALUE) {
return primitive ? definition.byteType : definition.byteobjType;
}
}
return primitive ? definition.intType : definition.intobjType;
} else if (sort1 == Sort.CHAR || sort1 == Sort.CHAR_OBJ) {
if (const0 != null) {
final int constant = (int)const0;
if (constant <= Character.MAX_VALUE && constant >= Character.MIN_VALUE) {
return primitive ? definition.byteType : definition.byteobjType;
}
}
return primitive ? definition.intType : definition.intobjType;
} else if (sort1 == Sort.INT || sort1 == Sort.INT_OBJ) {
return primitive ? definition.intType : definition.intobjType;
}
}
}
}
// TODO: In the rare case we still haven't reached a correct promotion we need
// to calculate the highest upper bound for the two types and return that.
// However, for now we just return objectType that may require an extra cast.
return definition.objectType;
}
private AnalyzerCaster() {}
}

View File

@ -1,856 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.painless;
import org.elasticsearch.painless.Definition.Sort;
import org.elasticsearch.painless.Definition.Type;
import org.elasticsearch.painless.Metadata.ExpressionMetadata;
import org.elasticsearch.painless.Metadata.ExternalMetadata;
import org.elasticsearch.painless.PainlessParser.AssignmentContext;
import org.elasticsearch.painless.PainlessParser.BinaryContext;
import org.elasticsearch.painless.PainlessParser.BoolContext;
import org.elasticsearch.painless.PainlessParser.CastContext;
import org.elasticsearch.painless.PainlessParser.CompContext;
import org.elasticsearch.painless.PainlessParser.ConditionalContext;
import org.elasticsearch.painless.PainlessParser.DecltypeContext;
import org.elasticsearch.painless.PainlessParser.ExpressionContext;
import org.elasticsearch.painless.PainlessParser.ExternalContext;
import org.elasticsearch.painless.PainlessParser.ExtstartContext;
import org.elasticsearch.painless.PainlessParser.FalseContext;
import org.elasticsearch.painless.PainlessParser.IncrementContext;
import org.elasticsearch.painless.PainlessParser.NullContext;
import org.elasticsearch.painless.PainlessParser.NumericContext;
import org.elasticsearch.painless.PainlessParser.PostincContext;
import org.elasticsearch.painless.PainlessParser.PreincContext;
import org.elasticsearch.painless.PainlessParser.TrueContext;
import org.elasticsearch.painless.PainlessParser.UnaryContext;
import static org.elasticsearch.painless.PainlessParser.ADD;
import static org.elasticsearch.painless.PainlessParser.BWAND;
import static org.elasticsearch.painless.PainlessParser.BWOR;
import static org.elasticsearch.painless.PainlessParser.BWXOR;
import static org.elasticsearch.painless.PainlessParser.DIV;
import static org.elasticsearch.painless.PainlessParser.LSH;
import static org.elasticsearch.painless.PainlessParser.MUL;
import static org.elasticsearch.painless.PainlessParser.REM;
import static org.elasticsearch.painless.PainlessParser.RSH;
import static org.elasticsearch.painless.PainlessParser.SUB;
import static org.elasticsearch.painless.PainlessParser.USH;
class AnalyzerExpression {
private final Metadata metadata;
private final Definition definition;
private final CompilerSettings settings;
private final Analyzer analyzer;
private final AnalyzerCaster caster;
private final AnalyzerPromoter promoter;
AnalyzerExpression(final Metadata metadata, final Analyzer analyzer,
final AnalyzerCaster caster, final AnalyzerPromoter promoter) {
this.metadata = metadata;
this.definition = metadata.definition;
this.settings = metadata.settings;
this.analyzer = analyzer;
this.caster = caster;
this.promoter = promoter;
}
void processNumeric(final NumericContext ctx) {
final ExpressionMetadata numericemd = metadata.getExpressionMetadata(ctx);
final boolean negate = ctx.parent instanceof UnaryContext && ((UnaryContext)ctx.parent).SUB() != null;
if (ctx.DECIMAL() != null) {
final String svalue = (negate ? "-" : "") + ctx.DECIMAL().getText();
if (svalue.endsWith("f") || svalue.endsWith("F")) {
try {
numericemd.from = definition.floatType;
numericemd.preConst = Float.parseFloat(svalue.substring(0, svalue.length() - 1));
} catch (NumberFormatException exception) {
throw new IllegalArgumentException(AnalyzerUtility.error(ctx) + "Invalid float constant [" + svalue + "].");
}
} else {
try {
numericemd.from = definition.doubleType;
numericemd.preConst = Double.parseDouble(svalue);
} catch (NumberFormatException exception) {
throw new IllegalArgumentException(AnalyzerUtility.error(ctx) + "Invalid double constant [" + svalue + "].");
}
}
} else {
String svalue = negate ? "-" : "";
int radix;
if (ctx.OCTAL() != null) {
svalue += ctx.OCTAL().getText();
radix = 8;
} else if (ctx.INTEGER() != null) {
svalue += ctx.INTEGER().getText();
radix = 10;
} else if (ctx.HEX() != null) {
svalue += ctx.HEX().getText();
radix = 16;
} else {
throw new IllegalStateException(AnalyzerUtility.error(ctx) + "Unexpected state.");
}
if (svalue.endsWith("d") || svalue.endsWith("D")) {
try {
numericemd.from = definition.doubleType;
numericemd.preConst = Double.parseDouble(svalue.substring(0, svalue.length() - 1));
} catch (NumberFormatException exception) {
throw new IllegalArgumentException(AnalyzerUtility.error(ctx) + "Invalid float constant [" + svalue + "].");
}
} else if (svalue.endsWith("f") || svalue.endsWith("F")) {
try {
numericemd.from = definition.floatType;
numericemd.preConst = Float.parseFloat(svalue.substring(0, svalue.length() - 1));
} catch (NumberFormatException exception) {
throw new IllegalArgumentException(AnalyzerUtility.error(ctx) + "Invalid float constant [" + svalue + "].");
}
} else if (svalue.endsWith("l") || svalue.endsWith("L")) {
try {
numericemd.from = definition.longType;
numericemd.preConst = Long.parseLong(svalue.substring(0, svalue.length() - 1), radix);
} catch (NumberFormatException exception) {
throw new IllegalArgumentException(AnalyzerUtility.error(ctx) + "Invalid long constant [" + svalue + "].");
}
} else {
try {
final Type type = numericemd.to;
final Sort sort = type == null ? Sort.INT : type.sort;
final int value = Integer.parseInt(svalue, radix);
if (sort == Sort.BYTE && value >= Byte.MIN_VALUE && value <= Byte.MAX_VALUE) {
numericemd.from = definition.byteType;
numericemd.preConst = (byte)value;
} else if (sort == Sort.CHAR && value >= Character.MIN_VALUE && value <= Character.MAX_VALUE) {
numericemd.from = definition.charType;
numericemd.preConst = (char)value;
} else if (sort == Sort.SHORT && value >= Short.MIN_VALUE && value <= Short.MAX_VALUE) {
numericemd.from = definition.shortType;
numericemd.preConst = (short)value;
} else {
numericemd.from = definition.intType;
numericemd.preConst = value;
}
} catch (NumberFormatException exception) {
throw new IllegalArgumentException(AnalyzerUtility.error(ctx) + "Invalid int constant [" + svalue + "].");
}
}
}
}
void processTrue(final TrueContext ctx) {
final ExpressionMetadata trueemd = metadata.getExpressionMetadata(ctx);
if (ctx.TRUE() == null) {
throw new IllegalStateException(AnalyzerUtility.error(ctx) + "Unexpected state.");
}
trueemd.preConst = true;
trueemd.from = definition.booleanType;
}
void processFalse(final FalseContext ctx) {
final ExpressionMetadata falseemd = metadata.getExpressionMetadata(ctx);
if (ctx.FALSE() == null) {
throw new IllegalStateException(AnalyzerUtility.error(ctx) + "Unexpected state.");
}
falseemd.preConst = false;
falseemd.from = definition.booleanType;
}
void processNull(final NullContext ctx) {
final ExpressionMetadata nullemd = metadata.getExpressionMetadata(ctx);
if (ctx.NULL() == null) {
throw new IllegalStateException(AnalyzerUtility.error(ctx) + "Unexpected state.");
}
nullemd.isNull = true;
if (nullemd.to != null) {
if (nullemd.to.sort.primitive) {
throw new IllegalArgumentException(AnalyzerUtility.error(ctx) +
"Cannot cast null to a primitive type [" + nullemd.to.name + "].");
}
nullemd.from = nullemd.to;
} else {
nullemd.from = definition.objectType;
}
}
void processExternal(final ExternalContext ctx) {
final ExpressionMetadata extemd = metadata.getExpressionMetadata(ctx);
final ExtstartContext extstartctx = ctx.extstart();
final ExternalMetadata extstartemd = metadata.createExternalMetadata(extstartctx);
extstartemd.read = extemd.read;
analyzer.visit(extstartctx);
extemd.statement = extstartemd.statement;
extemd.preConst = extstartemd.constant;
extemd.from = extstartemd.current;
extemd.typesafe = extstartemd.current.sort != Sort.DEF;
}
void processPostinc(final PostincContext ctx) {
final ExpressionMetadata postincemd = metadata.getExpressionMetadata(ctx);
final ExtstartContext extstartctx = ctx.extstart();
final ExternalMetadata extstartemd = metadata.createExternalMetadata(extstartctx);
extstartemd.read = postincemd.read;
extstartemd.storeExpr = ctx.increment();
extstartemd.token = ADD;
extstartemd.post = true;
analyzer.visit(extstartctx);
postincemd.statement = true;
postincemd.from = extstartemd.read ? extstartemd.current : definition.voidType;
postincemd.typesafe = extstartemd.current.sort != Sort.DEF;
}
void processPreinc(final PreincContext ctx) {
final ExpressionMetadata preincemd = metadata.getExpressionMetadata(ctx);
final ExtstartContext extstartctx = ctx.extstart();
final ExternalMetadata extstartemd = metadata.createExternalMetadata(extstartctx);
extstartemd.read = preincemd.read;
extstartemd.storeExpr = ctx.increment();
extstartemd.token = ADD;
extstartemd.pre = true;
analyzer.visit(extstartctx);
preincemd.statement = true;
preincemd.from = extstartemd.read ? extstartemd.current : definition.voidType;
preincemd.typesafe = extstartemd.current.sort != Sort.DEF;
}
void processUnary(final UnaryContext ctx) {
final ExpressionMetadata unaryemd = metadata.getExpressionMetadata(ctx);
final ExpressionContext exprctx = AnalyzerUtility.updateExpressionTree(ctx.expression());
final ExpressionMetadata expremd = metadata.createExpressionMetadata(exprctx);
if (ctx.BOOLNOT() != null) {
expremd.to = definition.booleanType;
analyzer.visit(exprctx);
caster.markCast(expremd);
if (expremd.postConst != null) {
unaryemd.preConst = !(boolean)expremd.postConst;
}
unaryemd.from = definition.booleanType;
} else if (ctx.BWNOT() != null || ctx.ADD() != null || ctx.SUB() != null) {
analyzer.visit(exprctx);
final Type promote = promoter.promoteNumeric(expremd.from, ctx.BWNOT() == null, true);
if (promote == null) {
throw new ClassCastException(AnalyzerUtility.error(ctx) + "Cannot apply [" + ctx.getChild(0).getText() + "] " +
"operation to type [" + expremd.from.name + "].");
}
expremd.to = promote;
caster.markCast(expremd);
if (expremd.postConst != null) {
final Sort sort = promote.sort;
if (ctx.BWNOT() != null) {
if (sort == Sort.INT) {
unaryemd.preConst = ~(int)expremd.postConst;
} else if (sort == Sort.LONG) {
unaryemd.preConst = ~(long)expremd.postConst;
} else {
throw new IllegalStateException(AnalyzerUtility.error(ctx) + "Unexpected state.");
}
} else if (ctx.SUB() != null) {
if (exprctx instanceof NumericContext) {
unaryemd.preConst = expremd.postConst;
} else {
if (sort == Sort.INT) {
if (settings.getNumericOverflow()) {
unaryemd.preConst = -(int)expremd.postConst;
} else {
unaryemd.preConst = Math.negateExact((int)expremd.postConst);
}
} else if (sort == Sort.LONG) {
if (settings.getNumericOverflow()) {
unaryemd.preConst = -(long)expremd.postConst;
} else {
unaryemd.preConst = Math.negateExact((long)expremd.postConst);
}
} else if (sort == Sort.FLOAT) {
unaryemd.preConst = -(float)expremd.postConst;
} else if (sort == Sort.DOUBLE) {
unaryemd.preConst = -(double)expremd.postConst;
} else {
throw new IllegalStateException(AnalyzerUtility.error(ctx) + "Unexpected state.");
}
}
} else if (ctx.ADD() != null) {
if (sort == Sort.INT) {
unaryemd.preConst = +(int)expremd.postConst;
} else if (sort == Sort.LONG) {
unaryemd.preConst = +(long)expremd.postConst;
} else if (sort == Sort.FLOAT) {
unaryemd.preConst = +(float)expremd.postConst;
} else if (sort == Sort.DOUBLE) {
unaryemd.preConst = +(double)expremd.postConst;
} else {
throw new IllegalStateException(AnalyzerUtility.error(ctx) + "Unexpected state.");
}
} else {
throw new IllegalStateException(AnalyzerUtility.error(ctx) + "Unexpected state.");
}
}
unaryemd.from = promote;
unaryemd.typesafe = expremd.typesafe;
} else {
throw new IllegalStateException(AnalyzerUtility.error(ctx) + "Unexpected state.");
}
}
void processCast(final CastContext ctx) {
final ExpressionMetadata castemd = metadata.getExpressionMetadata(ctx);
final DecltypeContext decltypectx = ctx.decltype();
final ExpressionMetadata decltypemd = metadata.createExpressionMetadata(decltypectx);
analyzer.visit(decltypectx);
final Type type = decltypemd.from;
castemd.from = type;
final ExpressionContext exprctx = AnalyzerUtility.updateExpressionTree(ctx.expression());
final ExpressionMetadata expremd = metadata.createExpressionMetadata(exprctx);
expremd.to = type;
expremd.explicit = true;
analyzer.visit(exprctx);
caster.markCast(expremd);
if (expremd.postConst != null) {
castemd.preConst = expremd.postConst;
}
castemd.typesafe = expremd.typesafe && castemd.from.sort != Sort.DEF;
}
void processBinary(final BinaryContext ctx) {
final ExpressionMetadata binaryemd = metadata.getExpressionMetadata(ctx);
final ExpressionContext exprctx0 = AnalyzerUtility.updateExpressionTree(ctx.expression(0));
final ExpressionMetadata expremd0 = metadata.createExpressionMetadata(exprctx0);
analyzer.visit(exprctx0);
final ExpressionContext exprctx1 = AnalyzerUtility.updateExpressionTree(ctx.expression(1));
final ExpressionMetadata expremd1 = metadata.createExpressionMetadata(exprctx1);
analyzer.visit(exprctx1);
final boolean decimal = ctx.MUL() != null || ctx.DIV() != null || ctx.REM() != null || ctx.SUB() != null;
final boolean add = ctx.ADD() != null;
final boolean xor = ctx.BWXOR() != null;
final Type promote = add ? promoter.promoteAdd(expremd0.from, expremd1.from) :
xor ? promoter.promoteXor(expremd0.from, expremd1.from) :
promoter.promoteNumeric(expremd0.from, expremd1.from, decimal, true);
if (promote == null) {
throw new ClassCastException(AnalyzerUtility.error(ctx) + "Cannot apply [" + ctx.getChild(1).getText() + "] " +
"operation to types [" + expremd0.from.name + "] and [" + expremd1.from.name + "].");
}
final Sort sort = promote.sort;
expremd0.to = add && sort == Sort.STRING ? expremd0.from : promote;
expremd1.to = add && sort == Sort.STRING ? expremd1.from : promote;
caster.markCast(expremd0);
caster.markCast(expremd1);
if (expremd0.postConst != null && expremd1.postConst != null) {
if (ctx.MUL() != null) {
if (sort == Sort.INT) {
if (settings.getNumericOverflow()) {
binaryemd.preConst = (int)expremd0.postConst * (int)expremd1.postConst;
} else {
binaryemd.preConst = Math.multiplyExact((int)expremd0.postConst, (int)expremd1.postConst);
}
} else if (sort == Sort.LONG) {
if (settings.getNumericOverflow()) {
binaryemd.preConst = (long)expremd0.postConst * (long)expremd1.postConst;
} else {
binaryemd.preConst = Math.multiplyExact((long)expremd0.postConst, (long)expremd1.postConst);
}
} else if (sort == Sort.FLOAT) {
if (settings.getNumericOverflow()) {
binaryemd.preConst = (float)expremd0.postConst * (float)expremd1.postConst;
} else {
binaryemd.preConst = Utility.multiplyWithoutOverflow((float)expremd0.postConst, (float)expremd1.postConst);
}
} else if (sort == Sort.DOUBLE) {
if (settings.getNumericOverflow()) {
binaryemd.preConst = (double)expremd0.postConst * (double)expremd1.postConst;
} else {
binaryemd.preConst = Utility.multiplyWithoutOverflow((double)expremd0.postConst, (double)expremd1.postConst);
}
} else {
throw new IllegalStateException(AnalyzerUtility.error(ctx) + "Unexpected state.");
}
} else if (ctx.DIV() != null) {
if (sort == Sort.INT) {
if (settings.getNumericOverflow()) {
binaryemd.preConst = (int)expremd0.postConst / (int)expremd1.postConst;
} else {
binaryemd.preConst = Utility.divideWithoutOverflow((int)expremd0.postConst, (int)expremd1.postConst);
}
} else if (sort == Sort.LONG) {
if (settings.getNumericOverflow()) {
binaryemd.preConst = (long)expremd0.postConst / (long)expremd1.postConst;
} else {
binaryemd.preConst = Utility.divideWithoutOverflow((long)expremd0.postConst, (long)expremd1.postConst);
}
} else if (sort == Sort.FLOAT) {
if (settings.getNumericOverflow()) {
binaryemd.preConst = (float)expremd0.postConst / (float)expremd1.postConst;
} else {
binaryemd.preConst = Utility.divideWithoutOverflow((float)expremd0.postConst, (float)expremd1.postConst);
}
} else if (sort == Sort.DOUBLE) {
if (settings.getNumericOverflow()) {
binaryemd.preConst = (double)expremd0.postConst / (double)expremd1.postConst;
} else {
binaryemd.preConst = Utility.divideWithoutOverflow((double)expremd0.postConst, (double)expremd1.postConst);
}
} else {
throw new IllegalStateException(AnalyzerUtility.error(ctx) + "Unexpected state.");
}
} else if (ctx.REM() != null) {
if (sort == Sort.INT) {
binaryemd.preConst = (int)expremd0.postConst % (int)expremd1.postConst;
} else if (sort == Sort.LONG) {
binaryemd.preConst = (long)expremd0.postConst % (long)expremd1.postConst;
} else if (sort == Sort.FLOAT) {
if (settings.getNumericOverflow()) {
binaryemd.preConst = (float)expremd0.postConst % (float)expremd1.postConst;
} else {
binaryemd.preConst = Utility.remainderWithoutOverflow((float)expremd0.postConst, (float)expremd1.postConst);
}
} else if (sort == Sort.DOUBLE) {
if (settings.getNumericOverflow()) {
binaryemd.preConst = (double)expremd0.postConst % (double)expremd1.postConst;
} else {
binaryemd.preConst = Utility.remainderWithoutOverflow((double)expremd0.postConst, (double)expremd1.postConst);
}
} else {
throw new IllegalStateException(AnalyzerUtility.error(ctx) + "Unexpected state.");
}
} else if (ctx.ADD() != null) {
if (sort == Sort.INT) {
if (settings.getNumericOverflow()) {
binaryemd.preConst = (int)expremd0.postConst + (int)expremd1.postConst;
} else {
binaryemd.preConst = Math.addExact((int)expremd0.postConst, (int)expremd1.postConst);
}
} else if (sort == Sort.LONG) {
if (settings.getNumericOverflow()) {
binaryemd.preConst = (long)expremd0.postConst + (long)expremd1.postConst;
} else {
binaryemd.preConst = Math.addExact((long)expremd0.postConst, (long)expremd1.postConst);
}
} else if (sort == Sort.FLOAT) {
if (settings.getNumericOverflow()) {
binaryemd.preConst = (float)expremd0.postConst + (float)expremd1.postConst;
} else {
binaryemd.preConst = Utility.addWithoutOverflow((float)expremd0.postConst, (float)expremd1.postConst);
}
} else if (sort == Sort.DOUBLE) {
if (settings.getNumericOverflow()) {
binaryemd.preConst = (double)expremd0.postConst + (double)expremd1.postConst;
} else {
binaryemd.preConst = Utility.addWithoutOverflow((double)expremd0.postConst, (double)expremd1.postConst);
}
} else if (sort == Sort.STRING) {
binaryemd.preConst = "" + expremd0.postConst + expremd1.postConst;
} else {
throw new IllegalStateException(AnalyzerUtility.error(ctx) + "Unexpected state.");
}
} else if (ctx.SUB() != null) {
if (sort == Sort.INT) {
if (settings.getNumericOverflow()) {
binaryemd.preConst = (int)expremd0.postConst - (int)expremd1.postConst;
} else {
binaryemd.preConst = Math.subtractExact((int)expremd0.postConst, (int)expremd1.postConst);
}
} else if (sort == Sort.LONG) {
if (settings.getNumericOverflow()) {
binaryemd.preConst = (long)expremd0.postConst - (long)expremd1.postConst;
} else {
binaryemd.preConst = Math.subtractExact((long)expremd0.postConst, (long)expremd1.postConst);
}
} else if (sort == Sort.FLOAT) {
if (settings.getNumericOverflow()) {
binaryemd.preConst = (float)expremd0.postConst - (float)expremd1.postConst;
} else {
binaryemd.preConst = Utility.subtractWithoutOverflow((float)expremd0.postConst, (float)expremd1.postConst);
}
} else if (sort == Sort.DOUBLE) {
if (settings.getNumericOverflow()) {
binaryemd.preConst = (double)expremd0.postConst - (double)expremd1.postConst;
} else {
binaryemd.preConst = Utility.subtractWithoutOverflow((double)expremd0.postConst, (double)expremd1.postConst);
}
} else {
throw new IllegalStateException(AnalyzerUtility.error(ctx) + "Unexpected state.");
}
} else if (ctx.LSH() != null) {
if (sort == Sort.INT) {
binaryemd.preConst = (int)expremd0.postConst << (int)expremd1.postConst;
} else if (sort == Sort.LONG) {
binaryemd.preConst = (long)expremd0.postConst << (long)expremd1.postConst;
} else {
throw new IllegalStateException(AnalyzerUtility.error(ctx) + "Unexpected state.");
}
} else if (ctx.RSH() != null) {
if (sort == Sort.INT) {
binaryemd.preConst = (int)expremd0.postConst >> (int)expremd1.postConst;
} else if (sort == Sort.LONG) {
binaryemd.preConst = (long)expremd0.postConst >> (long)expremd1.postConst;
} else {
throw new IllegalStateException(AnalyzerUtility.error(ctx) + "Unexpected state.");
}
} else if (ctx.USH() != null) {
if (sort == Sort.INT) {
binaryemd.preConst = (int)expremd0.postConst >>> (int)expremd1.postConst;
} else if (sort == Sort.LONG) {
binaryemd.preConst = (long)expremd0.postConst >>> (long)expremd1.postConst;
} else {
throw new IllegalStateException(AnalyzerUtility.error(ctx) + "Unexpected state.");
}
} else if (ctx.BWAND() != null) {
if (sort == Sort.INT) {
binaryemd.preConst = (int)expremd0.postConst & (int)expremd1.postConst;
} else if (sort == Sort.LONG) {
binaryemd.preConst = (long)expremd0.postConst & (long)expremd1.postConst;
} else {
throw new IllegalStateException(AnalyzerUtility.error(ctx) + "Unexpected state.");
}
} else if (ctx.BWXOR() != null) {
if (sort == Sort.BOOL) {
binaryemd.preConst = (boolean)expremd0.postConst ^ (boolean)expremd1.postConst;
} else if (sort == Sort.INT) {
binaryemd.preConst = (int)expremd0.postConst ^ (int)expremd1.postConst;
} else if (sort == Sort.LONG) {
binaryemd.preConst = (long)expremd0.postConst ^ (long)expremd1.postConst;
} else {
throw new IllegalStateException(AnalyzerUtility.error(ctx) + "Unexpected state.");
}
} else if (ctx.BWOR() != null) {
if (sort == Sort.INT) {
binaryemd.preConst = (int)expremd0.postConst | (int)expremd1.postConst;
} else if (sort == Sort.LONG) {
binaryemd.preConst = (long)expremd0.postConst | (long)expremd1.postConst;
} else {
throw new IllegalStateException(AnalyzerUtility.error(ctx) + "Unexpected state.");
}
} else {
throw new IllegalStateException(AnalyzerUtility.error(ctx) + "Unexpected state.");
}
}
binaryemd.from = promote;
binaryemd.typesafe = expremd0.typesafe && expremd1.typesafe;
}
void processComp(final CompContext ctx) {
final ExpressionMetadata compemd = metadata.getExpressionMetadata(ctx);
final boolean equality = ctx.EQ() != null || ctx.NE() != null;
final boolean reference = ctx.EQR() != null || ctx.NER() != null;
final ExpressionContext exprctx0 = AnalyzerUtility.updateExpressionTree(ctx.expression(0));
final ExpressionMetadata expremd0 = metadata.createExpressionMetadata(exprctx0);
analyzer.visit(exprctx0);
final ExpressionContext exprctx1 = AnalyzerUtility.updateExpressionTree(ctx.expression(1));
final ExpressionMetadata expremd1 = metadata.createExpressionMetadata(exprctx1);
analyzer.visit(exprctx1);
if (expremd0.isNull && expremd1.isNull) {
throw new IllegalArgumentException(AnalyzerUtility.error(ctx) + "Unnecessary comparison of null constants.");
}
final Type promote = equality ? promoter.promoteEquality(expremd0.from, expremd1.from) :
reference ? promoter.promoteReference(expremd0.from, expremd1.from) :
promoter.promoteNumeric(expremd0.from, expremd1.from, true, true);
if (promote == null) {
throw new ClassCastException(AnalyzerUtility.error(ctx) + "Cannot apply [" + ctx.getChild(1).getText() + "] " +
"operation to types [" + expremd0.from.name + "] and [" + expremd1.from.name + "].");
}
expremd0.to = promote;
expremd1.to = promote;
caster.markCast(expremd0);
caster.markCast(expremd1);
if (expremd0.postConst != null && expremd1.postConst != null) {
final Sort sort = promote.sort;
if (ctx.EQ() != null || ctx.EQR() != null) {
if (sort == Sort.BOOL) {
compemd.preConst = (boolean)expremd0.postConst == (boolean)expremd1.postConst;
} else if (sort == Sort.INT) {
compemd.preConst = (int)expremd0.postConst == (int)expremd1.postConst;
} else if (sort == Sort.LONG) {
compemd.preConst = (long)expremd0.postConst == (long)expremd1.postConst;
} else if (sort == Sort.FLOAT) {
compemd.preConst = (float)expremd0.postConst == (float)expremd1.postConst;
} else if (sort == Sort.DOUBLE) {
compemd.preConst = (double)expremd0.postConst == (double)expremd1.postConst;
} else {
if (ctx.EQ() != null && !expremd0.isNull && !expremd1.isNull) {
compemd.preConst = expremd0.postConst.equals(expremd1.postConst);
} else if (ctx.EQR() != null) {
compemd.preConst = expremd0.postConst == expremd1.postConst;
} else {
throw new IllegalStateException(AnalyzerUtility.error(ctx) + "Unexpected state.");
}
}
} else if (ctx.NE() != null || ctx.NER() != null) {
if (sort == Sort.BOOL) {
compemd.preConst = (boolean)expremd0.postConst != (boolean)expremd1.postConst;
} else if (sort == Sort.INT) {
compemd.preConst = (int)expremd0.postConst != (int)expremd1.postConst;
} else if (sort == Sort.LONG) {
compemd.preConst = (long)expremd0.postConst != (long)expremd1.postConst;
} else if (sort == Sort.FLOAT) {
compemd.preConst = (float)expremd0.postConst != (float)expremd1.postConst;
} else if (sort == Sort.DOUBLE) {
compemd.preConst = (double)expremd0.postConst != (double)expremd1.postConst;
} else {
if (ctx.NE() != null && !expremd0.isNull && !expremd1.isNull) {
compemd.preConst = expremd0.postConst.equals(expremd1.postConst);
} else if (ctx.NER() != null) {
compemd.preConst = expremd0.postConst == expremd1.postConst;
} else {
throw new IllegalStateException(AnalyzerUtility.error(ctx) + "Unexpected state.");
}
}
} else if (ctx.GTE() != null) {
if (sort == Sort.INT) {
compemd.preConst = (int)expremd0.postConst >= (int)expremd1.postConst;
} else if (sort == Sort.LONG) {
compemd.preConst = (long)expremd0.postConst >= (long)expremd1.postConst;
} else if (sort == Sort.FLOAT) {
compemd.preConst = (float)expremd0.postConst >= (float)expremd1.postConst;
} else if (sort == Sort.DOUBLE) {
compemd.preConst = (double)expremd0.postConst >= (double)expremd1.postConst;
} else {
throw new IllegalStateException(AnalyzerUtility.error(ctx) + "Unexpected state.");
}
} else if (ctx.GT() != null) {
if (sort == Sort.INT) {
compemd.preConst = (int)expremd0.postConst > (int)expremd1.postConst;
} else if (sort == Sort.LONG) {
compemd.preConst = (long)expremd0.postConst > (long)expremd1.postConst;
} else if (sort == Sort.FLOAT) {
compemd.preConst = (float)expremd0.postConst > (float)expremd1.postConst;
} else if (sort == Sort.DOUBLE) {
compemd.preConst = (double)expremd0.postConst > (double)expremd1.postConst;
} else {
throw new IllegalStateException(AnalyzerUtility.error(ctx) + "Unexpected state.");
}
} else if (ctx.LTE() != null) {
if (sort == Sort.INT) {
compemd.preConst = (int)expremd0.postConst <= (int)expremd1.postConst;
} else if (sort == Sort.LONG) {
compemd.preConst = (long)expremd0.postConst <= (long)expremd1.postConst;
} else if (sort == Sort.FLOAT) {
compemd.preConst = (float)expremd0.postConst <= (float)expremd1.postConst;
} else if (sort == Sort.DOUBLE) {
compemd.preConst = (double)expremd0.postConst <= (double)expremd1.postConst;
} else {
throw new IllegalStateException(AnalyzerUtility.error(ctx) + "Unexpected state.");
}
} else if (ctx.LT() != null) {
if (sort == Sort.INT) {
compemd.preConst = (int)expremd0.postConst < (int)expremd1.postConst;
} else if (sort == Sort.LONG) {
compemd.preConst = (long)expremd0.postConst < (long)expremd1.postConst;
} else if (sort == Sort.FLOAT) {
compemd.preConst = (float)expremd0.postConst < (float)expremd1.postConst;
} else if (sort == Sort.DOUBLE) {
compemd.preConst = (double)expremd0.postConst < (double)expremd1.postConst;
} else {
throw new IllegalStateException(AnalyzerUtility.error(ctx) + "Unexpected state.");
}
} else {
throw new IllegalStateException(AnalyzerUtility.error(ctx) + "Unexpected state.");
}
}
compemd.from = definition.booleanType;
compemd.typesafe = expremd0.typesafe && expremd1.typesafe;
}
void processBool(final BoolContext ctx) {
final ExpressionMetadata boolemd = metadata.getExpressionMetadata(ctx);
final ExpressionContext exprctx0 = AnalyzerUtility.updateExpressionTree(ctx.expression(0));
final ExpressionMetadata expremd0 = metadata.createExpressionMetadata(exprctx0);
expremd0.to = definition.booleanType;
analyzer.visit(exprctx0);
caster.markCast(expremd0);
final ExpressionContext exprctx1 = AnalyzerUtility.updateExpressionTree(ctx.expression(1));
final ExpressionMetadata expremd1 = metadata.createExpressionMetadata(exprctx1);
expremd1.to = definition.booleanType;
analyzer.visit(exprctx1);
caster.markCast(expremd1);
if (expremd0.postConst != null && expremd1.postConst != null) {
if (ctx.BOOLAND() != null) {
boolemd.preConst = (boolean)expremd0.postConst && (boolean)expremd1.postConst;
} else if (ctx.BOOLOR() != null) {
boolemd.preConst = (boolean)expremd0.postConst || (boolean)expremd1.postConst;
} else {
throw new IllegalStateException(AnalyzerUtility.error(ctx) + "Unexpected state.");
}
}
boolemd.from = definition.booleanType;
boolemd.typesafe = expremd0.typesafe && expremd1.typesafe;
}
void processConditional(final ConditionalContext ctx) {
final ExpressionMetadata condemd = metadata.getExpressionMetadata(ctx);
final ExpressionContext exprctx0 = AnalyzerUtility.updateExpressionTree(ctx.expression(0));
final ExpressionMetadata expremd0 = metadata.createExpressionMetadata(exprctx0);
expremd0.to = definition.booleanType;
analyzer.visit(exprctx0);
caster.markCast(expremd0);
if (expremd0.postConst != null) {
throw new IllegalArgumentException(AnalyzerUtility.error(ctx) + "Unnecessary conditional statement.");
}
final ExpressionContext exprctx1 = AnalyzerUtility.updateExpressionTree(ctx.expression(1));
final ExpressionMetadata expremd1 = metadata.createExpressionMetadata(exprctx1);
expremd1.to = condemd.to;
expremd1.explicit = condemd.explicit;
analyzer.visit(exprctx1);
final ExpressionContext exprctx2 = AnalyzerUtility.updateExpressionTree(ctx.expression(2));
final ExpressionMetadata expremd2 = metadata.createExpressionMetadata(exprctx2);
expremd2.to = condemd.to;
expremd2.explicit = condemd.explicit;
analyzer.visit(exprctx2);
if (condemd.to == null) {
final Type promote = promoter.promoteConditional(expremd1.from, expremd2.from, expremd1.preConst, expremd2.preConst);
expremd1.to = promote;
expremd2.to = promote;
condemd.from = promote;
} else {
condemd.from = condemd.to;
}
caster.markCast(expremd1);
caster.markCast(expremd2);
condemd.typesafe = expremd0.typesafe && expremd1.typesafe;
}
void processAssignment(final AssignmentContext ctx) {
final ExpressionMetadata assignemd = metadata.getExpressionMetadata(ctx);
final ExtstartContext extstartctx = ctx.extstart();
final ExternalMetadata extstartemd = metadata.createExternalMetadata(extstartctx);
extstartemd.read = assignemd.read;
extstartemd.storeExpr = AnalyzerUtility.updateExpressionTree(ctx.expression());
if (ctx.AMUL() != null) {
extstartemd.token = MUL;
} else if (ctx.ADIV() != null) {
extstartemd.token = DIV;
} else if (ctx.AREM() != null) {
extstartemd.token = REM;
} else if (ctx.AADD() != null) {
extstartemd.token = ADD;
} else if (ctx.ASUB() != null) {
extstartemd.token = SUB;
} else if (ctx.ALSH() != null) {
extstartemd.token = LSH;
} else if (ctx.AUSH() != null) {
extstartemd.token = USH;
} else if (ctx.ARSH() != null) {
extstartemd.token = RSH;
} else if (ctx.AAND() != null) {
extstartemd.token = BWAND;
} else if (ctx.AXOR() != null) {
extstartemd.token = BWXOR;
} else if (ctx.AOR() != null) {
extstartemd.token = BWOR;
}
analyzer.visit(extstartctx);
assignemd.statement = true;
assignemd.from = extstartemd.read ? extstartemd.current : definition.voidType;
assignemd.typesafe = extstartemd.current.sort != Sort.DEF;
}
void processIncrement(final IncrementContext ctx) {
final ExpressionMetadata incremd = metadata.getExpressionMetadata(ctx);
final Sort sort = incremd.to == null ? null : incremd.to.sort;
final boolean positive = ctx.INCR() != null;
if (incremd.to == null) {
incremd.preConst = positive ? 1 : -1;
incremd.from = definition.intType;
} else {
switch (sort) {
case LONG:
incremd.preConst = positive ? 1L : -1L;
incremd.from = definition.longType;
break;
case FLOAT:
incremd.preConst = positive ? 1.0F : -1.0F;
incremd.from = definition.floatType;
break;
case DOUBLE:
incremd.preConst = positive ? 1.0 : -1.0;
incremd.from = definition.doubleType;
break;
default:
incremd.preConst = positive ? 1 : -1;
incremd.from = definition.intType;
}
}
}
}

View File

@ -1,813 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.painless;
import org.antlr.v4.runtime.ParserRuleContext;
import org.elasticsearch.painless.AnalyzerUtility.Variable;
import org.elasticsearch.painless.Definition.Constructor;
import org.elasticsearch.painless.Definition.Field;
import org.elasticsearch.painless.Definition.Method;
import org.elasticsearch.painless.Definition.Sort;
import org.elasticsearch.painless.Definition.Struct;
import org.elasticsearch.painless.Definition.Type;
import org.elasticsearch.painless.Metadata.ExpressionMetadata;
import org.elasticsearch.painless.Metadata.ExtNodeMetadata;
import org.elasticsearch.painless.Metadata.ExternalMetadata;
import org.elasticsearch.painless.PainlessParser.DecltypeContext;
import org.elasticsearch.painless.PainlessParser.ExpressionContext;
import org.elasticsearch.painless.PainlessParser.ExtbraceContext;
import org.elasticsearch.painless.PainlessParser.ExtcallContext;
import org.elasticsearch.painless.PainlessParser.ExtcastContext;
import org.elasticsearch.painless.PainlessParser.ExtdotContext;
import org.elasticsearch.painless.PainlessParser.ExtfieldContext;
import org.elasticsearch.painless.PainlessParser.ExtnewContext;
import org.elasticsearch.painless.PainlessParser.ExtprecContext;
import org.elasticsearch.painless.PainlessParser.ExtstartContext;
import org.elasticsearch.painless.PainlessParser.ExtstringContext;
import org.elasticsearch.painless.PainlessParser.ExtvarContext;
import org.elasticsearch.painless.PainlessParser.IdentifierContext;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import static org.elasticsearch.painless.PainlessParser.ADD;
import static org.elasticsearch.painless.PainlessParser.BWAND;
import static org.elasticsearch.painless.PainlessParser.BWOR;
import static org.elasticsearch.painless.PainlessParser.BWXOR;
import static org.elasticsearch.painless.PainlessParser.DIV;
import static org.elasticsearch.painless.PainlessParser.MUL;
import static org.elasticsearch.painless.PainlessParser.REM;
import static org.elasticsearch.painless.PainlessParser.SUB;
class AnalyzerExternal {
private final Metadata metadata;
private final Definition definition;
private final Analyzer analyzer;
private final AnalyzerUtility utility;
private final AnalyzerCaster caster;
private final AnalyzerPromoter promoter;
AnalyzerExternal(final Metadata metadata, final Analyzer analyzer, final AnalyzerUtility utility,
final AnalyzerCaster caster, final AnalyzerPromoter promoter) {
this.metadata = metadata;
this.definition = metadata.definition;
this.analyzer = analyzer;
this.utility = utility;
this.caster = caster;
this.promoter = promoter;
}
void processExtstart(final ExtstartContext ctx) {
final ExtprecContext precctx = ctx.extprec();
final ExtcastContext castctx = ctx.extcast();
final ExtvarContext varctx = ctx.extvar();
final ExtnewContext newctx = ctx.extnew();
final ExtstringContext stringctx = ctx.extstring();
if (precctx != null) {
metadata.createExtNodeMetadata(ctx, precctx);
analyzer.visit(precctx);
} else if (castctx != null) {
metadata.createExtNodeMetadata(ctx, castctx);
analyzer.visit(castctx);
} else if (varctx != null) {
metadata.createExtNodeMetadata(ctx, varctx);
analyzer.visit(varctx);
} else if (newctx != null) {
metadata.createExtNodeMetadata(ctx, newctx);
analyzer.visit(newctx);
} else if (stringctx != null) {
metadata.createExtNodeMetadata(ctx, stringctx);
analyzer.visit(stringctx);
} else {
throw new IllegalStateException(AnalyzerUtility.error(ctx) + "Unexpected state.");
}
}
void processExtprec(final ExtprecContext ctx) {
final ExtNodeMetadata precenmd = metadata.getExtNodeMetadata(ctx);
final ParserRuleContext parent = precenmd.parent;
final ExternalMetadata parentemd = metadata.getExternalMetadata(parent);
final ExtprecContext precctx = ctx.extprec();
final ExtcastContext castctx = ctx.extcast();
final ExtvarContext varctx = ctx.extvar();
final ExtnewContext newctx = ctx.extnew();
final ExtstringContext stringctx = ctx.extstring();
final ExtdotContext dotctx = ctx.extdot();
final ExtbraceContext bracectx = ctx.extbrace();
if (dotctx != null || bracectx != null) {
++parentemd.scope;
}
if (precctx != null) {
metadata.createExtNodeMetadata(parent, precctx);
analyzer.visit(precctx);
} else if (castctx != null) {
metadata.createExtNodeMetadata(parent, castctx);
analyzer.visit(castctx);
} else if (varctx != null) {
metadata.createExtNodeMetadata(parent, varctx);
analyzer.visit(varctx);
} else if (newctx != null) {
metadata.createExtNodeMetadata(parent, newctx);
analyzer.visit(newctx);
} else if (stringctx != null) {
metadata.createExtNodeMetadata(ctx, stringctx);
analyzer.visit(stringctx);
} else {
throw new IllegalStateException(AnalyzerUtility.error(ctx) + "Unexpected state.");
}
parentemd.statement = false;
if (dotctx != null) {
--parentemd.scope;
metadata.createExtNodeMetadata(parent, dotctx);
analyzer.visit(dotctx);
} else if (bracectx != null) {
--parentemd.scope;
metadata.createExtNodeMetadata(parent, bracectx);
analyzer.visit(bracectx);
}
}
void processExtcast(final ExtcastContext ctx) {
final ExtNodeMetadata castenmd = metadata.getExtNodeMetadata(ctx);
final ParserRuleContext parent = castenmd.parent;
final ExternalMetadata parentemd = metadata.getExternalMetadata(parent);
final ExtprecContext precctx = ctx.extprec();
final ExtcastContext castctx = ctx.extcast();
final ExtvarContext varctx = ctx.extvar();
final ExtnewContext newctx = ctx.extnew();
final ExtstringContext stringctx = ctx.extstring();
if (precctx != null) {
metadata.createExtNodeMetadata(parent, precctx);
analyzer.visit(precctx);
} else if (castctx != null) {
metadata.createExtNodeMetadata(parent, castctx);
analyzer.visit(castctx);
} else if (varctx != null) {
metadata.createExtNodeMetadata(parent, varctx);
analyzer.visit(varctx);
} else if (newctx != null) {
metadata.createExtNodeMetadata(parent, newctx);
analyzer.visit(newctx);
} else if (stringctx != null) {
metadata.createExtNodeMetadata(ctx, stringctx);
analyzer.visit(stringctx);
} else {
throw new IllegalStateException(AnalyzerUtility.error(ctx) + "Unexpected state.");
}
final DecltypeContext declctx = ctx.decltype();
final ExpressionMetadata declemd = metadata.createExpressionMetadata(declctx);
analyzer.visit(declctx);
castenmd.castTo = caster.getLegalCast(ctx, parentemd.current, declemd.from, true);
castenmd.type = declemd.from;
parentemd.current = declemd.from;
parentemd.statement = false;
}
void processExtbrace(final ExtbraceContext ctx) {
final ExtNodeMetadata braceenmd = metadata.getExtNodeMetadata(ctx);
final ParserRuleContext parent = braceenmd.parent;
final ExternalMetadata parentemd = metadata.getExternalMetadata(parent);
final boolean array = parentemd.current.sort == Sort.ARRAY;
final boolean def = parentemd.current.sort == Sort.DEF;
boolean map = false;
boolean list = false;
try {
parentemd.current.clazz.asSubclass(Map.class);
map = true;
} catch (final ClassCastException exception) {
// Do nothing.
}
try {
parentemd.current.clazz.asSubclass(List.class);
list = true;
} catch (final ClassCastException exception) {
// Do nothing.
}
final ExtdotContext dotctx = ctx.extdot();
final ExtbraceContext bracectx = ctx.extbrace();
braceenmd.last = parentemd.scope == 0 && dotctx == null && bracectx == null;
final ExpressionContext exprctx = AnalyzerUtility.updateExpressionTree(ctx.expression());
final ExpressionMetadata expremd = metadata.createExpressionMetadata(exprctx);
if (array || def) {
expremd.to = array ? definition.intType : definition.objectType;
analyzer.visit(exprctx);
caster.markCast(expremd);
braceenmd.target = "#brace";
braceenmd.type = def ? definition.defType :
definition.getType(parentemd.current.struct, parentemd.current.type.getDimensions() - 1);
analyzeLoadStoreExternal(ctx);
parentemd.current = braceenmd.type;
} else {
final boolean store = braceenmd.last && parentemd.storeExpr != null;
final boolean get = parentemd.read || parentemd.token > 0 || !braceenmd.last;
final boolean set = braceenmd.last && store;
Method getter;
Method setter;
Type valuetype;
Type settype;
if (map) {
getter = parentemd.current.struct.methods.get("get");
setter = parentemd.current.struct.methods.get("put");
if (getter != null && (getter.rtn.sort == Sort.VOID || getter.arguments.size() != 1)) {
throw new IllegalArgumentException(AnalyzerUtility.error(ctx) +
"Illegal map get shortcut for type [" + parentemd.current.name + "].");
}
if (setter != null && setter.arguments.size() != 2) {
throw new IllegalArgumentException(AnalyzerUtility.error(ctx) +
"Illegal map set shortcut for type [" + parentemd.current.name + "].");
}
if (getter != null && setter != null && (!getter.arguments.get(0).equals(setter.arguments.get(0))
|| !getter.rtn.equals(setter.arguments.get(1)))) {
throw new IllegalArgumentException(AnalyzerUtility.error(ctx) + "Shortcut argument types must match.");
}
valuetype = setter != null ? setter.arguments.get(0) : getter != null ? getter.arguments.get(0) : null;
settype = setter == null ? null : setter.arguments.get(1);
} else if (list) {
getter = parentemd.current.struct.methods.get("get");
setter = parentemd.current.struct.methods.get("set");
if (getter != null && (getter.rtn.sort == Sort.VOID || getter.arguments.size() != 1 ||
getter.arguments.get(0).sort != Sort.INT)) {
throw new IllegalArgumentException(AnalyzerUtility.error(ctx) +
"Illegal list get shortcut for type [" + parentemd.current.name + "].");
}
if (setter != null && (setter.arguments.size() != 2 || setter.arguments.get(0).sort != Sort.INT)) {
throw new IllegalArgumentException(AnalyzerUtility.error(ctx) +
"Illegal list set shortcut for type [" + parentemd.current.name + "].");
}
if (getter != null && setter != null && (!getter.arguments.get(0).equals(setter.arguments.get(0))
|| !getter.rtn.equals(setter.arguments.get(1)))) {
throw new IllegalArgumentException(AnalyzerUtility.error(ctx) + "Shortcut argument types must match.");
}
valuetype = definition.intType;
settype = setter == null ? null : setter.arguments.get(1);
} else {
throw new IllegalStateException(AnalyzerUtility.error(ctx) + "Unexpected state.");
}
if ((get || set) && (!get || getter != null) && (!set || setter != null)) {
expremd.to = valuetype;
analyzer.visit(exprctx);
caster.markCast(expremd);
braceenmd.target = new Object[] {getter, setter, true, null};
braceenmd.type = get ? getter.rtn : settype;
analyzeLoadStoreExternal(ctx);
parentemd.current = get ? getter.rtn : setter.rtn;
}
}
if (braceenmd.target == null) {
throw new IllegalArgumentException(AnalyzerUtility.error(ctx) +
"Attempting to address a non-array type [" + parentemd.current.name + "] as an array.");
}
if (dotctx != null) {
metadata.createExtNodeMetadata(parent, dotctx);
analyzer.visit(dotctx);
} else if (bracectx != null) {
metadata.createExtNodeMetadata(parent, bracectx);
analyzer.visit(bracectx);
}
}
void processExtdot(final ExtdotContext ctx) {
final ExtNodeMetadata dotemnd = metadata.getExtNodeMetadata(ctx);
final ParserRuleContext parent = dotemnd.parent;
final ExtcallContext callctx = ctx.extcall();
final ExtfieldContext fieldctx = ctx.extfield();
if (callctx != null) {
metadata.createExtNodeMetadata(parent, callctx);
analyzer.visit(callctx);
} else if (fieldctx != null) {
metadata.createExtNodeMetadata(parent, fieldctx);
analyzer.visit(fieldctx);
}
}
void processExtcall(final ExtcallContext ctx) {
final ExtNodeMetadata callenmd = metadata.getExtNodeMetadata(ctx);
final ParserRuleContext parent = callenmd.parent;
final ExternalMetadata parentemd = metadata.getExternalMetadata(parent);
final ExtdotContext dotctx = ctx.extdot();
final ExtbraceContext bracectx = ctx.extbrace();
callenmd.last = parentemd.scope == 0 && dotctx == null && bracectx == null;
final String name = ctx.EXTID().getText();
if (parentemd.current.sort == Sort.ARRAY) {
throw new IllegalArgumentException(AnalyzerUtility.error(ctx) + "Unexpected call [" + name + "] on an array.");
} else if (callenmd.last && parentemd.storeExpr != null) {
throw new IllegalArgumentException(AnalyzerUtility.error(ctx) + "Cannot assign a value to a call [" + name + "].");
}
final Struct struct = parentemd.current.struct;
final List<ExpressionContext> arguments = ctx.arguments().expression();
final int size = arguments.size();
Type[] types;
final Method method = parentemd.statik ? struct.functions.get(name) : struct.methods.get(name);
final boolean def = parentemd.current.sort == Sort.DEF;
if (method == null && !def) {
throw new IllegalArgumentException(
AnalyzerUtility.error(ctx) + "Unknown call [" + name + "] on type [" + struct.name + "].");
} else if (method != null) {
types = new Type[method.arguments.size()];
method.arguments.toArray(types);
callenmd.target = method;
callenmd.type = method.rtn;
parentemd.statement = !parentemd.read && callenmd.last;
parentemd.current = method.rtn;
if (size != types.length) {
throw new IllegalArgumentException(AnalyzerUtility.error(ctx) + "When calling [" + name + "] on type " +
"[" + struct.name + "] expected [" + types.length + "] arguments," +
" but found [" + arguments.size() + "].");
}
} else {
types = new Type[arguments.size()];
Arrays.fill(types, definition.defType);
callenmd.target = name;
callenmd.type = definition.defType;
parentemd.statement = !parentemd.read && callenmd.last;
parentemd.current = callenmd.type;
}
for (int argument = 0; argument < size; ++argument) {
final ExpressionContext exprctx = AnalyzerUtility.updateExpressionTree(arguments.get(argument));
final ExpressionMetadata expremd = metadata.createExpressionMetadata(exprctx);
expremd.to = types[argument];
analyzer.visit(exprctx);
caster.markCast(expremd);
}
parentemd.statik = false;
if (dotctx != null) {
metadata.createExtNodeMetadata(parent, dotctx);
analyzer.visit(dotctx);
} else if (bracectx != null) {
metadata.createExtNodeMetadata(parent, bracectx);
analyzer.visit(bracectx);
}
}
void processExtvar(final ExtvarContext ctx) {
final ExtNodeMetadata varenmd = metadata.getExtNodeMetadata(ctx);
final ParserRuleContext parent = varenmd.parent;
final ExternalMetadata parentemd = metadata.getExternalMetadata(parent);
final IdentifierContext idctx = ctx.identifier();
final String id = idctx.getText();
final ExtdotContext dotctx = ctx.extdot();
final ExtbraceContext bracectx = ctx.extbrace();
final boolean type = utility.isValidType(idctx, false);
if (type) {
if (parentemd.current != null || dotctx == null || bracectx != null) {
throw new IllegalArgumentException(AnalyzerUtility.error(ctx) + "Unexpected static type [" + id + "].");
}
varenmd.type = definition.getType(id);
parentemd.current = varenmd.type;
parentemd.statik = true;
metadata.createExtNodeMetadata(parent, dotctx);
analyzer.visit(dotctx);
} else {
utility.isValidIdentifier(idctx, true);
if (parentemd.current != null) {
throw new IllegalStateException(AnalyzerUtility.error(ctx) + "Unexpected variable [" + id + "] load.");
}
varenmd.last = parentemd.scope == 0 && dotctx == null && bracectx == null;
final Variable variable = utility.getVariable(id);
if (variable == null) {
throw new IllegalArgumentException(AnalyzerUtility.error(ctx) + "Unknown variable [" + id + "].");
}
// special cases: reserved words
if ("_score".equals(id) || "_value".equals(id) || "doc".equals(id) || "ctx".equals(id)) {
// read-only: don't allow stores to ourself
if (varenmd.last && parentemd.storeExpr != null) {
throw new IllegalArgumentException(AnalyzerUtility.error(ctx) + "Variable [" + id + "] is read-only.");
}
if ("_score".equals(id)) {
// track if the _score value is ever used, we will invoke Scorer.score() only once if so.
metadata.scoreValueUsed = true;
} else if ("ctx".equals(id)) {
// track if ctx value is ever used, we will invoke Map.get() only once if so.
metadata.ctxValueUsed = true;
}
}
varenmd.target = variable.slot;
varenmd.type = variable.type;
analyzeLoadStoreExternal(ctx);
parentemd.current = varenmd.type;
if (dotctx != null) {
metadata.createExtNodeMetadata(parent, dotctx);
analyzer.visit(dotctx);
} else if (bracectx != null) {
metadata.createExtNodeMetadata(parent, bracectx);
analyzer.visit(bracectx);
}
}
}
void processExtfield(final ExtfieldContext ctx) {
final ExtNodeMetadata memberenmd = metadata.getExtNodeMetadata(ctx);
final ParserRuleContext parent = memberenmd.parent;
final ExternalMetadata parentemd = metadata.getExternalMetadata(parent);
if (ctx.EXTID() == null && ctx.EXTINTEGER() == null) {
throw new IllegalArgumentException(AnalyzerUtility.error(ctx) + "Unexpected state.");
}
final String value = ctx.EXTID() == null ? ctx.EXTINTEGER().getText() : ctx.EXTID().getText();
final ExtdotContext dotctx = ctx.extdot();
final ExtbraceContext bracectx = ctx.extbrace();
memberenmd.last = parentemd.scope == 0 && dotctx == null && bracectx == null;
final boolean store = memberenmd.last && parentemd.storeExpr != null;
if (parentemd.current == null) {
throw new IllegalStateException(AnalyzerUtility.error(ctx) + "Unexpected field [" + value + "] load.");
}
if (parentemd.current.sort == Sort.ARRAY) {
if ("length".equals(value)) {
if (!parentemd.read) {
throw new IllegalArgumentException(AnalyzerUtility.error(ctx) + "Must read array field [length].");
} else if (store) {
throw new IllegalArgumentException(
AnalyzerUtility.error(ctx) + "Cannot write to read-only array field [length].");
}
memberenmd.target = "#length";
memberenmd.type = definition.intType;
parentemd.current = definition.intType;
} else {
throw new IllegalArgumentException(AnalyzerUtility.error(ctx) + "Unexpected array field [" + value + "].");
}
} else if (parentemd.current.sort == Sort.DEF) {
memberenmd.target = value;
memberenmd.type = definition.defType;
analyzeLoadStoreExternal(ctx);
parentemd.current = memberenmd.type;
} else {
final Struct struct = parentemd.current.struct;
final Field field = parentemd.statik ? struct.statics.get(value) : struct.members.get(value);
if (field != null) {
if (store && java.lang.reflect.Modifier.isFinal(field.reflect.getModifiers())) {
throw new IllegalArgumentException(AnalyzerUtility.error(ctx) + "Cannot write to read-only" +
" field [" + value + "] for type [" + struct.name + "].");
}
memberenmd.target = field;
memberenmd.type = field.type;
analyzeLoadStoreExternal(ctx);
parentemd.current = memberenmd.type;
} else {
final boolean get = parentemd.read || parentemd.token > 0 || !memberenmd.last;
final boolean set = memberenmd.last && store;
Method getter = struct.methods.get("get" + Character.toUpperCase(value.charAt(0)) + value.substring(1));
Method setter = struct.methods.get("set" + Character.toUpperCase(value.charAt(0)) + value.substring(1));
Object constant = null;
if (getter != null && (getter.rtn.sort == Sort.VOID || !getter.arguments.isEmpty())) {
throw new IllegalArgumentException(AnalyzerUtility.error(ctx) +
"Illegal get shortcut on field [" + value + "] for type [" + struct.name + "].");
}
if (setter != null && (setter.rtn.sort != Sort.VOID || setter.arguments.size() != 1)) {
throw new IllegalArgumentException(AnalyzerUtility.error(ctx) +
"Illegal set shortcut on field [" + value + "] for type [" + struct.name + "].");
}
Type settype = setter == null ? null : setter.arguments.get(0);
if (getter == null && setter == null) {
if (ctx.EXTID() != null) {
try {
parentemd.current.clazz.asSubclass(Map.class);
getter = parentemd.current.struct.methods.get("get");
setter = parentemd.current.struct.methods.get("put");
if (getter != null && (getter.rtn.sort == Sort.VOID || getter.arguments.size() != 1 ||
getter.arguments.get(0).sort != Sort.STRING)) {
throw new IllegalArgumentException(AnalyzerUtility.error(ctx) +
"Illegal map get shortcut [" + value + "] for type [" + struct.name + "].");
}
if (setter != null && (setter.arguments.size() != 2 ||
setter.arguments.get(0).sort != Sort.STRING)) {
throw new IllegalArgumentException(AnalyzerUtility.error(ctx) +
"Illegal map set shortcut [" + value + "] for type [" + struct.name + "].");
}
if (getter != null && setter != null && !getter.rtn.equals(setter.arguments.get(1))) {
throw new IllegalArgumentException(AnalyzerUtility.error(ctx) + "Shortcut argument types must match.");
}
settype = setter == null ? null : setter.arguments.get(1);
constant = value;
} catch (ClassCastException exception) {
//Do nothing.
}
} else if (ctx.EXTINTEGER() != null) {
try {
parentemd.current.clazz.asSubclass(List.class);
getter = parentemd.current.struct.methods.get("get");
setter = parentemd.current.struct.methods.get("set");
if (getter != null && (getter.rtn.sort == Sort.VOID || getter.arguments.size() != 1 ||
getter.arguments.get(0).sort != Sort.INT)) {
throw new IllegalArgumentException(AnalyzerUtility.error(ctx) +
"Illegal list get shortcut [" + value + "] for type [" + struct.name + "].");
}
if (setter != null && (setter.arguments.size() != 2 || setter.arguments.get(0).sort != Sort.INT)) {
throw new IllegalArgumentException(AnalyzerUtility.error(ctx) +
"Illegal list set shortcut [" + value + "] for type [" + struct.name + "].");
}
if (getter != null && setter != null && !getter.rtn.equals(setter.arguments.get(1))) {
throw new IllegalArgumentException(AnalyzerUtility.error(ctx) + "Shortcut argument types must match.");
}
settype = setter == null ? null : setter.arguments.get(1);
try {
constant = Integer.parseInt(value);
} catch (NumberFormatException exception) {
throw new IllegalArgumentException(AnalyzerUtility.error(ctx) +
"Illegal list shortcut value [" + value + "].");
}
} catch (ClassCastException exception) {
//Do nothing.
}
} else {
throw new IllegalStateException(AnalyzerUtility.error(ctx) + "Unexpected state.");
}
}
if ((get || set) && (!get || getter != null) && (!set || setter != null)) {
memberenmd.target = new Object[] {getter, setter, constant != null, constant};
memberenmd.type = get ? getter.rtn : settype;
analyzeLoadStoreExternal(ctx);
parentemd.current = get ? getter.rtn : setter.rtn;
}
}
if (memberenmd.target == null) {
throw new IllegalArgumentException(
AnalyzerUtility.error(ctx) + "Unknown field [" + value + "] for type [" + struct.name + "].");
}
}
parentemd.statik = false;
if (dotctx != null) {
metadata.createExtNodeMetadata(parent, dotctx);
analyzer.visit(dotctx);
} else if (bracectx != null) {
metadata.createExtNodeMetadata(parent, bracectx);
analyzer.visit(bracectx);
}
}
void processExtnew(final ExtnewContext ctx) {
final ExtNodeMetadata newenmd = metadata.getExtNodeMetadata(ctx);
final ParserRuleContext parent = newenmd.parent;
final ExternalMetadata parentemd = metadata.getExternalMetadata(parent);
final ExtdotContext dotctx = ctx.extdot();
newenmd.last = parentemd.scope == 0 && dotctx == null;
final IdentifierContext idctx = ctx.identifier();
final String type = idctx.getText();
utility.isValidType(idctx, true);
if (parentemd.current != null) {
throw new IllegalArgumentException(AnalyzerUtility.error(ctx) + "Unexpected new call.");
} else if (newenmd.last && parentemd.storeExpr != null) {
throw new IllegalArgumentException(AnalyzerUtility.error(ctx) + "Cannot assign a value to a new call.");
}
final Struct struct = definition.structs.get(type);
final boolean newclass = ctx.arguments() != null;
final boolean newarray = !ctx.expression().isEmpty();
final List<ExpressionContext> arguments = newclass ? ctx.arguments().expression() : ctx.expression();
final int size = arguments.size();
Type[] types;
if (newarray) {
if (!parentemd.read) {
throw new IllegalArgumentException(AnalyzerUtility.error(ctx) + "A newly created array must be assigned.");
}
types = new Type[size];
Arrays.fill(types, definition.intType);
newenmd.target = "#makearray";
if (size > 1) {
newenmd.type = definition.getType(struct, size);
parentemd.current = newenmd.type;
} else if (size == 1) {
newenmd.type = definition.getType(struct, 0);
parentemd.current = definition.getType(struct, 1);
} else {
throw new IllegalArgumentException(AnalyzerUtility.error(ctx) + "A newly created array cannot have zero dimensions.");
}
} else if (newclass) {
final Constructor constructor = struct.constructors.get("new");
if (constructor != null) {
types = new Type[constructor.arguments.size()];
constructor.arguments.toArray(types);
newenmd.target = constructor;
newenmd.type = definition.getType(struct, 0);
parentemd.statement = !parentemd.read && newenmd.last;
parentemd.current = newenmd.type;
} else {
throw new IllegalArgumentException(
AnalyzerUtility.error(ctx) + "Unknown new call on type [" + struct.name + "].");
}
} else {
throw new IllegalArgumentException(AnalyzerUtility.error(ctx) + "Unknown state.");
}
if (size != types.length) {
throw new IllegalArgumentException(AnalyzerUtility.error(ctx) + "When calling constructor on type " +
"[" + struct.name + "] expected [" + types.length + "] arguments," +
" but found [" + arguments.size() + "].");
}
for (int argument = 0; argument < size; ++argument) {
final ExpressionContext exprctx = AnalyzerUtility.updateExpressionTree(arguments.get(argument));
final ExpressionMetadata expremd = metadata.createExpressionMetadata(exprctx);
expremd.to = types[argument];
analyzer.visit(exprctx);
caster.markCast(expremd);
}
if (dotctx != null) {
metadata.createExtNodeMetadata(parent, dotctx);
analyzer.visit(dotctx);
}
}
void processExtstring(final ExtstringContext ctx) {
final ExtNodeMetadata memberenmd = metadata.getExtNodeMetadata(ctx);
final ParserRuleContext parent = memberenmd.parent;
final ExternalMetadata parentemd = metadata.getExternalMetadata(parent);
final String string = ctx.STRING().getText().substring(1, ctx.STRING().getText().length() - 1);
final ExtdotContext dotctx = ctx.extdot();
final ExtbraceContext bracectx = ctx.extbrace();
memberenmd.last = parentemd.scope == 0 && dotctx == null && bracectx == null;
final boolean store = memberenmd.last && parentemd.storeExpr != null;
if (parentemd.current != null) {
throw new IllegalStateException(AnalyzerUtility.error(ctx) + "Unexpected String constant [" + string + "].");
}
if (!parentemd.read) {
throw new IllegalArgumentException(AnalyzerUtility.error(ctx) + "Must read String constant [" + string + "].");
} else if (store) {
throw new IllegalArgumentException(
AnalyzerUtility.error(ctx) + "Cannot write to read-only String constant [" + string + "].");
}
memberenmd.target = string;
memberenmd.type = definition.stringType;
parentemd.current = definition.stringType;
if (memberenmd.last) {
parentemd.constant = string;
}
if (dotctx != null) {
metadata.createExtNodeMetadata(parent, dotctx);
analyzer.visit(dotctx);
} else if (bracectx != null) {
metadata.createExtNodeMetadata(parent, bracectx);
analyzer.visit(bracectx);
}
}
private void analyzeLoadStoreExternal(final ParserRuleContext source) {
final ExtNodeMetadata extenmd = metadata.getExtNodeMetadata(source);
final ParserRuleContext parent = extenmd.parent;
final ExternalMetadata parentemd = metadata.getExternalMetadata(parent);
if (extenmd.last && parentemd.storeExpr != null) {
final ParserRuleContext store = parentemd.storeExpr;
final ExpressionMetadata storeemd = metadata.createExpressionMetadata(parentemd.storeExpr);
final int token = parentemd.token;
if (token > 0) {
analyzer.visit(store);
final boolean add = token == ADD;
final boolean xor = token == BWAND || token == BWXOR || token == BWOR;
final boolean decimal = token == MUL || token == DIV || token == REM || token == SUB;
extenmd.promote = add ? promoter.promoteAdd(extenmd.type, storeemd.from) :
xor ? promoter.promoteXor(extenmd.type, storeemd.from) :
promoter.promoteNumeric(extenmd.type, storeemd.from, decimal, true);
if (extenmd.promote == null) {
throw new IllegalArgumentException("Cannot apply compound assignment to " +
"types [" + extenmd.type.name + "] and [" + storeemd.from.name + "].");
}
extenmd.castFrom = caster.getLegalCast(source, extenmd.type, extenmd.promote, false);
extenmd.castTo = caster.getLegalCast(source, extenmd.promote, extenmd.type, true);
storeemd.to = add && extenmd.promote.sort == Sort.STRING ? storeemd.from : extenmd.promote;
caster.markCast(storeemd);
} else {
storeemd.to = extenmd.type;
analyzer.visit(store);
caster.markCast(storeemd);
}
}
}
}

View File

@ -1,281 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.painless;
import org.elasticsearch.painless.Definition.Pair;
import org.elasticsearch.painless.Definition.Sort;
import org.elasticsearch.painless.Definition.Type;
class AnalyzerPromoter {
private final Definition definition;
AnalyzerPromoter(final Definition definition) {
this.definition = definition;
}
Type promoteNumeric(final Type from, final boolean decimal, final boolean primitive) {
final Sort sort = from.sort;
if (sort == Sort.DEF) {
return definition.defType;
} else if ((sort == Sort.DOUBLE || sort == Sort.DOUBLE_OBJ || sort == Sort.NUMBER) && decimal) {
return primitive ? definition.doubleType : definition.doubleobjType;
} else if ((sort == Sort.FLOAT || sort == Sort.FLOAT_OBJ) && decimal) {
return primitive ? definition.floatType : definition.floatobjType;
} else if (sort == Sort.LONG || sort == Sort.LONG_OBJ || sort == Sort.NUMBER) {
return primitive ? definition.longType : definition.longobjType;
} else if (sort.numeric) {
return primitive ? definition.intType : definition.intobjType;
}
return null;
}
Type promoteNumeric(final Type from0, final Type from1, final boolean decimal, final boolean primitive) {
final Sort sort0 = from0.sort;
final Sort sort1 = from1.sort;
if (sort0 == Sort.DEF || sort1 == Sort.DEF) {
return definition.defType;
}
if (decimal) {
if (sort0 == Sort.DOUBLE || sort0 == Sort.DOUBLE_OBJ || sort0 == Sort.NUMBER ||
sort1 == Sort.DOUBLE || sort1 == Sort.DOUBLE_OBJ || sort1 == Sort.NUMBER) {
return primitive ? definition.doubleType : definition.doubleobjType;
} else if (sort0 == Sort.FLOAT || sort0 == Sort.FLOAT_OBJ || sort1 == Sort.FLOAT || sort1 == Sort.FLOAT_OBJ) {
return primitive ? definition.floatType : definition.floatobjType;
}
}
if (sort0 == Sort.LONG || sort0 == Sort.LONG_OBJ || sort0 == Sort.NUMBER ||
sort1 == Sort.LONG || sort1 == Sort.LONG_OBJ || sort1 == Sort.NUMBER) {
return primitive ? definition.longType : definition.longobjType;
} else if (sort0.numeric && sort1.numeric) {
return primitive ? definition.intType : definition.intobjType;
}
return null;
}
Type promoteAdd(final Type from0, final Type from1) {
final Sort sort0 = from0.sort;
final Sort sort1 = from1.sort;
if (sort0 == Sort.STRING || sort1 == Sort.STRING) {
return definition.stringType;
}
return promoteNumeric(from0, from1, true, true);
}
Type promoteXor(final Type from0, final Type from1) {
final Sort sort0 = from0.sort;
final Sort sort1 = from1.sort;
if (sort0.bool || sort1.bool) {
return definition.booleanType;
}
return promoteNumeric(from0, from1, false, true);
}
Type promoteEquality(final Type from0, final Type from1) {
final Sort sort0 = from0.sort;
final Sort sort1 = from1.sort;
if (sort0 == Sort.DEF || sort1 == Sort.DEF) {
return definition.defType;
}
final boolean primitive = sort0.primitive && sort1.primitive;
if (sort0.bool && sort1.bool) {
return primitive ? definition.booleanType : definition.booleanobjType;
}
if (sort0.numeric && sort1.numeric) {
return promoteNumeric(from0, from1, true, primitive);
}
return definition.objectType;
}
Type promoteReference(final Type from0, final Type from1) {
final Sort sort0 = from0.sort;
final Sort sort1 = from1.sort;
if (sort0 == Sort.DEF || sort1 == Sort.DEF) {
return definition.defType;
}
if (sort0.primitive && sort1.primitive) {
if (sort0.bool && sort1.bool) {
return definition.booleanType;
}
if (sort0.numeric && sort1.numeric) {
return promoteNumeric(from0, from1, true, true);
}
}
return definition.objectType;
}
Type promoteConditional(final Type from0, final Type from1, final Object const0, final Object const1) {
if (from0.equals(from1)) {
return from0;
}
final Sort sort0 = from0.sort;
final Sort sort1 = from1.sort;
if (sort0 == Sort.DEF || sort1 == Sort.DEF) {
return definition.defType;
}
final boolean primitive = sort0.primitive && sort1.primitive;
if (sort0.bool && sort1.bool) {
return primitive ? definition.booleanType : definition.booleanobjType;
}
if (sort0.numeric && sort1.numeric) {
if (sort0 == Sort.DOUBLE || sort0 == Sort.DOUBLE_OBJ || sort1 == Sort.DOUBLE || sort1 == Sort.DOUBLE_OBJ) {
return primitive ? definition.doubleType : definition.doubleobjType;
} else if (sort0 == Sort.FLOAT || sort0 == Sort.FLOAT_OBJ || sort1 == Sort.FLOAT || sort1 == Sort.FLOAT_OBJ) {
return primitive ? definition.floatType : definition.floatobjType;
} else if (sort0 == Sort.LONG || sort0 == Sort.LONG_OBJ || sort1 == Sort.LONG || sort1 == Sort.LONG_OBJ) {
return sort0.primitive && sort1.primitive ? definition.longType : definition.longobjType;
} else {
if (sort0 == Sort.BYTE || sort0 == Sort.BYTE_OBJ) {
if (sort1 == Sort.BYTE || sort1 == Sort.BYTE_OBJ) {
return primitive ? definition.byteType : definition.byteobjType;
} else if (sort1 == Sort.SHORT || sort1 == Sort.SHORT_OBJ) {
if (const1 != null) {
final short constant = (short)const1;
if (constant <= Byte.MAX_VALUE && constant >= Byte.MIN_VALUE) {
return primitive ? definition.byteType : definition.byteobjType;
}
}
return primitive ? definition.shortType : definition.shortobjType;
} else if (sort1 == Sort.CHAR || sort1 == Sort.CHAR_OBJ) {
return primitive ? definition.intType : definition.intobjType;
} else if (sort1 == Sort.INT || sort1 == Sort.INT_OBJ) {
if (const1 != null) {
final int constant = (int)const1;
if (constant <= Byte.MAX_VALUE && constant >= Byte.MIN_VALUE) {
return primitive ? definition.byteType : definition.byteobjType;
}
}
return primitive ? definition.intType : definition.intobjType;
}
} else if (sort0 == Sort.SHORT || sort0 == Sort.SHORT_OBJ) {
if (sort1 == Sort.BYTE || sort1 == Sort.BYTE_OBJ) {
if (const0 != null) {
final short constant = (short)const0;
if (constant <= Byte.MAX_VALUE && constant >= Byte.MIN_VALUE) {
return primitive ? definition.byteType : definition.byteobjType;
}
}
return primitive ? definition.shortType : definition.shortobjType;
} else if (sort1 == Sort.SHORT || sort1 == Sort.SHORT_OBJ) {
return primitive ? definition.shortType : definition.shortobjType;
} else if (sort1 == Sort.CHAR || sort1 == Sort.CHAR_OBJ) {
return primitive ? definition.intType : definition.intobjType;
} else if (sort1 == Sort.INT || sort1 == Sort.INT_OBJ) {
if (const1 != null) {
final int constant = (int)const1;
if (constant <= Short.MAX_VALUE && constant >= Short.MIN_VALUE) {
return primitive ? definition.shortType : definition.shortobjType;
}
}
return primitive ? definition.intType : definition.intobjType;
}
} else if (sort0 == Sort.CHAR || sort0 == Sort.CHAR_OBJ) {
if (sort1 == Sort.BYTE || sort1 == Sort.BYTE_OBJ) {
return primitive ? definition.intType : definition.intobjType;
} else if (sort1 == Sort.SHORT || sort1 == Sort.SHORT_OBJ) {
return primitive ? definition.intType : definition.intobjType;
} else if (sort1 == Sort.CHAR || sort1 == Sort.CHAR_OBJ) {
return primitive ? definition.charType : definition.charobjType;
} else if (sort1 == Sort.INT || sort1 == Sort.INT_OBJ) {
if (const1 != null) {
final int constant = (int)const1;
if (constant <= Character.MAX_VALUE && constant >= Character.MIN_VALUE) {
return primitive ? definition.byteType : definition.byteobjType;
}
}
return primitive ? definition.intType : definition.intobjType;
}
} else if (sort0 == Sort.INT || sort0 == Sort.INT_OBJ) {
if (sort1 == Sort.BYTE || sort1 == Sort.BYTE_OBJ) {
if (const0 != null) {
final int constant = (int)const0;
if (constant <= Byte.MAX_VALUE && constant >= Byte.MIN_VALUE) {
return primitive ? definition.byteType : definition.byteobjType;
}
}
return primitive ? definition.intType : definition.intobjType;
} else if (sort1 == Sort.SHORT || sort1 == Sort.SHORT_OBJ) {
if (const0 != null) {
final int constant = (int)const0;
if (constant <= Short.MAX_VALUE && constant >= Short.MIN_VALUE) {
return primitive ? definition.byteType : definition.byteobjType;
}
}
return primitive ? definition.intType : definition.intobjType;
} else if (sort1 == Sort.CHAR || sort1 == Sort.CHAR_OBJ) {
if (const0 != null) {
final int constant = (int)const0;
if (constant <= Character.MAX_VALUE && constant >= Character.MIN_VALUE) {
return primitive ? definition.byteType : definition.byteobjType;
}
}
return primitive ? definition.intType : definition.intobjType;
} else if (sort1 == Sort.INT || sort1 == Sort.INT_OBJ) {
return primitive ? definition.intType : definition.intobjType;
}
}
}
}
final Pair pair = new Pair(from0, from1);
final Type bound = definition.bounds.get(pair);
return bound == null ? definition.objectType : bound;
}
}

View File

@ -1,591 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.painless;
import org.elasticsearch.painless.Definition.Sort;
import org.elasticsearch.painless.Metadata.ExpressionMetadata;
import org.elasticsearch.painless.Metadata.StatementMetadata;
import org.elasticsearch.painless.PainlessParser.AfterthoughtContext;
import org.elasticsearch.painless.PainlessParser.BlockContext;
import org.elasticsearch.painless.PainlessParser.BreakContext;
import org.elasticsearch.painless.PainlessParser.ContinueContext;
import org.elasticsearch.painless.PainlessParser.DeclContext;
import org.elasticsearch.painless.PainlessParser.DeclarationContext;
import org.elasticsearch.painless.PainlessParser.DecltypeContext;
import org.elasticsearch.painless.PainlessParser.DeclvarContext;
import org.elasticsearch.painless.PainlessParser.DoContext;
import org.elasticsearch.painless.PainlessParser.ExprContext;
import org.elasticsearch.painless.PainlessParser.ExpressionContext;
import org.elasticsearch.painless.PainlessParser.ForContext;
import org.elasticsearch.painless.PainlessParser.IdentifierContext;
import org.elasticsearch.painless.PainlessParser.IfContext;
import org.elasticsearch.painless.PainlessParser.InitializerContext;
import org.elasticsearch.painless.PainlessParser.MultipleContext;
import org.elasticsearch.painless.PainlessParser.ReturnContext;
import org.elasticsearch.painless.PainlessParser.SingleContext;
import org.elasticsearch.painless.PainlessParser.SourceContext;
import org.elasticsearch.painless.PainlessParser.StatementContext;
import org.elasticsearch.painless.PainlessParser.ThrowContext;
import org.elasticsearch.painless.PainlessParser.TrapContext;
import org.elasticsearch.painless.PainlessParser.TryContext;
import org.elasticsearch.painless.PainlessParser.WhileContext;
import java.util.List;
class AnalyzerStatement {
private final Metadata metadata;
private final Definition definition;
private final Analyzer analyzer;
private final AnalyzerUtility utility;
private final AnalyzerCaster caster;
AnalyzerStatement(final Metadata metadata, final Analyzer analyzer,
final AnalyzerUtility utility, final AnalyzerCaster caster) {
this.metadata = metadata;
this.definition = metadata.definition;
this.analyzer = analyzer;
this.utility = utility;
this.caster = caster;
}
void processSource(final SourceContext ctx) {
final StatementMetadata sourcesmd = metadata.getStatementMetadata(ctx);
final List<StatementContext> statectxs = ctx.statement();
final StatementContext lastctx = statectxs.get(statectxs.size() - 1);
utility.incrementScope();
for (final StatementContext statectx : statectxs) {
if (sourcesmd.allLast) {
throw new IllegalArgumentException(AnalyzerUtility.error(statectx) +
"Statement will never be executed because all prior paths escape.");
}
final StatementMetadata statesmd = metadata.createStatementMetadata(statectx);
statesmd.lastSource = statectx == lastctx;
analyzer.visit(statectx);
sourcesmd.methodEscape = statesmd.methodEscape;
sourcesmd.allLast = statesmd.allLast;
}
utility.decrementScope();
}
void processIf(final IfContext ctx) {
final StatementMetadata ifsmd = metadata.getStatementMetadata(ctx);
final ExpressionContext exprctx = AnalyzerUtility.updateExpressionTree(ctx.expression());
final ExpressionMetadata expremd = metadata.createExpressionMetadata(exprctx);
expremd.to = definition.booleanType;
analyzer.visit(exprctx);
caster.markCast(expremd);
if (expremd.postConst != null) {
throw new IllegalArgumentException(AnalyzerUtility.error(ctx) + "If statement is not necessary.");
}
final BlockContext blockctx0 = ctx.block(0);
final StatementMetadata blocksmd0 = metadata.createStatementMetadata(blockctx0);
blocksmd0.lastSource = ifsmd.lastSource;
blocksmd0.inLoop = ifsmd.inLoop;
blocksmd0.lastLoop = ifsmd.lastLoop;
utility.incrementScope();
analyzer.visit(blockctx0);
utility.decrementScope();
ifsmd.anyContinue = blocksmd0.anyContinue;
ifsmd.anyBreak = blocksmd0.anyBreak;
ifsmd.count = blocksmd0.count;
if (ctx.ELSE() != null) {
final BlockContext blockctx1 = ctx.block(1);
final StatementMetadata blocksmd1 = metadata.createStatementMetadata(blockctx1);
blocksmd1.lastSource = ifsmd.lastSource;
utility.incrementScope();
analyzer.visit(blockctx1);
utility.decrementScope();
ifsmd.methodEscape = blocksmd0.methodEscape && blocksmd1.methodEscape;
ifsmd.loopEscape = blocksmd0.loopEscape && blocksmd1.loopEscape;
ifsmd.allLast = blocksmd0.allLast && blocksmd1.allLast;
ifsmd.anyContinue |= blocksmd1.anyContinue;
ifsmd.anyBreak |= blocksmd1.anyBreak;
ifsmd.count = Math.max(ifsmd.count, blocksmd1.count);
}
}
void processWhile(final WhileContext ctx) {
final StatementMetadata whilesmd = metadata.getStatementMetadata(ctx);
utility.incrementScope();
final ExpressionContext exprctx = AnalyzerUtility.updateExpressionTree(ctx.expression());
final ExpressionMetadata expremd = metadata.createExpressionMetadata(exprctx);
expremd.to = definition.booleanType;
analyzer.visit(exprctx);
caster.markCast(expremd);
boolean continuous = false;
if (expremd.postConst != null) {
continuous = (boolean)expremd.postConst;
if (!continuous) {
throw new IllegalArgumentException(AnalyzerUtility.error(ctx) + "The loop will never be executed.");
}
if (ctx.empty() != null) {
throw new IllegalArgumentException(AnalyzerUtility.error(ctx) + "The loop will never exit.");
}
}
final BlockContext blockctx = ctx.block();
if (blockctx != null) {
final StatementMetadata blocksmd = metadata.createStatementMetadata(blockctx);
blocksmd.beginLoop = true;
blocksmd.inLoop = true;
analyzer.visit(blockctx);
if (blocksmd.loopEscape && !blocksmd.anyContinue) {
throw new IllegalArgumentException(AnalyzerUtility.error(ctx) + "All paths escape so the loop is not necessary.");
}
if (continuous && !blocksmd.anyBreak) {
whilesmd.methodEscape = true;
whilesmd.allLast = true;
}
}
whilesmd.count = 1;
utility.decrementScope();
}
void processDo(final DoContext ctx) {
final StatementMetadata dosmd = metadata.getStatementMetadata(ctx);
utility.incrementScope();
final BlockContext blockctx = ctx.block();
final StatementMetadata blocksmd = metadata.createStatementMetadata(blockctx);
blocksmd.beginLoop = true;
blocksmd.inLoop = true;
analyzer.visit(blockctx);
if (blocksmd.loopEscape && !blocksmd.anyContinue) {
throw new IllegalArgumentException(AnalyzerUtility.error(ctx) + "All paths escape so the loop is not necessary.");
}
final ExpressionContext exprctx = AnalyzerUtility.updateExpressionTree(ctx.expression());
final ExpressionMetadata expremd = metadata.createExpressionMetadata(exprctx);
expremd.to = definition.booleanType;
analyzer.visit(exprctx);
caster.markCast(expremd);
if (expremd.postConst != null) {
final boolean continuous = (boolean)expremd.postConst;
if (!continuous) {
throw new IllegalArgumentException(AnalyzerUtility.error(ctx) + "All paths escape so the loop is not necessary.");
}
if (!blocksmd.anyBreak) {
dosmd.methodEscape = true;
dosmd.allLast = true;
}
}
dosmd.count = 1;
utility.decrementScope();
}
void processFor(final ForContext ctx) {
final StatementMetadata forsmd = metadata.getStatementMetadata(ctx);
boolean continuous = false;
utility.incrementScope();
final InitializerContext initctx = ctx.initializer();
if (initctx != null) {
metadata.createStatementMetadata(initctx);
analyzer.visit(initctx);
}
final ExpressionContext exprctx = AnalyzerUtility.updateExpressionTree(ctx.expression());
if (exprctx != null) {
final ExpressionMetadata expremd = metadata.createExpressionMetadata(exprctx);
expremd.to = definition.booleanType;
analyzer.visit(exprctx);
caster.markCast(expremd);
if (expremd.postConst != null) {
continuous = (boolean)expremd.postConst;
if (!continuous) {
throw new IllegalArgumentException(AnalyzerUtility.error(ctx) + "The loop will never be executed.");
}
if (ctx.empty() != null) {
throw new IllegalArgumentException(AnalyzerUtility.error(ctx) + "The loop is continuous.");
}
}
} else {
continuous = true;
}
final AfterthoughtContext atctx = ctx.afterthought();
if (atctx != null) {
metadata.createStatementMetadata(atctx);
analyzer.visit(atctx);
}
final BlockContext blockctx = ctx.block();
if (blockctx != null) {
final StatementMetadata blocksmd = metadata.createStatementMetadata(blockctx);
blocksmd.beginLoop = true;
blocksmd.inLoop = true;
analyzer.visit(blockctx);
if (blocksmd.loopEscape && !blocksmd.anyContinue) {
throw new IllegalArgumentException(AnalyzerUtility.error(ctx) + "All paths escape so the loop is not necessary.");
}
if (continuous && !blocksmd.anyBreak) {
forsmd.methodEscape = true;
forsmd.allLast = true;
}
}
forsmd.count = 1;
utility.decrementScope();
}
void processDecl(final DeclContext ctx) {
final StatementMetadata declsmd = metadata.getStatementMetadata(ctx);
final DeclarationContext declctx = ctx.declaration();
metadata.createStatementMetadata(declctx);
analyzer.visit(declctx);
declsmd.count = 1;
}
void processContinue(final ContinueContext ctx) {
final StatementMetadata continuesmd = metadata.getStatementMetadata(ctx);
if (!continuesmd.inLoop) {
throw new IllegalArgumentException(AnalyzerUtility.error(ctx) + "Cannot have a continue statement outside of a loop.");
}
if (continuesmd.lastLoop) {
throw new IllegalArgumentException(AnalyzerUtility.error(ctx) + "Unnecessary continue statement at the end of a loop.");
}
continuesmd.allLast = true;
continuesmd.anyContinue = true;
continuesmd.count = 1;
}
void processBreak(final BreakContext ctx) {
final StatementMetadata breaksmd = metadata.getStatementMetadata(ctx);
if (!breaksmd.inLoop) {
throw new IllegalArgumentException(AnalyzerUtility.error(ctx) + "Cannot have a break statement outside of a loop.");
}
breaksmd.loopEscape = true;
breaksmd.allLast = true;
breaksmd.anyBreak = true;
breaksmd.count = 1;
}
void processReturn(final ReturnContext ctx) {
final StatementMetadata returnsmd = metadata.getStatementMetadata(ctx);
final ExpressionContext exprctx = AnalyzerUtility.updateExpressionTree(ctx.expression());
final ExpressionMetadata expremd = metadata.createExpressionMetadata(exprctx);
expremd.to = definition.objectType;
analyzer.visit(exprctx);
caster.markCast(expremd);
returnsmd.methodEscape = true;
returnsmd.loopEscape = true;
returnsmd.allLast = true;
returnsmd.count = 1;
}
void processTry(final TryContext ctx) {
final StatementMetadata trysmd = metadata.getStatementMetadata(ctx);
final BlockContext blockctx = ctx.block();
final StatementMetadata blocksmd = metadata.createStatementMetadata(blockctx);
blocksmd.lastSource = trysmd.lastSource;
blocksmd.inLoop = trysmd.inLoop;
blocksmd.lastLoop = trysmd.lastLoop;
utility.incrementScope();
analyzer.visit(blockctx);
utility.decrementScope();
trysmd.methodEscape = blocksmd.methodEscape;
trysmd.loopEscape = blocksmd.loopEscape;
trysmd.allLast = blocksmd.allLast;
trysmd.anyContinue = blocksmd.anyContinue;
trysmd.anyBreak = blocksmd.anyBreak;
int trapcount = 0;
for (final TrapContext trapctx : ctx.trap()) {
final StatementMetadata trapsmd = metadata.createStatementMetadata(trapctx);
trapsmd.lastSource = trysmd.lastSource;
trapsmd.inLoop = trysmd.inLoop;
trapsmd.lastLoop = trysmd.lastLoop;
utility.incrementScope();
analyzer.visit(trapctx);
utility.decrementScope();
trysmd.methodEscape &= trapsmd.methodEscape;
trysmd.loopEscape &= trapsmd.loopEscape;
trysmd.allLast &= trapsmd.allLast;
trysmd.anyContinue |= trapsmd.anyContinue;
trysmd.anyBreak |= trapsmd.anyBreak;
trapcount = Math.max(trapcount, trapsmd.count);
}
trysmd.count = blocksmd.count + trapcount;
}
void processThrow(final ThrowContext ctx) {
final StatementMetadata throwsmd = metadata.getStatementMetadata(ctx);
final ExpressionContext exprctx = AnalyzerUtility.updateExpressionTree(ctx.expression());
final ExpressionMetadata expremd = metadata.createExpressionMetadata(exprctx);
expremd.to = definition.exceptionType;
analyzer.visit(exprctx);
caster.markCast(expremd);
throwsmd.methodEscape = true;
throwsmd.loopEscape = true;
throwsmd.allLast = true;
throwsmd.count = 1;
}
void processExpr(final ExprContext ctx) {
final StatementMetadata exprsmd = metadata.getStatementMetadata(ctx);
final ExpressionContext exprctx = AnalyzerUtility.updateExpressionTree(ctx.expression());
final ExpressionMetadata expremd = metadata.createExpressionMetadata(exprctx);
expremd.read = exprsmd.lastSource;
analyzer.visit(exprctx);
if (!expremd.statement && !exprsmd.lastSource) {
throw new IllegalArgumentException(AnalyzerUtility.error(ctx) + "Not a statement.");
}
final boolean rtn = exprsmd.lastSource && expremd.from.sort != Sort.VOID;
exprsmd.methodEscape = rtn;
exprsmd.loopEscape = rtn;
exprsmd.allLast = rtn;
expremd.to = rtn ? definition.objectType : expremd.from;
caster.markCast(expremd);
exprsmd.count = 1;
}
void processMultiple(final MultipleContext ctx) {
final StatementMetadata multiplesmd = metadata.getStatementMetadata(ctx);
final List<StatementContext> statectxs = ctx.statement();
final StatementContext lastctx = statectxs.get(statectxs.size() - 1);
for (StatementContext statectx : statectxs) {
if (multiplesmd.allLast) {
throw new IllegalArgumentException(AnalyzerUtility.error(statectx) +
"Statement will never be executed because all prior paths escape.");
}
final StatementMetadata statesmd = metadata.createStatementMetadata(statectx);
statesmd.lastSource = multiplesmd.lastSource && statectx == lastctx;
statesmd.inLoop = multiplesmd.inLoop;
statesmd.lastLoop = (multiplesmd.beginLoop || multiplesmd.lastLoop) && statectx == lastctx;
analyzer.visit(statectx);
multiplesmd.methodEscape = statesmd.methodEscape;
multiplesmd.loopEscape = statesmd.loopEscape;
multiplesmd.allLast = statesmd.allLast;
multiplesmd.anyContinue |= statesmd.anyContinue;
multiplesmd.anyBreak |= statesmd.anyBreak;
multiplesmd.count += statesmd.count;
}
}
void processSingle(final SingleContext ctx) {
final StatementMetadata singlesmd = metadata.getStatementMetadata(ctx);
final StatementContext statectx = ctx.statement();
final StatementMetadata statesmd = metadata.createStatementMetadata(statectx);
statesmd.lastSource = singlesmd.lastSource;
statesmd.inLoop = singlesmd.inLoop;
statesmd.lastLoop = singlesmd.beginLoop || singlesmd.lastLoop;
analyzer.visit(statectx);
singlesmd.methodEscape = statesmd.methodEscape;
singlesmd.loopEscape = statesmd.loopEscape;
singlesmd.allLast = statesmd.allLast;
singlesmd.anyContinue = statesmd.anyContinue;
singlesmd.anyBreak = statesmd.anyBreak;
singlesmd.count = statesmd.count;
}
void processInitializer(InitializerContext ctx) {
final DeclarationContext declctx = ctx.declaration();
final ExpressionContext exprctx = AnalyzerUtility.updateExpressionTree(ctx.expression());
if (declctx != null) {
metadata.createStatementMetadata(declctx);
analyzer.visit(declctx);
} else if (exprctx != null) {
final ExpressionMetadata expremd = metadata.createExpressionMetadata(exprctx);
expremd.read = false;
analyzer.visit(exprctx);
expremd.to = expremd.from;
caster.markCast(expremd);
if (!expremd.statement) {
throw new IllegalArgumentException(AnalyzerUtility.error(exprctx) +
"The initializer of a for loop must be a statement.");
}
} else {
throw new IllegalStateException(AnalyzerUtility.error(ctx) + "Unexpected state.");
}
}
void processAfterthought(AfterthoughtContext ctx) {
final ExpressionContext exprctx = AnalyzerUtility.updateExpressionTree(ctx.expression());
if (exprctx != null) {
final ExpressionMetadata expremd = metadata.createExpressionMetadata(exprctx);
expremd.read = false;
analyzer.visit(exprctx);
expremd.to = expremd.from;
caster.markCast(expremd);
if (!expremd.statement) {
throw new IllegalArgumentException(AnalyzerUtility.error(exprctx) +
"The afterthought of a for loop must be a statement.");
}
}
}
void processDeclaration(final DeclarationContext ctx) {
final DecltypeContext decltypectx = ctx.decltype();
final ExpressionMetadata decltypeemd = metadata.createExpressionMetadata(decltypectx);
analyzer.visit(decltypectx);
for (final DeclvarContext declvarctx : ctx.declvar()) {
final ExpressionMetadata declvaremd = metadata.createExpressionMetadata(declvarctx);
declvaremd.to = decltypeemd.from;
analyzer.visit(declvarctx);
}
}
void processDecltype(final DecltypeContext ctx) {
final ExpressionMetadata decltypeemd = metadata.getExpressionMetadata(ctx);
final IdentifierContext idctx = ctx.identifier();
final String type = ctx.getText();
utility.isValidType(idctx, true);
decltypeemd.from = definition.getType(type);
}
void processDeclvar(final DeclvarContext ctx) {
final ExpressionMetadata declvaremd = metadata.getExpressionMetadata(ctx);
final IdentifierContext idctx = ctx.identifier();
final String identifier = idctx.getText();
utility.isValidIdentifier(idctx, true);
declvaremd.postConst = utility.addVariable(ctx, identifier, declvaremd.to).slot;
final ExpressionContext exprctx = AnalyzerUtility.updateExpressionTree(ctx.expression());
if (exprctx != null) {
final ExpressionMetadata expremd = metadata.createExpressionMetadata(exprctx);
expremd.to = declvaremd.to;
analyzer.visit(exprctx);
caster.markCast(expremd);
}
}
void processTrap(final TrapContext ctx) {
final StatementMetadata trapsmd = metadata.getStatementMetadata(ctx);
final IdentifierContext idctx0 = ctx.identifier(0);
final String type = idctx0.getText();
utility.isValidType(idctx0, true);
trapsmd.exception = definition.getType(type);
try {
trapsmd.exception.clazz.asSubclass(Exception.class);
} catch (final ClassCastException exception) {
throw new IllegalArgumentException(AnalyzerUtility.error(ctx) + "Invalid exception type [" + trapsmd.exception.name + "].");
}
final IdentifierContext idctx1 = ctx.identifier(1);
final String identifier = idctx1.getText();
utility.isValidIdentifier(idctx1, true);
trapsmd.slot = utility.addVariable(ctx, identifier, trapsmd.exception).slot;
final BlockContext blockctx = ctx.block();
if (blockctx != null) {
final StatementMetadata blocksmd = metadata.createStatementMetadata(blockctx);
blocksmd.lastSource = trapsmd.lastSource;
blocksmd.inLoop = trapsmd.inLoop;
blocksmd.lastLoop = trapsmd.lastLoop;
analyzer.visit(blockctx);
trapsmd.methodEscape = blocksmd.methodEscape;
trapsmd.loopEscape = blocksmd.loopEscape;
trapsmd.allLast = blocksmd.allLast;
trapsmd.anyContinue = blocksmd.anyContinue;
trapsmd.anyBreak = blocksmd.anyBreak;
} else if (ctx.emptyscope() == null) {
throw new IllegalStateException(AnalyzerUtility.error(ctx) + "Unexpected state.");
}
}
}

View File

@ -1,191 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.painless;
import org.antlr.v4.runtime.ParserRuleContext;
import org.antlr.v4.runtime.tree.ParseTree;
import org.elasticsearch.painless.Definition.Type;
import org.elasticsearch.painless.PainlessParser.ExpressionContext;
import org.elasticsearch.painless.PainlessParser.IdentifierContext;
import org.elasticsearch.painless.PainlessParser.PrecedenceContext;
import java.util.ArrayDeque;
import java.util.Deque;
import java.util.Iterator;
class AnalyzerUtility {
static class Variable {
final String name;
final Type type;
final int slot;
private Variable(final String name, final Type type, final int slot) {
this.name = name;
this.type = type;
this.slot = slot;
}
}
/**
* A utility method to output consistent error messages.
* @param ctx The ANTLR node the error occurred in.
* @return The error message with tacked on line number and character position.
*/
static String error(final ParserRuleContext ctx) {
return "Analyzer Error [" + ctx.getStart().getLine() + ":" + ctx.getStart().getCharPositionInLine() + "]: ";
}
/**
* A utility method to output consistent error messages for invalid types.
* @param ctx The ANTLR node the error occurred in.
* @param type The invalid type.
* @return The error message with tacked on line number and character position.
*/
static String typeError(final ParserRuleContext ctx, final String type) {
return error(ctx) + "Invalid type [" + type + "].";
}
/**
* A utility method to output consistent error messages for invalid identifiers.
* @param ctx The ANTLR node the error occurred in.
* @param identifier The invalid identifier.
* @return The error message with tacked on line number and character position.
*/
static String identifierError(final ParserRuleContext ctx, final String identifier) {
return error(ctx) + "Invalid identifier [" + identifier + "].";
}
/**
* The ANTLR parse tree is modified in one single case; a parent node needs to check a child node to see if it's
* a precedence node, and if so, it must be removed from the tree permanently. Once the ANTLR tree is built,
* precedence nodes are no longer necessary to maintain the correct ordering of the tree, so they only
* add a level of indirection where complicated decisions about metadata passing would have to be made. This
* method removes the need for those decisions.
* @param source The child ANTLR node to check for precedence.
* @return The updated child ANTLR node.
*/
static ExpressionContext updateExpressionTree(ExpressionContext source) {
// Check to see if the ANTLR node is a precedence node.
if (source instanceof PainlessParser.PrecedenceContext) {
final ParserRuleContext parent = source.getParent();
int index = 0;
// Mark the index of the source node within the list of child nodes from the parent.
for (final ParseTree child : parent.children) {
if (child == source) {
break;
}
++index;
}
// If there are multiple precedence nodes in a row, remove them all.
while (source instanceof PrecedenceContext) {
source = ((PrecedenceContext)source).expression();
}
// Update the parent node with the child of the precedence node.
parent.children.set(index, source);
}
return source;
}
private final Definition definition;
private final Deque<Integer> scopes = new ArrayDeque<>();
private final Deque<Variable> variables = new ArrayDeque<>();
AnalyzerUtility(final Metadata metadata) {
this.definition = metadata.definition;
}
void incrementScope() {
scopes.push(0);
}
void decrementScope() {
int remove = scopes.pop();
while (remove > 0) {
variables.pop();
--remove;
}
}
Variable getVariable(final String name) {
final Iterator<Variable> itr = variables.iterator();
while (itr.hasNext()) {
final Variable variable = itr.next();
if (variable.name.equals(name)) {
return variable;
}
}
return null;
}
Variable addVariable(final ParserRuleContext source, final String name, final Type type) {
if (getVariable(name) != null) {
if (source == null) {
throw new IllegalArgumentException("Argument name [" + name + "] already defined within the scope.");
} else {
throw new IllegalArgumentException(error(source) + "Variable name [" + name + "] already defined within the scope.");
}
}
final Variable previous = variables.peekFirst();
int slot = 0;
if (previous != null) {
slot += previous.slot + previous.type.type.getSize();
}
final Variable variable = new Variable(name, type, slot);
variables.push(variable);
final int update = scopes.pop() + 1;
scopes.push(update);
return variable;
}
boolean isValidType(final IdentifierContext idctx, final boolean error) {
boolean valid = definition.structs.containsKey(idctx.getText());
if (!valid && error) {
throw new IllegalArgumentException(typeError(idctx, idctx.getText()));
}
return valid;
}
boolean isValidIdentifier(final IdentifierContext idctx, final boolean error) {
boolean valid = !definition.structs.containsKey(idctx.getText()) && idctx.generic() == null;
if (!valid && error) {
throw new IllegalArgumentException(identifierError(idctx, idctx.getText()));
}
return valid;
}
}

View File

@ -19,10 +19,10 @@
package org.elasticsearch.painless;
import org.antlr.v4.runtime.ANTLRInputStream;
import org.antlr.v4.runtime.CommonTokenStream;
import org.antlr.v4.runtime.ParserRuleContext;
import org.elasticsearch.bootstrap.BootstrapInfo;
import org.elasticsearch.painless.Variables.Reserved;
import org.elasticsearch.painless.antlr.Walker;
import org.elasticsearch.painless.node.SSource;
import java.net.MalformedURLException;
import java.net.URL;
@ -30,13 +30,16 @@ import java.security.CodeSource;
import java.security.SecureClassLoader;
import java.security.cert.Certificate;
import static org.elasticsearch.painless.WriterConstants.CLASS_NAME;
/**
* The Compiler is the entry point for generating a Painless script. The compiler will generate an ANTLR
* parse tree based on the source code that is passed in. Two passes will then be run over the parse tree,
* The Compiler is the entry point for generating a Painless script. The compiler will receive a Painless
* tree based on the type of input passed in (currently only ANTLR). Two passes will then be run over the tree,
* one for analysis using the {@link Analyzer} and another to generate the actual byte code using ASM in
* the {@link Writer}.
*/
final class Compiler {
/**
* The maximum number of characters allowed in the script source.
*/
@ -54,7 +57,7 @@ final class Compiler {
try {
// Setup the code privileges.
CODESOURCE = new CodeSource(new URL("file:" + BootstrapInfo.UNTRUSTED_CODEBASE), (Certificate[]) null);
} catch (MalformedURLException impossible) {
} catch (final MalformedURLException impossible) {
throw new RuntimeException(impossible);
}
}
@ -62,7 +65,7 @@ final class Compiler {
/**
* A secure class loader used to define Painless scripts.
*/
static class Loader extends SecureClassLoader {
static final class Loader extends SecureClassLoader {
/**
* @param parent The parent ClassLoader.
*/
@ -90,64 +93,45 @@ final class Compiler {
* @return An {@link Executable} Painless script.
*/
static Executable compile(final Loader loader, final String name, final String source, final CompilerSettings settings) {
final byte[] bytes = compile(name, source, Definition.INSTANCE, settings);
return createExecutable(loader, Definition.INSTANCE, name, source, bytes);
byte[] bytes = compile(source, settings);
return createExecutable(loader, name, source, bytes);
}
/**
* Compiles the script to bytecode
* Runs the two-pass compiler to generate a Painless script. (Used by the debugger.)
* @param source The source code for the script.
* @param settings The CompilerSettings to be used during the compilation.
* @return The bytes for compilation.
*/
static byte[] compile(String name, String source, Definition definition, CompilerSettings settings) {
static byte[] compile(final String source, final CompilerSettings settings) {
if (source.length() > MAXIMUM_SOURCE_LENGTH) {
throw new IllegalArgumentException("Scripts may be no longer than " + MAXIMUM_SOURCE_LENGTH +
" characters. The passed in script is " + source.length() + " characters. Consider using a" +
" plugin if a script longer than this length is a requirement.");
" characters. The passed in script is " + source.length() + " characters. Consider using a" +
" plugin if a script longer than this length is a requirement.");
}
final ParserRuleContext root = createParseTree(source);
final Metadata metadata = new Metadata(definition, source, root, settings);
Analyzer.analyze(metadata);
return Writer.write(metadata);
}
/**
* Generates the ANTLR tree from the given source code. Several methods below, are used
* to ensure that the first error generated by ANTLR will cause the compilation to fail rather than
* use ANTLR's recovery strategies that may be potentially dangerous.
* @param source The source code for the script.
* @return The root node for the ANTLR parse tree.
*/
private static ParserRuleContext createParseTree(final String source) {
final ANTLRInputStream stream = new ANTLRInputStream(source);
final ErrorHandlingLexer lexer = new ErrorHandlingLexer(stream);
final PainlessParser parser = new PainlessParser(new CommonTokenStream(lexer));
final ParserErrorStrategy strategy = new ParserErrorStrategy();
final Reserved reserved = new Reserved();
final SSource root = Walker.buildPainlessTree(source, reserved);
final Variables variables = Analyzer.analyze(settings, Definition.INSTANCE, reserved, root);
lexer.removeErrorListeners();
parser.removeErrorListeners();
parser.setErrorHandler(strategy);
ParserRuleContext root = parser.source();
return root;
return Writer.write(settings, Definition.INSTANCE, source, variables, root);
}
/**
* Generates an {@link Executable} that can run a Painless script.
* @param loader The {@link Loader} to define the script's class file.
* @param definition The Painless API.
* @param name The name of the script.
* @param source The source text of the script.
* @param bytes The ASM generated byte code to define the class with.
* @return A Painless {@link Executable} script.
*/
private static Executable createExecutable(final Loader loader, final Definition definition,
final String name, final String source, final byte[] bytes) {
private static Executable createExecutable(final Loader loader, final String name, final String source, final byte[] bytes) {
try {
final Class<? extends Executable> clazz = loader.define(WriterConstants.CLASS_NAME, bytes);
final java.lang.reflect.Constructor<? extends Executable> constructor =
clazz.getConstructor(Definition.class, String.class, String.class);
final Class<? extends Executable> clazz = loader.define(CLASS_NAME, bytes);
final java.lang.reflect.Constructor<? extends Executable> constructor = clazz.getConstructor(String.class, String.class);
return constructor.newInstance(definition, name, source);
return constructor.newInstance(name, source);
} catch (final Exception exception) { // Catch everything to let the user know this is something caused internally.
throw new IllegalStateException(
"An internal error occurred attempting to define the script [" + name + "].", exception);

View File

@ -22,7 +22,8 @@ package org.elasticsearch.painless;
/**
* Settings to use when compiling a script.
*/
final class CompilerSettings {
public final class CompilerSettings {
/**
* Constant to be used when specifying numeric overflow when compiling a script.
*/
@ -52,7 +53,7 @@ final class CompilerSettings {
* signs, and overflow for floating point types can result in infinite or
* {@code NaN} values.
*/
public boolean getNumericOverflow() {
public final boolean getNumericOverflow() {
return numericOverflow;
}
@ -60,15 +61,16 @@ final class CompilerSettings {
* Set {@code true} for numerics to overflow, false to deliver exceptions.
* @see #getNumericOverflow
*/
public void setNumericOverflow(boolean allow) {
public final void setNumericOverflow(boolean allow) {
this.numericOverflow = allow;
}
/**
* Returns the value for the cumulative total number of statements that can be made in all loops
* in a script before an exception is thrown. This attempts to prevent infinite loops.
* in a script before an exception is thrown. This attempts to prevent infinite loops. Note if
* the counter is set to 0, no loop counter will be written.
*/
public int getMaxLoopCounter() {
public final int getMaxLoopCounter() {
return maxLoopCounter;
}
@ -76,7 +78,7 @@ final class CompilerSettings {
* Set the cumulative total number of statements that can be made in all loops.
* @see #getMaxLoopCounter
*/
public void setMaxLoopCounter(int max) {
public final void setMaxLoopCounter(int max) {
this.maxLoopCounter = max;
}
}

View File

@ -24,8 +24,8 @@ import org.elasticsearch.painless.Definition.RuntimeClass;
import java.lang.invoke.MethodHandle;
import java.lang.invoke.MethodHandles;
import java.lang.invoke.MethodType;
import java.lang.invoke.MethodHandles.Lookup;
import java.lang.invoke.MethodType;
import java.util.Collections;
import java.util.List;
import java.util.Map;
@ -41,13 +41,85 @@ import java.util.stream.Stream;
* <p>
* Dynamic methods, loads, stores, and array/list/map load/stores involve locating the appropriate field
* or method depending on the receiver's class. For these, we emit an {@code invokedynamic} instruction that,
* for each new type encountered will query a corresponding {@code lookupXXX} method to retrieve the appropriate
* for each new type encountered will query a corresponding {@code lookupXXX} method to retrieve the appropriate
* method. In most cases, the {@code lookupXXX} methods here will only be called once for a given call site, because
* caching ({@link DynamicCallSite}) generally works: usually all objects at any call site will be consistently
* the same type (or just a few types). In extreme cases, if there is type explosion, they may be called every
* single time, but simplicity is still more valuable than performance in this code.
*/
public class Def {
public final class Def {
// TODO: Once Java has a factory for those in java.lang.invoke.MethodHandles, use it:
/** Helper class for isolating MethodHandles and methods to get the length of arrays
* (to emulate a "arraystore" byteoode using MethodHandles).
* This should really be a method in {@link MethodHandles} class!
*/
private static final class ArrayLengthHelper {
private static final Lookup PRIV_LOOKUP = MethodHandles.lookup();
private static final Map<Class<?>,MethodHandle> ARRAY_TYPE_MH_MAPPING = Collections.unmodifiableMap(
Stream.of(boolean[].class, byte[].class, short[].class, int[].class, long[].class,
char[].class, float[].class, double[].class, Object[].class)
.collect(Collectors.toMap(Function.identity(), type -> {
try {
return PRIV_LOOKUP.findStatic(PRIV_LOOKUP.lookupClass(), "getArrayLength", MethodType.methodType(int.class, type));
} catch (ReflectiveOperationException e) {
throw new AssertionError(e);
}
}))
);
private static final MethodHandle OBJECT_ARRAY_MH = ARRAY_TYPE_MH_MAPPING.get(Object[].class);
static int getArrayLength(final boolean[] array) { return array.length; }
static int getArrayLength(final byte[] array) { return array.length; }
static int getArrayLength(final short[] array) { return array.length; }
static int getArrayLength(final int[] array) { return array.length; }
static int getArrayLength(final long[] array) { return array.length; }
static int getArrayLength(final char[] array) { return array.length; }
static int getArrayLength(final float[] array) { return array.length; }
static int getArrayLength(final double[] array) { return array.length; }
static int getArrayLength(final Object[] array) { return array.length; }
static MethodHandle arrayLengthGetter(Class<?> arrayType) {
if (!arrayType.isArray()) {
throw new IllegalArgumentException("type must be an array");
}
return (ARRAY_TYPE_MH_MAPPING.containsKey(arrayType)) ?
ARRAY_TYPE_MH_MAPPING.get(arrayType) :
OBJECT_ARRAY_MH.asType(OBJECT_ARRAY_MH.type().changeParameterType(0, arrayType));
}
private ArrayLengthHelper() {}
}
/** pointer to Map.get(Object) */
private static final MethodHandle MAP_GET;
/** pointer to Map.put(Object,Object) */
private static final MethodHandle MAP_PUT;
/** pointer to List.get(int) */
private static final MethodHandle LIST_GET;
/** pointer to List.set(int,Object) */
private static final MethodHandle LIST_SET;
static {
final Lookup lookup = MethodHandles.publicLookup();
try {
MAP_GET = lookup.findVirtual(Map.class , "get", MethodType.methodType(Object.class, Object.class));
MAP_PUT = lookup.findVirtual(Map.class , "put", MethodType.methodType(Object.class, Object.class, Object.class));
LIST_GET = lookup.findVirtual(List.class, "get", MethodType.methodType(Object.class, int.class));
LIST_SET = lookup.findVirtual(List.class, "set", MethodType.methodType(Object.class, int.class, Object.class));
} catch (final ReflectiveOperationException roe) {
throw new AssertionError(roe);
}
}
/** Returns an array length getter MethodHandle for the given array type */
static MethodHandle arrayLengthGetter(Class<?> arrayType) {
return ArrayLengthHelper.arrayLengthGetter(arrayType);
}
/**
* Looks up handle for a dynamic method call.
@ -65,7 +137,7 @@ public class Def {
* @return pointer to matching method to invoke. never returns null.
* @throws IllegalArgumentException if no matching whitelisted method was found.
*/
static MethodHandle lookupMethod(Class<?> receiverClass, String name, Definition definition) {
static MethodHandle lookupMethod(Class<?> receiverClass, String name, Definition definition) {
// check whitelist for matching method
for (Class<?> clazz = receiverClass; clazz != null; clazz = clazz.getSuperclass()) {
RuntimeClass struct = definition.runtimeMap.get(clazz);
@ -77,7 +149,7 @@ public class Def {
}
}
for (Class<?> iface : clazz.getInterfaces()) {
for (final Class<?> iface : clazz.getInterfaces()) {
struct = definition.runtimeMap.get(iface);
if (struct != null) {
@ -94,78 +166,6 @@ public class Def {
"for class [" + receiverClass.getCanonicalName() + "].");
}
/** pointer to Map.get(Object) */
private static final MethodHandle MAP_GET;
/** pointer to Map.put(Object,Object) */
private static final MethodHandle MAP_PUT;
/** pointer to List.get(int) */
private static final MethodHandle LIST_GET;
/** pointer to List.set(int,Object) */
private static final MethodHandle LIST_SET;
static {
Lookup lookup = MethodHandles.publicLookup();
try {
MAP_GET = lookup.findVirtual(Map.class, "get",
MethodType.methodType(Object.class, Object.class));
MAP_PUT = lookup.findVirtual(Map.class, "put",
MethodType.methodType(Object.class, Object.class, Object.class));
LIST_GET = lookup.findVirtual(List.class, "get",
MethodType.methodType(Object.class, int.class));
LIST_SET = lookup.findVirtual(List.class, "set",
MethodType.methodType(Object.class, int.class, Object.class));
} catch (ReflectiveOperationException e) {
throw new AssertionError(e);
}
}
// TODO: Once Java has a factory for those in java.lang.invoke.MethodHandles, use it:
/** Helper class for isolating MethodHandles and methods to get the length of arrays
* (to emulate a "arraystore" byteoode using MethodHandles).
* This should really be a method in {@link MethodHandles} class!
*/
private static final class ArrayLengthHelper {
private ArrayLengthHelper() {}
private static final Lookup PRIV_LOOKUP = MethodHandles.lookup();
private static final Map<Class<?>,MethodHandle> ARRAY_TYPE_MH_MAPPING = Collections.unmodifiableMap(
Stream.of(boolean[].class, byte[].class, short[].class, int[].class, long[].class,
char[].class, float[].class, double[].class, Object[].class)
.collect(Collectors.toMap(Function.identity(), type -> {
try {
return PRIV_LOOKUP.findStatic(PRIV_LOOKUP.lookupClass(), "getArrayLength", MethodType.methodType(int.class, type));
} catch (ReflectiveOperationException e) {
throw new AssertionError(e);
}
}))
);
private static final MethodHandle OBJECT_ARRAY_MH = ARRAY_TYPE_MH_MAPPING.get(Object[].class);
static int getArrayLength(boolean[] array) { return array.length; }
static int getArrayLength(byte[] array) { return array.length; }
static int getArrayLength(short[] array) { return array.length; }
static int getArrayLength(int[] array) { return array.length; }
static int getArrayLength(long[] array) { return array.length; }
static int getArrayLength(char[] array) { return array.length; }
static int getArrayLength(float[] array) { return array.length; }
static int getArrayLength(double[] array) { return array.length; }
static int getArrayLength(Object[] array) { return array.length; }
public static MethodHandle arrayLengthGetter(Class<?> arrayType) {
if (!arrayType.isArray()) {
throw new IllegalArgumentException("type must be an array");
}
return (ARRAY_TYPE_MH_MAPPING.containsKey(arrayType)) ?
ARRAY_TYPE_MH_MAPPING.get(arrayType) :
OBJECT_ARRAY_MH.asType(OBJECT_ARRAY_MH.type().changeParameterType(0, arrayType));
}
}
/** Returns an array length getter MethodHandle for the given array type */
public static MethodHandle arrayLengthGetter(Class<?> arrayType) {
return ArrayLengthHelper.arrayLengthGetter(arrayType);
}
/**
* Looks up handle for a dynamic field getter (field load)
* <p>
@ -298,7 +298,7 @@ public class Def {
try {
int index = Integer.parseInt(name);
return MethodHandles.insertArguments(LIST_SET, 1, index);
} catch (NumberFormatException exception) {
} catch (final NumberFormatException exception) {
throw new IllegalArgumentException( "Illegal list shortcut value [" + name + "].");
}
}
@ -345,10 +345,12 @@ public class Def {
"[" + receiverClass.getCanonicalName() + "] as an array.");
}
// NOTE: below methods are not cached, instead invoked directly because they are performant.
// NOTE: Below methods are not cached, instead invoked directly because they are performant.
// We also check for Long values first when possible since the type is more
// likely to be a Long than a Float.
public static Object not(final Object unary) {
if (unary instanceof Double || unary instanceof Float || unary instanceof Long) {
if (unary instanceof Double || unary instanceof Long || unary instanceof Float) {
return ~((Number)unary).longValue();
} else if (unary instanceof Number) {
return ~((Number)unary).intValue();
@ -594,103 +596,40 @@ public class Def {
"[" + left.getClass().getCanonicalName() + "] and [" + right.getClass().getCanonicalName() + "].");
}
public static Object lsh(final Object left, final Object right) {
if (left instanceof Number) {
if (right instanceof Number) {
if (left instanceof Double || right instanceof Double ||
left instanceof Long || right instanceof Long ||
left instanceof Float || right instanceof Float) {
return ((Number)left).longValue() << ((Number)right).longValue();
} else {
return ((Number)left).intValue() << ((Number)right).intValue();
}
} else if (right instanceof Character) {
if (left instanceof Double || left instanceof Long || left instanceof Float) {
return ((Number)left).longValue() << (char)right;
} else {
return ((Number)left).intValue() << (char)right;
}
}
public static Object lsh(final Object left, final int right) {
if (left instanceof Double || left instanceof Long || left instanceof Float) {
return ((Number)left).longValue() << right;
} else if (left instanceof Number) {
return ((Number)left).intValue() << right;
} else if (left instanceof Character) {
if (right instanceof Number) {
if (right instanceof Double || right instanceof Long || right instanceof Float) {
return (long)(char)left << ((Number)right).longValue();
} else {
return (char)left << ((Number)right).intValue();
}
} else if (right instanceof Character) {
return (char)left << (char)right;
}
return (char)left << right;
}
throw new ClassCastException("Cannot apply [<<] operation to types " +
"[" + left.getClass().getCanonicalName() + "] and [" + right.getClass().getCanonicalName() + "].");
throw new ClassCastException("Cannot apply [<<] operation to types [" + left.getClass().getCanonicalName() + "] and [int].");
}
public static Object rsh(final Object left, final Object right) {
if (left instanceof Number) {
if (right instanceof Number) {
if (left instanceof Double || right instanceof Double ||
left instanceof Long || right instanceof Long ||
left instanceof Float || right instanceof Float) {
return ((Number)left).longValue() >> ((Number)right).longValue();
} else {
return ((Number)left).intValue() >> ((Number)right).intValue();
}
} else if (right instanceof Character) {
if (left instanceof Double || left instanceof Long || left instanceof Float) {
return ((Number)left).longValue() >> (char)right;
} else {
return ((Number)left).intValue() >> (char)right;
}
}
public static Object rsh(final Object left, final int right) {
if (left instanceof Double || left instanceof Long || left instanceof Float) {
return ((Number)left).longValue() >> right;
} else if (left instanceof Number) {
return ((Number)left).intValue() >> right;
} else if (left instanceof Character) {
if (right instanceof Number) {
if (right instanceof Double || right instanceof Long || right instanceof Float) {
return (long)(char)left >> ((Number)right).longValue();
} else {
return (char)left >> ((Number)right).intValue();
}
} else if (right instanceof Character) {
return (char)left >> (char)right;
}
return (char)left >> right;
}
throw new ClassCastException("Cannot apply [>>] operation to types " +
"[" + left.getClass().getCanonicalName() + "] and [" + right.getClass().getCanonicalName() + "].");
throw new ClassCastException("Cannot apply [>>] operation to types [" + left.getClass().getCanonicalName() + "] and [int].");
}
public static Object ush(final Object left, final Object right) {
if (left instanceof Number) {
if (right instanceof Number) {
if (left instanceof Double || right instanceof Double ||
left instanceof Long || right instanceof Long ||
left instanceof Float || right instanceof Float) {
return ((Number)left).longValue() >>> ((Number)right).longValue();
} else {
return ((Number)left).intValue() >>> ((Number)right).intValue();
}
} else if (right instanceof Character) {
if (left instanceof Double || left instanceof Long || left instanceof Float) {
return ((Number)left).longValue() >>> (char)right;
} else {
return ((Number)left).intValue() >>> (char)right;
}
}
public static Object ush(final Object left, final int right) {
if (left instanceof Double || left instanceof Long || left instanceof Float) {
return ((Number)left).longValue() >>> right;
} else if (left instanceof Number) {
return ((Number)left).intValue() >>> right;
} else if (left instanceof Character) {
if (right instanceof Number) {
if (right instanceof Double || right instanceof Long || right instanceof Float) {
return (long)(char)left >>> ((Number)right).longValue();
} else {
return (char)left >>> ((Number)right).intValue();
}
} else if (right instanceof Character) {
return (char)left >>> (char)right;
}
return (char)left >>> right;
}
throw new ClassCastException("Cannot apply [>>>] operation to types " +
"[" + left.getClass().getCanonicalName() + "] and [" + right.getClass().getCanonicalName() + "].");
throw new ClassCastException("Cannot apply [>>>] operation to types [" + left.getClass().getCanonicalName() + "] and [int].");
}
public static Object and(final Object left, final Object right) {
@ -1026,6 +965,8 @@ public class Def {
"[" + left.getClass().getCanonicalName() + "] and [" + right.getClass().getCanonicalName() + "].");
}
// Conversion methods for Def to primitive types.
public static boolean DefToboolean(final Object value) {
if (value instanceof Boolean) {
return (boolean)value;

View File

@ -36,33 +36,34 @@ import java.lang.invoke.MutableCallSite;
* When a new type is encountered at the call site, we lookup from the appropriate
* whitelist, and cache with a guard. If we encounter too many types, we stop caching.
* <p>
* Based on the cascaded inlining cache from the JSR 292 cookbook
* Based on the cascaded inlining cache from the JSR 292 cookbook
* (https://code.google.com/archive/p/jsr292-cookbook/, BSD license)
*/
// NOTE: this class must be public, because generated painless classes are in a different package,
// and it needs to be accessible by that code.
public final class DynamicCallSite {
// NOTE: these must be primitive types, see https://docs.oracle.com/javase/specs/jvms/se7/html/jvms-6.html#jvms-6.5.invokedynamic
/** static bootstrap parameter indicating a dynamic method call, e.g. foo.bar(...) */
static final int METHOD_CALL = 0;
public static final int METHOD_CALL = 0;
/** static bootstrap parameter indicating a dynamic load (getter), e.g. baz = foo.bar */
static final int LOAD = 1;
public static final int LOAD = 1;
/** static bootstrap parameter indicating a dynamic store (setter), e.g. foo.bar = baz */
static final int STORE = 2;
public static final int STORE = 2;
/** static bootstrap parameter indicating a dynamic array load, e.g. baz = foo[bar] */
static final int ARRAY_LOAD = 3;
public static final int ARRAY_LOAD = 3;
/** static bootstrap parameter indicating a dynamic array store, e.g. foo[bar] = baz */
static final int ARRAY_STORE = 4;
public static final int ARRAY_STORE = 4;
static class InliningCacheCallSite extends MutableCallSite {
/** maximum number of types before we go megamorphic */
static final int MAX_DEPTH = 5;
final Lookup lookup;
final String name;
final int flavor;
int depth;
InliningCacheCallSite(Lookup lookup, String name, MethodType type, int flavor) {
super(type);
this.lookup = lookup;
@ -70,8 +71,8 @@ public final class DynamicCallSite {
this.flavor = flavor;
}
}
/**
/**
* invokeDynamic bootstrap method
* <p>
* In addition to ordinary parameters, we also take a static parameter {@code flavor} which
@ -81,45 +82,45 @@ public final class DynamicCallSite {
*/
public static CallSite bootstrap(Lookup lookup, String name, MethodType type, int flavor) {
InliningCacheCallSite callSite = new InliningCacheCallSite(lookup, name, type, flavor);
MethodHandle fallback = FALLBACK.bindTo(callSite);
fallback = fallback.asCollector(Object[].class, type.parameterCount());
fallback = fallback.asType(type);
callSite.setTarget(fallback);
return callSite;
}
/**
/**
* guard method for inline caching: checks the receiver's class is the same
* as the cached class
*/
static boolean checkClass(Class<?> clazz, Object receiver) {
return receiver.getClass() == clazz;
}
/**
* Does a slow lookup against the whitelist.
*/
private static MethodHandle lookup(int flavor, Class<?> clazz, String name) {
switch(flavor) {
case METHOD_CALL:
case METHOD_CALL:
return Def.lookupMethod(clazz, name, Definition.INSTANCE);
case LOAD:
case LOAD:
return Def.lookupGetter(clazz, name, Definition.INSTANCE);
case STORE:
return Def.lookupSetter(clazz, name, Definition.INSTANCE);
case ARRAY_LOAD:
case ARRAY_LOAD:
return Def.lookupArrayLoad(clazz);
case ARRAY_STORE:
return Def.lookupArrayStore(clazz);
default: throw new AssertionError();
}
}
/**
* Called when a new type is encountered (or, when we have encountered more than {@code MAX_DEPTH}
* types at this call site and given up on caching).
* types at this call site and given up on caching).
*/
static Object fallback(InliningCacheCallSite callSite, Object[] args) throws Throwable {
MethodType type = callSite.type();
@ -127,25 +128,26 @@ public final class DynamicCallSite {
Class<?> receiverClass = receiver.getClass();
MethodHandle target = lookup(callSite.flavor, receiverClass, callSite.name);
target = target.asType(type);
if (callSite.depth >= InliningCacheCallSite.MAX_DEPTH) {
// revert to a vtable call
callSite.setTarget(target);
return target.invokeWithArguments(args);
}
MethodHandle test = CHECK_CLASS.bindTo(receiverClass);
test = test.asType(test.type().changeParameterType(0, type.parameterType(0)));
MethodHandle guard = MethodHandles.guardWithTest(test, target, callSite.getTarget());
callSite.depth++;
callSite.setTarget(guard);
return target.invokeWithArguments(args);
}
private static final MethodHandle CHECK_CLASS;
private static final MethodHandle FALLBACK;
static {
Lookup lookup = MethodHandles.lookup();
try {

View File

@ -24,15 +24,15 @@ import org.elasticsearch.search.lookup.LeafDocLookup;
import java.util.Map;
/**
* The superclass used to build all Painless scripts on top of.
*/
public abstract class Executable {
protected final Definition definition;
private final String name;
private final String source;
public Executable(final Definition definition, final String name, final String source) {
this.definition = definition;
public Executable(final String name, final String source) {
this.name = name;
this.source = source;
}
@ -45,9 +45,6 @@ public abstract class Executable {
return source;
}
public Definition getDefinition() {
return definition;
}
public abstract Object execute(Map<String, Object> params, Scorer scorer, LeafDocLookup doc, Object value);
public abstract Object execute(
final Map<String, Object> params, final Scorer scorer, final LeafDocLookup doc, final Object value);
}

View File

@ -1,605 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.painless;
import org.antlr.v4.runtime.ParserRuleContext;
import org.elasticsearch.painless.Definition.Cast;
import org.elasticsearch.painless.Definition.Type;
import java.util.HashMap;
import java.util.Map;
/**
* Metadata is a wrapper for all the data that is collected by the {@link Analyzer}. Each node in the ANTLR parse tree
* will have one of the types of metadata to store information used either in a different node by the analyzer
* or by the {@link Writer} during byte code generation. Metadata also contains several objects passed into the
* {@link Analyzer} and {@link Writer} used during compilation including the {@link Definition}, the source code,
* the root of the ANTLR parse tree, and the {@link CompilerSettings}.
*/
class Metadata {
/**
* StatementMetadata is used to store metadata mostly about
* control flow for ANTLR nodes related to if/else, do, while, for, etc.
*/
static class StatementMetadata {
/**
* The source variable is the ANTLR node used to generate this metadata.
*/
final ParserRuleContext source;
/**
* The lastSource variable will be set to true when the final statement from the root ANTLR node is about
* to be visited. This is used to determine whether or not the auto-return feature is allowed to be used,
* and if a null return value needs to be generated automatically since a return value is always required.
*/
boolean lastSource = false;
/**
* The beginLoop variable will be set to true whenever a loop node is initially visited including inner
* loops. This will not be propagated down the parse tree afterwards, though. This is used to determine
* whether or not inLoop should be set further down the tree. Note that inLoop alone is not enough
* information to determine whether we are in the last statement of a loop because we may inside of
* multiple loops, so this variable is necessary.
*/
boolean beginLoop = false;
/**
* The inLoop variable is set to true when inside a loop. This will be propagated down the parse tree. This
* is used to determine whether or not continue and break statements are legal.
*/
boolean inLoop = false;
/**
* The lastLoop variable is set to true when the final statement of a loop is reached. This will be
* propagated down the parse tree until another loop is reached and then will not be propagated further for
* the current loop. This is used to determine whether or not a continue statement is superfluous.
*/
boolean lastLoop = false;
/**
* The methodEscape variable is set to true when a statement would cause the method to potentially exit. This
* includes return, throw, and continuous loop statements. Note that a catch statement may possibly
* reset this to false after a throw statement. This will be propagated up the tree as far as necessary.
* This is used by the {@link Writer} to ensure that superfluous statements aren't unnecessarily written
* into generated bytecode.
*/
boolean methodEscape = false;
/**
* The loopEscape variable is set to true when a loop is going to be exited. This may be caused by a number of
* different statements including continue, break, return, etc. This will only be propagated as far as the
* loop node. This is used to ensure that in certain case an infinite loop will be caught at
* compile-time rather than run-time.
*/
boolean loopEscape = false;
/**
* The allLast variable is set whenever a final statement in a block is reached. This includes the end of loop,
* if, else, etc. This will be only propagated to the top of the block statement ANTLR node.
* This is used to ensure that there are no unreachable statements within the script.
*/
boolean allLast = false;
/**
* The anyContinue will be set to true when a continue statement is visited. This will be propagated to the
* loop node it's within. This is used to ensure that in certain case an infinite loop will be caught at
* compile-time rather than run-time.
*/
boolean anyContinue = false;
/**
* The anyBreak will be set to true when a break statement is visited. This will be propagated to the
* loop node it's within. This is used to in conjunction with methodEscape to ensure there are no unreachable
* statements within the script.
*/
boolean anyBreak = false;
/**
* The count variable is used as a rudimentary count of statements within a loop. This will be used in
* the {@link Writer} to keep a count of statements that have been executed at run-time to ensure that a loop
* will exit if it runs too long.
*/
int count = 0;
/**
* The exception variable is used to store the exception type when a throw node is visited. This is used by
* the {@link Writer} to write the correct type of exception in the generated byte code.
*/
Type exception = null;
/**
* The slot variable is used to store the place on the stack of where a thrown exception will be stored to.
* This is used by the {@link Writer}.
*/
int slot = -1;
/**
* Constructor.
* @param source The associated ANTLR node.
*/
private StatementMetadata(final ParserRuleContext source) {
this.source = source;
}
}
/**
* ExpressionMetadata is used to store metadata mostly about constants and casting
* for ANTLR nodes related to mathematical operations.
*/
static class ExpressionMetadata {
/**
* The source variable is the ANTLR node used to generate this metadata.
*/
final ParserRuleContext source;
/**
* The read variable is used to determine whether or not the value of an expression will be read from.
* This is set to false when the expression is the left-hand side of an assignment that is not chained or
* when a method call is made alone. This will propagate down the tree as far as necessary.
* The {@link Writer} uses this to determine when a value may need to be popped from the stack
* such as when a method call returns a value that is never read.
*/
boolean read = true;
/**
* The statement variable is set true when an expression is a complete meaning that there is some sort
* of effect on a variable or a method call is made. This will propagate up the tree as far as necessary.
* This prevents statements that have no effect on the output of a script from being executed.
*/
boolean statement = false;
/**
* The preConst variable is set to a non-null value when a constant statement is made in a script. This is
* used to track the constant value prior to any casts being made on an ANTLR node.
*/
Object preConst = null;
/**
* The postConst variable is set to a non-null value when a cast is made on a node where a preConst variable
* has already been set when the cast would leave the constant as a non-object value except in the case of a
* String. This will be propagated up the tree and used to simplify constants when possible such as making
* the value of 2*2 be 4 in the * node, so that the {@link Writer} only has to push a 4 onto the stack.
*/
Object postConst = null;
/**
* The isNull variable is set to true when a null constant statement is made in the script. This allows the
* {@link Writer} to potentially shortcut certain comparison operations.
*/
boolean isNull = false;
/**
* The to variable is used to track what an ANTLR node's value should be cast to. This is set on every ANTLR
* node in the tree, and used by the {@link Writer} to make a run-time cast if necessary in the byte code.
* This is also used by the {@link Analyzer} to determine if a cast is legal.
*/
Type to = null;
/**
* The from variable is used to track what an ANTLR node's value should be cast from. This is set on every
* ANTLR node in the tree independent of other nodes. This is used by the {@link Analyzer} to determine if a
* cast is legal.
*/
Type from = null;
/**
* The explicit variable is set to true when a cast is explicitly made in the script. This tracks whether
* or not a cast is a legal up cast.
*/
boolean explicit = false;
/**
* The typesafe variable is set to true when a dynamic type is used as part of an expression. This propagates
* up the tree to the top of the expression. This allows for implicit up casts throughout the expression and
* is used by the {@link Analyzer}.
*/
boolean typesafe = true;
/**
* This is set to the combination of the to and from variables at the end of each node visit in the
* {@link Analyzer}. This is set on every ANTLR node in the tree independent of other nodes, and is
* used by {@link Writer} to make a run-time cast if necessary in the byte code.
*/
Cast cast = null;
/**
* Constructor.
* @param source The associated ANTLR node.
*/
private ExpressionMetadata(final ParserRuleContext source) {
this.source = source;
}
}
/**
* ExternalMetadata is used to store metadata about the overall state of a variable/method chain such as
* '(int)x.get(3)' where each piece of that chain is broken into it's indiviual pieces and stored in
* {@link ExtNodeMetadata}.
*/
static class ExternalMetadata {
/**
* The source variable is the ANTLR node used to generate this metadata.
*/
final ParserRuleContext source;
/**
* The read variable is set to true when the value of a variable/method chain is going to be read from.
* This is used by the {@link Analyzer} to determine if this variable/method chain will be in a standalone
* statement.
*/
boolean read = false;
/**
* The storeExpr variable is set to the right-hand side of an assignment in the variable/method chain if
* necessary. This is used by the {@link Analyzer} to set the proper metadata for a read versus a write,
* and is used by the {@link Writer} to determine if a bytecode operation should be a load or a store.
*/
ParserRuleContext storeExpr = null;
/**
* The token variable is set to a constant value of the operator type (+, -, etc.) when a compound assignment
* is being visited. This is also used by the increment and decrement operators. This is used by both the
* {@link Analyzer} and {@link Writer} to correctly handle the compound assignment.
*/
int token = 0;
/**
* The pre variable is set to true when pre-increment or pre-decrement is visited. This is used by both the
* {@link Analyzer} and {@link Writer} to correctly handle any reads of the variable/method chain that are
* necessary.
*/
boolean pre = false;
/**
* The post variable is set to true when post-increment or post-decrement is visited. This is used by both the
* {@link Analyzer} and {@link Writer} to correctly handle any reads of the variable/method chain that are
* necessary.
*/
boolean post = false;
/**
* The scope variable is incremented and decremented when a precedence node is visited as part of a
* variable/method chain. This is used by the {@link Analyzer} to determine when the final piece of the
* variable/method chain has been reached.
*/
int scope = 0;
/**
* The current variable is set to whatever the current type is within the visited node of the variable/method
* chain. This changes as the nodes for the variable/method are walked through. This is used by the
* {@link Analyzer} to make decisions about whether or not a cast is legal, and what methods are available
* for that specific type.
*/
Type current = null;
/**
* The statik variable is set to true when a variable/method chain begins with static type. This is used by
* the {@link Analyzer} to determine what methods/members are available for that specific type.
*/
boolean statik = false;
/**
* The statement variable is set to true when a variable/method chain can be standalone statement. This is
* used by the {@link Analyzer} to error out if there a variable/method chain that is not a statement.
*/
boolean statement = false;
/**
* The constant variable is set when a String constant is part of the variable/method chain. String is a
* special case because methods/members need to be able to be called on a String constant, so this can't be
* only as part of {@link ExpressionMetadata}. This is used by the {@link Writer} to write out the String
* constant in the byte code.
*/
Object constant = null;
/**
* Constructor.
* @param source The associated ANTLR node.
*/
private ExternalMetadata(final ParserRuleContext source) {
this.source = source;
}
}
static class ExtNodeMetadata {
/**
* The parent variable is top-level ANTLR node of the variable/method chain. This is used to retrieve the
* ExternalMetadata for the variable/method chain this ExtNodeMetadata is a piece of.
*/
final ParserRuleContext parent;
/**
* The source variable is the ANTLR node used to generate this metadata.
*/
final ParserRuleContext source;
/**
* The target variable is set to a value based on the type of ANTLR node that is visited. This is used by
* {@link Writer} to determine whether a cast, store, load, or method call should be written in byte code
* depending on what the target variable is.
*/
Object target = null;
/**
* The last variable is set to true when the last ANTLR node of the variable/method chain is visted. This is
* used by the {@link Writer} in conjuction with the storeExpr variable to determine whether or not a store
* needs to be written as opposed to a load.
*/
boolean last = false;
/**
* The type variable is set to the type that a visited node ends with. This is used by both the
* {@link Analyzer} and {@link Writer} to make decisions about compound assignments, String constants, and
* shortcuts.
*/
Type type = null;
/**
* The promote variable is set to the type of a promotion within a compound assignment. Compound assignments
* may require promotion between the left-hand side variable and right-hand side value. This is used by the
* {@link Writer} to make the correct decision about the byte code operation.
*/
Type promote = null;
/**
* The castFrom variable is set during a compound assignment. This is used by the {@link Writer} to
* cast the values to the promoted type during a compound assignment.
*/
Cast castFrom = null;
/**
* The castTo variable is set during an explicit cast in a variable/method chain or during a compound
* assignment. This is used by the {@link Writer} to either do an explicit cast, or cast the values
* from the promoted type back to the original type during a compound assignment.
*/
Cast castTo = null;
/**
* Constructor.
* @param parent The top-level ANTLR node for the variable/method chain.
* @param source The associated ANTLR node.
*/
private ExtNodeMetadata(final ParserRuleContext parent, final ParserRuleContext source) {
this.parent = parent;
this.source = source;
}
}
/**
* Acts as both the Painless API and white-list for what types and methods are allowed.
*/
final Definition definition;
/**
* The original text of the input script. This is used to write out the source code into
* the byte code file for debugging purposes.
*/
final String source;
/**
* Toot node of the ANTLR tree for the Painless script.
*/
final ParserRuleContext root;
/**
* Used to determine certain compile-time constraints such as whether or not numeric overflow is allowed
* and how many statements are allowed before a loop will throw an exception.
*/
final CompilerSettings settings;
/**
* Used to determine what slot the input variable is stored in. This is used in the {@link Writer} whenever
* the input variable is accessed.
*/
int paramsValueSlot = -1;
/**
* Used to determine what slot the Scorer variable is stored in. This is used in the {@link Writer} to load
* _score from it, if _score will be accessed by the script.
*/
int scorerValueSlot = -1;
/**
* Used to determine what slot the _value variable is scored in.
*/
int aggregationValueSlot = -1;
/**
* Used to determine what slot the loopCounter variable is stored in. This is used n the {@link Writer} whenever
* the loop variable is accessed.
*/
int loopCounterSlot = -1;
/**
* Used to determine what slot the _score variable is stored in. This is used in the {@link Writer} whenever
* the score variable is accessed.
*/
int scoreValueSlot = -1;
/**
* Used to determine if the _score variable is actually used. This is used to know if we should call
* Scorer.score() once and cache into a local variable, and expose NeedsScore interface (to allow query caching)
*/
boolean scoreValueUsed = false;
/**
* Used to determine what slot the doc variable is stored in. This is used in the {@link Writer} whenever
* the doc variable is accessed.
*/
int docValueSlot = -1;
/**
* Used to determine what slot the ctx variable is stored in. This is used in the {@link Writer} whenever
* the ctx variable is accessed.
*/
int ctxValueSlot = -1;
/**
* Used to determine if the ctx variable is actually used. This is used to determine if we should call
* Map.get once and store into a local variable on startup.
*/
boolean ctxValueUsed = false;
/**
* Maps the relevant ANTLR node to its metadata.
*/
private final Map<ParserRuleContext, StatementMetadata> statementMetadata = new HashMap<>();
/**
* Maps the relevant ANTLR node to its metadata.
*/
private final Map<ParserRuleContext, ExpressionMetadata> expressionMetadata = new HashMap<>();
/**
* Maps the relevant ANTLR node to its metadata.
*/
private final Map<ParserRuleContext, ExternalMetadata> externalMetadata = new HashMap<>();
/**
* Maps the relevant ANTLR node to its metadata.
*/
private final Map<ParserRuleContext, ExtNodeMetadata> extNodeMetadata = new HashMap<>();
/**
* Constructor.
* @param definition The Painless definition.
* @param source The source text for the script.
* @param root The root ANTLR node.
* @param settings The compile-time settings.
*/
Metadata(final Definition definition, final String source, final ParserRuleContext root, final CompilerSettings settings) {
this.definition = definition;
this.source = source;
this.root = root;
this.settings = settings;
}
/**
* Creates a new StatementMetadata and stores it in the statementMetadata map.
* @param source The ANTLR node for this metadata.
* @return The new StatementMetadata.
*/
StatementMetadata createStatementMetadata(final ParserRuleContext source) {
final StatementMetadata sourcesmd = new StatementMetadata(source);
statementMetadata.put(source, sourcesmd);
return sourcesmd;
}
/**
* Retrieves StatementMetadata from the statementMetadata map.
* @param source The ANTLR node for this metadata.
* @return The retrieved StatementMetadata.
*/
StatementMetadata getStatementMetadata(final ParserRuleContext source) {
final StatementMetadata sourcesmd = statementMetadata.get(source);
if (sourcesmd == null) {
throw new IllegalStateException("Statement metadata does not exist at" +
" the parse node with text [" + source.getText() + "].");
}
return sourcesmd;
}
/**
* Creates a new ExpressionMetadata and stores it in the expressionMetadata map.
* @param source The ANTLR node for this metadata.
* @return The new ExpressionMetadata.
*/
ExpressionMetadata createExpressionMetadata(ParserRuleContext source) {
final ExpressionMetadata sourceemd = new ExpressionMetadata(source);
expressionMetadata.put(source, sourceemd);
return sourceemd;
}
/**
* Retrieves ExpressionMetadata from the expressionMetadata map.
* @param source The ANTLR node for this metadata.
* @return The retrieved ExpressionMetadata.
*/
ExpressionMetadata getExpressionMetadata(final ParserRuleContext source) {
final ExpressionMetadata sourceemd = expressionMetadata.get(source);
if (sourceemd == null) {
throw new IllegalStateException("Expression metadata does not exist at" +
" the parse node with text [" + source.getText() + "].");
}
return sourceemd;
}
/**
* Creates a new ExternalMetadata and stores it in the externalMetadata map.
* @param source The ANTLR node for this metadata.
* @return The new ExternalMetadata.
*/
ExternalMetadata createExternalMetadata(final ParserRuleContext source) {
final ExternalMetadata sourceemd = new ExternalMetadata(source);
externalMetadata.put(source, sourceemd);
return sourceemd;
}
/**
* Retrieves ExternalMetadata from the externalMetadata map.
* @param source The ANTLR node for this metadata.
* @return The retrieved ExternalMetadata.
*/
ExternalMetadata getExternalMetadata(final ParserRuleContext source) {
final ExternalMetadata sourceemd = externalMetadata.get(source);
if (sourceemd == null) {
throw new IllegalStateException("External metadata does not exist at" +
" the parse node with text [" + source.getText() + "].");
}
return sourceemd;
}
/**
* Creates a new ExtNodeMetadata and stores it in the extNodeMetadata map.
* @param source The ANTLR node for this metadata.
* @return The new ExtNodeMetadata.
*/
ExtNodeMetadata createExtNodeMetadata(final ParserRuleContext parent, final ParserRuleContext source) {
final ExtNodeMetadata sourceemd = new ExtNodeMetadata(parent, source);
extNodeMetadata.put(source, sourceemd);
return sourceemd;
}
/**
* Retrieves ExtNodeMetadata from the extNodeMetadata map.
* @param source The ANTLR node for this metadata.
* @return The retrieved ExtNodeMetadata.
*/
ExtNodeMetadata getExtNodeMetadata(final ParserRuleContext source) {
final ExtNodeMetadata sourceemd = extNodeMetadata.get(source);
if (sourceemd == null) {
throw new IllegalStateException("External metadata does not exist at" +
" the parse node with text [" + source.getText() + "].");
}
return sourceemd;
}
}

View File

@ -18,6 +18,8 @@
*/
package org.elasticsearch.painless;
/** Marker interface that a generated {@link Executable} uses the {@code _score} value */
/**
* Marker interface that a generated {@link Executable} uses the {@code _score} value
*/
public interface NeedsScore {
}

View File

@ -0,0 +1,61 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.painless;
/**
* Provides a way to represent operations independently of ASM, to keep ASM
* contained to only the writing phase of compilation. Note there are also
* a few extra operations not in ASM that are used internally by the
* Painless tree.
*/
public enum Operation {
MUL ( "+" ),
DIV ( "/" ),
REM ( "%" ),
ADD ( "+" ),
SUB ( "-" ),
LSH ( "<<" ),
RSH ( ">>" ),
USH ( ">>>" ),
BWNOT ( "~" ),
BWAND ( "&" ),
XOR ( "^" ),
BWOR ( "|" ),
NOT ( "!" ),
AND ( "&&" ),
OR ( "||" ),
LT ( "<" ),
LTE ( "<=" ),
GT ( ">" ),
GTE ( ">=" ),
EQ ( "==" ),
EQR ( "===" ),
NE ( "!=" ),
NER ( "!==" ),
INCR ( "++" ),
DECR ( "--" );
public final String symbol;
Operation(final String symbol) {
this.symbol = symbol;
}
}

View File

@ -28,6 +28,7 @@ package org.elasticsearch.painless;
*/
@SuppressWarnings("serial")
public class PainlessError extends Error {
/**
* Constructor.
* @param message The error message.

View File

@ -23,6 +23,9 @@ import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.script.ScriptEngineRegistry;
import org.elasticsearch.script.ScriptModule;
/**
* Registers Painless as a plugin.
*/
public final class PainlessPlugin extends Plugin {
@Override

View File

@ -24,6 +24,7 @@ import org.elasticsearch.SpecialPermission;
import org.elasticsearch.common.component.AbstractComponent;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.painless.Compiler.Loader;
import org.elasticsearch.script.CompiledScript;
import org.elasticsearch.script.ExecutableScript;
import org.elasticsearch.script.LeafSearchScript;
@ -45,7 +46,7 @@ import java.util.Map;
/**
* Implementation of a ScriptEngine for the Painless language.
*/
public class PainlessScriptEngineService extends AbstractComponent implements ScriptEngineService {
public final class PainlessScriptEngineService extends AbstractComponent implements ScriptEngineService {
/**
* Standard name of the Painless language.
@ -60,7 +61,7 @@ public class PainlessScriptEngineService extends AbstractComponent implements Sc
/**
* Standard extension of the Painless language.
*/
public static final String EXTENSION = "pain";
public static final String EXTENSION = "painless";
/**
* Standard list of extensions for the Painless language. (There is only one.)
@ -166,10 +167,10 @@ public class PainlessScriptEngineService extends AbstractComponent implements Sc
}
// Create our loader (which loads compiled code with no permissions).
final Compiler.Loader loader = AccessController.doPrivileged(new PrivilegedAction<Compiler.Loader>() {
final Loader loader = AccessController.doPrivileged(new PrivilegedAction<Loader>() {
@Override
public Compiler.Loader run() {
return new Compiler.Loader(getClass().getClassLoader());
public Loader run() {
return new Loader(getClass().getClassLoader());
}
});

View File

@ -33,6 +33,7 @@ import java.util.Map;
* to run a previously compiled Painless script.
*/
final class ScriptImpl implements ExecutableScript, LeafSearchScript {
/**
* The Painless Executable script that can be run.
*/
@ -47,7 +48,7 @@ final class ScriptImpl implements ExecutableScript, LeafSearchScript {
* The lookup is used to access search field values at run-time.
*/
private final LeafSearchLookup lookup;
/**
* the 'doc' object accessed by the script, if available.
*/
@ -97,7 +98,11 @@ final class ScriptImpl implements ExecutableScript, LeafSearchScript {
public void setNextVar(final String name, final Object value) {
variables.put(name, value);
}
/**
* Set the next aggregation value.
* @param value Per-document value, typically a String, Long, or Double.
*/
@Override
public void setNextAggregationValue(Object value) {
this.aggregationValue = value;

View File

@ -19,7 +19,12 @@
package org.elasticsearch.painless;
/**
* A set of methods for non-native boxing and non-native
* exact math operations used at both compile-time and runtime.
*/
public class Utility {
public static boolean NumberToboolean(final Number value) {
return value.longValue() != 0;
}
@ -818,11 +823,11 @@ public class Utility {
}
public static boolean checkEquals(final Object left, final Object right) {
if (left != null && right != null) {
if (left != null) {
return left.equals(right);
}
return left == null && right == null;
return right == null || right.equals(null);
}
private Utility() {}

View File

@ -0,0 +1,219 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.painless;
import org.elasticsearch.painless.Definition.Type;
import java.util.ArrayDeque;
import java.util.Deque;
import java.util.Iterator;
/**
* Tracks variables across compilation phases.
*/
public final class Variables {
/**
* Tracks reserved variables. Must be given to any source of input
* prior to beginning the analysis phase so that reserved variables
* are known ahead of time to assign appropriate slots without
* being wasteful.
*/
public static final class Reserved {
public static final String THIS = "#this";
public static final String PARAMS = "params";
public static final String SCORER = "#scorer";
public static final String DOC = "doc";
public static final String VALUE = "_value";
public static final String SCORE = "_score";
public static final String CTX = "ctx";
public static final String LOOP = "#loop";
boolean score = false;
boolean ctx = false;
boolean loop = false;
public void markReserved(final String name) {
if (SCORE.equals(name)) {
score = true;
} else if (CTX.equals(name)) {
ctx = true;
}
}
public boolean isReserved(final String name) {
return name.equals(THIS) || name.equals(PARAMS) || name.equals(SCORER) || name.equals(DOC) ||
name.equals(VALUE) || name.equals(SCORE) || name.equals(CTX) || name.equals(LOOP);
}
public void usesLoop() {
loop = true;
}
}
public static final class Variable {
public final String location;
public final String name;
public final Type type;
public final int slot;
public final boolean readonly;
public boolean read = false;
private Variable(final String location, final String name, final Type type, final int slot, final boolean readonly) {
this.location = location;
this.name = name;
this.type = type;
this.slot = slot;
this.readonly = readonly;
}
}
private final Definition definition;
final Reserved reserved;
private final Deque<Integer> scopes = new ArrayDeque<>();
private final Deque<Variable> variables = new ArrayDeque<>();
public Variables(final CompilerSettings settings, final Definition definition, final Reserved reserved) {
this.definition = definition;
this.reserved = reserved;
incrementScope();
// Method variables.
// This reference. Internal use only.
addVariable("[" + Reserved.THIS + "]" , definition.execType.name, Reserved.THIS , true, true);
// Input map of variables passed to the script. TODO: Rename to 'params' since that will be its use.
addVariable("[" + Reserved.PARAMS + "]", definition.smapType.name, Reserved.PARAMS, true, true);
// Scorer parameter passed to the script. Internal use only.
addVariable("[" + Reserved.SCORER + "]", definition.defType.name , Reserved.SCORER, true, true);
// Doc parameter passed to the script. TODO: Currently working as a Map<String,Def>, we can do better?
addVariable("[" + Reserved.DOC + "]" , definition.smapType.name, Reserved.DOC , true, true);
// Aggregation _value parameter passed to the script.
addVariable("[" + Reserved.VALUE + "]" , definition.defType.name , Reserved.VALUE , true, true);
// Shortcut variables.
// Document's score as a read-only double.
if (reserved.score) {
addVariable("[" + Reserved.SCORE + "]", definition.doubleType.name, Reserved.SCORE, true, true);
}
// The ctx map set by executable scripts as a read-only map.
if (reserved.ctx) {
addVariable("[" + Reserved.CTX + "]", definition.smapType.name, Reserved.CTX, true, true);
}
// Loop counter to catch infinite loops. Internal use only.
if (reserved.loop && settings.getMaxLoopCounter() > 0) {
addVariable("[" + Reserved.LOOP + "]", definition.intType.name, Reserved.LOOP, true, true);
}
}
public void incrementScope() {
scopes.push(0);
}
public void decrementScope() {
int remove = scopes.pop();
while (remove > 0) {
final Variable variable = variables.pop();
if (variable.read) {
throw new IllegalArgumentException("Error [" + variable.location + "]: Variable [" + variable.name + "] never used.");
}
--remove;
}
}
public Variable getVariable(final String location, final String name) {
final Iterator<Variable> itr = variables.iterator();
while (itr.hasNext()) {
final Variable variable = itr.next();
if (variable.name.equals(name)) {
return variable;
}
}
if (location != null) {
throw new IllegalArgumentException("Error " + location + ": Variable [" + name + "] not defined.");
}
return null;
}
public Variable addVariable(final String location, final String typestr, final String name,
final boolean readonly, final boolean reserved) {
if (!reserved && this.reserved.isReserved(name)) {
throw new IllegalArgumentException("Error " + location + ": Variable name [" + name + "] is reserved.");
}
if (getVariable(null, name) != null) {
throw new IllegalArgumentException("Error " + location + ": Variable name [" + name + "] already defined.");
}
final Type type;
try {
type = definition.getType(typestr);
} catch (final IllegalArgumentException exception) {
throw new IllegalArgumentException("Error " + location + ": Not a type [" + typestr + "].");
}
boolean legal = !name.contains("<");
try {
definition.getType(name);
legal = false;
} catch (final IllegalArgumentException exception) {
// Do nothing.
}
if (!legal) {
throw new IllegalArgumentException("Error " + location + ": Variable name [" + name + "] cannot be a type.");
}
final Variable previous = variables.peekFirst();
int slot = 0;
if (previous != null) {
slot = previous.slot + previous.type.type.getSize();
}
final Variable variable = new Variable(location, name, type, slot, readonly);
variables.push(variable);
final int update = scopes.pop() + 1;
scopes.push(update);
return variable;
}
}

View File

@ -19,58 +19,9 @@
package org.elasticsearch.painless;
import org.antlr.v4.runtime.tree.ParseTree;
import org.elasticsearch.painless.PainlessParser.AfterthoughtContext;
import org.elasticsearch.painless.PainlessParser.ArgumentsContext;
import org.elasticsearch.painless.PainlessParser.AssignmentContext;
import org.elasticsearch.painless.PainlessParser.BinaryContext;
import org.elasticsearch.painless.PainlessParser.BoolContext;
import org.elasticsearch.painless.PainlessParser.BreakContext;
import org.elasticsearch.painless.PainlessParser.CastContext;
import org.elasticsearch.painless.PainlessParser.CompContext;
import org.elasticsearch.painless.PainlessParser.ConditionalContext;
import org.elasticsearch.painless.PainlessParser.ContinueContext;
import org.elasticsearch.painless.PainlessParser.DeclContext;
import org.elasticsearch.painless.PainlessParser.DeclarationContext;
import org.elasticsearch.painless.PainlessParser.DecltypeContext;
import org.elasticsearch.painless.PainlessParser.DeclvarContext;
import org.elasticsearch.painless.PainlessParser.DoContext;
import org.elasticsearch.painless.PainlessParser.EmptyContext;
import org.elasticsearch.painless.PainlessParser.EmptyscopeContext;
import org.elasticsearch.painless.PainlessParser.ExprContext;
import org.elasticsearch.painless.PainlessParser.ExtbraceContext;
import org.elasticsearch.painless.PainlessParser.ExtcallContext;
import org.elasticsearch.painless.PainlessParser.ExtcastContext;
import org.elasticsearch.painless.PainlessParser.ExtdotContext;
import org.elasticsearch.painless.PainlessParser.ExternalContext;
import org.elasticsearch.painless.PainlessParser.ExtfieldContext;
import org.elasticsearch.painless.PainlessParser.ExtnewContext;
import org.elasticsearch.painless.PainlessParser.ExtprecContext;
import org.elasticsearch.painless.PainlessParser.ExtstartContext;
import org.elasticsearch.painless.PainlessParser.ExtstringContext;
import org.elasticsearch.painless.PainlessParser.ExtvarContext;
import org.elasticsearch.painless.PainlessParser.FalseContext;
import org.elasticsearch.painless.PainlessParser.ForContext;
import org.elasticsearch.painless.PainlessParser.GenericContext;
import org.elasticsearch.painless.PainlessParser.IdentifierContext;
import org.elasticsearch.painless.PainlessParser.IfContext;
import org.elasticsearch.painless.PainlessParser.IncrementContext;
import org.elasticsearch.painless.PainlessParser.InitializerContext;
import org.elasticsearch.painless.PainlessParser.MultipleContext;
import org.elasticsearch.painless.PainlessParser.NullContext;
import org.elasticsearch.painless.PainlessParser.NumericContext;
import org.elasticsearch.painless.PainlessParser.PostincContext;
import org.elasticsearch.painless.PainlessParser.PrecedenceContext;
import org.elasticsearch.painless.PainlessParser.PreincContext;
import org.elasticsearch.painless.PainlessParser.ReturnContext;
import org.elasticsearch.painless.PainlessParser.SingleContext;
import org.elasticsearch.painless.PainlessParser.SourceContext;
import org.elasticsearch.painless.PainlessParser.ThrowContext;
import org.elasticsearch.painless.PainlessParser.TrapContext;
import org.elasticsearch.painless.PainlessParser.TrueContext;
import org.elasticsearch.painless.PainlessParser.TryContext;
import org.elasticsearch.painless.PainlessParser.UnaryContext;
import org.elasticsearch.painless.PainlessParser.WhileContext;
import org.elasticsearch.painless.Variables.Reserved;
import org.elasticsearch.painless.Variables.Variable;
import org.elasticsearch.painless.node.SSource;
import org.objectweb.asm.ClassWriter;
import org.objectweb.asm.Opcodes;
import org.objectweb.asm.commons.GeneratorAdapter;
@ -79,45 +30,44 @@ import static org.elasticsearch.painless.WriterConstants.BASE_CLASS_TYPE;
import static org.elasticsearch.painless.WriterConstants.CLASS_TYPE;
import static org.elasticsearch.painless.WriterConstants.CONSTRUCTOR;
import static org.elasticsearch.painless.WriterConstants.EXECUTE;
import static org.elasticsearch.painless.WriterConstants.MAP_GET;
import static org.elasticsearch.painless.WriterConstants.MAP_TYPE;
class Writer extends PainlessParserBaseVisitor<Void> {
static byte[] write(Metadata metadata) {
final Writer writer = new Writer(metadata);
/**
* Runs the writing phase of compilation using the Painless AST.
*/
final class Writer {
static byte[] write(final CompilerSettings settings, final Definition definition,
final String source, final Variables variables, final SSource root) {
final Writer writer = new Writer(settings, definition, source, variables, root);
return writer.getBytes();
}
private final Metadata metadata;
private final ParseTree root;
private final String source;
private final CompilerSettings settings;
private final Definition definition;
private final String source;
private final Variables variables;
private final SSource root;
private final ClassWriter writer;
private final GeneratorAdapter execute;
private final GeneratorAdapter adapter;
private final WriterStatement statement;
private final WriterExpression expression;
private final WriterExternal external;
private Writer(final Metadata metadata) {
this.metadata = metadata;
root = metadata.root;
source = metadata.source;
settings = metadata.settings;
private Writer(final CompilerSettings settings, final Definition definition,
final String source, final Variables variables, final SSource root) {
this.settings = settings;
this.definition = definition;
this.source = source;
this.variables = variables;
this.root = root;
writer = new ClassWriter(ClassWriter.COMPUTE_FRAMES | ClassWriter.COMPUTE_MAXS);
writeBegin();
writeConstructor();
execute = new GeneratorAdapter(Opcodes.ACC_PUBLIC, EXECUTE, null, null, writer);
final WriterUtility utility = new WriterUtility(metadata, execute);
final WriterCaster caster = new WriterCaster(execute);
statement = new WriterStatement(metadata, execute, this, utility);
expression = new WriterExpression(metadata, execute, this, utility, caster);
external = new WriterExternal(metadata, execute, this, utility, caster);
adapter = new GeneratorAdapter(Opcodes.ACC_PUBLIC, EXECUTE, null, null, writer);
writeExecute();
writeEnd();
@ -130,12 +80,9 @@ class Writer extends PainlessParserBaseVisitor<Void> {
final String name = CLASS_TYPE.getInternalName();
// apply marker interface NeedsScore if we use the score!
final String interfaces[];
if (metadata.scoreValueUsed) {
interfaces = new String[] { WriterConstants.NEEDS_SCORE_TYPE.getInternalName() };
} else {
interfaces = null;
}
final String interfaces[] = variables.reserved.score ?
new String[] { WriterConstants.NEEDS_SCORE_TYPE.getInternalName() } : null;
writer.visit(version, access, name, null, base, interfaces);
writer.visitSource(source, null);
}
@ -150,29 +97,43 @@ class Writer extends PainlessParserBaseVisitor<Void> {
}
private void writeExecute() {
if (metadata.scoreValueUsed) {
if (variables.reserved.score) {
// if the _score value is used, we do this once:
// final double _score = scorer.score();
execute.visitVarInsn(Opcodes.ALOAD, metadata.scorerValueSlot);
execute.invokeVirtual(WriterConstants.SCORER_TYPE, WriterConstants.SCORER_SCORE);
execute.visitInsn(Opcodes.F2D);
execute.visitVarInsn(Opcodes.DSTORE, metadata.scoreValueSlot);
// final double _score = scorer.score();
final Variable scorer = variables.getVariable(null, Reserved.SCORER);
final Variable score = variables.getVariable(null, Reserved.SCORE);
adapter.visitVarInsn(Opcodes.ALOAD, scorer.slot);
adapter.invokeVirtual(WriterConstants.SCORER_TYPE, WriterConstants.SCORER_SCORE);
adapter.visitInsn(Opcodes.F2D);
adapter.visitVarInsn(Opcodes.DSTORE, score.slot);
}
if (metadata.ctxValueUsed) {
if (variables.reserved.ctx) {
// if the _ctx value is used, we do this once:
// final Map<String,Object> ctx = input.get("ctx");
execute.visitVarInsn(Opcodes.ALOAD, metadata.paramsValueSlot);
execute.push("ctx");
execute.invokeInterface(WriterConstants.MAP_TYPE, WriterConstants.MAP_GET);
execute.visitVarInsn(Opcodes.ASTORE, metadata.ctxValueSlot);
// final Map<String,Object> ctx = input.get("ctx");
final Variable input = variables.getVariable(null, Reserved.PARAMS);
final Variable ctx = variables.getVariable(null, Reserved.CTX);
adapter.visitVarInsn(Opcodes.ALOAD, input.slot);
adapter.push(Reserved.CTX);
adapter.invokeInterface(MAP_TYPE, MAP_GET);
adapter.visitVarInsn(Opcodes.ASTORE, ctx.slot);
}
execute.push(settings.getMaxLoopCounter());
execute.visitVarInsn(Opcodes.ISTORE, metadata.loopCounterSlot);
if (variables.reserved.loop) {
// if there is infinite loop protection, we do this once:
// int #loop = settings.getMaxLoopCounter()
visit(root);
execute.endMethod();
final Variable loop = variables.getVariable(null, Reserved.LOOP);
adapter.push(settings.getMaxLoopCounter());
adapter.visitVarInsn(Opcodes.ISTORE, loop.slot);
}
root.write(settings, definition, adapter);
adapter.endMethod();
}
private void writeEnd() {
@ -182,348 +143,4 @@ class Writer extends PainlessParserBaseVisitor<Void> {
private byte[] getBytes() {
return writer.toByteArray();
}
@Override
public Void visitSource(final SourceContext ctx) {
statement.processSource(ctx);
return null;
}
@Override
public Void visitIf(final IfContext ctx) {
statement.processIf(ctx);
return null;
}
@Override
public Void visitWhile(final WhileContext ctx) {
statement.processWhile(ctx);
return null;
}
@Override
public Void visitDo(final DoContext ctx) {
statement.processDo(ctx);
return null;
}
@Override
public Void visitFor(final ForContext ctx) {
statement.processFor(ctx);
return null;
}
@Override
public Void visitDecl(final DeclContext ctx) {
statement.processDecl(ctx);
return null;
}
@Override
public Void visitContinue(final ContinueContext ctx) {
statement.processContinue();
return null;
}
@Override
public Void visitBreak(final BreakContext ctx) {
statement.processBreak();
return null;
}
@Override
public Void visitReturn(final ReturnContext ctx) {
statement.processReturn(ctx);
return null;
}
@Override
public Void visitTry(final TryContext ctx) {
statement.processTry(ctx);
return null;
}
@Override
public Void visitThrow(final ThrowContext ctx) {
statement.processThrow(ctx);
return null;
}
@Override
public Void visitExpr(final ExprContext ctx) {
statement.processExpr(ctx);
return null;
}
@Override
public Void visitMultiple(final MultipleContext ctx) {
statement.processMultiple(ctx);
return null;
}
@Override
public Void visitSingle(final SingleContext ctx) {
statement.processSingle(ctx);
return null;
}
@Override
public Void visitEmpty(final EmptyContext ctx) {
throw new UnsupportedOperationException(WriterUtility.error(ctx) + "Unexpected state.");
}
@Override
public Void visitEmptyscope(final EmptyscopeContext ctx) {
throw new UnsupportedOperationException(WriterUtility.error(ctx) + "Unexpected state.");
}
@Override
public Void visitInitializer(final InitializerContext ctx) {
statement.processInitializer(ctx);
return null;
}
@Override
public Void visitAfterthought(final AfterthoughtContext ctx) {
statement.processAfterthought(ctx);
return null;
}
@Override
public Void visitDeclaration(DeclarationContext ctx) {
statement.processDeclaration(ctx);
return null;
}
@Override
public Void visitDecltype(final DecltypeContext ctx) {
throw new UnsupportedOperationException(WriterUtility.error(ctx) + "Unexpected state.");
}
@Override
public Void visitDeclvar(final DeclvarContext ctx) {
statement.processDeclvar(ctx);
return null;
}
@Override
public Void visitTrap(final TrapContext ctx) {
statement.processTrap(ctx);
return null;
}
@Override
public Void visitIdentifier(IdentifierContext ctx) {
throw new UnsupportedOperationException(WriterUtility.error(ctx) + "Unexpected state.");
}
@Override
public Void visitGeneric(GenericContext ctx) {
throw new UnsupportedOperationException(WriterUtility.error(ctx) + "Unexpected state.");
}
@Override
public Void visitPrecedence(final PrecedenceContext ctx) {
throw new UnsupportedOperationException(WriterUtility.error(ctx) + "Unexpected state.");
}
@Override
public Void visitNumeric(final NumericContext ctx) {
expression.processNumeric(ctx);
return null;
}
@Override
public Void visitTrue(final TrueContext ctx) {
expression.processTrue(ctx);
return null;
}
@Override
public Void visitFalse(final FalseContext ctx) {
expression.processFalse(ctx);
return null;
}
@Override
public Void visitNull(final NullContext ctx) {
expression.processNull(ctx);
return null;
}
@Override
public Void visitExternal(final ExternalContext ctx) {
expression.processExternal(ctx);
return null;
}
@Override
public Void visitPostinc(final PostincContext ctx) {
expression.processPostinc(ctx);
return null;
}
@Override
public Void visitPreinc(final PreincContext ctx) {
expression.processPreinc(ctx);
return null;
}
@Override
public Void visitUnary(final UnaryContext ctx) {
expression.processUnary(ctx);
return null;
}
@Override
public Void visitCast(final CastContext ctx) {
expression.processCast(ctx);
return null;
}
@Override
public Void visitBinary(final BinaryContext ctx) {
expression.processBinary(ctx);
return null;
}
@Override
public Void visitComp(final CompContext ctx) {
expression.processComp(ctx);
return null;
}
@Override
public Void visitBool(final BoolContext ctx) {
expression.processBool(ctx);
return null;
}
@Override
public Void visitConditional(final ConditionalContext ctx) {
expression.processConditional(ctx);
return null;
}
@Override
public Void visitAssignment(final AssignmentContext ctx) {
expression.processAssignment(ctx);
return null;
}
@Override
public Void visitExtstart(final ExtstartContext ctx) {
external.processExtstart(ctx);
return null;
}
@Override
public Void visitExtprec(final ExtprecContext ctx) {
external.processExtprec(ctx);
return null;
}
@Override
public Void visitExtcast(final ExtcastContext ctx) {
external.processExtcast(ctx);
return null;
}
@Override
public Void visitExtbrace(final ExtbraceContext ctx) {
external.processExtbrace(ctx);
return null;
}
@Override
public Void visitExtdot(final ExtdotContext ctx) {
external.processExtdot(ctx);
return null;
}
@Override
public Void visitExtcall(final ExtcallContext ctx) {
external.processExtcall(ctx);
return null;
}
@Override
public Void visitExtvar(final ExtvarContext ctx) {
external.processExtvar(ctx);
return null;
}
@Override
public Void visitExtfield(final ExtfieldContext ctx) {
external.processExtfield(ctx);
return null;
}
@Override
public Void visitExtnew(final ExtnewContext ctx) {
external.processExtnew(ctx);
return null;
}
@Override
public Void visitExtstring(final ExtstringContext ctx) {
external.processExtstring(ctx);
return null;
}
@Override
public Void visitArguments(final ArgumentsContext ctx) {
throw new UnsupportedOperationException(WriterUtility.error(ctx) + "Unexpected state.");
}
@Override
public Void visitIncrement(final IncrementContext ctx) {
expression.processIncrement(ctx);
return null;
}
}

View File

@ -1,86 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.painless;
import org.antlr.v4.runtime.ParserRuleContext;
import org.elasticsearch.painless.Definition.Cast;
import org.elasticsearch.painless.Definition.Transform;
import org.elasticsearch.painless.Definition.Type;
import org.elasticsearch.painless.Metadata.ExpressionMetadata;
import org.objectweb.asm.commons.GeneratorAdapter;
class WriterCaster {
private final GeneratorAdapter execute;
WriterCaster(final GeneratorAdapter execute) {
this.execute = execute;
}
void checkWriteCast(final ExpressionMetadata sort) {
checkWriteCast(sort.source, sort.cast);
}
void checkWriteCast(final ParserRuleContext source, final Cast cast) {
if (cast instanceof Transform) {
writeTransform((Transform)cast);
} else if (cast != null) {
writeCast(cast);
} else {
throw new IllegalStateException(WriterUtility.error(source) + "Unexpected cast object.");
}
}
private void writeCast(final Cast cast) {
final Type from = cast.from;
final Type to = cast.to;
if (from.equals(to)) {
return;
}
if (from.sort.numeric && from.sort.primitive && to.sort.numeric && to.sort.primitive) {
execute.cast(from.type, to.type);
} else {
try {
from.clazz.asSubclass(to.clazz);
} catch (ClassCastException exception) {
execute.checkCast(to.type);
}
}
}
private void writeTransform(final Transform transform) {
if (transform.upcast != null) {
execute.checkCast(transform.upcast.type);
}
if (java.lang.reflect.Modifier.isStatic(transform.method.reflect.getModifiers())) {
execute.invokeStatic(transform.method.owner.type, transform.method.method);
} else if (java.lang.reflect.Modifier.isInterface(transform.method.owner.clazz.getModifiers())) {
execute.invokeInterface(transform.method.owner.type, transform.method.method);
} else {
execute.invokeVirtual(transform.method.owner.type, transform.method.method);
}
if (transform.downcast != null) {
execute.checkCast(transform.downcast.type);
}
}
}

View File

@ -31,116 +31,130 @@ import java.lang.invoke.MethodHandles;
import java.lang.invoke.MethodType;
import java.util.Map;
class WriterConstants {
final static String BASE_CLASS_NAME = Executable.class.getName();
final static String CLASS_NAME = BASE_CLASS_NAME + "$CompiledPainlessExecutable";
final static Type BASE_CLASS_TYPE = Type.getType(Executable.class);
final static Type CLASS_TYPE = Type.getType("L" + CLASS_NAME.replace(".", "/") + ";");
/**
* General pool of constants used during the writing phase of compilation.
*/
public final class WriterConstants {
final static Method CONSTRUCTOR = getAsmMethod(void.class, "<init>", Definition.class, String.class, String.class);
final static Method EXECUTE = getAsmMethod(Object.class, "execute", Map.class, Scorer.class, LeafDocLookup.class, Object.class);
public final static String BASE_CLASS_NAME = Executable.class.getName();
public final static String CLASS_NAME = BASE_CLASS_NAME + "$CompiledPainlessExecutable";
public final static Type BASE_CLASS_TYPE = Type.getType(Executable.class);
public final static Type CLASS_TYPE = Type.getType("L" + CLASS_NAME.replace(".", "/") + ";");
final static Type PAINLESS_ERROR_TYPE = Type.getType(PainlessError.class);
public final static Method CONSTRUCTOR = getAsmMethod(void.class, "<init>", String.class, String.class);
public final static Method EXECUTE =
getAsmMethod(Object.class, "execute", Map.class, Scorer.class, LeafDocLookup.class, Object.class);
final static Type DEFINITION_TYPE = Type.getType(Definition.class);
final static Type NEEDS_SCORE_TYPE = Type.getType(NeedsScore.class);
public final static Type PAINLESS_ERROR_TYPE = Type.getType(PainlessError.class);
final static Type OBJECT_TYPE = Type.getType(Object.class);
public final static Type NEEDS_SCORE_TYPE = Type.getType(NeedsScore.class);
public final static Type SCORER_TYPE = Type.getType(Scorer.class);
public final static Method SCORER_SCORE = getAsmMethod(float.class, "score");
final static Type SCORER_TYPE = Type.getType(Scorer.class);
final static Method SCORER_SCORE = getAsmMethod(float.class, "score");
final static Type MAP_TYPE = Type.getType(Map.class);
final static Method MAP_GET = getAsmMethod(Object.class, "get", Object.class);
public final static Type MAP_TYPE = Type.getType(Map.class);
public final static Method MAP_GET = getAsmMethod(Object.class, "get", Object.class);
/** dynamic callsite bootstrap signature */
final static MethodType DEF_BOOTSTRAP_TYPE = MethodType.methodType(CallSite.class, MethodHandles.Lookup.class,
String.class, MethodType.class, int.class);
final static Handle DEF_BOOTSTRAP_HANDLE = new Handle(Opcodes.H_INVOKESTATIC, Type.getInternalName(DynamicCallSite.class),
"bootstrap", WriterConstants.DEF_BOOTSTRAP_TYPE.toMethodDescriptorString());
public final static MethodType DEF_BOOTSTRAP_TYPE =
MethodType.methodType(CallSite.class, MethodHandles.Lookup.class, String.class, MethodType.class, int.class);
public final static Handle DEF_BOOTSTRAP_HANDLE =
new Handle(Opcodes.H_INVOKESTATIC, Type.getInternalName(DynamicCallSite.class),
"bootstrap", DEF_BOOTSTRAP_TYPE.toMethodDescriptorString());
final static String DEF_DYNAMIC_LOAD_FIELD_DESC = MethodType.methodType(Object.class, Object.class)
.toMethodDescriptorString();
final static String DEF_DYNAMIC_STORE_FIELD_DESC = MethodType.methodType(void.class, Object.class, Object.class)
.toMethodDescriptorString();
final static String DEF_DYNAMIC_ARRAY_LOAD_DESC = MethodType.methodType(Object.class, Object.class, Object.class)
.toMethodDescriptorString();
final static String DEF_DYNAMIC_ARRAY_STORE_DESC = MethodType.methodType(void.class, Object.class, Object.class, Object.class)
.toMethodDescriptorString();
public final static String DEF_DYNAMIC_LOAD_FIELD_DESC =
MethodType.methodType(Object.class, Object.class).toMethodDescriptorString();
public final static String DEF_DYNAMIC_STORE_FIELD_DESC =
MethodType.methodType(void.class, Object.class, Object.class).toMethodDescriptorString();
public final static String DEF_DYNAMIC_ARRAY_LOAD_DESC =
MethodType.methodType(Object.class, Object.class, Object.class).toMethodDescriptorString();
public final static String DEF_DYNAMIC_ARRAY_STORE_DESC =
MethodType.methodType(void.class, Object.class, Object.class, Object.class).toMethodDescriptorString();
final static Method DEF_NOT_CALL = getAsmMethod(Object.class, "not", Object.class);
final static Method DEF_NEG_CALL = getAsmMethod(Object.class, "neg", Object.class);
final static Method DEF_MUL_CALL = getAsmMethod(Object.class, "mul", Object.class, Object.class);
final static Method DEF_DIV_CALL = getAsmMethod(Object.class, "div", Object.class, Object.class);
final static Method DEF_REM_CALL = getAsmMethod(Object.class, "rem", Object.class, Object.class);
final static Method DEF_ADD_CALL = getAsmMethod(Object.class, "add", Object.class, Object.class);
final static Method DEF_SUB_CALL = getAsmMethod(Object.class, "sub", Object.class, Object.class);
final static Method DEF_LSH_CALL = getAsmMethod(Object.class, "lsh", Object.class, Object.class);
final static Method DEF_RSH_CALL = getAsmMethod(Object.class, "rsh", Object.class, Object.class);
final static Method DEF_USH_CALL = getAsmMethod(Object.class, "ush", Object.class, Object.class);
final static Method DEF_AND_CALL = getAsmMethod(Object.class, "and", Object.class, Object.class);
final static Method DEF_XOR_CALL = getAsmMethod(Object.class, "xor", Object.class, Object.class);
final static Method DEF_OR_CALL = getAsmMethod(Object.class, "or" , Object.class, Object.class);
final static Method DEF_EQ_CALL = getAsmMethod(boolean.class, "eq" , Object.class, Object.class);
final static Method DEF_LT_CALL = getAsmMethod(boolean.class, "lt" , Object.class, Object.class);
final static Method DEF_LTE_CALL = getAsmMethod(boolean.class, "lte", Object.class, Object.class);
final static Method DEF_GT_CALL = getAsmMethod(boolean.class, "gt" , Object.class, Object.class);
final static Method DEF_GTE_CALL = getAsmMethod(boolean.class, "gte", Object.class, Object.class);
public final static Method DEF_NOT_CALL = getAsmMethod(Object.class, "not", Object.class);
public final static Method DEF_NEG_CALL = getAsmMethod(Object.class, "neg", Object.class);
public final static Method DEF_MUL_CALL = getAsmMethod(Object.class, "mul", Object.class, Object.class);
public final static Method DEF_DIV_CALL = getAsmMethod(Object.class, "div", Object.class, Object.class);
public final static Method DEF_REM_CALL = getAsmMethod(Object.class, "rem", Object.class, Object.class);
public final static Method DEF_ADD_CALL = getAsmMethod(Object.class, "add", Object.class, Object.class);
public final static Method DEF_SUB_CALL = getAsmMethod(Object.class, "sub", Object.class, Object.class);
public final static Method DEF_LSH_CALL = getAsmMethod(Object.class, "lsh", Object.class, int.class);
public final static Method DEF_RSH_CALL = getAsmMethod(Object.class, "rsh", Object.class, int.class);
public final static Method DEF_USH_CALL = getAsmMethod(Object.class, "ush", Object.class, int.class);
public final static Method DEF_AND_CALL = getAsmMethod(Object.class, "and", Object.class, Object.class);
public final static Method DEF_XOR_CALL = getAsmMethod(Object.class, "xor", Object.class, Object.class);
public final static Method DEF_OR_CALL = getAsmMethod(Object.class, "or" , Object.class, Object.class);
public final static Method DEF_EQ_CALL = getAsmMethod(boolean.class, "eq" , Object.class, Object.class);
public final static Method DEF_LT_CALL = getAsmMethod(boolean.class, "lt" , Object.class, Object.class);
public final static Method DEF_LTE_CALL = getAsmMethod(boolean.class, "lte", Object.class, Object.class);
public final static Method DEF_GT_CALL = getAsmMethod(boolean.class, "gt" , Object.class, Object.class);
public final static Method DEF_GTE_CALL = getAsmMethod(boolean.class, "gte", Object.class, Object.class);
final static Type STRINGBUILDER_TYPE = Type.getType(StringBuilder.class);
public final static Type STRINGBUILDER_TYPE = Type.getType(StringBuilder.class);
final static Method STRINGBUILDER_CONSTRUCTOR = getAsmMethod(void.class, "<init>");
final static Method STRINGBUILDER_APPEND_BOOLEAN = getAsmMethod(StringBuilder.class, "append", boolean.class);
final static Method STRINGBUILDER_APPEND_CHAR = getAsmMethod(StringBuilder.class, "append", char.class);
final static Method STRINGBUILDER_APPEND_INT = getAsmMethod(StringBuilder.class, "append", int.class);
final static Method STRINGBUILDER_APPEND_LONG = getAsmMethod(StringBuilder.class, "append", long.class);
final static Method STRINGBUILDER_APPEND_FLOAT = getAsmMethod(StringBuilder.class, "append", float.class);
final static Method STRINGBUILDER_APPEND_DOUBLE = getAsmMethod(StringBuilder.class, "append", double.class);
final static Method STRINGBUILDER_APPEND_STRING = getAsmMethod(StringBuilder.class, "append", String.class);
final static Method STRINGBUILDER_APPEND_OBJECT = getAsmMethod(StringBuilder.class, "append", Object.class);
final static Method STRINGBUILDER_TOSTRING = getAsmMethod(String.class, "toString");
public final static Method STRINGBUILDER_CONSTRUCTOR = getAsmMethod(void.class, "<init>");
public final static Method STRINGBUILDER_APPEND_BOOLEAN = getAsmMethod(StringBuilder.class, "append", boolean.class);
public final static Method STRINGBUILDER_APPEND_CHAR = getAsmMethod(StringBuilder.class, "append", char.class);
public final static Method STRINGBUILDER_APPEND_INT = getAsmMethod(StringBuilder.class, "append", int.class);
public final static Method STRINGBUILDER_APPEND_LONG = getAsmMethod(StringBuilder.class, "append", long.class);
public final static Method STRINGBUILDER_APPEND_FLOAT = getAsmMethod(StringBuilder.class, "append", float.class);
public final static Method STRINGBUILDER_APPEND_DOUBLE = getAsmMethod(StringBuilder.class, "append", double.class);
public final static Method STRINGBUILDER_APPEND_STRING = getAsmMethod(StringBuilder.class, "append", String.class);
public final static Method STRINGBUILDER_APPEND_OBJECT = getAsmMethod(StringBuilder.class, "append", Object.class);
public final static Method STRINGBUILDER_TOSTRING = getAsmMethod(String.class, "toString");
final static Method TOINTEXACT_LONG = getAsmMethod(int.class, "toIntExact", long.class);
final static Method NEGATEEXACT_INT = getAsmMethod(int.class, "negateExact", int.class);
final static Method NEGATEEXACT_LONG = getAsmMethod(long.class, "negateExact", long.class);
final static Method MULEXACT_INT = getAsmMethod(int.class, "multiplyExact", int.class, int.class);
final static Method MULEXACT_LONG = getAsmMethod(long.class, "multiplyExact", long.class, long.class);
final static Method ADDEXACT_INT = getAsmMethod(int.class, "addExact", int.class, int.class);
final static Method ADDEXACT_LONG = getAsmMethod(long.class, "addExact", long.class, long.class);
final static Method SUBEXACT_INT = getAsmMethod(int.class, "subtractExact", int.class, int.class);
final static Method SUBEXACT_LONG = getAsmMethod(long.class, "subtractExact", long.class, long.class);
public final static Method TOINTEXACT_LONG = getAsmMethod(int.class, "toIntExact", long.class);
public final static Method NEGATEEXACT_INT = getAsmMethod(int.class, "negateExact", int.class);
public final static Method NEGATEEXACT_LONG = getAsmMethod(long.class, "negateExact", long.class);
public final static Method MULEXACT_INT = getAsmMethod(int.class, "multiplyExact", int.class, int.class);
public final static Method MULEXACT_LONG = getAsmMethod(long.class, "multiplyExact", long.class, long.class);
public final static Method ADDEXACT_INT = getAsmMethod(int.class, "addExact", int.class, int.class);
public final static Method ADDEXACT_LONG = getAsmMethod(long.class, "addExact", long.class, long.class);
public final static Method SUBEXACT_INT = getAsmMethod(int.class, "subtractExact", int.class, int.class);
public final static Method SUBEXACT_LONG = getAsmMethod(long.class, "subtractExact", long.class, long.class);
final static Method CHECKEQUALS = getAsmMethod(boolean.class, "checkEquals", Object.class, Object.class);
final static Method TOBYTEEXACT_INT = getAsmMethod(byte.class, "toByteExact", int.class);
final static Method TOBYTEEXACT_LONG = getAsmMethod(byte.class, "toByteExact", long.class);
final static Method TOBYTEWOOVERFLOW_FLOAT = getAsmMethod(byte.class, "toByteWithoutOverflow", float.class);
final static Method TOBYTEWOOVERFLOW_DOUBLE = getAsmMethod(byte.class, "toByteWithoutOverflow", double.class);
final static Method TOSHORTEXACT_INT = getAsmMethod(short.class, "toShortExact", int.class);
final static Method TOSHORTEXACT_LONG = getAsmMethod(short.class, "toShortExact", long.class);
final static Method TOSHORTWOOVERFLOW_FLOAT = getAsmMethod(short.class, "toShortWithoutOverflow", float.class);
final static Method TOSHORTWOOVERFLOW_DOUBLE = getAsmMethod(short.class, "toShortWihtoutOverflow", double.class);
final static Method TOCHAREXACT_INT = getAsmMethod(char.class, "toCharExact", int.class);
final static Method TOCHAREXACT_LONG = getAsmMethod(char.class, "toCharExact", long.class);
final static Method TOCHARWOOVERFLOW_FLOAT = getAsmMethod(char.class, "toCharWithoutOverflow", float.class);
final static Method TOCHARWOOVERFLOW_DOUBLE = getAsmMethod(char.class, "toCharWithoutOverflow", double.class);
final static Method TOINTWOOVERFLOW_FLOAT = getAsmMethod(int.class, "toIntWithoutOverflow", float.class);
final static Method TOINTWOOVERFLOW_DOUBLE = getAsmMethod(int.class, "toIntWithoutOverflow", double.class);
final static Method TOLONGWOOVERFLOW_FLOAT = getAsmMethod(long.class, "toLongWithoutOverflow", float.class);
final static Method TOLONGWOOVERFLOW_DOUBLE = getAsmMethod(long.class, "toLongWithoutOverflow", double.class);
final static Method TOFLOATWOOVERFLOW_DOUBLE = getAsmMethod(float.class , "toFloatWihtoutOverflow", double.class);
final static Method MULWOOVERLOW_FLOAT = getAsmMethod(float.class, "multiplyWithoutOverflow", float.class, float.class);
final static Method MULWOOVERLOW_DOUBLE = getAsmMethod(double.class, "multiplyWithoutOverflow", double.class, double.class);
final static Method DIVWOOVERLOW_INT = getAsmMethod(int.class, "divideWithoutOverflow", int.class, int.class);
final static Method DIVWOOVERLOW_LONG = getAsmMethod(long.class, "divideWithoutOverflow", long.class, long.class);
final static Method DIVWOOVERLOW_FLOAT = getAsmMethod(float.class, "divideWithoutOverflow", float.class, float.class);
final static Method DIVWOOVERLOW_DOUBLE = getAsmMethod(double.class, "divideWithoutOverflow", double.class, double.class);
final static Method REMWOOVERLOW_FLOAT = getAsmMethod(float.class, "remainderWithoutOverflow", float.class, float.class);
final static Method REMWOOVERLOW_DOUBLE = getAsmMethod(double.class, "remainderWithoutOverflow", double.class, double.class);
final static Method ADDWOOVERLOW_FLOAT = getAsmMethod(float.class, "addWithoutOverflow", float.class, float.class);
final static Method ADDWOOVERLOW_DOUBLE = getAsmMethod(double.class, "addWithoutOverflow", double.class, double.class);
final static Method SUBWOOVERLOW_FLOAT = getAsmMethod(float.class, "subtractWithoutOverflow", float.class, float.class);
final static Method SUBWOOVERLOW_DOUBLE = getAsmMethod(double.class, "subtractWithoutOverflow", double.class, double.class);
public final static Method CHECKEQUALS =
getAsmMethod(boolean.class, "checkEquals", Object.class, Object.class);
public final static Method TOBYTEEXACT_INT = getAsmMethod(byte.class, "toByteExact", int.class);
public final static Method TOBYTEEXACT_LONG = getAsmMethod(byte.class, "toByteExact", long.class);
public final static Method TOBYTEWOOVERFLOW_FLOAT = getAsmMethod(byte.class, "toByteWithoutOverflow", float.class);
public final static Method TOBYTEWOOVERFLOW_DOUBLE = getAsmMethod(byte.class, "toByteWithoutOverflow", double.class);
public final static Method TOSHORTEXACT_INT = getAsmMethod(short.class, "toShortExact", int.class);
public final static Method TOSHORTEXACT_LONG = getAsmMethod(short.class, "toShortExact", long.class);
public final static Method TOSHORTWOOVERFLOW_FLOAT = getAsmMethod(short.class, "toShortWithoutOverflow", float.class);
public final static Method TOSHORTWOOVERFLOW_DOUBLE = getAsmMethod(short.class, "toShortWihtoutOverflow", double.class);
public final static Method TOCHAREXACT_INT = getAsmMethod(char.class, "toCharExact", int.class);
public final static Method TOCHAREXACT_LONG = getAsmMethod(char.class, "toCharExact", long.class);
public final static Method TOCHARWOOVERFLOW_FLOAT = getAsmMethod(char.class, "toCharWithoutOverflow", float.class);
public final static Method TOCHARWOOVERFLOW_DOUBLE = getAsmMethod(char.class, "toCharWithoutOverflow", double.class);
public final static Method TOINTWOOVERFLOW_FLOAT = getAsmMethod(int.class, "toIntWithoutOverflow", float.class);
public final static Method TOINTWOOVERFLOW_DOUBLE = getAsmMethod(int.class, "toIntWithoutOverflow", double.class);
public final static Method TOLONGWOOVERFLOW_FLOAT = getAsmMethod(long.class, "toLongWithoutOverflow", float.class);
public final static Method TOLONGWOOVERFLOW_DOUBLE = getAsmMethod(long.class, "toLongWithoutOverflow", double.class);
public final static Method TOFLOATWOOVERFLOW_DOUBLE = getAsmMethod(float.class , "toFloatWihtoutOverflow", double.class);
public final static Method MULWOOVERLOW_FLOAT =
getAsmMethod(float.class, "multiplyWithoutOverflow", float.class, float.class);
public final static Method MULWOOVERLOW_DOUBLE =
getAsmMethod(double.class, "multiplyWithoutOverflow", double.class, double.class);
public final static Method DIVWOOVERLOW_INT =
getAsmMethod(int.class, "divideWithoutOverflow", int.class, int.class);
public final static Method DIVWOOVERLOW_LONG =
getAsmMethod(long.class, "divideWithoutOverflow", long.class, long.class);
public final static Method DIVWOOVERLOW_FLOAT =
getAsmMethod(float.class, "divideWithoutOverflow", float.class, float.class);
public final static Method DIVWOOVERLOW_DOUBLE =
getAsmMethod(double.class, "divideWithoutOverflow", double.class, double.class);
public final static Method REMWOOVERLOW_FLOAT =
getAsmMethod(float.class, "remainderWithoutOverflow", float.class, float.class);
public final static Method REMWOOVERLOW_DOUBLE =
getAsmMethod(double.class, "remainderWithoutOverflow", double.class, double.class);
public final static Method ADDWOOVERLOW_FLOAT =
getAsmMethod(float.class, "addWithoutOverflow", float.class, float.class);
public final static Method ADDWOOVERLOW_DOUBLE =
getAsmMethod(double.class, "addWithoutOverflow", double.class, double.class);
public final static Method SUBWOOVERLOW_FLOAT =
getAsmMethod(float.class, "subtractWithoutOverflow", float.class, float.class);
public final static Method SUBWOOVERLOW_DOUBLE =
getAsmMethod(double.class, "subtractWithoutOverflow", double.class, double.class);
private static Method getAsmMethod(final Class<?> rtype, final String name, final Class<?>... ptypes) {
return new Method(name, MethodType.methodType(rtype, ptypes).toMethodDescriptorString());

View File

@ -1,669 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.painless;
import org.elasticsearch.painless.Definition.Sort;
import org.elasticsearch.painless.Definition.Type;
import org.elasticsearch.painless.Metadata.ExpressionMetadata;
import org.elasticsearch.painless.PainlessParser.AssignmentContext;
import org.elasticsearch.painless.PainlessParser.BinaryContext;
import org.elasticsearch.painless.PainlessParser.BoolContext;
import org.elasticsearch.painless.PainlessParser.CastContext;
import org.elasticsearch.painless.PainlessParser.CompContext;
import org.elasticsearch.painless.PainlessParser.ConditionalContext;
import org.elasticsearch.painless.PainlessParser.ExpressionContext;
import org.elasticsearch.painless.PainlessParser.ExternalContext;
import org.elasticsearch.painless.PainlessParser.FalseContext;
import org.elasticsearch.painless.PainlessParser.IncrementContext;
import org.elasticsearch.painless.PainlessParser.NullContext;
import org.elasticsearch.painless.PainlessParser.NumericContext;
import org.elasticsearch.painless.PainlessParser.PostincContext;
import org.elasticsearch.painless.PainlessParser.PreincContext;
import org.elasticsearch.painless.PainlessParser.TrueContext;
import org.elasticsearch.painless.PainlessParser.UnaryContext;
import org.elasticsearch.painless.WriterUtility.Branch;
import org.objectweb.asm.Label;
import org.objectweb.asm.Opcodes;
import org.objectweb.asm.commons.GeneratorAdapter;
import static org.elasticsearch.painless.PainlessParser.ADD;
import static org.elasticsearch.painless.PainlessParser.BWAND;
import static org.elasticsearch.painless.PainlessParser.BWOR;
import static org.elasticsearch.painless.PainlessParser.BWXOR;
import static org.elasticsearch.painless.PainlessParser.DIV;
import static org.elasticsearch.painless.PainlessParser.LSH;
import static org.elasticsearch.painless.PainlessParser.MUL;
import static org.elasticsearch.painless.PainlessParser.REM;
import static org.elasticsearch.painless.PainlessParser.RSH;
import static org.elasticsearch.painless.PainlessParser.SUB;
import static org.elasticsearch.painless.PainlessParser.USH;
import static org.elasticsearch.painless.WriterConstants.CHECKEQUALS;
import static org.elasticsearch.painless.WriterConstants.DEF_EQ_CALL;
import static org.elasticsearch.painless.WriterConstants.DEF_GTE_CALL;
import static org.elasticsearch.painless.WriterConstants.DEF_GT_CALL;
import static org.elasticsearch.painless.WriterConstants.DEF_LTE_CALL;
import static org.elasticsearch.painless.WriterConstants.DEF_LT_CALL;
import static org.elasticsearch.painless.WriterConstants.DEF_NEG_CALL;
import static org.elasticsearch.painless.WriterConstants.DEF_NOT_CALL;
import static org.elasticsearch.painless.WriterConstants.NEGATEEXACT_INT;
import static org.elasticsearch.painless.WriterConstants.NEGATEEXACT_LONG;
class WriterExpression {
private final Metadata metadata;
private final Definition definition;
private final CompilerSettings settings;
private final GeneratorAdapter execute;
private final Writer writer;
private final WriterUtility utility;
private final WriterCaster caster;
WriterExpression(final Metadata metadata, final GeneratorAdapter execute, final Writer writer,
final WriterUtility utility, final WriterCaster caster) {
this.metadata = metadata;
definition = metadata.definition;
settings = metadata.settings;
this.execute = execute;
this.writer = writer;
this.utility = utility;
this.caster = caster;
}
void processNumeric(final NumericContext ctx) {
final ExpressionMetadata numericemd = metadata.getExpressionMetadata(ctx);
final Object postConst = numericemd.postConst;
if (postConst == null) {
utility.writeNumeric(ctx, numericemd.preConst);
caster.checkWriteCast(numericemd);
} else {
utility.writeConstant(ctx, postConst);
}
utility.checkWriteBranch(ctx);
}
void processTrue(final TrueContext ctx) {
final ExpressionMetadata trueemd = metadata.getExpressionMetadata(ctx);
final Object postConst = trueemd.postConst;
final Branch branch = utility.getBranch(ctx);
if (branch == null) {
if (postConst == null) {
utility.writeBoolean(ctx, true);
caster.checkWriteCast(trueemd);
} else {
utility.writeConstant(ctx, postConst);
}
} else if (branch.tru != null) {
execute.goTo(branch.tru);
}
}
void processFalse(final FalseContext ctx) {
final ExpressionMetadata falseemd = metadata.getExpressionMetadata(ctx);
final Object postConst = falseemd.postConst;
final Branch branch = utility.getBranch(ctx);
if (branch == null) {
if (postConst == null) {
utility.writeBoolean(ctx, false);
caster.checkWriteCast(falseemd);
} else {
utility.writeConstant(ctx, postConst);
}
} else if (branch.fals != null) {
execute.goTo(branch.fals);
}
}
void processNull(final NullContext ctx) {
final ExpressionMetadata nullemd = metadata.getExpressionMetadata(ctx);
execute.visitInsn(Opcodes.ACONST_NULL);
caster.checkWriteCast(nullemd);
utility.checkWriteBranch(ctx);
}
void processExternal(final ExternalContext ctx) {
final ExpressionMetadata expremd = metadata.getExpressionMetadata(ctx);
writer.visit(ctx.extstart());
caster.checkWriteCast(expremd);
utility.checkWriteBranch(ctx);
}
void processPostinc(final PostincContext ctx) {
final ExpressionMetadata expremd = metadata.getExpressionMetadata(ctx);
writer.visit(ctx.extstart());
caster.checkWriteCast(expremd);
utility.checkWriteBranch(ctx);
}
void processPreinc(final PreincContext ctx) {
final ExpressionMetadata expremd = metadata.getExpressionMetadata(ctx);
writer.visit(ctx.extstart());
caster.checkWriteCast(expremd);
utility.checkWriteBranch(ctx);
}
void processUnary(final UnaryContext ctx) {
final ExpressionMetadata unaryemd = metadata.getExpressionMetadata(ctx);
final Object postConst = unaryemd.postConst;
final Object preConst = unaryemd.preConst;
final Branch branch = utility.getBranch(ctx);
if (postConst != null) {
if (ctx.BOOLNOT() != null) {
if (branch == null) {
utility.writeConstant(ctx, postConst);
} else {
if ((boolean)postConst && branch.tru != null) {
execute.goTo(branch.tru);
} else if (!(boolean)postConst && branch.fals != null) {
execute.goTo(branch.fals);
}
}
} else {
utility.writeConstant(ctx, postConst);
utility.checkWriteBranch(ctx);
}
} else if (preConst != null) {
if (branch == null) {
utility.writeConstant(ctx, preConst);
caster.checkWriteCast(unaryemd);
} else {
throw new IllegalStateException(WriterUtility.error(ctx) + "Unexpected state.");
}
} else {
final ExpressionContext exprctx = ctx.expression();
if (ctx.BOOLNOT() != null) {
final Branch local = utility.markBranch(ctx, exprctx);
if (branch == null) {
local.fals = new Label();
final Label aend = new Label();
writer.visit(exprctx);
execute.push(false);
execute.goTo(aend);
execute.mark(local.fals);
execute.push(true);
execute.mark(aend);
caster.checkWriteCast(unaryemd);
} else {
local.tru = branch.fals;
local.fals = branch.tru;
writer.visit(exprctx);
}
} else {
final org.objectweb.asm.Type type = unaryemd.from.type;
final Sort sort = unaryemd.from.sort;
writer.visit(exprctx);
if (ctx.BWNOT() != null) {
if (sort == Sort.DEF) {
execute.invokeStatic(definition.defobjType.type, DEF_NOT_CALL);
} else {
if (sort == Sort.INT) {
utility.writeConstant(ctx, -1);
} else if (sort == Sort.LONG) {
utility.writeConstant(ctx, -1L);
} else {
throw new IllegalStateException(WriterUtility.error(ctx) + "Unexpected state.");
}
execute.math(GeneratorAdapter.XOR, type);
}
} else if (ctx.SUB() != null) {
if (sort == Sort.DEF) {
execute.invokeStatic(definition.defobjType.type, DEF_NEG_CALL);
} else {
if (settings.getNumericOverflow()) {
execute.math(GeneratorAdapter.NEG, type);
} else {
if (sort == Sort.INT) {
execute.invokeStatic(definition.mathType.type, NEGATEEXACT_INT);
} else if (sort == Sort.LONG) {
execute.invokeStatic(definition.mathType.type, NEGATEEXACT_LONG);
} else {
throw new IllegalStateException(WriterUtility.error(ctx) + "Unexpected state.");
}
}
}
} else if (ctx.ADD() == null) {
throw new IllegalStateException(WriterUtility.error(ctx) + "Unexpected state.");
}
caster.checkWriteCast(unaryemd);
utility.checkWriteBranch(ctx);
}
}
}
void processCast(final CastContext ctx) {
final ExpressionMetadata castemd = metadata.getExpressionMetadata(ctx);
final Object postConst = castemd.postConst;
if (postConst == null) {
writer.visit(ctx.expression());
caster.checkWriteCast(castemd);
} else {
utility.writeConstant(ctx, postConst);
}
utility.checkWriteBranch(ctx);
}
void processBinary(final BinaryContext ctx) {
final ExpressionMetadata binaryemd = metadata.getExpressionMetadata(ctx);
final Object postConst = binaryemd.postConst;
final Object preConst = binaryemd.preConst;
final Branch branch = utility.getBranch(ctx);
if (postConst != null) {
utility.writeConstant(ctx, postConst);
} else if (preConst != null) {
if (branch == null) {
utility.writeConstant(ctx, preConst);
caster.checkWriteCast(binaryemd);
} else {
throw new IllegalStateException(WriterUtility.error(ctx) + "Unexpected state.");
}
} else if (binaryemd.from.sort == Sort.STRING) {
final boolean marked = utility.containsStrings(ctx);
if (!marked) {
utility.writeNewStrings();
}
final ExpressionContext exprctx0 = ctx.expression(0);
final ExpressionMetadata expremd0 = metadata.getExpressionMetadata(exprctx0);
utility.addStrings(exprctx0);
writer.visit(exprctx0);
if (utility.containsStrings(exprctx0)) {
utility.writeAppendStrings(expremd0.from.sort);
utility.removeStrings(exprctx0);
}
final ExpressionContext exprctx1 = ctx.expression(1);
final ExpressionMetadata expremd1 = metadata.getExpressionMetadata(exprctx1);
utility.addStrings(exprctx1);
writer.visit(exprctx1);
if (utility.containsStrings(exprctx1)) {
utility.writeAppendStrings(expremd1.from.sort);
utility.removeStrings(exprctx1);
}
if (marked) {
utility.removeStrings(ctx);
} else {
utility.writeToStrings();
}
caster.checkWriteCast(binaryemd);
} else {
final ExpressionContext exprctx0 = ctx.expression(0);
final ExpressionContext exprctx1 = ctx.expression(1);
writer.visit(exprctx0);
writer.visit(exprctx1);
final Type type = binaryemd.from;
if (ctx.MUL() != null) utility.writeBinaryInstruction(ctx, type, MUL);
else if (ctx.DIV() != null) utility.writeBinaryInstruction(ctx, type, DIV);
else if (ctx.REM() != null) utility.writeBinaryInstruction(ctx, type, REM);
else if (ctx.ADD() != null) utility.writeBinaryInstruction(ctx, type, ADD);
else if (ctx.SUB() != null) utility.writeBinaryInstruction(ctx, type, SUB);
else if (ctx.LSH() != null) utility.writeBinaryInstruction(ctx, type, LSH);
else if (ctx.USH() != null) utility.writeBinaryInstruction(ctx, type, USH);
else if (ctx.RSH() != null) utility.writeBinaryInstruction(ctx, type, RSH);
else if (ctx.BWAND() != null) utility.writeBinaryInstruction(ctx, type, BWAND);
else if (ctx.BWXOR() != null) utility.writeBinaryInstruction(ctx, type, BWXOR);
else if (ctx.BWOR() != null) utility.writeBinaryInstruction(ctx, type, BWOR);
else {
throw new IllegalStateException(WriterUtility.error(ctx) + "Unexpected state.");
}
caster.checkWriteCast(binaryemd);
}
utility.checkWriteBranch(ctx);
}
void processComp(final CompContext ctx) {
final ExpressionMetadata compemd = metadata.getExpressionMetadata(ctx);
final Object postConst = compemd.postConst;
final Object preConst = compemd.preConst;
final Branch branch = utility.getBranch(ctx);
if (postConst != null) {
if (branch == null) {
utility.writeConstant(ctx, postConst);
} else {
if ((boolean)postConst && branch.tru != null) {
execute.mark(branch.tru);
} else if (!(boolean)postConst && branch.fals != null) {
execute.mark(branch.fals);
}
}
} else if (preConst != null) {
if (branch == null) {
utility.writeConstant(ctx, preConst);
caster.checkWriteCast(compemd);
} else {
throw new IllegalStateException(WriterUtility.error(ctx) + "Unexpected state.");
}
} else {
final ExpressionContext exprctx0 = ctx.expression(0);
final ExpressionMetadata expremd0 = metadata.getExpressionMetadata(exprctx0);
final ExpressionContext exprctx1 = ctx.expression(1);
final ExpressionMetadata expremd1 = metadata.getExpressionMetadata(exprctx1);
final org.objectweb.asm.Type type = expremd1.to.type;
final Sort sort1 = expremd1.to.sort;
writer.visit(exprctx0);
if (!expremd1.isNull) {
writer.visit(exprctx1);
}
final boolean tru = branch != null && branch.tru != null;
final boolean fals = branch != null && branch.fals != null;
final Label jump = tru ? branch.tru : fals ? branch.fals : new Label();
final Label end = new Label();
final boolean eq = (ctx.EQ() != null || ctx.EQR() != null) && (tru || !fals) ||
(ctx.NE() != null || ctx.NER() != null) && fals;
final boolean ne = (ctx.NE() != null || ctx.NER() != null) && (tru || !fals) ||
(ctx.EQ() != null || ctx.EQR() != null) && fals;
final boolean lt = ctx.LT() != null && (tru || !fals) || ctx.GTE() != null && fals;
final boolean lte = ctx.LTE() != null && (tru || !fals) || ctx.GT() != null && fals;
final boolean gt = ctx.GT() != null && (tru || !fals) || ctx.LTE() != null && fals;
final boolean gte = ctx.GTE() != null && (tru || !fals) || ctx.LT() != null && fals;
boolean writejump = true;
switch (sort1) {
case VOID:
case BYTE:
case SHORT:
case CHAR:
throw new IllegalStateException(WriterUtility.error(ctx) + "Unexpected state.");
case BOOL:
if (eq) execute.ifZCmp(GeneratorAdapter.EQ, jump);
else if (ne) execute.ifZCmp(GeneratorAdapter.NE, jump);
else {
throw new IllegalStateException(WriterUtility.error(ctx) + "Unexpected state.");
}
break;
case INT:
case LONG:
case FLOAT:
case DOUBLE:
if (eq) execute.ifCmp(type, GeneratorAdapter.EQ, jump);
else if (ne) execute.ifCmp(type, GeneratorAdapter.NE, jump);
else if (lt) execute.ifCmp(type, GeneratorAdapter.LT, jump);
else if (lte) execute.ifCmp(type, GeneratorAdapter.LE, jump);
else if (gt) execute.ifCmp(type, GeneratorAdapter.GT, jump);
else if (gte) execute.ifCmp(type, GeneratorAdapter.GE, jump);
else {
throw new IllegalStateException(WriterUtility.error(ctx) + "Unexpected state.");
}
break;
case DEF:
if (eq) {
if (expremd1.isNull) {
execute.ifNull(jump);
} else if (!expremd0.isNull && ctx.EQ() != null) {
execute.invokeStatic(definition.defobjType.type, DEF_EQ_CALL);
} else {
execute.ifCmp(type, GeneratorAdapter.EQ, jump);
}
} else if (ne) {
if (expremd1.isNull) {
execute.ifNonNull(jump);
} else if (!expremd0.isNull && ctx.NE() != null) {
execute.invokeStatic(definition.defobjType.type, DEF_EQ_CALL);
execute.ifZCmp(GeneratorAdapter.EQ, jump);
} else {
execute.ifCmp(type, GeneratorAdapter.NE, jump);
}
} else if (lt) {
execute.invokeStatic(definition.defobjType.type, DEF_LT_CALL);
} else if (lte) {
execute.invokeStatic(definition.defobjType.type, DEF_LTE_CALL);
} else if (gt) {
execute.invokeStatic(definition.defobjType.type, DEF_GT_CALL);
} else if (gte) {
execute.invokeStatic(definition.defobjType.type, DEF_GTE_CALL);
} else {
throw new IllegalStateException(WriterUtility.error(ctx) + "Unexpected state.");
}
writejump = expremd1.isNull || ne || ctx.EQR() != null;
if (branch != null && !writejump) {
execute.ifZCmp(GeneratorAdapter.NE, jump);
}
break;
default:
if (eq) {
if (expremd1.isNull) {
execute.ifNull(jump);
} else if (ctx.EQ() != null) {
execute.invokeStatic(definition.utilityType.type, CHECKEQUALS);
if (branch != null) {
execute.ifZCmp(GeneratorAdapter.NE, jump);
}
writejump = false;
} else {
execute.ifCmp(type, GeneratorAdapter.EQ, jump);
}
} else if (ne) {
if (expremd1.isNull) {
execute.ifNonNull(jump);
} else if (ctx.NE() != null) {
execute.invokeStatic(definition.utilityType.type, CHECKEQUALS);
execute.ifZCmp(GeneratorAdapter.EQ, jump);
} else {
execute.ifCmp(type, GeneratorAdapter.NE, jump);
}
} else {
throw new IllegalStateException(WriterUtility.error(ctx) + "Unexpected state.");
}
}
if (branch == null) {
if (writejump) {
execute.push(false);
execute.goTo(end);
execute.mark(jump);
execute.push(true);
execute.mark(end);
}
caster.checkWriteCast(compemd);
}
}
}
void processBool(final BoolContext ctx) {
final ExpressionMetadata boolemd = metadata.getExpressionMetadata(ctx);
final Object postConst = boolemd.postConst;
final Object preConst = boolemd.preConst;
final Branch branch = utility.getBranch(ctx);
if (postConst != null) {
if (branch == null) {
utility.writeConstant(ctx, postConst);
} else {
if ((boolean)postConst && branch.tru != null) {
execute.mark(branch.tru);
} else if (!(boolean)postConst && branch.fals != null) {
execute.mark(branch.fals);
}
}
} else if (preConst != null) {
if (branch == null) {
utility.writeConstant(ctx, preConst);
caster.checkWriteCast(boolemd);
} else {
throw new IllegalStateException(WriterUtility.error(ctx) + "Unexpected state.");
}
} else {
final ExpressionContext exprctx0 = ctx.expression(0);
final ExpressionContext exprctx1 = ctx.expression(1);
if (branch == null) {
if (ctx.BOOLAND() != null) {
final Branch local = utility.markBranch(ctx, exprctx0, exprctx1);
local.fals = new Label();
final Label end = new Label();
writer.visit(exprctx0);
writer.visit(exprctx1);
execute.push(true);
execute.goTo(end);
execute.mark(local.fals);
execute.push(false);
execute.mark(end);
} else if (ctx.BOOLOR() != null) {
final Branch branch0 = utility.markBranch(ctx, exprctx0);
branch0.tru = new Label();
final Branch branch1 = utility.markBranch(ctx, exprctx1);
branch1.fals = new Label();
final Label aend = new Label();
writer.visit(exprctx0);
writer.visit(exprctx1);
execute.mark(branch0.tru);
execute.push(true);
execute.goTo(aend);
execute.mark(branch1.fals);
execute.push(false);
execute.mark(aend);
} else {
throw new IllegalStateException(WriterUtility.error(ctx) + "Unexpected state.");
}
caster.checkWriteCast(boolemd);
} else {
if (ctx.BOOLAND() != null) {
final Branch branch0 = utility.markBranch(ctx, exprctx0);
branch0.fals = branch.fals == null ? new Label() : branch.fals;
final Branch branch1 = utility.markBranch(ctx, exprctx1);
branch1.tru = branch.tru;
branch1.fals = branch.fals;
writer.visit(exprctx0);
writer.visit(exprctx1);
if (branch.fals == null) {
execute.mark(branch0.fals);
}
} else if (ctx.BOOLOR() != null) {
final Branch branch0 = utility.markBranch(ctx, exprctx0);
branch0.tru = branch.tru == null ? new Label() : branch.tru;
final Branch branch1 = utility.markBranch(ctx, exprctx1);
branch1.tru = branch.tru;
branch1.fals = branch.fals;
writer.visit(exprctx0);
writer.visit(exprctx1);
if (branch.tru == null) {
execute.mark(branch0.tru);
}
} else {
throw new IllegalStateException(WriterUtility.error(ctx) + "Unexpected state.");
}
}
}
}
void processConditional(final ConditionalContext ctx) {
final ExpressionMetadata condemd = metadata.getExpressionMetadata(ctx);
final Branch branch = utility.getBranch(ctx);
final ExpressionContext expr0 = ctx.expression(0);
final ExpressionContext expr1 = ctx.expression(1);
final ExpressionContext expr2 = ctx.expression(2);
final Branch local = utility.markBranch(ctx, expr0);
local.fals = new Label();
local.end = new Label();
if (branch != null) {
utility.copyBranch(branch, expr1, expr2);
}
writer.visit(expr0);
writer.visit(expr1);
execute.goTo(local.end);
execute.mark(local.fals);
writer.visit(expr2);
execute.mark(local.end);
if (branch == null) {
caster.checkWriteCast(condemd);
}
}
void processAssignment(final AssignmentContext ctx) {
final ExpressionMetadata expremd = metadata.getExpressionMetadata(ctx);
writer.visit(ctx.extstart());
caster.checkWriteCast(expremd);
utility.checkWriteBranch(ctx);
}
void processIncrement(final IncrementContext ctx) {
final ExpressionMetadata incremd = metadata.getExpressionMetadata(ctx);
final Object postConst = incremd.postConst;
if (postConst == null) {
utility.writeNumeric(ctx, incremd.preConst);
caster.checkWriteCast(incremd);
} else {
utility.writeConstant(ctx, postConst);
}
utility.checkWriteBranch(ctx);
}
}

View File

@ -1,720 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.painless;
import org.antlr.v4.runtime.ParserRuleContext;
import org.elasticsearch.painless.Definition.Cast;
import org.elasticsearch.painless.Definition.Constructor;
import org.elasticsearch.painless.Definition.Field;
import org.elasticsearch.painless.Definition.Method;
import org.elasticsearch.painless.Definition.Sort;
import org.elasticsearch.painless.Definition.Type;
import org.elasticsearch.painless.Metadata.ExpressionMetadata;
import org.elasticsearch.painless.Metadata.ExtNodeMetadata;
import org.elasticsearch.painless.Metadata.ExternalMetadata;
import org.elasticsearch.painless.PainlessParser.ExpressionContext;
import org.elasticsearch.painless.PainlessParser.ExtbraceContext;
import org.elasticsearch.painless.PainlessParser.ExtcallContext;
import org.elasticsearch.painless.PainlessParser.ExtcastContext;
import org.elasticsearch.painless.PainlessParser.ExtdotContext;
import org.elasticsearch.painless.PainlessParser.ExtfieldContext;
import org.elasticsearch.painless.PainlessParser.ExtnewContext;
import org.elasticsearch.painless.PainlessParser.ExtprecContext;
import org.elasticsearch.painless.PainlessParser.ExtstartContext;
import org.elasticsearch.painless.PainlessParser.ExtstringContext;
import org.elasticsearch.painless.PainlessParser.ExtvarContext;
import org.objectweb.asm.Opcodes;
import org.objectweb.asm.commons.GeneratorAdapter;
import java.util.List;
import static org.elasticsearch.painless.PainlessParser.ADD;
import static org.elasticsearch.painless.PainlessParser.DIV;
import static org.elasticsearch.painless.PainlessParser.MUL;
import static org.elasticsearch.painless.PainlessParser.REM;
import static org.elasticsearch.painless.PainlessParser.SUB;
import static org.elasticsearch.painless.WriterConstants.TOBYTEEXACT_INT;
import static org.elasticsearch.painless.WriterConstants.TOBYTEEXACT_LONG;
import static org.elasticsearch.painless.WriterConstants.TOBYTEWOOVERFLOW_DOUBLE;
import static org.elasticsearch.painless.WriterConstants.TOBYTEWOOVERFLOW_FLOAT;
import static org.elasticsearch.painless.WriterConstants.TOCHAREXACT_INT;
import static org.elasticsearch.painless.WriterConstants.TOCHAREXACT_LONG;
import static org.elasticsearch.painless.WriterConstants.TOCHARWOOVERFLOW_DOUBLE;
import static org.elasticsearch.painless.WriterConstants.TOCHARWOOVERFLOW_FLOAT;
import static org.elasticsearch.painless.WriterConstants.TOFLOATWOOVERFLOW_DOUBLE;
import static org.elasticsearch.painless.WriterConstants.TOINTEXACT_LONG;
import static org.elasticsearch.painless.WriterConstants.TOINTWOOVERFLOW_DOUBLE;
import static org.elasticsearch.painless.WriterConstants.TOINTWOOVERFLOW_FLOAT;
import static org.elasticsearch.painless.WriterConstants.TOLONGWOOVERFLOW_DOUBLE;
import static org.elasticsearch.painless.WriterConstants.TOLONGWOOVERFLOW_FLOAT;
import static org.elasticsearch.painless.WriterConstants.TOSHORTEXACT_INT;
import static org.elasticsearch.painless.WriterConstants.TOSHORTEXACT_LONG;
import static org.elasticsearch.painless.WriterConstants.TOSHORTWOOVERFLOW_DOUBLE;
import static org.elasticsearch.painless.WriterConstants.TOSHORTWOOVERFLOW_FLOAT;
class WriterExternal {
private final Metadata metadata;
private final Definition definition;
private final CompilerSettings settings;
private final GeneratorAdapter execute;
private final Writer writer;
private final WriterUtility utility;
private final WriterCaster caster;
WriterExternal(final Metadata metadata, final GeneratorAdapter execute, final Writer writer,
final WriterUtility utility, final WriterCaster caster) {
this.metadata = metadata;
definition = metadata.definition;
settings = metadata.settings;
this.execute = execute;
this.writer = writer;
this.utility = utility;
this.caster = caster;
}
void processExtstart(final ExtstartContext ctx) {
final ExternalMetadata startemd = metadata.getExternalMetadata(ctx);
if (startemd.token == ADD) {
final ExpressionMetadata storeemd = metadata.getExpressionMetadata(startemd.storeExpr);
if (startemd.current.sort == Sort.STRING || storeemd.from.sort == Sort.STRING) {
utility.writeNewStrings();
utility.addStrings(startemd.storeExpr);
}
}
final ExtprecContext precctx = ctx.extprec();
final ExtcastContext castctx = ctx.extcast();
final ExtvarContext varctx = ctx.extvar();
final ExtnewContext newctx = ctx.extnew();
final ExtstringContext stringctx = ctx.extstring();
if (precctx != null) {
writer.visit(precctx);
} else if (castctx != null) {
writer.visit(castctx);
} else if (varctx != null) {
writer.visit(varctx);
} else if (newctx != null) {
writer.visit(newctx);
} else if (stringctx != null) {
writer.visit(stringctx);
} else {
throw new IllegalStateException(WriterUtility.error(ctx) + "Unexpected state.");
}
}
void processExtprec(final ExtprecContext ctx) {
final ExtprecContext precctx = ctx.extprec();
final ExtcastContext castctx = ctx.extcast();
final ExtvarContext varctx = ctx.extvar();
final ExtnewContext newctx = ctx.extnew();
final ExtstringContext stringctx = ctx.extstring();
if (precctx != null) {
writer.visit(precctx);
} else if (castctx != null) {
writer.visit(castctx);
} else if (varctx != null) {
writer.visit(varctx);
} else if (newctx != null) {
writer.visit(newctx);
} else if (stringctx != null) {
writer.visit(stringctx);
} else {
throw new IllegalStateException(WriterUtility.error(ctx) + "Unexpected state.");
}
final ExtdotContext dotctx = ctx.extdot();
final ExtbraceContext bracectx = ctx.extbrace();
if (dotctx != null) {
writer.visit(dotctx);
} else if (bracectx != null) {
writer.visit(bracectx);
}
}
void processExtcast(final ExtcastContext ctx) {
ExtNodeMetadata castenmd = metadata.getExtNodeMetadata(ctx);
final ExtprecContext precctx = ctx.extprec();
final ExtcastContext castctx = ctx.extcast();
final ExtvarContext varctx = ctx.extvar();
final ExtnewContext newctx = ctx.extnew();
final ExtstringContext stringctx = ctx.extstring();
if (precctx != null) {
writer.visit(precctx);
} else if (castctx != null) {
writer.visit(castctx);
} else if (varctx != null) {
writer.visit(varctx);
} else if (newctx != null) {
writer.visit(newctx);
} else if (stringctx != null) {
writer.visit(stringctx);
} else {
throw new IllegalStateException(WriterUtility.error(ctx) + "Unexpected state.");
}
caster.checkWriteCast(ctx, castenmd.castTo);
}
void processExtbrace(final ExtbraceContext ctx) {
final ExpressionContext exprctx = ctx.expression();
writer.visit(exprctx);
writeLoadStoreExternal(ctx);
final ExtdotContext dotctx = ctx.extdot();
final ExtbraceContext bracectx = ctx.extbrace();
if (dotctx != null) {
writer.visit(dotctx);
} else if (bracectx != null) {
writer.visit(bracectx);
}
}
void processExtdot(final ExtdotContext ctx) {
final ExtcallContext callctx = ctx.extcall();
final ExtfieldContext fieldctx = ctx.extfield();
if (callctx != null) {
writer.visit(callctx);
} else if (fieldctx != null) {
writer.visit(fieldctx);
}
}
void processExtcall(final ExtcallContext ctx) {
writeCallExternal(ctx);
final ExtdotContext dotctx = ctx.extdot();
final ExtbraceContext bracectx = ctx.extbrace();
if (dotctx != null) {
writer.visit(dotctx);
} else if (bracectx != null) {
writer.visit(bracectx);
}
}
void processExtvar(final ExtvarContext ctx) {
writeLoadStoreExternal(ctx);
final ExtdotContext dotctx = ctx.extdot();
final ExtbraceContext bracectx = ctx.extbrace();
if (dotctx != null) {
writer.visit(dotctx);
} else if (bracectx != null) {
writer.visit(bracectx);
}
}
void processExtfield(final ExtfieldContext ctx) {
writeLoadStoreExternal(ctx);
final ExtdotContext dotctx = ctx.extdot();
final ExtbraceContext bracectx = ctx.extbrace();
if (dotctx != null) {
writer.visit(dotctx);
} else if (bracectx != null) {
writer.visit(bracectx);
}
}
void processExtnew(final ExtnewContext ctx) {
writeNewExternal(ctx);
final ExtdotContext dotctx = ctx.extdot();
if (dotctx != null) {
writer.visit(dotctx);
}
}
void processExtstring(final ExtstringContext ctx) {
final ExtNodeMetadata stringenmd = metadata.getExtNodeMetadata(ctx);
utility.writeConstant(ctx, stringenmd.target);
final ExtdotContext dotctx = ctx.extdot();
final ExtbraceContext bracectx = ctx.extbrace();
if (dotctx != null) {
writer.visit(dotctx);
} else if (bracectx != null) {
writer.visit(bracectx);
}
}
private void writeLoadStoreExternal(final ParserRuleContext source) {
final ExtNodeMetadata sourceenmd = metadata.getExtNodeMetadata(source);
final ExternalMetadata parentemd = metadata.getExternalMetadata(sourceenmd.parent);
if (sourceenmd.target == null) {
return;
}
final boolean length = "#length".equals(sourceenmd.target);
final boolean array = "#brace".equals(sourceenmd.target);
final boolean name = sourceenmd.target instanceof String && !length && !array;
final boolean variable = sourceenmd.target instanceof Integer;
final boolean field = sourceenmd.target instanceof Field;
final boolean shortcut = sourceenmd.target instanceof Object[];
if (!length && !variable && !field && !array && !name && !shortcut) {
throw new IllegalStateException(WriterUtility.error(source) + "Target not found for load/store.");
}
final boolean maplist = shortcut && (boolean)((Object[])sourceenmd.target)[2];
final Object constant = shortcut ? ((Object[])sourceenmd.target)[3] : null;
final boolean x1 = field || name || (shortcut && !maplist);
final boolean x2 = array || (shortcut && maplist);
if (length) {
execute.arrayLength();
} else if (sourceenmd.last && parentemd.storeExpr != null) {
final ExpressionMetadata expremd = metadata.getExpressionMetadata(parentemd.storeExpr);
final boolean cat = utility.containsStrings(parentemd.storeExpr);
if (cat) {
if (maplist && constant != null) {
utility.writeConstant(source, constant);
}
if (field || name || (shortcut && !maplist)) {
execute.dupX1();
} else if (array || maplist) {
execute.dup2X1();
}
writeLoadStoreInstruction(source, false, variable, field, name, array, shortcut);
utility.writeAppendStrings(sourceenmd.type.sort);
writer.visit(parentemd.storeExpr);
if (utility.containsStrings(parentemd.storeExpr)) {
utility.writeAppendStrings(expremd.to.sort);
utility.removeStrings(parentemd.storeExpr);
}
utility.writeToStrings();
caster.checkWriteCast(source, sourceenmd.castTo);
if (parentemd.read) {
utility.writeDup(sourceenmd.type.sort.size, x1, x2);
}
writeLoadStoreInstruction(source, true, variable, field, name, array, shortcut);
} else if (parentemd.token > 0) {
final int token = parentemd.token;
if (maplist && constant != null) {
utility.writeConstant(source, constant);
}
if (field || name || (shortcut && !maplist)) {
execute.dup();
} else if (array || maplist) {
execute.dup2();
}
writeLoadStoreInstruction(source, false, variable, field, name, array, shortcut);
if (parentemd.read && parentemd.post) {
utility.writeDup(sourceenmd.type.sort.size, x1, x2);
}
caster.checkWriteCast(source, sourceenmd.castFrom);
writer.visit(parentemd.storeExpr);
utility.writeBinaryInstruction(source, sourceenmd.promote, token);
boolean exact = false;
if (!settings.getNumericOverflow() && expremd.typesafe && sourceenmd.type.sort != Sort.DEF &&
(token == MUL || token == DIV || token == REM || token == ADD || token == SUB)) {
exact = writeExactInstruction(sourceenmd.type.sort, sourceenmd.promote.sort);
}
if (!exact) {
caster.checkWriteCast(source, sourceenmd.castTo);
}
if (parentemd.read && !parentemd.post) {
utility.writeDup(sourceenmd.type.sort.size, x1, x2);
}
writeLoadStoreInstruction(source, true, variable, field, name, array, shortcut);
} else {
if (constant != null) {
utility.writeConstant(source, constant);
}
writer.visit(parentemd.storeExpr);
if (parentemd.read) {
utility.writeDup(sourceenmd.type.sort.size, x1, x2);
}
writeLoadStoreInstruction(source, true, variable, field, name, array, shortcut);
}
} else {
if (constant != null) {
utility.writeConstant(source, constant);
}
writeLoadStoreInstruction(source, false, variable, field, name, array, shortcut);
}
}
private void writeLoadStoreInstruction(final ParserRuleContext source,
final boolean store, final boolean variable,
final boolean field, final boolean name,
final boolean array, final boolean shortcut) {
final ExtNodeMetadata sourceemd = metadata.getExtNodeMetadata(source);
if (variable) {
writeLoadStoreVariable(source, store, sourceemd.type, (int)sourceemd.target);
} else if (field) {
writeLoadStoreField(store, (Field)sourceemd.target);
} else if (name) {
writeLoadStoreField(source, store, (String)sourceemd.target);
} else if (array) {
writeLoadStoreArray(source, store, sourceemd.type);
} else if (shortcut) {
Object[] targets = (Object[])sourceemd.target;
writeLoadStoreShortcut(store, (Method)targets[0], (Method)targets[1]);
} else {
throw new IllegalStateException(WriterUtility.error(source) + "Load/Store requires a variable, field, or array.");
}
}
private void writeLoadStoreVariable(final ParserRuleContext source, final boolean store, final Type type, int slot) {
if (type.sort == Sort.VOID) {
throw new IllegalStateException(WriterUtility.error(source) + "Cannot load/store void type.");
}
if (store) {
execute.visitVarInsn(type.type.getOpcode(Opcodes.ISTORE), slot);
} else {
execute.visitVarInsn(type.type.getOpcode(Opcodes.ILOAD), slot);
}
}
private void writeLoadStoreField(final boolean store, final Field field) {
if (java.lang.reflect.Modifier.isStatic(field.reflect.getModifiers())) {
if (store) {
execute.putStatic(field.owner.type, field.reflect.getName(), field.type.type);
} else {
execute.getStatic(field.owner.type, field.reflect.getName(), field.type.type);
if (!field.generic.clazz.equals(field.type.clazz)) {
execute.checkCast(field.generic.type);
}
}
} else {
if (store) {
execute.putField(field.owner.type, field.reflect.getName(), field.type.type);
} else {
execute.getField(field.owner.type, field.reflect.getName(), field.type.type);
if (!field.generic.clazz.equals(field.type.clazz)) {
execute.checkCast(field.generic.type);
}
}
}
}
private void writeLoadStoreField(final ParserRuleContext source, final boolean store, final String name) {
if (store) {
execute.visitInvokeDynamicInsn(name, WriterConstants.DEF_DYNAMIC_STORE_FIELD_DESC,
WriterConstants.DEF_BOOTSTRAP_HANDLE, new Object[] { DynamicCallSite.STORE });
} else {
execute.visitInvokeDynamicInsn(name, WriterConstants.DEF_DYNAMIC_LOAD_FIELD_DESC,
WriterConstants.DEF_BOOTSTRAP_HANDLE, new Object[] { DynamicCallSite.LOAD });
}
}
private void writeLoadStoreArray(final ParserRuleContext source, final boolean store, final Type type) {
if (type.sort == Sort.VOID) {
throw new IllegalStateException(WriterUtility.error(source) + "Cannot load/store void type.");
}
if (type.sort == Sort.DEF) {
if (store) {
execute.visitInvokeDynamicInsn("arrayStore", WriterConstants.DEF_DYNAMIC_ARRAY_STORE_DESC,
WriterConstants.DEF_BOOTSTRAP_HANDLE, new Object[] { DynamicCallSite.ARRAY_STORE });
} else {
execute.visitInvokeDynamicInsn("arrayLoad", WriterConstants.DEF_DYNAMIC_ARRAY_LOAD_DESC,
WriterConstants.DEF_BOOTSTRAP_HANDLE, new Object[] { DynamicCallSite.ARRAY_LOAD });
}
} else {
if (store) {
execute.arrayStore(type.type);
} else {
execute.arrayLoad(type.type);
}
}
}
private void writeLoadStoreShortcut(final boolean store, final Method getter, final Method setter) {
final Method method = store ? setter : getter;
if (java.lang.reflect.Modifier.isInterface(getter.owner.clazz.getModifiers())) {
execute.invokeInterface(method.owner.type, method.method);
} else {
execute.invokeVirtual(method.owner.type, method.method);
}
if (store) {
utility.writePop(method.rtn.type.getSize());
} else if (!method.rtn.clazz.equals(method.handle.type().returnType())) {
execute.checkCast(method.rtn.type);
}
}
/**
* Called for any compound assignment (including increment/decrement instructions).
* We have to be stricter than writeBinary, and do overflow checks against the original type's size
* instead of the promoted type's size, since the result will be implicitly cast back.
*
* @return This will be true if an instruction is written, false otherwise.
*/
private boolean writeExactInstruction(final Sort osort, final Sort psort) {
if (psort == Sort.DOUBLE) {
if (osort == Sort.FLOAT) {
execute.invokeStatic(definition.utilityType.type, TOFLOATWOOVERFLOW_DOUBLE);
} else if (osort == Sort.FLOAT_OBJ) {
execute.invokeStatic(definition.utilityType.type, TOFLOATWOOVERFLOW_DOUBLE);
execute.checkCast(definition.floatobjType.type);
} else if (osort == Sort.LONG) {
execute.invokeStatic(definition.utilityType.type, TOLONGWOOVERFLOW_DOUBLE);
} else if (osort == Sort.LONG_OBJ) {
execute.invokeStatic(definition.utilityType.type, TOLONGWOOVERFLOW_DOUBLE);
execute.checkCast(definition.longobjType.type);
} else if (osort == Sort.INT) {
execute.invokeStatic(definition.utilityType.type, TOINTWOOVERFLOW_DOUBLE);
} else if (osort == Sort.INT_OBJ) {
execute.invokeStatic(definition.utilityType.type, TOINTWOOVERFLOW_DOUBLE);
execute.checkCast(definition.intobjType.type);
} else if (osort == Sort.CHAR) {
execute.invokeStatic(definition.utilityType.type, TOCHARWOOVERFLOW_DOUBLE);
} else if (osort == Sort.CHAR_OBJ) {
execute.invokeStatic(definition.utilityType.type, TOCHARWOOVERFLOW_DOUBLE);
execute.checkCast(definition.charobjType.type);
} else if (osort == Sort.SHORT) {
execute.invokeStatic(definition.utilityType.type, TOSHORTWOOVERFLOW_DOUBLE);
} else if (osort == Sort.SHORT_OBJ) {
execute.invokeStatic(definition.utilityType.type, TOSHORTWOOVERFLOW_DOUBLE);
execute.checkCast(definition.shortobjType.type);
} else if (osort == Sort.BYTE) {
execute.invokeStatic(definition.utilityType.type, TOBYTEWOOVERFLOW_DOUBLE);
} else if (osort == Sort.BYTE_OBJ) {
execute.invokeStatic(definition.utilityType.type, TOBYTEWOOVERFLOW_DOUBLE);
execute.checkCast(definition.byteobjType.type);
} else {
return false;
}
} else if (psort == Sort.FLOAT) {
if (osort == Sort.LONG) {
execute.invokeStatic(definition.utilityType.type, TOLONGWOOVERFLOW_FLOAT);
} else if (osort == Sort.LONG_OBJ) {
execute.invokeStatic(definition.utilityType.type, TOLONGWOOVERFLOW_FLOAT);
execute.checkCast(definition.longobjType.type);
} else if (osort == Sort.INT) {
execute.invokeStatic(definition.utilityType.type, TOINTWOOVERFLOW_FLOAT);
} else if (osort == Sort.INT_OBJ) {
execute.invokeStatic(definition.utilityType.type, TOINTWOOVERFLOW_FLOAT);
execute.checkCast(definition.intobjType.type);
} else if (osort == Sort.CHAR) {
execute.invokeStatic(definition.utilityType.type, TOCHARWOOVERFLOW_FLOAT);
} else if (osort == Sort.CHAR_OBJ) {
execute.invokeStatic(definition.utilityType.type, TOCHARWOOVERFLOW_FLOAT);
execute.checkCast(definition.charobjType.type);
} else if (osort == Sort.SHORT) {
execute.invokeStatic(definition.utilityType.type, TOSHORTWOOVERFLOW_FLOAT);
} else if (osort == Sort.SHORT_OBJ) {
execute.invokeStatic(definition.utilityType.type, TOSHORTWOOVERFLOW_FLOAT);
execute.checkCast(definition.shortobjType.type);
} else if (osort == Sort.BYTE) {
execute.invokeStatic(definition.utilityType.type, TOBYTEWOOVERFLOW_FLOAT);
} else if (osort == Sort.BYTE_OBJ) {
execute.invokeStatic(definition.utilityType.type, TOBYTEWOOVERFLOW_FLOAT);
execute.checkCast(definition.byteobjType.type);
} else {
return false;
}
} else if (psort == Sort.LONG) {
if (osort == Sort.INT) {
execute.invokeStatic(definition.mathType.type, TOINTEXACT_LONG);
} else if (osort == Sort.INT_OBJ) {
execute.invokeStatic(definition.mathType.type, TOINTEXACT_LONG);
execute.checkCast(definition.intobjType.type);
} else if (osort == Sort.CHAR) {
execute.invokeStatic(definition.utilityType.type, TOCHAREXACT_LONG);
} else if (osort == Sort.CHAR_OBJ) {
execute.invokeStatic(definition.utilityType.type, TOCHAREXACT_LONG);
execute.checkCast(definition.charobjType.type);
} else if (osort == Sort.SHORT) {
execute.invokeStatic(definition.utilityType.type, TOSHORTEXACT_LONG);
} else if (osort == Sort.SHORT_OBJ) {
execute.invokeStatic(definition.utilityType.type, TOSHORTEXACT_LONG);
execute.checkCast(definition.shortobjType.type);
} else if (osort == Sort.BYTE) {
execute.invokeStatic(definition.utilityType.type, TOBYTEEXACT_LONG);
} else if (osort == Sort.BYTE_OBJ) {
execute.invokeStatic(definition.utilityType.type, TOBYTEEXACT_LONG);
execute.checkCast(definition.byteobjType.type);
} else {
return false;
}
} else if (psort == Sort.INT) {
if (osort == Sort.CHAR) {
execute.invokeStatic(definition.utilityType.type, TOCHAREXACT_INT);
} else if (osort == Sort.CHAR_OBJ) {
execute.invokeStatic(definition.utilityType.type, TOCHAREXACT_INT);
execute.checkCast(definition.charobjType.type);
} else if (osort == Sort.SHORT) {
execute.invokeStatic(definition.utilityType.type, TOSHORTEXACT_INT);
} else if (osort == Sort.SHORT_OBJ) {
execute.invokeStatic(definition.utilityType.type, TOSHORTEXACT_INT);
execute.checkCast(definition.shortobjType.type);
} else if (osort == Sort.BYTE) {
execute.invokeStatic(definition.utilityType.type, TOBYTEEXACT_INT);
} else if (osort == Sort.BYTE_OBJ) {
execute.invokeStatic(definition.utilityType.type, TOBYTEEXACT_INT);
execute.checkCast(definition.byteobjType.type);
} else {
return false;
}
} else {
return false;
}
return true;
}
private void writeNewExternal(final ExtnewContext source) {
final ExtNodeMetadata sourceenmd = metadata.getExtNodeMetadata(source);
final ExternalMetadata parentemd = metadata.getExternalMetadata(sourceenmd.parent);
final boolean makearray = "#makearray".equals(sourceenmd.target);
final boolean constructor = sourceenmd.target instanceof Constructor;
if (!makearray && !constructor) {
throw new IllegalStateException(WriterUtility.error(source) + "Target not found for new call.");
}
if (makearray) {
for (final ExpressionContext exprctx : source.expression()) {
writer.visit(exprctx);
}
if (sourceenmd.type.sort == Sort.ARRAY) {
execute.visitMultiANewArrayInsn(sourceenmd.type.type.getDescriptor(), sourceenmd.type.type.getDimensions());
} else {
execute.newArray(sourceenmd.type.type);
}
} else {
execute.newInstance(sourceenmd.type.type);
if (parentemd.read) {
execute.dup();
}
for (final ExpressionContext exprctx : source.arguments().expression()) {
writer.visit(exprctx);
}
final Constructor target = (Constructor)sourceenmd.target;
execute.invokeConstructor(target.owner.type, target.method);
}
}
private void writeCallExternal(final ExtcallContext source) {
final ExtNodeMetadata sourceenmd = metadata.getExtNodeMetadata(source);
final boolean method = sourceenmd.target instanceof Method;
final boolean def = sourceenmd.target instanceof String;
if (!method && !def) {
throw new IllegalStateException(WriterUtility.error(source) + "Target not found for call.");
}
final List<ExpressionContext> arguments = source.arguments().expression();
if (method) {
for (final ExpressionContext exprctx : arguments) {
writer.visit(exprctx);
}
final Method target = (Method)sourceenmd.target;
if (java.lang.reflect.Modifier.isStatic(target.reflect.getModifiers())) {
execute.invokeStatic(target.owner.type, target.method);
} else if (java.lang.reflect.Modifier.isInterface(target.owner.clazz.getModifiers())) {
execute.invokeInterface(target.owner.type, target.method);
} else {
execute.invokeVirtual(target.owner.type, target.method);
}
if (!target.rtn.clazz.equals(target.handle.type().returnType())) {
execute.checkCast(target.rtn.type);
}
} else {
writeDynamicCallExternal(source);
}
}
private void writeDynamicCallExternal(final ExtcallContext source) {
final ExtNodeMetadata sourceenmd = metadata.getExtNodeMetadata(source);
final List<ExpressionContext> arguments = source.arguments().expression();
StringBuilder signature = new StringBuilder();
signature.append('(');
// first parameter is the receiver, we never know its type: always Object
signature.append(WriterConstants.OBJECT_TYPE.getDescriptor());
for (int i = 0; i < arguments.size(); i++) {
ExpressionMetadata arg = metadata.getExpressionMetadata(arguments.get(i));
// disable any implicit casts/conversion for arguments, let invokeDynamic take care
arg.to = arg.from;
arg.cast = new Cast(arg.from, arg.from);
signature.append(arg.from.type.getDescriptor());
writer.visit(arguments.get(i));
}
signature.append(')');
// return value: currently always Object. making this better may be tricky...
signature.append(WriterConstants.OBJECT_TYPE.getDescriptor());
execute.visitInvokeDynamicInsn((String)sourceenmd.target, signature.toString(),
WriterConstants.DEF_BOOTSTRAP_HANDLE, new Object[] { DynamicCallSite.METHOD_CALL });
}
}

View File

@ -1,390 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.painless;
import org.elasticsearch.painless.Definition.Sort;
import org.elasticsearch.painless.Metadata.ExpressionMetadata;
import org.elasticsearch.painless.Metadata.StatementMetadata;
import org.elasticsearch.painless.PainlessParser.AfterthoughtContext;
import org.elasticsearch.painless.PainlessParser.BlockContext;
import org.elasticsearch.painless.PainlessParser.DeclContext;
import org.elasticsearch.painless.PainlessParser.DeclarationContext;
import org.elasticsearch.painless.PainlessParser.DeclvarContext;
import org.elasticsearch.painless.PainlessParser.DoContext;
import org.elasticsearch.painless.PainlessParser.EmptyscopeContext;
import org.elasticsearch.painless.PainlessParser.ExprContext;
import org.elasticsearch.painless.PainlessParser.ExpressionContext;
import org.elasticsearch.painless.PainlessParser.ForContext;
import org.elasticsearch.painless.PainlessParser.IfContext;
import org.elasticsearch.painless.PainlessParser.InitializerContext;
import org.elasticsearch.painless.PainlessParser.MultipleContext;
import org.elasticsearch.painless.PainlessParser.ReturnContext;
import org.elasticsearch.painless.PainlessParser.SingleContext;
import org.elasticsearch.painless.PainlessParser.SourceContext;
import org.elasticsearch.painless.PainlessParser.StatementContext;
import org.elasticsearch.painless.PainlessParser.ThrowContext;
import org.elasticsearch.painless.PainlessParser.TrapContext;
import org.elasticsearch.painless.PainlessParser.TryContext;
import org.elasticsearch.painless.PainlessParser.WhileContext;
import org.elasticsearch.painless.WriterUtility.Branch;
import org.objectweb.asm.Label;
import org.objectweb.asm.Opcodes;
import org.objectweb.asm.commons.GeneratorAdapter;
import static org.elasticsearch.painless.WriterConstants.PAINLESS_ERROR_TYPE;
class WriterStatement {
private final Metadata metadata;
private final GeneratorAdapter execute;
private final Writer writer;
private final WriterUtility utility;
WriterStatement(final Metadata metadata, final GeneratorAdapter execute,
final Writer writer, final WriterUtility utility) {
this.metadata = metadata;
this.execute = execute;
this.writer = writer;
this.utility = utility;
}
void processSource(final SourceContext ctx) {
final StatementMetadata sourcesmd = metadata.getStatementMetadata(ctx);
for (final StatementContext sctx : ctx.statement()) {
writer.visit(sctx);
}
if (!sourcesmd.methodEscape) {
execute.visitInsn(Opcodes.ACONST_NULL);
execute.returnValue();
}
}
void processIf(final IfContext ctx) {
final ExpressionContext exprctx = ctx.expression();
final boolean els = ctx.ELSE() != null;
final Branch branch = utility.markBranch(ctx, exprctx);
branch.end = new Label();
branch.fals = els ? new Label() : branch.end;
writer.visit(exprctx);
final BlockContext blockctx0 = ctx.block(0);
final StatementMetadata blockmd0 = metadata.getStatementMetadata(blockctx0);
writer.visit(blockctx0);
if (els) {
if (!blockmd0.allLast) {
execute.goTo(branch.end);
}
execute.mark(branch.fals);
writer.visit(ctx.block(1));
}
execute.mark(branch.end);
}
void processWhile(final WhileContext ctx) {
final ExpressionContext exprctx = ctx.expression();
final Branch branch = utility.markBranch(ctx, exprctx);
branch.begin = new Label();
branch.end = new Label();
branch.fals = branch.end;
utility.pushJump(branch);
execute.mark(branch.begin);
writer.visit(exprctx);
final BlockContext blockctx = ctx.block();
boolean allLast = false;
if (blockctx != null) {
final StatementMetadata blocksmd = metadata.getStatementMetadata(blockctx);
allLast = blocksmd.allLast;
writeLoopCounter(blocksmd.count > 0 ? blocksmd.count : 1);
writer.visit(blockctx);
} else if (ctx.empty() != null) {
writeLoopCounter(1);
} else {
throw new IllegalStateException(WriterUtility.error(ctx) + "Unexpected state.");
}
if (!allLast) {
execute.goTo(branch.begin);
}
execute.mark(branch.end);
utility.popJump();
}
void processDo(final DoContext ctx) {
final ExpressionContext exprctx = ctx.expression();
final Branch branch = utility.markBranch(ctx, exprctx);
Label start = new Label();
branch.begin = new Label();
branch.end = new Label();
branch.fals = branch.end;
final BlockContext blockctx = ctx.block();
final StatementMetadata blocksmd = metadata.getStatementMetadata(blockctx);
utility.pushJump(branch);
execute.mark(start);
writer.visit(blockctx);
execute.mark(branch.begin);
writer.visit(exprctx);
writeLoopCounter(blocksmd.count > 0 ? blocksmd.count : 1);
execute.goTo(start);
execute.mark(branch.end);
utility.popJump();
}
void processFor(final ForContext ctx) {
final ExpressionContext exprctx = ctx.expression();
final AfterthoughtContext atctx = ctx.afterthought();
final Branch branch = utility.markBranch(ctx, exprctx);
final Label start = new Label();
branch.begin = atctx == null ? start : new Label();
branch.end = new Label();
branch.fals = branch.end;
utility.pushJump(branch);
if (ctx.initializer() != null) {
writer.visit(ctx.initializer());
}
execute.mark(start);
if (exprctx != null) {
writer.visit(exprctx);
}
final BlockContext blockctx = ctx.block();
boolean allLast = false;
if (blockctx != null) {
StatementMetadata blocksmd = metadata.getStatementMetadata(blockctx);
allLast = blocksmd.allLast;
int count = blocksmd.count > 0 ? blocksmd.count : 1;
if (atctx != null) {
++count;
}
writeLoopCounter(count);
writer.visit(blockctx);
} else if (ctx.empty() != null) {
writeLoopCounter(1);
} else {
throw new IllegalStateException(WriterUtility.error(ctx) + "Unexpected state.");
}
if (atctx != null) {
execute.mark(branch.begin);
writer.visit(atctx);
}
if (atctx != null || !allLast) {
execute.goTo(start);
}
execute.mark(branch.end);
utility.popJump();
}
void processDecl(final DeclContext ctx) {
writer.visit(ctx.declaration());
}
void processContinue() {
final Branch jump = utility.peekJump();
execute.goTo(jump.begin);
}
void processBreak() {
final Branch jump = utility.peekJump();
execute.goTo(jump.end);
}
void processReturn(final ReturnContext ctx) {
writer.visit(ctx.expression());
execute.returnValue();
}
void processTry(final TryContext ctx) {
final TrapContext[] trapctxs = new TrapContext[ctx.trap().size()];
ctx.trap().toArray(trapctxs);
final Branch branch = utility.markBranch(ctx, trapctxs);
Label end = new Label();
branch.begin = new Label();
branch.end = new Label();
branch.tru = trapctxs.length > 1 ? end : null;
execute.mark(branch.begin);
final BlockContext blockctx = ctx.block();
final StatementMetadata blocksmd = metadata.getStatementMetadata(blockctx);
writer.visit(blockctx);
if (!blocksmd.allLast) {
execute.goTo(end);
}
execute.mark(branch.end);
for (final TrapContext trapctx : trapctxs) {
writer.visit(trapctx);
}
if (!blocksmd.allLast || trapctxs.length > 1) {
execute.mark(end);
}
}
void processThrow(final ThrowContext ctx) {
writer.visit(ctx.expression());
execute.throwException();
}
void processExpr(final ExprContext ctx) {
final StatementMetadata exprsmd = metadata.getStatementMetadata(ctx);
final ExpressionContext exprctx = ctx.expression();
final ExpressionMetadata expremd = metadata.getExpressionMetadata(exprctx);
writer.visit(exprctx);
if (exprsmd.methodEscape) {
execute.returnValue();
} else {
utility.writePop(expremd.to.type.getSize());
}
}
void processMultiple(final MultipleContext ctx) {
for (final StatementContext sctx : ctx.statement()) {
writer.visit(sctx);
}
}
void processSingle(final SingleContext ctx) {
writer.visit(ctx.statement());
}
void processInitializer(InitializerContext ctx) {
final DeclarationContext declctx = ctx.declaration();
final ExpressionContext exprctx = ctx.expression();
if (declctx != null) {
writer.visit(declctx);
} else if (exprctx != null) {
final ExpressionMetadata expremd = metadata.getExpressionMetadata(exprctx);
writer.visit(exprctx);
utility.writePop(expremd.to.type.getSize());
} else {
throw new IllegalStateException(WriterUtility.error(ctx) + "Unexpected state.");
}
}
void processAfterthought(AfterthoughtContext ctx) {
final ExpressionContext exprctx = ctx.expression();
final ExpressionMetadata expremd = metadata.getExpressionMetadata(exprctx);
writer.visit(ctx.expression());
utility.writePop(expremd.to.type.getSize());
}
void processDeclaration(DeclarationContext ctx) {
for (final DeclvarContext declctx : ctx.declvar()) {
writer.visit(declctx);
}
}
void processDeclvar(final DeclvarContext ctx) {
final ExpressionMetadata declvaremd = metadata.getExpressionMetadata(ctx);
final org.objectweb.asm.Type type = declvaremd.to.type;
final Sort sort = declvaremd.to.sort;
int slot = (int)declvaremd.postConst;
final ExpressionContext exprctx = ctx.expression();
final boolean initialize = exprctx == null;
if (!initialize) {
writer.visit(exprctx);
}
switch (sort) {
case VOID: throw new IllegalStateException(WriterUtility.error(ctx) + "Unexpected state.");
case BOOL:
case BYTE:
case SHORT:
case CHAR:
case INT: if (initialize) execute.push(0); break;
case LONG: if (initialize) execute.push(0L); break;
case FLOAT: if (initialize) execute.push(0.0F); break;
case DOUBLE: if (initialize) execute.push(0.0); break;
default: if (initialize) execute.visitInsn(Opcodes.ACONST_NULL);
}
execute.visitVarInsn(type.getOpcode(Opcodes.ISTORE), slot);
}
void processTrap(final TrapContext ctx) {
final StatementMetadata trapsmd = metadata.getStatementMetadata(ctx);
final Branch branch = utility.getBranch(ctx);
final Label jump = new Label();
final BlockContext blockctx = ctx.block();
final EmptyscopeContext emptyctx = ctx.emptyscope();
execute.mark(jump);
execute.visitVarInsn(trapsmd.exception.type.getOpcode(Opcodes.ISTORE), trapsmd.slot);
if (blockctx != null) {
writer.visit(ctx.block());
} else if (emptyctx == null) {
throw new IllegalStateException(WriterUtility.error(ctx) + "Unexpected state.");
}
execute.visitTryCatchBlock(branch.begin, branch.end, jump, trapsmd.exception.type.getInternalName());
if (branch.tru != null && !trapsmd.allLast) {
execute.goTo(branch.tru);
}
}
private void writeLoopCounter(final int count) {
final Label end = new Label();
execute.iinc(metadata.loopCounterSlot, -count);
execute.visitVarInsn(Opcodes.ILOAD, metadata.loopCounterSlot);
execute.push(0);
execute.ifICmp(GeneratorAdapter.GT, end);
execute.throwException(PAINLESS_ERROR_TYPE,
"The maximum number of statements that can be executed in a loop has been reached.");
execute.mark(end);
}
}

View File

@ -19,31 +19,14 @@
package org.elasticsearch.painless;
import org.antlr.v4.runtime.ParserRuleContext;
import org.elasticsearch.painless.Definition.Cast;
import org.elasticsearch.painless.Definition.Sort;
import org.elasticsearch.painless.Definition.Transform;
import org.elasticsearch.painless.Definition.Type;
import org.objectweb.asm.Label;
import org.objectweb.asm.Opcodes;
import org.objectweb.asm.commons.GeneratorAdapter;
import java.util.ArrayDeque;
import java.util.Deque;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import static org.elasticsearch.painless.PainlessParser.ADD;
import static org.elasticsearch.painless.PainlessParser.BWAND;
import static org.elasticsearch.painless.PainlessParser.BWOR;
import static org.elasticsearch.painless.PainlessParser.BWXOR;
import static org.elasticsearch.painless.PainlessParser.DIV;
import static org.elasticsearch.painless.PainlessParser.LSH;
import static org.elasticsearch.painless.PainlessParser.MUL;
import static org.elasticsearch.painless.PainlessParser.REM;
import static org.elasticsearch.painless.PainlessParser.RSH;
import static org.elasticsearch.painless.PainlessParser.SUB;
import static org.elasticsearch.painless.PainlessParser.USH;
import static org.elasticsearch.painless.WriterConstants.ADDEXACT_INT;
import static org.elasticsearch.painless.WriterConstants.ADDEXACT_LONG;
import static org.elasticsearch.painless.WriterConstants.ADDWOOVERLOW_DOUBLE;
@ -67,6 +50,7 @@ import static org.elasticsearch.painless.WriterConstants.MULEXACT_INT;
import static org.elasticsearch.painless.WriterConstants.MULEXACT_LONG;
import static org.elasticsearch.painless.WriterConstants.MULWOOVERLOW_DOUBLE;
import static org.elasticsearch.painless.WriterConstants.MULWOOVERLOW_FLOAT;
import static org.elasticsearch.painless.WriterConstants.PAINLESS_ERROR_TYPE;
import static org.elasticsearch.painless.WriterConstants.REMWOOVERLOW_DOUBLE;
import static org.elasticsearch.painless.WriterConstants.REMWOOVERLOW_FLOAT;
import static org.elasticsearch.painless.WriterConstants.STRINGBUILDER_APPEND_BOOLEAN;
@ -84,304 +68,369 @@ import static org.elasticsearch.painless.WriterConstants.SUBEXACT_INT;
import static org.elasticsearch.painless.WriterConstants.SUBEXACT_LONG;
import static org.elasticsearch.painless.WriterConstants.SUBWOOVERLOW_DOUBLE;
import static org.elasticsearch.painless.WriterConstants.SUBWOOVERLOW_FLOAT;
import static org.elasticsearch.painless.WriterConstants.TOBYTEEXACT_INT;
import static org.elasticsearch.painless.WriterConstants.TOBYTEEXACT_LONG;
import static org.elasticsearch.painless.WriterConstants.TOBYTEWOOVERFLOW_DOUBLE;
import static org.elasticsearch.painless.WriterConstants.TOBYTEWOOVERFLOW_FLOAT;
import static org.elasticsearch.painless.WriterConstants.TOCHAREXACT_INT;
import static org.elasticsearch.painless.WriterConstants.TOCHAREXACT_LONG;
import static org.elasticsearch.painless.WriterConstants.TOCHARWOOVERFLOW_DOUBLE;
import static org.elasticsearch.painless.WriterConstants.TOCHARWOOVERFLOW_FLOAT;
import static org.elasticsearch.painless.WriterConstants.TOFLOATWOOVERFLOW_DOUBLE;
import static org.elasticsearch.painless.WriterConstants.TOINTEXACT_LONG;
import static org.elasticsearch.painless.WriterConstants.TOINTWOOVERFLOW_DOUBLE;
import static org.elasticsearch.painless.WriterConstants.TOINTWOOVERFLOW_FLOAT;
import static org.elasticsearch.painless.WriterConstants.TOLONGWOOVERFLOW_DOUBLE;
import static org.elasticsearch.painless.WriterConstants.TOLONGWOOVERFLOW_FLOAT;
import static org.elasticsearch.painless.WriterConstants.TOSHORTEXACT_INT;
import static org.elasticsearch.painless.WriterConstants.TOSHORTEXACT_LONG;
import static org.elasticsearch.painless.WriterConstants.TOSHORTWOOVERFLOW_DOUBLE;
import static org.elasticsearch.painless.WriterConstants.TOSHORTWOOVERFLOW_FLOAT;
class WriterUtility {
static class Branch {
final ParserRuleContext source;
/**
* Set of methods used during the writing phase of compilation
* shared by the nodes of the Painless tree.
*/
public final class WriterUtility {
Label begin = null;
Label end = null;
Label tru = null;
Label fals = null;
public static void writeLoopCounter(final GeneratorAdapter adapter, final int slot, final int count) {
if (slot > -1) {
final Label end = new Label();
private Branch(final ParserRuleContext source) {
this.source = source;
adapter.iinc(slot, -count);
adapter.visitVarInsn(Opcodes.ILOAD, slot);
adapter.push(0);
adapter.ifICmp(GeneratorAdapter.GT, end);
adapter.throwException(PAINLESS_ERROR_TYPE,
"The maximum number of statements that can be executed in a loop has been reached.");
adapter.mark(end);
}
}
/**
* A utility method to output consistent error messages.
* @param ctx The ANTLR node the error occurred in.
* @return The error message with tacked on line number and character position.
*/
static String error(final ParserRuleContext ctx) {
return "Writer Error [" + ctx.getStart().getLine() + ":" + ctx.getStart().getCharPositionInLine() + "]: ";
}
public static void writeCast(final GeneratorAdapter adapter, final Cast cast) {
if (cast instanceof Transform) {
final Transform transform = (Transform)cast;
private final Definition definition;
private final CompilerSettings settings;
private final GeneratorAdapter execute;
private final Map<ParserRuleContext, Branch> branches = new HashMap<>();
private final Deque<Branch> jumps = new ArrayDeque<>();
private final Set<ParserRuleContext> strings = new HashSet<>();
WriterUtility(final Metadata metadata, final GeneratorAdapter execute) {
definition = metadata.definition;
settings = metadata.settings;
this.execute = execute;
}
Branch markBranch(final ParserRuleContext source, final ParserRuleContext... nodes) {
final Branch branch = new Branch(source);
for (final ParserRuleContext node : nodes) {
branches.put(node, branch);
}
return branch;
}
void copyBranch(final Branch branch, final ParserRuleContext... nodes) {
for (final ParserRuleContext node : nodes) {
branches.put(node, branch);
}
}
Branch getBranch(final ParserRuleContext source) {
return branches.get(source);
}
void checkWriteBranch(final ParserRuleContext source) {
final Branch branch = getBranch(source);
if (branch != null) {
if (branch.tru != null) {
execute.visitJumpInsn(Opcodes.IFNE, branch.tru);
} else if (branch.fals != null) {
execute.visitJumpInsn(Opcodes.IFEQ, branch.fals);
if (transform.upcast != null) {
adapter.checkCast(transform.upcast.type);
}
}
}
void pushJump(final Branch branch) {
jumps.push(branch);
}
Branch peekJump() {
return jumps.peek();
}
void popJump() {
jumps.pop();
}
void addStrings(final ParserRuleContext source) {
strings.add(source);
}
boolean containsStrings(final ParserRuleContext source) {
return strings.contains(source);
}
void removeStrings(final ParserRuleContext source) {
strings.remove(source);
}
void writeDup(final int size, final boolean x1, final boolean x2) {
if (size == 1) {
if (x2) {
execute.dupX2();
} else if (x1) {
execute.dupX1();
if (java.lang.reflect.Modifier.isStatic(transform.method.reflect.getModifiers())) {
adapter.invokeStatic(transform.method.owner.type, transform.method.method);
} else if (java.lang.reflect.Modifier.isInterface(transform.method.owner.clazz.getModifiers())) {
adapter.invokeInterface(transform.method.owner.type, transform.method.method);
} else {
execute.dup();
adapter.invokeVirtual(transform.method.owner.type, transform.method.method);
}
} else if (size == 2) {
if (x2) {
execute.dup2X2();
} else if (x1) {
execute.dup2X1();
if (transform.downcast != null) {
adapter.checkCast(transform.downcast.type);
}
} else if (cast != null) {
final Type from = cast.from;
final Type to = cast.to;
if (from.equals(to)) {
return;
}
if (from.sort.numeric && from.sort.primitive && to.sort.numeric && to.sort.primitive) {
adapter.cast(from.type, to.type);
} else {
execute.dup2();
try {
from.clazz.asSubclass(to.clazz);
} catch (ClassCastException exception) {
adapter.checkCast(to.type);
}
}
}
}
void writePop(final int size) {
if (size == 1) {
execute.pop();
} else if (size == 2) {
execute.pop2();
public static void writeBranch(final GeneratorAdapter adapter, final Label tru, final Label fals) {
if (tru != null) {
adapter.visitJumpInsn(Opcodes.IFNE, tru);
} else if (fals != null) {
adapter.visitJumpInsn(Opcodes.IFEQ, fals);
}
}
void writeConstant(final ParserRuleContext source, final Object constant) {
if (constant instanceof Number) {
writeNumeric(source, constant);
} else if (constant instanceof Character) {
writeNumeric(source, (int)(char)constant);
} else if (constant instanceof String) {
writeString(source, constant);
} else if (constant instanceof Boolean) {
writeBoolean(source, constant);
} else if (constant != null) {
throw new IllegalStateException(WriterUtility.error(source) + "Unexpected state.");
}
public static void writeNewStrings(final GeneratorAdapter adapter) {
adapter.newInstance(STRINGBUILDER_TYPE);
adapter.dup();
adapter.invokeConstructor(STRINGBUILDER_TYPE, STRINGBUILDER_CONSTRUCTOR);
}
void writeNumeric(final ParserRuleContext source, final Object numeric) {
if (numeric instanceof Double) {
execute.push((double)numeric);
} else if (numeric instanceof Float) {
execute.push((float)numeric);
} else if (numeric instanceof Long) {
execute.push((long)numeric);
} else if (numeric instanceof Number) {
execute.push(((Number)numeric).intValue());
} else {
throw new IllegalStateException(WriterUtility.error(source) + "Unexpected state.");
}
}
void writeString(final ParserRuleContext source, final Object string) {
if (string instanceof String) {
execute.push((String)string);
} else {
throw new IllegalStateException(WriterUtility.error(source) + "Unexpected state.");
}
}
void writeBoolean(final ParserRuleContext source, final Object bool) {
if (bool instanceof Boolean) {
execute.push((boolean)bool);
} else {
throw new IllegalStateException(WriterUtility.error(source) + "Unexpected state.");
}
}
void writeNewStrings() {
execute.newInstance(STRINGBUILDER_TYPE);
execute.dup();
execute.invokeConstructor(STRINGBUILDER_TYPE, STRINGBUILDER_CONSTRUCTOR);
}
void writeAppendStrings(final Sort sort) {
public static void writeAppendStrings(final GeneratorAdapter adapter, final Sort sort) {
switch (sort) {
case BOOL: execute.invokeVirtual(STRINGBUILDER_TYPE, STRINGBUILDER_APPEND_BOOLEAN); break;
case CHAR: execute.invokeVirtual(STRINGBUILDER_TYPE, STRINGBUILDER_APPEND_CHAR); break;
case BOOL: adapter.invokeVirtual(STRINGBUILDER_TYPE, STRINGBUILDER_APPEND_BOOLEAN); break;
case CHAR: adapter.invokeVirtual(STRINGBUILDER_TYPE, STRINGBUILDER_APPEND_CHAR); break;
case BYTE:
case SHORT:
case INT: execute.invokeVirtual(STRINGBUILDER_TYPE, STRINGBUILDER_APPEND_INT); break;
case LONG: execute.invokeVirtual(STRINGBUILDER_TYPE, STRINGBUILDER_APPEND_LONG); break;
case FLOAT: execute.invokeVirtual(STRINGBUILDER_TYPE, STRINGBUILDER_APPEND_FLOAT); break;
case DOUBLE: execute.invokeVirtual(STRINGBUILDER_TYPE, STRINGBUILDER_APPEND_DOUBLE); break;
case STRING: execute.invokeVirtual(STRINGBUILDER_TYPE, STRINGBUILDER_APPEND_STRING); break;
default: execute.invokeVirtual(STRINGBUILDER_TYPE, STRINGBUILDER_APPEND_OBJECT);
case INT: adapter.invokeVirtual(STRINGBUILDER_TYPE, STRINGBUILDER_APPEND_INT); break;
case LONG: adapter.invokeVirtual(STRINGBUILDER_TYPE, STRINGBUILDER_APPEND_LONG); break;
case FLOAT: adapter.invokeVirtual(STRINGBUILDER_TYPE, STRINGBUILDER_APPEND_FLOAT); break;
case DOUBLE: adapter.invokeVirtual(STRINGBUILDER_TYPE, STRINGBUILDER_APPEND_DOUBLE); break;
case STRING: adapter.invokeVirtual(STRINGBUILDER_TYPE, STRINGBUILDER_APPEND_STRING); break;
default: adapter.invokeVirtual(STRINGBUILDER_TYPE, STRINGBUILDER_APPEND_OBJECT);
}
}
void writeToStrings() {
execute.invokeVirtual(STRINGBUILDER_TYPE, STRINGBUILDER_TOSTRING);
public static void writeToStrings(final GeneratorAdapter adapter) {
adapter.invokeVirtual(STRINGBUILDER_TYPE, STRINGBUILDER_TOSTRING);
}
void writeBinaryInstruction(final ParserRuleContext source, final Type type, final int token) {
public static void writeBinaryInstruction(final CompilerSettings settings, final Definition definition,
final GeneratorAdapter adapter, final String location,
final Type type, final Operation operation) {
final Sort sort = type.sort;
final boolean exact = !settings.getNumericOverflow() &&
boolean exact = !settings.getNumericOverflow() &&
((sort == Sort.INT || sort == Sort.LONG) &&
(token == MUL || token == DIV || token == ADD || token == SUB) ||
(operation == Operation.MUL || operation == Operation.DIV ||
operation == Operation.ADD || operation == Operation.SUB) ||
(sort == Sort.FLOAT || sort == Sort.DOUBLE) &&
(token == MUL || token == DIV || token == REM || token == ADD || token == SUB));
// If it's a 64-bit shift, fix-up the last argument to truncate to 32-bits.
// Note that unlike java, this means we still do binary promotion of shifts,
// but it keeps things simple, and this check works because we promote shifts.
if (sort == Sort.LONG && (token == LSH || token == USH || token == RSH)) {
execute.cast(org.objectweb.asm.Type.LONG_TYPE, org.objectweb.asm.Type.INT_TYPE);
}
(operation == Operation.MUL || operation == Operation.DIV || operation == Operation.REM ||
operation == Operation.ADD || operation == Operation.SUB));
if (exact) {
switch (sort) {
case INT:
switch (token) {
case MUL: execute.invokeStatic(definition.mathType.type, MULEXACT_INT); break;
case DIV: execute.invokeStatic(definition.utilityType.type, DIVWOOVERLOW_INT); break;
case ADD: execute.invokeStatic(definition.mathType.type, ADDEXACT_INT); break;
case SUB: execute.invokeStatic(definition.mathType.type, SUBEXACT_INT); break;
default:
throw new IllegalStateException(WriterUtility.error(source) + "Unexpected state.");
switch (operation) {
case MUL: adapter.invokeStatic(definition.mathType.type, MULEXACT_INT); break;
case DIV: adapter.invokeStatic(definition.utilityType.type, DIVWOOVERLOW_INT); break;
case ADD: adapter.invokeStatic(definition.mathType.type, ADDEXACT_INT); break;
case SUB: adapter.invokeStatic(definition.mathType.type, SUBEXACT_INT); break;
}
break;
case LONG:
switch (token) {
case MUL: execute.invokeStatic(definition.mathType.type, MULEXACT_LONG); break;
case DIV: execute.invokeStatic(definition.utilityType.type, DIVWOOVERLOW_LONG); break;
case ADD: execute.invokeStatic(definition.mathType.type, ADDEXACT_LONG); break;
case SUB: execute.invokeStatic(definition.mathType.type, SUBEXACT_LONG); break;
default:
throw new IllegalStateException(WriterUtility.error(source) + "Unexpected state.");
switch (operation) {
case MUL: adapter.invokeStatic(definition.mathType.type, MULEXACT_LONG); break;
case DIV: adapter.invokeStatic(definition.utilityType.type, DIVWOOVERLOW_LONG); break;
case ADD: adapter.invokeStatic(definition.mathType.type, ADDEXACT_LONG); break;
case SUB: adapter.invokeStatic(definition.mathType.type, SUBEXACT_LONG); break;
}
break;
case FLOAT:
switch (token) {
case MUL: execute.invokeStatic(definition.utilityType.type, MULWOOVERLOW_FLOAT); break;
case DIV: execute.invokeStatic(definition.utilityType.type, DIVWOOVERLOW_FLOAT); break;
case REM: execute.invokeStatic(definition.utilityType.type, REMWOOVERLOW_FLOAT); break;
case ADD: execute.invokeStatic(definition.utilityType.type, ADDWOOVERLOW_FLOAT); break;
case SUB: execute.invokeStatic(definition.utilityType.type, SUBWOOVERLOW_FLOAT); break;
switch (operation) {
case MUL: adapter.invokeStatic(definition.utilityType.type, MULWOOVERLOW_FLOAT); break;
case DIV: adapter.invokeStatic(definition.utilityType.type, DIVWOOVERLOW_FLOAT); break;
case REM: adapter.invokeStatic(definition.utilityType.type, REMWOOVERLOW_FLOAT); break;
case ADD: adapter.invokeStatic(definition.utilityType.type, ADDWOOVERLOW_FLOAT); break;
case SUB: adapter.invokeStatic(definition.utilityType.type, SUBWOOVERLOW_FLOAT); break;
default:
throw new IllegalStateException(WriterUtility.error(source) + "Unexpected state.");
throw new IllegalStateException("Error " + location + ": Illegal tree structure.");
}
break;
case DOUBLE:
switch (token) {
case MUL: execute.invokeStatic(definition.utilityType.type, MULWOOVERLOW_DOUBLE); break;
case DIV: execute.invokeStatic(definition.utilityType.type, DIVWOOVERLOW_DOUBLE); break;
case REM: execute.invokeStatic(definition.utilityType.type, REMWOOVERLOW_DOUBLE); break;
case ADD: execute.invokeStatic(definition.utilityType.type, ADDWOOVERLOW_DOUBLE); break;
case SUB: execute.invokeStatic(definition.utilityType.type, SUBWOOVERLOW_DOUBLE); break;
switch (operation) {
case MUL: adapter.invokeStatic(definition.utilityType.type, MULWOOVERLOW_DOUBLE); break;
case DIV: adapter.invokeStatic(definition.utilityType.type, DIVWOOVERLOW_DOUBLE); break;
case REM: adapter.invokeStatic(definition.utilityType.type, REMWOOVERLOW_DOUBLE); break;
case ADD: adapter.invokeStatic(definition.utilityType.type, ADDWOOVERLOW_DOUBLE); break;
case SUB: adapter.invokeStatic(definition.utilityType.type, SUBWOOVERLOW_DOUBLE); break;
default:
throw new IllegalStateException(WriterUtility.error(source) + "Unexpected state.");
throw new IllegalStateException("Error " + location + ": Illegal tree structure.");
}
break;
default:
throw new IllegalStateException(WriterUtility.error(source) + "Unexpected state.");
throw new IllegalStateException("Error " + location + ": Illegal tree structure.");
}
} else {
if ((sort == Sort.FLOAT || sort == Sort.DOUBLE) &&
(token == LSH || token == USH || token == RSH || token == BWAND || token == BWXOR || token == BWOR)) {
throw new IllegalStateException(WriterUtility.error(source) + "Unexpected state.");
(operation == Operation.LSH || operation == Operation.USH ||
operation == Operation.RSH || operation == Operation.BWAND ||
operation == Operation.XOR || operation == Operation.BWOR)) {
throw new IllegalStateException("Error " + location + ": Illegal tree structure.");
}
if (sort == Sort.DEF) {
switch (token) {
case MUL: execute.invokeStatic(definition.defobjType.type, DEF_MUL_CALL); break;
case DIV: execute.invokeStatic(definition.defobjType.type, DEF_DIV_CALL); break;
case REM: execute.invokeStatic(definition.defobjType.type, DEF_REM_CALL); break;
case ADD: execute.invokeStatic(definition.defobjType.type, DEF_ADD_CALL); break;
case SUB: execute.invokeStatic(definition.defobjType.type, DEF_SUB_CALL); break;
case LSH: execute.invokeStatic(definition.defobjType.type, DEF_LSH_CALL); break;
case USH: execute.invokeStatic(definition.defobjType.type, DEF_RSH_CALL); break;
case RSH: execute.invokeStatic(definition.defobjType.type, DEF_USH_CALL); break;
case BWAND: execute.invokeStatic(definition.defobjType.type, DEF_AND_CALL); break;
case BWXOR: execute.invokeStatic(definition.defobjType.type, DEF_XOR_CALL); break;
case BWOR: execute.invokeStatic(definition.defobjType.type, DEF_OR_CALL); break;
switch (operation) {
case MUL: adapter.invokeStatic(definition.defobjType.type, DEF_MUL_CALL); break;
case DIV: adapter.invokeStatic(definition.defobjType.type, DEF_DIV_CALL); break;
case REM: adapter.invokeStatic(definition.defobjType.type, DEF_REM_CALL); break;
case ADD: adapter.invokeStatic(definition.defobjType.type, DEF_ADD_CALL); break;
case SUB: adapter.invokeStatic(definition.defobjType.type, DEF_SUB_CALL); break;
case LSH: adapter.invokeStatic(definition.defobjType.type, DEF_LSH_CALL); break;
case USH: adapter.invokeStatic(definition.defobjType.type, DEF_RSH_CALL); break;
case RSH: adapter.invokeStatic(definition.defobjType.type, DEF_USH_CALL); break;
case BWAND: adapter.invokeStatic(definition.defobjType.type, DEF_AND_CALL); break;
case XOR: adapter.invokeStatic(definition.defobjType.type, DEF_XOR_CALL); break;
case BWOR: adapter.invokeStatic(definition.defobjType.type, DEF_OR_CALL); break;
default:
throw new IllegalStateException(WriterUtility.error(source) + "Unexpected state.");
throw new IllegalStateException("Error " + location + ": Illegal tree structure.");
}
} else {
switch (token) {
case MUL: execute.math(GeneratorAdapter.MUL, type.type); break;
case DIV: execute.math(GeneratorAdapter.DIV, type.type); break;
case REM: execute.math(GeneratorAdapter.REM, type.type); break;
case ADD: execute.math(GeneratorAdapter.ADD, type.type); break;
case SUB: execute.math(GeneratorAdapter.SUB, type.type); break;
case LSH: execute.math(GeneratorAdapter.SHL, type.type); break;
case USH: execute.math(GeneratorAdapter.USHR, type.type); break;
case RSH: execute.math(GeneratorAdapter.SHR, type.type); break;
case BWAND: execute.math(GeneratorAdapter.AND, type.type); break;
case BWXOR: execute.math(GeneratorAdapter.XOR, type.type); break;
case BWOR: execute.math(GeneratorAdapter.OR, type.type); break;
switch (operation) {
case MUL: adapter.math(GeneratorAdapter.MUL, type.type); break;
case DIV: adapter.math(GeneratorAdapter.DIV, type.type); break;
case REM: adapter.math(GeneratorAdapter.REM, type.type); break;
case ADD: adapter.math(GeneratorAdapter.ADD, type.type); break;
case SUB: adapter.math(GeneratorAdapter.SUB, type.type); break;
case LSH: adapter.math(GeneratorAdapter.SHL, type.type); break;
case USH: adapter.math(GeneratorAdapter.USHR, type.type); break;
case RSH: adapter.math(GeneratorAdapter.SHR, type.type); break;
case BWAND: adapter.math(GeneratorAdapter.AND, type.type); break;
case XOR: adapter.math(GeneratorAdapter.XOR, type.type); break;
case BWOR: adapter.math(GeneratorAdapter.OR, type.type); break;
default:
throw new IllegalStateException(WriterUtility.error(source) + "Unexpected state.");
throw new IllegalStateException("Error " + location + ": Illegal tree structure.");
}
}
}
}
/**
* Called for any compound assignment (including increment/decrement instructions).
* We have to be stricter than writeBinary and do overflow checks against the original type's size
* instead of the promoted type's size, since the result will be implicitly cast back.
*
* @return This will be true if an instruction is written, false otherwise.
*/
public static boolean writeExactInstruction(
final Definition definition, final GeneratorAdapter adapter, final Sort fsort, final Sort tsort) {
if (fsort == Sort.DOUBLE) {
if (tsort == Sort.FLOAT) {
adapter.invokeStatic(definition.utilityType.type, TOFLOATWOOVERFLOW_DOUBLE);
} else if (tsort == Sort.FLOAT_OBJ) {
adapter.invokeStatic(definition.utilityType.type, TOFLOATWOOVERFLOW_DOUBLE);
adapter.checkCast(definition.floatobjType.type);
} else if (tsort == Sort.LONG) {
adapter.invokeStatic(definition.utilityType.type, TOLONGWOOVERFLOW_DOUBLE);
} else if (tsort == Sort.LONG_OBJ) {
adapter.invokeStatic(definition.utilityType.type, TOLONGWOOVERFLOW_DOUBLE);
adapter.checkCast(definition.longobjType.type);
} else if (tsort == Sort.INT) {
adapter.invokeStatic(definition.utilityType.type, TOINTWOOVERFLOW_DOUBLE);
} else if (tsort == Sort.INT_OBJ) {
adapter.invokeStatic(definition.utilityType.type, TOINTWOOVERFLOW_DOUBLE);
adapter.checkCast(definition.intobjType.type);
} else if (tsort == Sort.CHAR) {
adapter.invokeStatic(definition.utilityType.type, TOCHARWOOVERFLOW_DOUBLE);
} else if (tsort == Sort.CHAR_OBJ) {
adapter.invokeStatic(definition.utilityType.type, TOCHARWOOVERFLOW_DOUBLE);
adapter.checkCast(definition.charobjType.type);
} else if (tsort == Sort.SHORT) {
adapter.invokeStatic(definition.utilityType.type, TOSHORTWOOVERFLOW_DOUBLE);
} else if (tsort == Sort.SHORT_OBJ) {
adapter.invokeStatic(definition.utilityType.type, TOSHORTWOOVERFLOW_DOUBLE);
adapter.checkCast(definition.shortobjType.type);
} else if (tsort == Sort.BYTE) {
adapter.invokeStatic(definition.utilityType.type, TOBYTEWOOVERFLOW_DOUBLE);
} else if (tsort == Sort.BYTE_OBJ) {
adapter.invokeStatic(definition.utilityType.type, TOBYTEWOOVERFLOW_DOUBLE);
adapter.checkCast(definition.byteobjType.type);
} else {
return false;
}
} else if (fsort == Sort.FLOAT) {
if (tsort == Sort.LONG) {
adapter.invokeStatic(definition.utilityType.type, TOLONGWOOVERFLOW_FLOAT);
} else if (tsort == Sort.LONG_OBJ) {
adapter.invokeStatic(definition.utilityType.type, TOLONGWOOVERFLOW_FLOAT);
adapter.checkCast(definition.longobjType.type);
} else if (tsort == Sort.INT) {
adapter.invokeStatic(definition.utilityType.type, TOINTWOOVERFLOW_FLOAT);
} else if (tsort == Sort.INT_OBJ) {
adapter.invokeStatic(definition.utilityType.type, TOINTWOOVERFLOW_FLOAT);
adapter.checkCast(definition.intobjType.type);
} else if (tsort == Sort.CHAR) {
adapter.invokeStatic(definition.utilityType.type, TOCHARWOOVERFLOW_FLOAT);
} else if (tsort == Sort.CHAR_OBJ) {
adapter.invokeStatic(definition.utilityType.type, TOCHARWOOVERFLOW_FLOAT);
adapter.checkCast(definition.charobjType.type);
} else if (tsort == Sort.SHORT) {
adapter.invokeStatic(definition.utilityType.type, TOSHORTWOOVERFLOW_FLOAT);
} else if (tsort == Sort.SHORT_OBJ) {
adapter.invokeStatic(definition.utilityType.type, TOSHORTWOOVERFLOW_FLOAT);
adapter.checkCast(definition.shortobjType.type);
} else if (tsort == Sort.BYTE) {
adapter.invokeStatic(definition.utilityType.type, TOBYTEWOOVERFLOW_FLOAT);
} else if (tsort == Sort.BYTE_OBJ) {
adapter.invokeStatic(definition.utilityType.type, TOBYTEWOOVERFLOW_FLOAT);
adapter.checkCast(definition.byteobjType.type);
} else {
return false;
}
} else if (fsort == Sort.LONG) {
if (tsort == Sort.INT) {
adapter.invokeStatic(definition.mathType.type, TOINTEXACT_LONG);
} else if (tsort == Sort.INT_OBJ) {
adapter.invokeStatic(definition.mathType.type, TOINTEXACT_LONG);
adapter.checkCast(definition.intobjType.type);
} else if (tsort == Sort.CHAR) {
adapter.invokeStatic(definition.utilityType.type, TOCHAREXACT_LONG);
} else if (tsort == Sort.CHAR_OBJ) {
adapter.invokeStatic(definition.utilityType.type, TOCHAREXACT_LONG);
adapter.checkCast(definition.charobjType.type);
} else if (tsort == Sort.SHORT) {
adapter.invokeStatic(definition.utilityType.type, TOSHORTEXACT_LONG);
} else if (tsort == Sort.SHORT_OBJ) {
adapter.invokeStatic(definition.utilityType.type, TOSHORTEXACT_LONG);
adapter.checkCast(definition.shortobjType.type);
} else if (tsort == Sort.BYTE) {
adapter.invokeStatic(definition.utilityType.type, TOBYTEEXACT_LONG);
} else if (tsort == Sort.BYTE_OBJ) {
adapter.invokeStatic(definition.utilityType.type, TOBYTEEXACT_LONG);
adapter.checkCast(definition.byteobjType.type);
} else {
return false;
}
} else if (fsort == Sort.INT) {
if (tsort == Sort.CHAR) {
adapter.invokeStatic(definition.utilityType.type, TOCHAREXACT_INT);
} else if (tsort == Sort.CHAR_OBJ) {
adapter.invokeStatic(definition.utilityType.type, TOCHAREXACT_INT);
adapter.checkCast(definition.charobjType.type);
} else if (tsort == Sort.SHORT) {
adapter.invokeStatic(definition.utilityType.type, TOSHORTEXACT_INT);
} else if (tsort == Sort.SHORT_OBJ) {
adapter.invokeStatic(definition.utilityType.type, TOSHORTEXACT_INT);
adapter.checkCast(definition.shortobjType.type);
} else if (tsort == Sort.BYTE) {
adapter.invokeStatic(definition.utilityType.type, TOBYTEEXACT_INT);
} else if (tsort == Sort.BYTE_OBJ) {
adapter.invokeStatic(definition.utilityType.type, TOBYTEEXACT_INT);
adapter.checkCast(definition.byteobjType.type);
} else {
return false;
}
} else {
return false;
}
return true;
}
public static void writeDup(final GeneratorAdapter adapter, final int size, final int xsize) {
if (size == 1) {
if (xsize == 2) {
adapter.dupX2();
} else if (xsize == 1) {
adapter.dupX1();
} else {
adapter.dup();
}
} else if (size == 2) {
if (xsize == 2) {
adapter.dup2X2();
} else if (xsize == 1) {
adapter.dup2X1();
} else {
adapter.dup2();
}
}
}
public static void writePop(final GeneratorAdapter adapter, final int size) {
if (size == 1) {
adapter.pop();
} else if (size == 2) {
adapter.pop2();
}
}
private WriterUtility() {}
}

View File

@ -17,7 +17,7 @@
* under the License.
*/
package org.elasticsearch.painless;
package org.elasticsearch.painless.antlr;
import org.antlr.v4.runtime.CharStream;
import org.antlr.v4.runtime.LexerNoViableAltException;
@ -25,21 +25,26 @@ import org.antlr.v4.runtime.misc.Interval;
import java.text.ParseException;
class ErrorHandlingLexer extends PainlessLexer {
public ErrorHandlingLexer(CharStream charStream) {
/**
* A lexer that will override the default error behavior to fail on the first error.
*/
final class ErrorHandlingLexer extends PainlessLexer {
ErrorHandlingLexer(final CharStream charStream) {
super(charStream);
}
@Override
public void recover(LexerNoViableAltException lnvae) {
CharStream charStream = lnvae.getInputStream();
int startIndex = lnvae.getStartIndex();
String text = charStream.getText(Interval.of(startIndex, charStream.index()));
public void recover(final LexerNoViableAltException lnvae) {
final CharStream charStream = lnvae.getInputStream();
final int startIndex = lnvae.getStartIndex();
final String text = charStream.getText(Interval.of(startIndex, charStream.index()));
ParseException parseException = new ParseException("Error [" + _tokenStartLine + ":" +
final ParseException parseException = new ParseException("Error [" + _tokenStartLine + ":" +
_tokenStartCharPositionInLine + "]: unexpected character [" +
getErrorDisplay(text) + "].", _tokenStartCharIndex);
parseException.initCause(lnvae);
throw new RuntimeException(parseException);
}
}

View File

@ -1,5 +1,5 @@
// ANTLR GENERATED CODE: DO NOT EDIT
package org.elasticsearch.painless;
package org.elasticsearch.painless.antlr;
import org.antlr.v4.runtime.Lexer;
import org.antlr.v4.runtime.CharStream;
import org.antlr.v4.runtime.Token;
@ -21,7 +21,7 @@ class PainlessLexer extends Lexer {
COMMA=10, SEMICOLON=11, IF=12, ELSE=13, WHILE=14, DO=15, FOR=16, CONTINUE=17,
BREAK=18, RETURN=19, NEW=20, TRY=21, CATCH=22, THROW=23, BOOLNOT=24, BWNOT=25,
MUL=26, DIV=27, REM=28, ADD=29, SUB=30, LSH=31, RSH=32, USH=33, LT=34,
LTE=35, GT=36, GTE=37, EQ=38, EQR=39, NE=40, NER=41, BWAND=42, BWXOR=43,
LTE=35, GT=36, GTE=37, EQ=38, EQR=39, NE=40, NER=41, BWAND=42, XOR=43,
BWOR=44, BOOLAND=45, BOOLOR=46, COND=47, COLON=48, INCR=49, DECR=50, ASSIGN=51,
AADD=52, ASUB=53, AMUL=54, ADIV=55, AREM=56, AAND=57, AXOR=58, AOR=59,
ALSH=60, ARSH=61, AUSH=62, OCTAL=63, HEX=64, INTEGER=65, DECIMAL=66, STRING=67,
@ -36,11 +36,11 @@ class PainlessLexer extends Lexer {
"COMMA", "SEMICOLON", "IF", "ELSE", "WHILE", "DO", "FOR", "CONTINUE",
"BREAK", "RETURN", "NEW", "TRY", "CATCH", "THROW", "BOOLNOT", "BWNOT",
"MUL", "DIV", "REM", "ADD", "SUB", "LSH", "RSH", "USH", "LT", "LTE", "GT",
"GTE", "EQ", "EQR", "NE", "NER", "BWAND", "BWXOR", "BWOR", "BOOLAND",
"BOOLOR", "COND", "COLON", "INCR", "DECR", "ASSIGN", "AADD", "ASUB", "AMUL",
"ADIV", "AREM", "AAND", "AXOR", "AOR", "ALSH", "ARSH", "AUSH", "OCTAL",
"HEX", "INTEGER", "DECIMAL", "STRING", "TRUE", "FALSE", "NULL", "ID",
"EXTINTEGER", "EXTID"
"GTE", "EQ", "EQR", "NE", "NER", "BWAND", "XOR", "BWOR", "BOOLAND", "BOOLOR",
"COND", "COLON", "INCR", "DECR", "ASSIGN", "AADD", "ASUB", "AMUL", "ADIV",
"AREM", "AAND", "AXOR", "AOR", "ALSH", "ARSH", "AUSH", "OCTAL", "HEX",
"INTEGER", "DECIMAL", "STRING", "TRUE", "FALSE", "NULL", "ID", "EXTINTEGER",
"EXTID"
};
private static final String[] _LITERAL_NAMES = {
@ -58,11 +58,11 @@ class PainlessLexer extends Lexer {
"DOT", "COMMA", "SEMICOLON", "IF", "ELSE", "WHILE", "DO", "FOR", "CONTINUE",
"BREAK", "RETURN", "NEW", "TRY", "CATCH", "THROW", "BOOLNOT", "BWNOT",
"MUL", "DIV", "REM", "ADD", "SUB", "LSH", "RSH", "USH", "LT", "LTE", "GT",
"GTE", "EQ", "EQR", "NE", "NER", "BWAND", "BWXOR", "BWOR", "BOOLAND",
"BOOLOR", "COND", "COLON", "INCR", "DECR", "ASSIGN", "AADD", "ASUB", "AMUL",
"ADIV", "AREM", "AAND", "AXOR", "AOR", "ALSH", "ARSH", "AUSH", "OCTAL",
"HEX", "INTEGER", "DECIMAL", "STRING", "TRUE", "FALSE", "NULL", "ID",
"EXTINTEGER", "EXTID"
"GTE", "EQ", "EQR", "NE", "NER", "BWAND", "XOR", "BWOR", "BOOLAND", "BOOLOR",
"COND", "COLON", "INCR", "DECR", "ASSIGN", "AADD", "ASUB", "AMUL", "ADIV",
"AREM", "AAND", "AXOR", "AOR", "ALSH", "ARSH", "AUSH", "OCTAL", "HEX",
"INTEGER", "DECIMAL", "STRING", "TRUE", "FALSE", "NULL", "ID", "EXTINTEGER",
"EXTID"
};
public static final Vocabulary VOCABULARY = new VocabularyImpl(_LITERAL_NAMES, _SYMBOLIC_NAMES);

View File

@ -1,5 +1,5 @@
// ANTLR GENERATED CODE: DO NOT EDIT
package org.elasticsearch.painless;
package org.elasticsearch.painless.antlr;
import org.antlr.v4.runtime.tree.AbstractParseTreeVisitor;
/**
@ -186,6 +186,13 @@ class PainlessParserBaseVisitor<T> extends AbstractParseTreeVisitor<T> implement
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitComp(PainlessParser.CompContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitRead(PainlessParser.ReadContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
@ -256,13 +263,6 @@ class PainlessParserBaseVisitor<T> extends AbstractParseTreeVisitor<T> implement
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitCast(PainlessParser.CastContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitExternal(PainlessParser.ExternalContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
@ -290,70 +290,70 @@ class PainlessParserBaseVisitor<T> extends AbstractParseTreeVisitor<T> implement
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitExtstart(PainlessParser.ExtstartContext ctx) { return visitChildren(ctx); }
@Override public T visitChain(PainlessParser.ChainContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitExtprec(PainlessParser.ExtprecContext ctx) { return visitChildren(ctx); }
@Override public T visitLinkprec(PainlessParser.LinkprecContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitExtcast(PainlessParser.ExtcastContext ctx) { return visitChildren(ctx); }
@Override public T visitLinkcast(PainlessParser.LinkcastContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitExtbrace(PainlessParser.ExtbraceContext ctx) { return visitChildren(ctx); }
@Override public T visitLinkbrace(PainlessParser.LinkbraceContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitExtdot(PainlessParser.ExtdotContext ctx) { return visitChildren(ctx); }
@Override public T visitLinkdot(PainlessParser.LinkdotContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitExtcall(PainlessParser.ExtcallContext ctx) { return visitChildren(ctx); }
@Override public T visitLinkcall(PainlessParser.LinkcallContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitExtvar(PainlessParser.ExtvarContext ctx) { return visitChildren(ctx); }
@Override public T visitLinkvar(PainlessParser.LinkvarContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitExtfield(PainlessParser.ExtfieldContext ctx) { return visitChildren(ctx); }
@Override public T visitLinkfield(PainlessParser.LinkfieldContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitExtnew(PainlessParser.ExtnewContext ctx) { return visitChildren(ctx); }
@Override public T visitLinknew(PainlessParser.LinknewContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitExtstring(PainlessParser.ExtstringContext ctx) { return visitChildren(ctx); }
@Override public T visitLinkstring(PainlessParser.LinkstringContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
@ -361,11 +361,4 @@ class PainlessParserBaseVisitor<T> extends AbstractParseTreeVisitor<T> implement
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitArguments(PainlessParser.ArgumentsContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitIncrement(PainlessParser.IncrementContext ctx) { return visitChildren(ctx); }
}

View File

@ -1,5 +1,5 @@
// ANTLR GENERATED CODE: DO NOT EDIT
package org.elasticsearch.painless;
package org.elasticsearch.painless.antlr;
import org.antlr.v4.runtime.tree.ParseTreeVisitor;
/**
@ -174,6 +174,13 @@ interface PainlessParserVisitor<T> extends ParseTreeVisitor<T> {
* @return the visitor result
*/
T visitComp(PainlessParser.CompContext ctx);
/**
* Visit a parse tree produced by the {@code read}
* labeled alternative in {@link PainlessParser#expression}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitRead(PainlessParser.ReadContext ctx);
/**
* Visit a parse tree produced by the {@code bool}
* labeled alternative in {@link PainlessParser#expression}.
@ -244,13 +251,6 @@ interface PainlessParserVisitor<T> extends ParseTreeVisitor<T> {
* @return the visitor result
*/
T visitCast(PainlessParser.CastContext ctx);
/**
* Visit a parse tree produced by the {@code external}
* labeled alternative in {@link PainlessParser#expression}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitExternal(PainlessParser.ExternalContext ctx);
/**
* Visit a parse tree produced by the {@code null}
* labeled alternative in {@link PainlessParser#expression}.
@ -273,75 +273,69 @@ interface PainlessParserVisitor<T> extends ParseTreeVisitor<T> {
*/
T visitTrue(PainlessParser.TrueContext ctx);
/**
* Visit a parse tree produced by {@link PainlessParser#extstart}.
* Visit a parse tree produced by {@link PainlessParser#chain}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitExtstart(PainlessParser.ExtstartContext ctx);
T visitChain(PainlessParser.ChainContext ctx);
/**
* Visit a parse tree produced by {@link PainlessParser#extprec}.
* Visit a parse tree produced by {@link PainlessParser#linkprec}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitExtprec(PainlessParser.ExtprecContext ctx);
T visitLinkprec(PainlessParser.LinkprecContext ctx);
/**
* Visit a parse tree produced by {@link PainlessParser#extcast}.
* Visit a parse tree produced by {@link PainlessParser#linkcast}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitExtcast(PainlessParser.ExtcastContext ctx);
T visitLinkcast(PainlessParser.LinkcastContext ctx);
/**
* Visit a parse tree produced by {@link PainlessParser#extbrace}.
* Visit a parse tree produced by {@link PainlessParser#linkbrace}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitExtbrace(PainlessParser.ExtbraceContext ctx);
T visitLinkbrace(PainlessParser.LinkbraceContext ctx);
/**
* Visit a parse tree produced by {@link PainlessParser#extdot}.
* Visit a parse tree produced by {@link PainlessParser#linkdot}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitExtdot(PainlessParser.ExtdotContext ctx);
T visitLinkdot(PainlessParser.LinkdotContext ctx);
/**
* Visit a parse tree produced by {@link PainlessParser#extcall}.
* Visit a parse tree produced by {@link PainlessParser#linkcall}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitExtcall(PainlessParser.ExtcallContext ctx);
T visitLinkcall(PainlessParser.LinkcallContext ctx);
/**
* Visit a parse tree produced by {@link PainlessParser#extvar}.
* Visit a parse tree produced by {@link PainlessParser#linkvar}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitExtvar(PainlessParser.ExtvarContext ctx);
T visitLinkvar(PainlessParser.LinkvarContext ctx);
/**
* Visit a parse tree produced by {@link PainlessParser#extfield}.
* Visit a parse tree produced by {@link PainlessParser#linkfield}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitExtfield(PainlessParser.ExtfieldContext ctx);
T visitLinkfield(PainlessParser.LinkfieldContext ctx);
/**
* Visit a parse tree produced by {@link PainlessParser#extnew}.
* Visit a parse tree produced by {@link PainlessParser#linknew}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitExtnew(PainlessParser.ExtnewContext ctx);
T visitLinknew(PainlessParser.LinknewContext ctx);
/**
* Visit a parse tree produced by {@link PainlessParser#extstring}.
* Visit a parse tree produced by {@link PainlessParser#linkstring}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitExtstring(PainlessParser.ExtstringContext ctx);
T visitLinkstring(PainlessParser.LinkstringContext ctx);
/**
* Visit a parse tree produced by {@link PainlessParser#arguments}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitArguments(PainlessParser.ArgumentsContext ctx);
/**
* Visit a parse tree produced by {@link PainlessParser#increment}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitIncrement(PainlessParser.IncrementContext ctx);
}

View File

@ -17,7 +17,7 @@
* under the License.
*/
package org.elasticsearch.painless;
package org.elasticsearch.painless.antlr;
import org.antlr.v4.runtime.DefaultErrorStrategy;
import org.antlr.v4.runtime.InputMismatchException;
@ -28,10 +28,14 @@ import org.antlr.v4.runtime.Token;
import java.text.ParseException;
class ParserErrorStrategy extends DefaultErrorStrategy {
/**
* An error strategy that will override the default error behavior to fail on the first parser error.
*/
final class ParserErrorStrategy extends DefaultErrorStrategy {
@Override
public void recover(Parser recognizer, RecognitionException re) {
Token token = re.getOffendingToken();
public void recover(final Parser recognizer, final RecognitionException re) {
final Token token = re.getOffendingToken();
String message;
if (token == null) {
@ -52,23 +56,24 @@ class ParserErrorStrategy extends DefaultErrorStrategy {
" unexpected token near [" + getTokenErrorDisplay(token) + "].";
}
ParseException parseException = new ParseException(message, token == null ? -1 : token.getStartIndex());
final ParseException parseException = new ParseException(message, token == null ? -1 : token.getStartIndex());
parseException.initCause(re);
throw new RuntimeException(parseException);
}
@Override
public Token recoverInline(Parser recognizer) throws RecognitionException {
Token token = recognizer.getCurrentToken();
String message = "Error[" + token.getLine() + ":" + token.getCharPositionInLine() + "]:" +
" unexpected token [" + getTokenErrorDisplay(token) + "]" +
" was expecting one of [" + recognizer.getExpectedTokens().toString(recognizer.getVocabulary()) + "].";
ParseException parseException = new ParseException(message, token.getStartIndex());
public Token recoverInline(final Parser recognizer) throws RecognitionException {
final Token token = recognizer.getCurrentToken();
final String message = "Error[" + token.getLine() + ":" + token.getCharPositionInLine() + "]:" +
" unexpected token [" + getTokenErrorDisplay(token) + "]" +
" was expecting one of [" + recognizer.getExpectedTokens().toString(recognizer.getVocabulary()) + "].";
final ParseException parseException = new ParseException(message, token.getStartIndex());
throw new RuntimeException(parseException);
}
@Override
public void sync(Parser recognizer) {
public void sync(final Parser recognizer) {
}
}

View File

@ -0,0 +1,790 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.painless.antlr;
import org.antlr.v4.runtime.ANTLRInputStream;
import org.antlr.v4.runtime.CommonTokenStream;
import org.antlr.v4.runtime.ParserRuleContext;
import org.elasticsearch.painless.Operation;
import org.elasticsearch.painless.Variables.Reserved;
import org.elasticsearch.painless.antlr.PainlessParser.AfterthoughtContext;
import org.elasticsearch.painless.antlr.PainlessParser.ArgumentsContext;
import org.elasticsearch.painless.antlr.PainlessParser.AssignmentContext;
import org.elasticsearch.painless.antlr.PainlessParser.BinaryContext;
import org.elasticsearch.painless.antlr.PainlessParser.BreakContext;
import org.elasticsearch.painless.antlr.PainlessParser.CastContext;
import org.elasticsearch.painless.antlr.PainlessParser.ConditionalContext;
import org.elasticsearch.painless.antlr.PainlessParser.ContinueContext;
import org.elasticsearch.painless.antlr.PainlessParser.DeclContext;
import org.elasticsearch.painless.antlr.PainlessParser.DeclarationContext;
import org.elasticsearch.painless.antlr.PainlessParser.DecltypeContext;
import org.elasticsearch.painless.antlr.PainlessParser.DeclvarContext;
import org.elasticsearch.painless.antlr.PainlessParser.DoContext;
import org.elasticsearch.painless.antlr.PainlessParser.EmptyContext;
import org.elasticsearch.painless.antlr.PainlessParser.EmptyscopeContext;
import org.elasticsearch.painless.antlr.PainlessParser.ExprContext;
import org.elasticsearch.painless.antlr.PainlessParser.ExpressionContext;
import org.elasticsearch.painless.antlr.PainlessParser.LinkbraceContext;
import org.elasticsearch.painless.antlr.PainlessParser.LinkcallContext;
import org.elasticsearch.painless.antlr.PainlessParser.LinkcastContext;
import org.elasticsearch.painless.antlr.PainlessParser.LinkdotContext;
import org.elasticsearch.painless.antlr.PainlessParser.ReadContext;
import org.elasticsearch.painless.antlr.PainlessParser.LinkfieldContext;
import org.elasticsearch.painless.antlr.PainlessParser.LinknewContext;
import org.elasticsearch.painless.antlr.PainlessParser.LinkprecContext;
import org.elasticsearch.painless.antlr.PainlessParser.ChainContext;
import org.elasticsearch.painless.antlr.PainlessParser.LinkstringContext;
import org.elasticsearch.painless.antlr.PainlessParser.LinkvarContext;
import org.elasticsearch.painless.antlr.PainlessParser.FalseContext;
import org.elasticsearch.painless.antlr.PainlessParser.ForContext;
import org.elasticsearch.painless.antlr.PainlessParser.GenericContext;
import org.elasticsearch.painless.antlr.PainlessParser.IdentifierContext;
import org.elasticsearch.painless.antlr.PainlessParser.IfContext;
import org.elasticsearch.painless.antlr.PainlessParser.InitializerContext;
import org.elasticsearch.painless.antlr.PainlessParser.MultipleContext;
import org.elasticsearch.painless.antlr.PainlessParser.NullContext;
import org.elasticsearch.painless.antlr.PainlessParser.NumericContext;
import org.elasticsearch.painless.antlr.PainlessParser.PostincContext;
import org.elasticsearch.painless.antlr.PainlessParser.PrecedenceContext;
import org.elasticsearch.painless.antlr.PainlessParser.PreincContext;
import org.elasticsearch.painless.antlr.PainlessParser.ReturnContext;
import org.elasticsearch.painless.antlr.PainlessParser.SingleContext;
import org.elasticsearch.painless.antlr.PainlessParser.SourceContext;
import org.elasticsearch.painless.antlr.PainlessParser.StatementContext;
import org.elasticsearch.painless.antlr.PainlessParser.ThrowContext;
import org.elasticsearch.painless.antlr.PainlessParser.TrapContext;
import org.elasticsearch.painless.antlr.PainlessParser.TrueContext;
import org.elasticsearch.painless.antlr.PainlessParser.TryContext;
import org.elasticsearch.painless.antlr.PainlessParser.UnaryContext;
import org.elasticsearch.painless.antlr.PainlessParser.WhileContext;
import org.elasticsearch.painless.node.AExpression;
import org.elasticsearch.painless.node.ALink;
import org.elasticsearch.painless.node.ANode;
import org.elasticsearch.painless.node.AStatement;
import org.elasticsearch.painless.node.EBinary;
import org.elasticsearch.painless.node.EBool;
import org.elasticsearch.painless.node.EBoolean;
import org.elasticsearch.painless.node.EExplicit;
import org.elasticsearch.painless.node.EChain;
import org.elasticsearch.painless.node.EComp;
import org.elasticsearch.painless.node.EConditional;
import org.elasticsearch.painless.node.EDecimal;
import org.elasticsearch.painless.node.ENull;
import org.elasticsearch.painless.node.ENumeric;
import org.elasticsearch.painless.node.EUnary;
import org.elasticsearch.painless.node.LBrace;
import org.elasticsearch.painless.node.LCall;
import org.elasticsearch.painless.node.LCast;
import org.elasticsearch.painless.node.LField;
import org.elasticsearch.painless.node.LNewArray;
import org.elasticsearch.painless.node.LNewObj;
import org.elasticsearch.painless.node.LString;
import org.elasticsearch.painless.node.LVariable;
import org.elasticsearch.painless.node.SBlock;
import org.elasticsearch.painless.node.SBreak;
import org.elasticsearch.painless.node.SContinue;
import org.elasticsearch.painless.node.SDeclBlock;
import org.elasticsearch.painless.node.SDeclaration;
import org.elasticsearch.painless.node.SDo;
import org.elasticsearch.painless.node.SExpression;
import org.elasticsearch.painless.node.SFor;
import org.elasticsearch.painless.node.SIfElse;
import org.elasticsearch.painless.node.SReturn;
import org.elasticsearch.painless.node.SSource;
import org.elasticsearch.painless.node.SThrow;
import org.elasticsearch.painless.node.STrap;
import org.elasticsearch.painless.node.STry;
import org.elasticsearch.painless.node.SWhile;
import java.util.ArrayList;
import java.util.List;
/**
* Converts the ANTLR tree to a Painless tree.
*/
public final class Walker extends PainlessParserBaseVisitor<ANode> {
public static SSource buildPainlessTree(final String source, final Reserved reserved) {
return new Walker(source, reserved).source;
}
private final Reserved reserved;
private final SSource source;
private Walker(final String source, final Reserved reserved) {
this.reserved = reserved;
this.source = (SSource)visit(buildAntlrTree(source));
}
private SourceContext buildAntlrTree(final String source) {
final ANTLRInputStream stream = new ANTLRInputStream(source);
final PainlessLexer lexer = new ErrorHandlingLexer(stream);
final PainlessParser parser = new PainlessParser(new CommonTokenStream(lexer));
final ParserErrorStrategy strategy = new ParserErrorStrategy();
lexer.removeErrorListeners();
parser.removeErrorListeners();
parser.setErrorHandler(strategy);
return parser.source();
}
private String location(final ParserRuleContext ctx) {
return "[ " + ctx.getStart().getLine() + " : " + ctx.getStart().getCharPositionInLine() + " ]";
}
@Override
public ANode visitSource(final SourceContext ctx) {
final List<AStatement> statements = new ArrayList<>();
for (final StatementContext statement : ctx.statement()) {
statements.add((AStatement)visit(statement));
}
return new SSource(location(ctx), statements);
}
@Override
public ANode visitIf(final IfContext ctx) {
final AExpression condition = (AExpression)visit(ctx.expression());
final AStatement ifblock = (AStatement)visit(ctx.block(0));
final AStatement elseblock = ctx.block(1) == null ? null : (AStatement)visit(ctx.block(1));
return new SIfElse(location(ctx), condition, ifblock, elseblock);
}
@Override
public ANode visitWhile(final WhileContext ctx) {
final AExpression condition = (AExpression)visit(ctx.expression());
final AStatement block = ctx.block() == null ? null : (AStatement)visit(ctx.block());
reserved.usesLoop();
return new SWhile(location(ctx), condition, block);
}
@Override
public ANode visitDo(final DoContext ctx) {
final AStatement block = ctx.block() == null ? null : (AStatement)visit(ctx.block());
final AExpression condition = (AExpression)visit(ctx.expression());
reserved.usesLoop();
return new SDo(location(ctx), block, condition);
}
@Override
public ANode visitFor(final ForContext ctx) {
final ANode intializer = ctx.initializer() == null ? null : visit(ctx.initializer());
final AExpression condition = ctx.expression() == null ? null : (AExpression)visit(ctx.expression());
final AExpression afterthought = ctx.afterthought() == null ? null : (AExpression)visit(ctx.afterthought());
final AStatement block = ctx.block() == null ? null : (AStatement)visit(ctx.block());
reserved.usesLoop();
return new SFor(location(ctx), intializer, condition, afterthought, block);
}
@Override
public ANode visitDecl(final DeclContext ctx) {
return visit(ctx.declaration());
}
@Override
public ANode visitContinue(final ContinueContext ctx) {
return new SContinue(location(ctx));
}
@Override
public ANode visitBreak(final BreakContext ctx) {
return new SBreak(location(ctx));
}
@Override
public ANode visitReturn(final ReturnContext ctx) {
final AExpression expression = (AExpression)visit(ctx.expression());
return new SReturn(location(ctx), expression);
}
@Override
public ANode visitTry(final TryContext ctx) {
final AStatement block = (AStatement)visit(ctx.block());
final List<STrap> traps = new ArrayList<>();
for (final TrapContext trap : ctx.trap()) {
traps.add((STrap)visit(trap));
}
return new STry(location(ctx), block, traps);
}
@Override
public ANode visitThrow(final ThrowContext ctx) {
final AExpression expression = (AExpression)visit(ctx.expression());
return new SThrow(location(ctx), expression);
}
@Override
public ANode visitExpr(final ExprContext ctx) {
final AExpression expression = (AExpression)visit(ctx.expression());
return new SExpression(location(ctx), expression);
}
@Override
public ANode visitMultiple(final MultipleContext ctx) {
final List<AStatement> statements = new ArrayList<>();
for (final StatementContext statement : ctx.statement()) {
statements.add((AStatement)visit(statement));
}
return new SBlock(location(ctx), statements);
}
@Override
public ANode visitSingle(final SingleContext ctx) {
final List<AStatement> statements = new ArrayList<>();
statements.add((AStatement)visit(ctx.statement()));
return new SBlock(location(ctx), statements);
}
@Override
public ANode visitEmpty(final EmptyContext ctx) {
throw new IllegalStateException("Error " + location(ctx) + ": Unexpected state.");
}
@Override
public ANode visitEmptyscope(final EmptyscopeContext ctx) {
throw new IllegalStateException("Error " + location(ctx) + ": Unexpected state.");
}
@Override
public ANode visitInitializer(final InitializerContext ctx) {
if (ctx.declaration() != null) {
return visit(ctx.declaration());
} else if (ctx.expression() != null) {
return visit(ctx.expression());
}
throw new IllegalStateException("Error " + location(ctx) + ": Unexpected state.");
}
@Override
public ANode visitAfterthought(final AfterthoughtContext ctx) {
return visit(ctx.expression());
}
@Override
public ANode visitDeclaration(final DeclarationContext ctx) {
final String type = ctx.decltype().getText();
final List<SDeclaration> declarations = new ArrayList<>();
for (final DeclvarContext declvar : ctx.declvar()) {
final String name = declvar.identifier().getText();
final AExpression expression = declvar.expression() == null ? null : (AExpression)visit(declvar.expression());
declarations.add(new SDeclaration(location(ctx), type, name, expression));
}
return new SDeclBlock(location(ctx), declarations);
}
@Override
public ANode visitDecltype(final DecltypeContext ctx) {
throw new IllegalStateException("Error " + location(ctx) + ": Unexpected state.");
}
@Override
public ANode visitDeclvar(final DeclvarContext ctx) {
throw new IllegalStateException("Error " + location(ctx) + ": Unexpected state.");
}
@Override
public ANode visitTrap(final TrapContext ctx) {
final String type = ctx.identifier(0).getText();
final String name = ctx.identifier(1).getText();
final AStatement block = ctx.block() == null ? null : (AStatement)visit(ctx.block());
return new STrap(location(ctx), type, name, block);
}
@Override
public ANode visitIdentifier(final IdentifierContext ctx) {
throw new IllegalStateException("Error " + location(ctx) + ": Unexpected state.");
}
@Override
public ANode visitGeneric(final GenericContext ctx) {
throw new IllegalStateException("Error " + location(ctx) + ": Unexpected state.");
}
@Override
public ANode visitPrecedence(final PrecedenceContext ctx) {
return visit(ctx.expression());
}
@Override
public ANode visitNumeric(final NumericContext ctx) {
final boolean negate = ctx.parent instanceof UnaryContext && ((UnaryContext)ctx.parent).SUB() != null;
if (ctx.DECIMAL() != null) {
return new EDecimal(location(ctx), (negate ? "-" : "") + ctx.DECIMAL().getText());
} else if (ctx.HEX() != null) {
return new ENumeric(location(ctx), (negate ? "-" : "") + ctx.HEX().getText().substring(2), 16);
} else if (ctx.INTEGER() != null) {
return new ENumeric(location(ctx), (negate ? "-" : "") + ctx.INTEGER().getText(), 10);
} else if (ctx.OCTAL() != null) {
return new ENumeric(location(ctx), (negate ? "-" : "") + ctx.OCTAL().getText().substring(1), 8);
} else {
throw new IllegalStateException("Error " + location(ctx) + ": Unexpected state.");
}
}
@Override
public ANode visitTrue(final TrueContext ctx) {
return new EBoolean(location(ctx), true);
}
@Override
public ANode visitFalse(FalseContext ctx) {
return new EBoolean(location(ctx), false);
}
@Override
public ANode visitNull(final NullContext ctx) {
return new ENull(location(ctx));
}
@Override
public ANode visitPostinc(final PostincContext ctx) {
final List<ALink> links = new ArrayList<>();
final Operation operation;
visitChain(ctx.chain(), links);
if (ctx.INCR() != null) {
operation = Operation.INCR;
} else if (ctx.DECR() != null) {
operation = Operation.DECR;
} else {
throw new IllegalStateException("Error " + location(ctx) + ": Unexpected state.");
}
return new EChain(location(ctx), links, false, true, operation, null);
}
@Override
public ANode visitPreinc(final PreincContext ctx) {
final List<ALink> links = new ArrayList<>();
final Operation operation;
visitChain(ctx.chain(), links);
if (ctx.INCR() != null) {
operation = Operation.INCR;
} else if (ctx.DECR() != null) {
operation = Operation.DECR;
} else {
throw new IllegalStateException("Error " + location(ctx) + ": Unexpected state.");
}
return new EChain(location(ctx), links, true, false, operation, null);
}
@Override
public ANode visitRead(final ReadContext ctx) {
final List<ALink> links = new ArrayList<>();
visitChain(ctx.chain(), links);
return new EChain(location(ctx), links, false, false, null, null);
}
@Override
public ANode visitUnary(final UnaryContext ctx) {
if (ctx.SUB() != null && ctx.expression() instanceof NumericContext) {
return visit(ctx.expression());
} else {
final Operation operation;
if (ctx.BOOLNOT() != null) {
operation = Operation.NOT;
} else if (ctx.BWNOT() != null) {
operation = Operation.BWNOT;
} else if (ctx.ADD() != null) {
operation = Operation.ADD;
} else if (ctx.SUB() != null) {
operation = Operation.SUB;
} else {
throw new IllegalStateException("Error " + location(ctx) + ": Unexpected state.");
}
return new EUnary(location(ctx), operation, (AExpression)visit(ctx.expression()));
}
}
@Override
public ANode visitCast(final CastContext ctx) {
return new EExplicit(location(ctx), ctx.decltype().getText(), (AExpression)visit(ctx.expression()));
}
@Override
public ANode visitBinary(final BinaryContext ctx) {
final AExpression left = (AExpression)visit(ctx.expression(0));
final AExpression right = (AExpression)visit(ctx.expression(1));
final Operation operation;
if (ctx.MUL() != null) {
operation = Operation.MUL;
} else if (ctx.DIV() != null) {
operation = Operation.DIV;
} else if (ctx.REM() != null) {
operation = Operation.REM;
} else if (ctx.ADD() != null) {
operation = Operation.ADD;
} else if (ctx.SUB() != null) {
operation = Operation.SUB;
} else if (ctx.LSH() != null) {
operation = Operation.LSH;
} else if (ctx.RSH() != null) {
operation = Operation.RSH;
} else if (ctx.USH() != null) {
operation = Operation.USH;
} else if (ctx.BWAND() != null) {
operation = Operation.BWAND;
} else if (ctx.XOR() != null) {
operation = Operation.XOR;
} else if (ctx.BWOR() != null) {
operation = Operation.BWOR;
} else {
throw new IllegalStateException("Error " + location(ctx) + ": Unexpected state.");
}
return new EBinary(location(ctx), operation, left, right);
}
@Override
public ANode visitComp(PainlessParser.CompContext ctx) {
final AExpression left = (AExpression)visit(ctx.expression(0));
final AExpression right = (AExpression)visit(ctx.expression(1));
final Operation operation;
if (ctx.LT() != null) {
operation = Operation.LT;
} else if (ctx.LTE() != null) {
operation = Operation.LTE;
} else if (ctx.GT() != null) {
operation = Operation.GT;
} else if (ctx.GTE() != null) {
operation = Operation.GTE;
} else if (ctx.EQ() != null) {
operation = Operation.EQ;
} else if (ctx.EQR() != null) {
operation = Operation.EQR;
} else if (ctx.NE() != null) {
operation = Operation.NE;
} else if (ctx.NER() != null) {
operation = Operation.NER;
} else {
throw new IllegalStateException("Error " + location(ctx) + ": Unexpected state.");
}
return new EComp(location(ctx), operation, left, right);
}
@Override
public ANode visitBool(PainlessParser.BoolContext ctx) {
final AExpression left = (AExpression)visit(ctx.expression(0));
final AExpression right = (AExpression)visit(ctx.expression(1));
final Operation operation;
if (ctx.BOOLAND() != null) {
operation = Operation.AND;
} else if (ctx.BOOLOR() != null) {
operation = Operation.OR;
} else {
throw new IllegalStateException("Error " + location(ctx) + ": Unexpected state.");
}
return new EBool(location(ctx), operation, left, right);
}
@Override
public ANode visitConditional(final ConditionalContext ctx) {
final AExpression condition = (AExpression)visit(ctx.expression(0));
final AExpression left = (AExpression)visit(ctx.expression(1));
final AExpression right = (AExpression)visit(ctx.expression(2));
return new EConditional(location(ctx), condition, left, right);
}
@Override
public ANode visitAssignment(final AssignmentContext ctx) {
final List<ALink> links = new ArrayList<>();
final Operation operation;
visitChain(ctx.chain(), links);
if (ctx.AMUL() != null) {
operation = Operation.MUL;
} else if (ctx.ADIV() != null) {
operation = Operation.DIV;
} else if (ctx.AREM() != null) {
operation = Operation.REM;
} else if (ctx.AADD() != null) {
operation = Operation.ADD;
} else if (ctx.ASUB() != null) {
operation = Operation.SUB;
} else if (ctx.ALSH() != null) {
operation = Operation.LSH;
} else if (ctx.ARSH() != null) {
operation = Operation.RSH;
} else if (ctx.AUSH() != null) {
operation = Operation.USH;
} else if (ctx.AAND() != null) {
operation = Operation.BWAND;
} else if (ctx.AXOR() != null) {
operation = Operation.XOR;
} else if (ctx.AOR() != null) {
operation = Operation.BWOR;
} else {
operation = null;
}
return new EChain(location(ctx), links, false, false, operation, (AExpression)visit(ctx.expression()));
}
private void visitChain(final ChainContext ctx, final List<ALink> links) {
if (ctx.linkprec() != null) {
visitLinkprec(ctx.linkprec(), links);
} else if (ctx.linkcast() != null) {
visitLinkcast(ctx.linkcast(), links);
} else if (ctx.linkvar() != null) {
visitLinkvar(ctx.linkvar(), links);
} else if (ctx.linknew() != null) {
visitLinknew(ctx.linknew(), links);
} else if (ctx.linkstring() != null) {
visitLinkstring(ctx.linkstring(), links);
} else {
throw new IllegalStateException("Error " + location(ctx) + ": Unexpected state.");
}
}
@Override
public ANode visitChain(final ChainContext ctx) {
throw new IllegalStateException("Error " + location(ctx) + ": Unexpected state.");
}
private void visitLinkprec(final LinkprecContext ctx, final List<ALink> links) {
if (ctx.linkprec() != null) {
visitLinkprec(ctx.linkprec(), links);
} else if (ctx.linkcast() != null) {
visitLinkcast(ctx.linkcast(), links);
} else if (ctx.linkvar() != null) {
visitLinkvar(ctx.linkvar(), links);
} else if (ctx.linknew() != null) {
visitLinknew(ctx.linknew(), links);
} else if (ctx.linkstring() != null) {
visitLinkstring(ctx.linkstring(), links);
} else {
throw new IllegalStateException("Error " + location(ctx) + ": Unexpected state.");
}
if (ctx.linkbrace() != null) {
visitLinkbrace(ctx.linkbrace(), links);
} else if (ctx.linkdot() != null) {
visitLinkdot(ctx.linkdot(), links);
}
}
@Override
public ANode visitLinkprec(final LinkprecContext ctx) {
throw new IllegalStateException("Error " + location(ctx) + ": Unexpected state.");
}
private void visitLinkcast(final LinkcastContext ctx, final List<ALink> links) {
if (ctx.linkprec() != null) {
visitLinkprec(ctx.linkprec(), links);
} else if (ctx.linkcast() != null) {
visitLinkcast(ctx.linkcast(), links);
} else if (ctx.linkvar() != null) {
visitLinkvar(ctx.linkvar(), links);
} else if (ctx.linknew() != null) {
visitLinknew(ctx.linknew(), links);
} else if (ctx.linkstring() != null) {
visitLinkstring(ctx.linkstring(), links);
} else {
throw new IllegalStateException("Error " + location(ctx) + ": Unexpected state.");
}
links.add(new LCast(location(ctx), ctx.decltype().getText()));
}
@Override
public ANode visitLinkcast(final LinkcastContext ctx) {
throw new IllegalStateException("Error " + location(ctx) + ": Unexpected state.");
}
private void visitLinkbrace(final LinkbraceContext ctx, final List<ALink> links) {
links.add(new LBrace(location(ctx), (AExpression)visit(ctx.expression())));
if (ctx.linkbrace() != null) {
visitLinkbrace(ctx.linkbrace(), links);
} else if (ctx.linkdot() != null) {
visitLinkdot(ctx.linkdot(), links);
}
}
@Override
public ANode visitLinkbrace(final LinkbraceContext ctx) {
throw new IllegalStateException("Error " + location(ctx) + ": Unexpected state.");
}
private void visitLinkdot(final LinkdotContext ctx, final List<ALink> links) {
if (ctx.linkcall() != null) {
visitLinkcall(ctx.linkcall(), links);
} else if (ctx.linkfield() != null) {
visitLinkfield(ctx.linkfield(), links);
}
}
@Override
public ANode visitLinkdot(final LinkdotContext ctx) {
throw new IllegalStateException("Error " + location(ctx) + ": Unexpected state.");
}
private void visitLinkcall(final LinkcallContext ctx, final List<ALink> links) {
final List<AExpression> arguments = new ArrayList<>();
for (final ExpressionContext expression : ctx.arguments().expression()) {
arguments.add((AExpression)visit(expression));
}
links.add(new LCall(location(ctx), ctx.EXTID().getText(), arguments));
if (ctx.linkbrace() != null) {
visitLinkbrace(ctx.linkbrace(), links);
} else if (ctx.linkdot() != null) {
visitLinkdot(ctx.linkdot(), links);
}
}
@Override
public ANode visitLinkcall(final LinkcallContext ctx) {
throw new IllegalStateException("Error " + location(ctx) + ": Unexpected state.");
}
private void visitLinkvar(final LinkvarContext ctx, final List<ALink> links) {
final String name = ctx.identifier().getText();
reserved.markReserved(name);
links.add(new LVariable(location(ctx), name));
if (ctx.linkbrace() != null) {
visitLinkbrace(ctx.linkbrace(), links);
} else if (ctx.linkdot() != null) {
visitLinkdot(ctx.linkdot(), links);
}
}
@Override
public ANode visitLinkvar(final LinkvarContext ctx) {
throw new IllegalStateException("Error " + location(ctx) + ": Unexpected state.");
}
private void visitLinkfield(final LinkfieldContext ctx, final List<ALink> links) {
final String value;
if (ctx.EXTID() != null) {
value = ctx.EXTID().getText();
} else if (ctx.EXTINTEGER() != null) {
value = ctx.EXTINTEGER().getText();
} else {
throw new IllegalStateException("Error " + location(ctx) + ": Unexpected state.");
}
links.add(new LField(location(ctx), value));
if (ctx.linkbrace() != null) {
visitLinkbrace(ctx.linkbrace(), links);
} else if (ctx.linkdot() != null) {
visitLinkdot(ctx.linkdot(), links);
}
}
@Override
public ANode visitLinkfield(final LinkfieldContext ctx) {
throw new IllegalStateException("Error " + location(ctx) + ": Unexpected state.");
}
private void visitLinknew(final LinknewContext ctx, final List<ALink> links) {
final List<AExpression> arguments = new ArrayList<>();
if (ctx.arguments() != null) {
for (final ExpressionContext expression : ctx.arguments().expression()) {
arguments.add((AExpression)visit(expression));
}
links.add(new LNewObj(location(ctx), ctx.identifier().getText(), arguments));
} else if (ctx.expression().size() > 0) {
for (final ExpressionContext expression : ctx.expression()) {
arguments.add((AExpression)visit(expression));
}
links.add(new LNewArray(location(ctx), ctx.identifier().getText(), arguments));
} else {
throw new IllegalStateException("Error " + location(ctx) + ": Unexpected state.");
}
if (ctx.linkdot() != null) {
visitLinkdot(ctx.linkdot(), links);
}
}
@Override
public ANode visitLinknew(final LinknewContext ctx) {
throw new IllegalStateException("Error " + location(ctx) + ": Unexpected state.");
}
private void visitLinkstring(final LinkstringContext ctx, final List<ALink> links) {
links.add(new LString(location(ctx), ctx.STRING().getText().substring(1, ctx.STRING().getText().length() - 1)));
if (ctx.linkbrace() != null) {
visitLinkbrace(ctx.linkbrace(), links);
} else if (ctx.linkdot() != null) {
visitLinkdot(ctx.linkdot(), links);
}
}
@Override
public ANode visitLinkstring(final LinkstringContext ctx) {
throw new IllegalStateException("Error " + location(ctx) + ": Unexpected state.");
}
@Override
public ANode visitArguments(final ArgumentsContext ctx) {
throw new IllegalStateException("Error " + location(ctx) + ": Unexpected state.");
}
}

View File

@ -0,0 +1,173 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.painless.node;
import org.elasticsearch.painless.CompilerSettings;
import org.elasticsearch.painless.Definition;
import org.elasticsearch.painless.Definition.Cast;
import org.elasticsearch.painless.Definition.Type;
import org.elasticsearch.painless.AnalyzerCaster;
import org.elasticsearch.painless.Variables;
import org.objectweb.asm.Label;
import org.objectweb.asm.commons.GeneratorAdapter;
/**
* The superclass for all E* (expression) nodes.
*/
public abstract class AExpression extends ANode {
/**
* Set to false when an expression will not be read from such as
* a basic assignment. Note this variable is always set by the parent
* as input.
*/
protected boolean read = true;
/**
* Set to true when an expression can be considered a stand alone
* statement. Used to prevent extraneous bytecode. This is always
* set by the node as output.
*/
protected boolean statement = false;
/**
* Set to the expected type this node needs to be. Note this variable
* is always set by the parent as input and should never be read from.
*/
protected Type expected = null;
/**
* Set to the actual type this node is. Note this variable is always
* set by the node as output and should only be read from outside of the
* node itself. <b>Also, actual can always be read after a cast is
* called on this node to get the type of the node after the cast.</b>
*/
protected Type actual = null;
/**
* Set by {@link EExplicit} if a cast made on an expression node should be
* explicit.
*/
protected boolean explicit = false;
/**
* Set to the value of the constant this expression node represents if
* and only if the node represents a constant. If this is not null
* this node will be replaced by an {@link EConstant} during casting
* if it's not already one.
*/
protected Object constant = null;
/**
* Set to true by {@link ENull} to represent a null value.
*/
protected boolean isNull = false;
/**
* If an expression represents a branch statement, represents the jump should
* the expression evaluate to a true value. It should always be the case that only
* one of tru and fals are non-null or both are null. Only used during the writing phase.
*/
protected Label tru = null;
/**
* If an expression represents a branch statement, represents the jump should
* the expression evaluate to a false value. It should always be the case that only
* one of tru and fals are non-null or both are null. Only used during the writing phase.
*/
protected Label fals = null;
public AExpression(final String location) {
super(location);
}
/**
* Checks for errors and collects data for the writing phase.
*/
abstract void analyze(final CompilerSettings settings, final Definition definition, final Variables variables);
/**
* Writes ASM based on the data collected during the analysis phase.
*/
abstract void write(final CompilerSettings settings, final Definition definition, final GeneratorAdapter adapter);
/**
* Inserts {@link ECast} nodes into the tree for implicit casts. Also replaces
* nodes with the constant variable set to a non-null value with {@link EConstant}.
* @return The new child node for the parent node calling this method.
*/
AExpression cast(final CompilerSettings settings, final Definition definition, final Variables variables) {
final Cast cast = AnalyzerCaster.getLegalCast(definition, location, actual, expected, explicit);
if (cast == null) {
if (constant == null || this instanceof EConstant) {
return this;
} else {
final EConstant econstant = new EConstant(location, constant);
econstant.analyze(settings, definition, variables);
if (!expected.equals(econstant.actual)) {
throw new IllegalStateException(error("Illegal tree structure."));
}
return econstant;
}
} else {
if (constant == null) {
final ECast ecast = new ECast(location, this, cast);
ecast.statement = statement;
ecast.actual = expected;
ecast.isNull = isNull;
return ecast;
} else {
if (expected.sort.constant) {
constant = AnalyzerCaster.constCast(location, constant, cast);
final EConstant econstant = new EConstant(location, constant);
econstant.analyze(settings, definition, variables);
if (!expected.equals(econstant.actual)) {
throw new IllegalStateException(error("Illegal tree structure."));
}
return econstant;
} else if (this instanceof EConstant) {
final ECast ecast = new ECast(location, this, cast);
ecast.actual = expected;
return ecast;
} else {
final EConstant econstant = new EConstant(location, constant);
econstant.analyze(settings, definition, variables);
if (!actual.equals(econstant.actual)) {
throw new IllegalStateException(error("Illegal tree structure."));
}
final ECast ecast = new ECast(location, econstant, cast);
ecast.actual = expected;
return ecast;
}
}
}
}
}

View File

@ -0,0 +1,121 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.painless.node;
import org.elasticsearch.painless.CompilerSettings;
import org.elasticsearch.painless.Definition;
import org.elasticsearch.painless.Definition.Type;
import org.elasticsearch.painless.Variables;
import org.objectweb.asm.commons.GeneratorAdapter;
/**
* The superclass for all L* (link) nodes.
*/
public abstract class ALink extends ANode {
/**
* Size is set to a value based on this link's size on the stack. This is
* used during the writing phase to dup stack values from this link as
* necessary during certain store operations.
*/
final int size;
/**
* Set to false only if the link is not going to be read from.
*/
boolean load = true;
/**
* Set to true only if the link is going to be written to and
* is the final link in a chain.
*/
boolean store = false;
/**
* Set to true if this link represents a statik type to be accessed.
*/
boolean statik = false;
/**
* Set by the parent chain to type of the previous link or null if
* there was no previous link.
*/
Type before = null;
/**
* Set by the link to be the type after the link has been loaded/stored.
*/
Type after = null;
/**
* Set to true if this link could be a stand-alone statement.
*/
boolean statement = false;
/**
* Used by {@link LString} to set the value of the String constant. Also
* used by shortcuts to represent a constant key.
*/
String string = null;
ALink(final String location, final int size) {
super(location);
this.size = size;
}
/**
* Checks for errors and collects data for the writing phase.
* @return Possibly returns a different {@link ALink} node if a type is
* def or a shortcut is used. Otherwise, returns itself. This will be
* updated into the {@link EChain} node's list of links.
*/
abstract ALink analyze(final CompilerSettings settings, final Definition definition, final Variables variables);
/**
* Write values before a load/store occurs such as an array index.
*/
abstract void write(final CompilerSettings settings, final Definition definition, final GeneratorAdapter adapter);
/**
* Write a load for the specific link type.
*/
abstract void load(final CompilerSettings settings, final Definition definition, final GeneratorAdapter adapter);
/**
* Write a store for the specific link type.
*/
abstract void store(final CompilerSettings settings, final Definition definition, final GeneratorAdapter adapter);
/**
* Used to copy link data from one to another during analysis in the case of replacement.
*/
final ALink copy(final ALink link) {
load = link.load;
store = link.store;
statik = link.statik;
before = link.before;
after = link.after;
statement = link.statement;
string = link.string;
return this;
}
}

View File

@ -0,0 +1,39 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.painless.node;
/**
* The superclass for all other nodes.
*/
public abstract class ANode {
/**
* The location in the original source to be printed in error messages.
*/
final String location;
ANode(final String location) {
this.location = location;
}
public String error(final String message) {
return "Error " + location + ": " + message;
}
}

View File

@ -0,0 +1,125 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.painless.node;
import org.elasticsearch.painless.CompilerSettings;
import org.elasticsearch.painless.Definition;
import org.elasticsearch.painless.Variables;
import org.objectweb.asm.Label;
import org.objectweb.asm.commons.GeneratorAdapter;
/**
* The superclass for all S* (statement) nodes.
*/
public abstract class AStatement extends ANode {
/**
* Set to true when the final statement in an {@link SSource} is reached.
* Used to determine whether or not an auto-return is necessary.
*/
boolean lastSource = false;
/**
* Set to true when a loop begins. Used by {@link SBlock} to help determine
* when the final statement of a loop is reached.
*/
boolean beginLoop = false;
/**
* Set to true when inside a loop. Used by {@link SBreak} and {@link SContinue}
* to determine if a break/continue statement is legal.
*/
boolean inLoop = false;
/**
* Set to true when on the last statement of a loop. Used by {@link SContinue}
* to prevent extraneous continue statements.
*/
boolean lastLoop = false;
/**
* Set to true if a statement would cause the method to exit. Used to
* determine whether or not an auto-return is necessary.
*/
boolean methodEscape = false;
/**
* Set to true if a statement would cause a loop to exit. Used to
* prevent unreachable statements.
*/
boolean loopEscape = false;
/**
* Set to true if all current paths escape from the current {@link SBlock}.
* Used during the analysis phase to prevent unreachable statements and
* the writing phase to prevent extraneous bytecode gotos from being written.
*/
boolean allEscape = false;
/**
* Set to true if any continue statement occurs in a loop. Used to prevent
* unnecessary infinite loops.
*/
boolean anyContinue = false;
/**
* Set to true if any break statement occurs in a loop. Used to prevent
* extraneous loops.
*/
boolean anyBreak = false;
/**
* Set to the loop counter variable slot as a shortcut if loop statements
* are being counted.
*/
int loopCounterSlot = -1;
/**
* Set to the approximate number of statements in a loop block to prevent
* infinite loops during runtime.
*/
int statementCount = 0;
/**
* Set to the beginning of a loop so a continue statement knows where to
* jump to. Only used during the writing phase.
*/
Label continu = null;
/**
* Set to the beginning of a loop so a break statement knows where to
* jump to. Only used during the writing phase.
*/
Label brake = null;
AStatement(final String location) {
super(location);
}
/**
* Checks for errors and collects data for the writing phase.
*/
abstract void analyze(final CompilerSettings settings, final Definition definition, final Variables variables);
/**
* Writes ASM based on the data collected during the analysis phase.
*/
abstract void write(final CompilerSettings settings, final Definition definition, final GeneratorAdapter adapter);
}

View File

@ -0,0 +1,528 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.painless.node;
import org.elasticsearch.painless.CompilerSettings;
import org.elasticsearch.painless.Definition;
import org.elasticsearch.painless.Definition.Sort;
import org.elasticsearch.painless.Definition.Type;
import org.elasticsearch.painless.AnalyzerCaster;
import org.elasticsearch.painless.Operation;
import org.elasticsearch.painless.Variables;
import org.elasticsearch.painless.WriterUtility;
import org.objectweb.asm.commons.GeneratorAdapter;
/**
* Represents a binary math expression.
*/
public final class EBinary extends AExpression {
final Operation operation;
AExpression left;
AExpression right;
boolean cat = false;
public EBinary(final String location, final Operation operation, final AExpression left, final AExpression right) {
super(location);
this.operation = operation;
this.left = left;
this.right = right;
}
@Override
void analyze(final CompilerSettings settings, final Definition definition, final Variables variables) {
if (operation == Operation.MUL) {
analyzeMul(settings, definition, variables);
} else if (operation == Operation.DIV) {
analyzeDiv(settings, definition, variables);
} else if (operation == Operation.REM) {
analyzeRem(settings, definition, variables);
} else if (operation == Operation.ADD) {
analyzeAdd(settings, definition, variables);
} else if (operation == Operation.SUB) {
analyzeSub(settings, definition, variables);
} else if (operation == Operation.LSH) {
analyzeLSH(settings, definition, variables);
} else if (operation == Operation.RSH) {
analyzeRSH(settings, definition, variables);
} else if (operation == Operation.USH) {
analyzeUSH(settings, definition, variables);
} else if (operation == Operation.BWAND) {
analyzeBWAnd(settings, definition, variables);
} else if (operation == Operation.XOR) {
analyzeXor(settings, definition, variables);
} else if (operation == Operation.BWOR) {
analyzeBWOr(settings, definition, variables);
} else {
throw new IllegalStateException(error("Illegal tree structure."));
}
}
private void analyzeMul(final CompilerSettings settings, final Definition definition, final Variables variables) {
left.analyze(settings, definition, variables);
right.analyze(settings, definition, variables);
final Type promote = AnalyzerCaster.promoteNumeric(definition, left.actual, right.actual, true, true);
if (promote == null) {
throw new ClassCastException(error("Cannot apply multiply [*] to types " +
"[" + left.actual.name + "] and [" + right.actual.name + "]."));
}
left.expected = promote;
right.expected = promote;
left = left.cast(settings, definition, variables);
right = right.cast(settings, definition, variables);
if (left.constant != null && right.constant != null) {
final boolean overflow = settings.getNumericOverflow();
final Sort sort = promote.sort;
if (sort == Sort.INT) {
constant = overflow ? (int)left.constant * (int)right.constant :
Math.multiplyExact((int)left.constant, (int)right.constant);
} else if (sort == Sort.LONG) {
constant = overflow ? (long)left.constant * (long)right.constant :
Math.multiplyExact((long)left.constant, (long)right.constant);
} else if (sort == Sort.FLOAT) {
constant = overflow ? (float)left.constant * (float)right.constant :
org.elasticsearch.painless.Utility.multiplyWithoutOverflow((float)left.constant, (float)right.constant);
} else if (sort == Sort.DOUBLE) {
constant = overflow ? (double)left.constant * (double)right.constant :
org.elasticsearch.painless.Utility.multiplyWithoutOverflow((double)left.constant, (double)right.constant);
} else {
throw new IllegalStateException(error("Illegal tree structure."));
}
}
actual = promote;
}
private void analyzeDiv(final CompilerSettings settings, final Definition definition, final Variables variables) {
left.analyze(settings, definition, variables);
right.analyze(settings, definition, variables);
final Type promote = AnalyzerCaster.promoteNumeric(definition, left.actual, right.actual, true, true);
if (promote == null) {
throw new ClassCastException(error("Cannot apply divide [/] to types " +
"[" + left.actual.name + "] and [" + right.actual.name + "]."));
}
left.expected = promote;
right.expected = promote;
left = left.cast(settings, definition, variables);
right = right.cast(settings, definition, variables);
if (left.constant != null && right.constant != null) {
final boolean overflow = settings.getNumericOverflow();
final Sort sort = promote.sort;
if (sort == Sort.INT) {
constant = overflow ? (int)left.constant / (int)right.constant :
org.elasticsearch.painless.Utility.divideWithoutOverflow((int)left.constant, (int)right.constant);
} else if (sort == Sort.LONG) {
constant = overflow ? (long)left.constant / (long)right.constant :
org.elasticsearch.painless.Utility.divideWithoutOverflow((long)left.constant, (long)right.constant);
} else if (sort == Sort.FLOAT) {
constant = overflow ? (float)left.constant / (float)right.constant :
org.elasticsearch.painless.Utility.divideWithoutOverflow((float)left.constant, (float)right.constant);
} else if (sort == Sort.DOUBLE) {
constant = overflow ? (double)left.constant / (double)right.constant :
org.elasticsearch.painless.Utility.divideWithoutOverflow((double)left.constant, (double)right.constant);
} else {
throw new IllegalStateException(error("Illegal tree structure."));
}
}
actual = promote;
}
private void analyzeRem(final CompilerSettings settings, final Definition definition, final Variables variables) {
left.analyze(settings, definition, variables);
right.analyze(settings, definition, variables);
final Type promote = AnalyzerCaster.promoteNumeric(definition, left.actual, right.actual, true, true);
if (promote == null) {
throw new ClassCastException(error("Cannot apply remainder [%] to types " +
"[" + left.actual.name + "] and [" + right.actual.name + "]."));
}
left.expected = promote;
right.expected = promote;
left = left.cast(settings, definition, variables);
right = right.cast(settings, definition, variables);
if (left.constant != null && right.constant != null) {
final boolean overflow = settings.getNumericOverflow();
final Sort sort = promote.sort;
if (sort == Sort.INT) {
constant = (int)left.constant % (int)right.constant;
} else if (sort == Sort.LONG) {
constant = (long)left.constant % (long)right.constant;
} else if (sort == Sort.FLOAT) {
constant = overflow ? (float)left.constant % (float)right.constant :
org.elasticsearch.painless.Utility.remainderWithoutOverflow((float)left.constant, (float)right.constant);
} else if (sort == Sort.DOUBLE) {
constant = overflow ? (double)left.constant % (double)right.constant :
org.elasticsearch.painless.Utility.remainderWithoutOverflow((double)left.constant, (double)right.constant);
} else {
throw new IllegalStateException(error("Illegal tree structure."));
}
}
actual = promote;
}
private void analyzeAdd(final CompilerSettings settings, final Definition definition, final Variables variables) {
left.analyze(settings, definition, variables);
right.analyze(settings, definition, variables);
final Type promote = AnalyzerCaster.promoteAdd(definition, left.actual, right.actual);
if (promote == null) {
throw new ClassCastException(error("Cannot apply add [+] to types " +
"[" + left.actual.name + "] and [" + right.actual.name + "]."));
}
final Sort sort = promote.sort;
if (sort == Sort.STRING) {
left.expected = left.actual;
if (left instanceof EBinary && ((EBinary)left).operation == Operation.ADD && left.actual.sort == Sort.STRING) {
((EBinary)left).cat = true;
}
right.expected = right.actual;
if (right instanceof EBinary && ((EBinary)right).operation == Operation.ADD && right.actual.sort == Sort.STRING) {
((EBinary)right).cat = true;
}
} else {
left.expected = promote;
right.expected = promote;
}
left = left.cast(settings, definition, variables);
right = right.cast(settings, definition, variables);
if (left.constant != null && right.constant != null) {
final boolean overflow = settings.getNumericOverflow();
if (sort == Sort.INT) {
constant = overflow ? (int)left.constant + (int)right.constant :
Math.addExact((int)left.constant, (int)right.constant);
} else if (sort == Sort.LONG) {
constant = overflow ? (long)left.constant + (long)right.constant :
Math.addExact((long)left.constant, (long)right.constant);
} else if (sort == Sort.FLOAT) {
constant = overflow ? (float)left.constant + (float)right.constant :
org.elasticsearch.painless.Utility.addWithoutOverflow((float)left.constant, (float)right.constant);
} else if (sort == Sort.DOUBLE) {
constant = overflow ? (double)left.constant + (double)right.constant :
org.elasticsearch.painless.Utility.addWithoutOverflow((double)left.constant, (double)right.constant);
} else if (sort == Sort.STRING) {
constant = "" + left.constant + right.constant;
} else {
throw new IllegalStateException(error("Illegal tree structure."));
}
}
actual = promote;
}
private void analyzeSub(final CompilerSettings settings, final Definition definition, final Variables variables) {
left.analyze(settings, definition, variables);
right.analyze(settings, definition, variables);
final Type promote = AnalyzerCaster.promoteNumeric(definition, left.actual, right.actual, true, true);
if (promote == null) {
throw new ClassCastException(error("Cannot apply subtract [-] to types " +
"[" + left.actual.name + "] and [" + right.actual.name + "]."));
}
left.expected = promote;
right.expected = promote;
left = left.cast(settings, definition, variables);
right = right.cast(settings, definition, variables);
if (left.constant != null && right.constant != null) {
final boolean overflow = settings.getNumericOverflow();
final Sort sort = promote.sort;
if (sort == Sort.INT) {
constant = overflow ? (int)left.constant - (int)right.constant :
Math.subtractExact((int)left.constant, (int)right.constant);
} else if (sort == Sort.LONG) {
constant = overflow ? (long)left.constant - (long)right.constant :
Math.subtractExact((long)left.constant, (long)right.constant);
} else if (sort == Sort.FLOAT) {
constant = overflow ? (float)left.constant - (float)right.constant :
org.elasticsearch.painless.Utility.subtractWithoutOverflow((float)left.constant, (float)right.constant);
} else if (sort == Sort.DOUBLE) {
constant = overflow ? (double)left.constant - (double)right.constant :
org.elasticsearch.painless.Utility.subtractWithoutOverflow((double)left.constant, (double)right.constant);
} else {
throw new IllegalStateException(error("Illegal tree structure."));
}
}
actual = promote;
}
private void analyzeLSH(final CompilerSettings settings, final Definition definition, final Variables variables) {
left.analyze(settings, definition, variables);
right.analyze(settings, definition, variables);
final Type promote = AnalyzerCaster.promoteNumeric(definition, left.actual, false, true);
if (promote == null) {
throw new ClassCastException(error("Cannot apply left shift [<<] to types " +
"[" + left.actual.name + "] and [" + right.actual.name + "]."));
}
left.expected = promote;
right.expected = definition.intType;
right.explicit = true;
left = left.cast(settings, definition, variables);
right = right.cast(settings, definition, variables);
if (left.constant != null && right.constant != null) {
final Sort sort = promote.sort;
if (sort == Sort.INT) {
constant = (int)left.constant << (int)right.constant;
} else if (sort == Sort.LONG) {
constant = (long)left.constant << (int)right.constant;
} else {
throw new IllegalStateException(error("Illegal tree structure."));
}
}
actual = promote;
}
private void analyzeRSH(final CompilerSettings settings, final Definition definition, final Variables variables) {
left.analyze(settings, definition, variables);
right.analyze(settings, definition, variables);
final Type promote = AnalyzerCaster.promoteNumeric(definition, left.actual, false, true);
if (promote == null) {
throw new ClassCastException(error("Cannot apply right shift [>>] to types " +
"[" + left.actual.name + "] and [" + right.actual.name + "]."));
}
left.expected = promote;
right.expected = definition.intType;
right.explicit = true;
left = left.cast(settings, definition, variables);
right = right.cast(settings, definition, variables);
if (left.constant != null && right.constant != null) {
final Sort sort = promote.sort;
if (sort == Sort.INT) {
constant = (int)left.constant >> (int)right.constant;
} else if (sort == Sort.LONG) {
constant = (long)left.constant >> (int)right.constant;
} else {
throw new IllegalStateException(error("Illegal tree structure."));
}
}
actual = promote;
}
private void analyzeUSH(final CompilerSettings settings, final Definition definition, final Variables variables) {
left.analyze(settings, definition, variables);
right.analyze(settings, definition, variables);
final Type promote = AnalyzerCaster.promoteNumeric(definition, left.actual, false, true);
if (promote == null) {
throw new ClassCastException(error("Cannot apply unsigned shift [>>>] to types " +
"[" + left.actual.name + "] and [" + right.actual.name + "]."));
}
left.expected = promote;
right.expected = definition.intType;
right.explicit = true;
left = left.cast(settings, definition, variables);
right = right.cast(settings, definition, variables);
if (left.constant != null && right.constant != null) {
final Sort sort = promote.sort;
if (sort == Sort.INT) {
constant = (int)left.constant >>> (int)right.constant;
} else if (sort == Sort.LONG) {
constant = (long)left.constant >>> (int)right.constant;
} else {
throw new IllegalStateException(error("Illegal tree structure."));
}
}
actual = promote;
}
private void analyzeBWAnd(final CompilerSettings settings, final Definition definition, final Variables variables) {
left.analyze(settings, definition, variables);
right.analyze(settings, definition, variables);
final Type promote = AnalyzerCaster.promoteNumeric(definition, left.actual, right.actual, false, true);
if (promote == null) {
throw new ClassCastException(error("Cannot apply and [&] to types " +
"[" + left.actual.name + "] and [" + right.actual.name + "]."));
}
left.expected = promote;
right.expected = promote;
left = left.cast(settings, definition, variables);
right = right.cast(settings, definition, variables);
if (left.constant != null && right.constant != null) {
final Sort sort = promote.sort;
if (sort == Sort.INT) {
constant = (int)left.constant & (int)right.constant;
} else if (sort == Sort.LONG) {
constant = (long)left.constant & (long)right.constant;
} else {
throw new IllegalStateException(error("Illegal tree structure."));
}
}
actual = promote;
}
private void analyzeXor(final CompilerSettings settings, final Definition definition, final Variables variables) {
left.analyze(settings, definition, variables);
right.analyze(settings, definition, variables);
final Type promote = AnalyzerCaster.promoteXor(definition, left.actual, right.actual);
if (promote == null) {
throw new ClassCastException(error("Cannot apply xor [^] to types " +
"[" + left.actual.name + "] and [" + right.actual.name + "]."));
}
left.expected = promote;
right.expected = promote;
left = left.cast(settings, definition, variables);
right = right.cast(settings, definition, variables);
if (left.constant != null && right.constant != null) {
final Sort sort = promote.sort;
if (sort == Sort.BOOL) {
constant = (boolean)left.constant ^ (boolean)right.constant;
} else if (sort == Sort.INT) {
constant = (int)left.constant ^ (int)right.constant;
} else if (sort == Sort.LONG) {
constant = (long)left.constant ^ (long)right.constant;
} else {
throw new IllegalStateException(error("Illegal tree structure."));
}
}
actual = promote;
}
private void analyzeBWOr(final CompilerSettings settings, final Definition definition, final Variables variables) {
left.analyze(settings, definition, variables);
right.analyze(settings, definition, variables);
final Type promote = AnalyzerCaster.promoteNumeric(definition, left.actual, right.actual, false, true);
if (promote == null) {
throw new ClassCastException(error("Cannot apply or [|] to types " +
"[" + left.actual.name + "] and [" + right.actual.name + "]."));
}
left.expected = promote;
right.expected = promote;
left = left.cast(settings, definition, variables);
right = right.cast(settings, definition, variables);
if (left.constant != null && right.constant != null) {
final Sort sort = promote.sort;
if (sort == Sort.INT) {
constant = (int)left.constant | (int)right.constant;
} else if (sort == Sort.LONG) {
constant = (long)left.constant | (long)right.constant;
} else {
throw new IllegalStateException(error("Illegal tree structure."));
}
}
actual = promote;
}
@Override
void write(final CompilerSettings settings, final Definition definition, final GeneratorAdapter adapter) {
if (actual.sort == Sort.STRING && operation == Operation.ADD) {
if (!cat) {
WriterUtility.writeNewStrings(adapter);
}
left.write(settings, definition, adapter);
if (!(left instanceof EBinary) || ((EBinary)left).operation != Operation.ADD || left.actual.sort != Sort.STRING) {
WriterUtility.writeAppendStrings(adapter, left.actual.sort);
}
right.write(settings, definition, adapter);
if (!(right instanceof EBinary) || ((EBinary)right).operation != Operation.ADD || right.actual.sort != Sort.STRING) {
WriterUtility.writeAppendStrings(adapter, right.actual.sort);
}
if (!cat) {
WriterUtility.writeToStrings(adapter);
}
} else {
left.write(settings, definition, adapter);
right.write(settings, definition, adapter);
WriterUtility.writeBinaryInstruction(settings, definition, adapter, location, actual, operation);
}
WriterUtility.writeBranch(adapter, tru, fals);
}
}

View File

@ -0,0 +1,139 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.painless.node;
import org.elasticsearch.painless.CompilerSettings;
import org.elasticsearch.painless.Definition;
import org.elasticsearch.painless.Operation;
import org.elasticsearch.painless.Variables;
import org.objectweb.asm.Label;
import org.objectweb.asm.commons.GeneratorAdapter;
/**
* Represents a boolean expression.
*/
public final class EBool extends AExpression {
final Operation operation;
AExpression left;
AExpression right;
public EBool(final String location, final Operation operation, final AExpression left, final AExpression right) {
super(location);
this.operation = operation;
this.left = left;
this.right = right;
}
@Override
void analyze(final CompilerSettings settings, final Definition definition, final Variables variables) {
left.expected = definition.booleanType;
left.analyze(settings, definition, variables);
left = left.cast(settings, definition, variables);
right.expected = definition.booleanType;
right.analyze(settings, definition, variables);
right = right.cast(settings, definition, variables);
if (left.constant != null && right.constant != null) {
if (operation == Operation.AND) {
constant = (boolean)left.constant && (boolean)right.constant;
} else if (operation == Operation.OR) {
constant = (boolean)left.constant || (boolean)right.constant;
} else {
throw new IllegalStateException(error("Illegal tree structure."));
}
}
actual = definition.booleanType;
}
@Override
void write(final CompilerSettings settings, final Definition definition, final GeneratorAdapter adapter) {
if (tru != null || fals != null) {
if (operation == Operation.AND) {
final Label localfals = fals == null ? new Label() : fals;
left.fals = localfals;
right.tru = tru;
right.fals = fals;
left.write(settings, definition, adapter);
right.write(settings, definition, adapter);
if (fals == null) {
adapter.mark(localfals);
}
} else if (operation == Operation.OR) {
final Label localtru = tru == null ? new Label() : tru;
left.tru = localtru;
right.tru = tru;
right.fals = fals;
left.write(settings, definition, adapter);
right.write(settings, definition, adapter);
if (tru == null) {
adapter.mark(localtru);
}
} else {
throw new IllegalStateException(error("Illegal tree structure."));
}
} else {
if (operation == Operation.AND) {
final Label localfals = new Label();
final Label end = new Label();
left.fals = localfals;
right.fals = localfals;
left.write(settings, definition, adapter);
right.write(settings, definition, adapter);
adapter.push(true);
adapter.goTo(end);
adapter.mark(localfals);
adapter.push(false);
adapter.mark(end);
} else if (operation == Operation.OR) {
final Label localtru = new Label();
final Label localfals = new Label();
final Label end = new Label();
left.tru = localtru;
right.fals = localfals;
left.write(settings, definition, adapter);
right.write(settings, definition, adapter);
adapter.mark(localtru);
adapter.push(true);
adapter.goTo(end);
adapter.mark(localfals);
adapter.push(false);
adapter.mark(end);
} else {
throw new IllegalStateException(error("Illegal tree structure."));
}
}
}
}

View File

@ -0,0 +1,47 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.painless.node;
import org.elasticsearch.painless.CompilerSettings;
import org.elasticsearch.painless.Definition;
import org.elasticsearch.painless.Variables;
import org.objectweb.asm.commons.GeneratorAdapter;
/**
* Represents a boolean constant.
*/
public final class EBoolean extends AExpression {
public EBoolean(final String location, final boolean constant) {
super(location);
this.constant = constant;
}
@Override
void analyze(final CompilerSettings settings, final Definition definition, final Variables variables) {
actual = definition.booleanType;
}
@Override
void write(final CompilerSettings settings, final Definition definition, final GeneratorAdapter adapter) {
throw new IllegalArgumentException(error("Illegal tree structure."));
}
}

View File

@ -0,0 +1,60 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.painless.node;
import org.elasticsearch.painless.CompilerSettings;
import org.elasticsearch.painless.Definition;
import org.elasticsearch.painless.Definition.Cast;
import org.elasticsearch.painless.Variables;
import org.elasticsearch.painless.WriterUtility;
import org.objectweb.asm.commons.GeneratorAdapter;
/**
* Represents an implicit cast in most cases, though it will replace
* explicit casts in the tree for simplicity. (Internal only.)
*/
final class ECast extends AExpression {
final String type;
AExpression child;
Cast cast = null;
ECast(final String location, final AExpression child, final Cast cast) {
super(location);
this.type = null;
this.child = child;
this.cast = cast;
}
@Override
void analyze(final CompilerSettings settings, final Definition definition, final Variables variables) {
throw new IllegalStateException(error("Illegal tree structure."));
}
@Override
void write(final CompilerSettings settings, final Definition definition, final GeneratorAdapter adapter) {
child.write(settings, definition, adapter);
WriterUtility.writeCast(adapter, cast);
WriterUtility.writeBranch(adapter, tru, fals);
}
}

View File

@ -0,0 +1,310 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.painless.node;
import org.elasticsearch.painless.CompilerSettings;
import org.elasticsearch.painless.Definition;
import org.elasticsearch.painless.Definition.Cast;
import org.elasticsearch.painless.Definition.Sort;
import org.elasticsearch.painless.Definition.Type;
import org.elasticsearch.painless.AnalyzerCaster;
import org.elasticsearch.painless.Operation;
import org.elasticsearch.painless.Variables;
import org.elasticsearch.painless.WriterUtility;
import org.objectweb.asm.commons.GeneratorAdapter;
import java.util.List;
/**
* Represents the entirety of a variable/method chain for read/write operations.
*/
public final class EChain extends AExpression {
final List<ALink> links;
final boolean pre;
final boolean post;
Operation operation;
AExpression expression;
boolean cat = false;
Type promote = null;
boolean exact = false;
Cast there = null;
Cast back = null;
public EChain(final String location, final List<ALink> links,
final boolean pre, final boolean post, final Operation operation, final AExpression expression) {
super(location);
this.links = links;
this.pre = pre;
this.post = post;
this.operation = operation;
this.expression = expression;
}
@Override
void analyze(final CompilerSettings settings, final Definition definition, final Variables variables) {
analyzeLinks(settings, definition, variables);
analyzeIncrDecr();
if (operation != null) {
analyzeCompound(settings, definition, variables);
} else if (expression != null) {
analyzeWrite(settings, definition, variables);
} else {
analyzeRead();
}
}
private void analyzeLinks(final CompilerSettings settings, final Definition definition, final Variables variables) {
ALink previous = null;
int index = 0;
while (index < links.size()) {
final ALink current = links.get(index);
if (previous != null) {
current.before = previous.after;
if (index == 1) {
current.statik = previous.statik;
}
}
if (index == links.size() - 1) {
current.load = read;
current.store = expression != null || pre || post;
}
final ALink analyzed = current.analyze(settings, definition, variables);
if (analyzed == null) {
links.remove(index);
} else {
if (analyzed != current) {
links.set(index, analyzed);
}
previous = analyzed;
++index;
}
}
if (links.get(0).statik) {
links.remove(0);
}
}
private void analyzeIncrDecr() {
final ALink last = links.get(links.size() - 1);
if (pre && post) {
throw new IllegalStateException(error("Illegal tree structure."));
} else if (pre || post) {
if (expression != null) {
throw new IllegalStateException(error("Illegal tree structure."));
}
final Sort sort = last.after.sort;
if (operation == Operation.INCR) {
if (sort == Sort.DOUBLE) {
expression = new EConstant(location, 1D);
} else if (sort == Sort.FLOAT) {
expression = new EConstant(location, 1F);
} else if (sort == Sort.LONG) {
expression = new EConstant(location, 1L);
} else {
expression = new EConstant(location, 1);
}
operation = Operation.ADD;
} else if (operation == Operation.DECR) {
if (sort == Sort.DOUBLE) {
expression = new EConstant(location, 1D);
} else if (sort == Sort.FLOAT) {
expression = new EConstant(location, 1F);
} else if (sort == Sort.LONG) {
expression = new EConstant(location, 1L);
} else {
expression = new EConstant(location, 1);
}
operation = Operation.SUB;
} else {
throw new IllegalStateException(error("Illegal tree structure."));
}
}
}
private void analyzeCompound(final CompilerSettings settings, final Definition definition, final Variables variables) {
final ALink last = links.get(links.size() - 1);
expression.analyze(settings, definition, variables);
if (operation == Operation.MUL) {
promote = AnalyzerCaster.promoteNumeric(definition, last.after, expression.actual, true, true);
} else if (operation == Operation.DIV) {
promote = AnalyzerCaster.promoteNumeric(definition, last.after, expression.actual, true, true);
} else if (operation == Operation.REM) {
promote = AnalyzerCaster.promoteNumeric(definition, last.after, expression.actual, true, true);
} else if (operation == Operation.ADD) {
promote = AnalyzerCaster.promoteAdd(definition, last.after, expression.actual);
} else if (operation == Operation.SUB) {
promote = AnalyzerCaster.promoteNumeric(definition, last.after, expression.actual, true, true);
} else if (operation == Operation.LSH) {
promote = AnalyzerCaster.promoteNumeric(definition, last.after, false, true);
} else if (operation == Operation.RSH) {
promote = AnalyzerCaster.promoteNumeric(definition, last.after, false, true);
} else if (operation == Operation.USH) {
promote = AnalyzerCaster.promoteNumeric(definition, last.after, false, true);
} else if (operation == Operation.BWAND) {
promote = AnalyzerCaster.promoteXor(definition, last.after, expression.actual);
} else if (operation == Operation.XOR) {
promote = AnalyzerCaster.promoteXor(definition, last.after, expression.actual);
} else if (operation == Operation.BWOR) {
promote = AnalyzerCaster.promoteXor(definition, last.after, expression.actual);
} else {
throw new IllegalStateException(error("Illegal tree structure."));
}
if (promote == null) {
throw new ClassCastException("Cannot apply compound assignment " +
"[" + operation.symbol + "=] to types [" + last.after + "] and [" + expression.actual + "].");
}
cat = operation == Operation.ADD && promote.sort == Sort.STRING;
if (cat) {
if (expression instanceof EBinary && ((EBinary)expression).operation == Operation.ADD &&
expression.actual.sort == Sort.STRING) {
((EBinary)expression).cat = true;
}
expression.expected = expression.actual;
} else if (operation == Operation.LSH || operation == Operation.RSH || operation == Operation.USH) {
expression.expected = definition.intType;
expression.explicit = true;
} else {
expression.expected = promote;
}
expression = expression.cast(settings, definition, variables);
exact = !settings.getNumericOverflow() &&
(operation == Operation.MUL || operation == Operation.DIV || operation == Operation.REM ||
operation == Operation.ADD || operation == Operation.SUB);
there = AnalyzerCaster.getLegalCast(definition, location, last.after, promote, false);
back = AnalyzerCaster.getLegalCast(definition, location, promote, last.after, true);
statement = true;
actual = read ? last.after : definition.voidType;
}
private void analyzeWrite(final CompilerSettings settings, final Definition definition, final Variables variables) {
final ALink last = links.get(links.size() - 1);
expression.expected = last.after;
expression.analyze(settings, definition, variables);
expression = expression.cast(settings, definition, variables);
statement = true;
actual = read ? last.after : definition.voidType;
}
private void analyzeRead() {
final ALink last = links.get(links.size() - 1);
constant = last.string;
statement = last.statement;
actual = last.after;
}
@Override
void write(final CompilerSettings settings, final Definition definition, final GeneratorAdapter adapter) {
if (cat) {
WriterUtility.writeNewStrings(adapter);
}
final ALink last = links.get(links.size() - 1);
for (final ALink link : links) {
link.write(settings, definition, adapter);
if (link == last && link.store) {
if (cat) {
WriterUtility.writeDup(adapter, link.size, 1);
link.load(settings, definition, adapter);
WriterUtility.writeAppendStrings(adapter, link.after.sort);
expression.write(settings, definition, adapter);
if (!(expression instanceof EBinary) ||
((EBinary)expression).operation != Operation.ADD || expression.actual.sort != Sort.STRING) {
WriterUtility.writeAppendStrings(adapter, expression.actual.sort);
}
WriterUtility.writeToStrings(adapter);
WriterUtility.writeCast(adapter, back);
if (link.load) {
WriterUtility.writeDup(adapter, link.after.sort.size, link.size);
}
link.store(settings, definition, adapter);
} else if (operation != null) {
WriterUtility.writeDup(adapter, link.size, 0);
link.load(settings, definition, adapter);
if (link.load && post) {
WriterUtility.writeDup(adapter, link.after.sort.size, link.size);
}
WriterUtility.writeCast(adapter, there);
expression.write(settings, definition, adapter);
WriterUtility.writeBinaryInstruction(settings, definition, adapter, location, promote, operation);
if (!exact || !WriterUtility.writeExactInstruction(definition, adapter, promote.sort, link.after.sort)) {
WriterUtility.writeCast(adapter, back);
}
if (link.load && !post) {
WriterUtility.writeDup(adapter, link.after.sort.size, link.size);
}
link.store(settings, definition, adapter);
} else {
expression.write(settings, definition, adapter);
if (link.load) {
WriterUtility.writeDup(adapter, link.after.sort.size, link.size);
}
link.store(settings, definition, adapter);
}
} else {
link.load(settings, definition, adapter);
}
}
WriterUtility.writeBranch(adapter, tru, fals);
}
}

View File

@ -0,0 +1,528 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.painless.node;
import org.elasticsearch.painless.CompilerSettings;
import org.elasticsearch.painless.Definition;
import org.elasticsearch.painless.Definition.Sort;
import org.elasticsearch.painless.Definition.Type;
import org.elasticsearch.painless.AnalyzerCaster;
import org.elasticsearch.painless.Operation;
import org.elasticsearch.painless.Variables;
import org.objectweb.asm.Label;
import org.objectweb.asm.commons.GeneratorAdapter;
import static org.elasticsearch.painless.WriterConstants.CHECKEQUALS;
import static org.elasticsearch.painless.WriterConstants.DEF_EQ_CALL;
import static org.elasticsearch.painless.WriterConstants.DEF_GTE_CALL;
import static org.elasticsearch.painless.WriterConstants.DEF_GT_CALL;
import static org.elasticsearch.painless.WriterConstants.DEF_LTE_CALL;
import static org.elasticsearch.painless.WriterConstants.DEF_LT_CALL;
/**
* Represents a comparison expression.
*/
public final class EComp extends AExpression {
final Operation operation;
AExpression left;
AExpression right;
public EComp(final String location, final Operation operation, final AExpression left, final AExpression right) {
super(location);
this.operation = operation;
this.left = left;
this.right = right;
}
@Override
void analyze(final CompilerSettings settings, final Definition definition, final Variables variables) {
if (operation == Operation.EQ) {
analyzeEq(settings, definition, variables);
} else if (operation == Operation.EQR) {
analyzeEqR(settings, definition, variables);
} else if (operation == Operation.NE) {
analyzeNE(settings, definition, variables);
} else if (operation == Operation.NER) {
analyzeNER(settings, definition, variables);
} else if (operation == Operation.GTE) {
analyzeGTE(settings, definition, variables);
} else if (operation == Operation.GT) {
analyzeGT(settings, definition, variables);
} else if (operation == Operation.LTE) {
analyzeLTE(settings, definition, variables);
} else if (operation == Operation.LT) {
analyzeLT(settings, definition, variables);
} else {
throw new IllegalStateException(error("Illegal tree structure."));
}
}
private void analyzeEq(final CompilerSettings settings, final Definition definition, final Variables variables) {
left.analyze(settings, definition, variables);
right.analyze(settings, definition, variables);
final Type promote = AnalyzerCaster.promoteEquality(definition, left.actual, right.actual);
if (promote == null) {
throw new ClassCastException(error("Cannot apply equals [==] to types " +
"[" + left.actual.name + "] and [" + right.actual.name + "]."));
}
left.expected = promote;
right.expected = promote;
left = left.cast(settings, definition, variables);
right = right.cast(settings, definition, variables);
if (left.isNull && right.isNull) {
throw new IllegalArgumentException(error("Extraneous comparison of null constants."));
}
if ((left.constant != null || left.isNull) && (right.constant != null || right.isNull)) {
final Sort sort = promote.sort;
if (sort == Sort.BOOL) {
constant = (boolean)left.constant == (boolean)right.constant;
} else if (sort == Sort.INT) {
constant = (int)left.constant == (int)right.constant;
} else if (sort == Sort.LONG) {
constant = (long)left.constant == (long)right.constant;
} else if (sort == Sort.FLOAT) {
constant = (float)left.constant == (float)right.constant;
} else if (sort == Sort.DOUBLE) {
constant = (double)left.constant == (double)right.constant;
} else if (!left.isNull) {
constant = left.constant.equals(right.constant);
} else if (!right.isNull) {
constant = right.constant.equals(null);
} else {
throw new IllegalStateException(error("Illegal tree structure."));
}
}
actual = definition.booleanType;
}
private void analyzeEqR(final CompilerSettings settings, final Definition definition, final Variables variables) {
left.analyze(settings, definition, variables);
right.analyze(settings, definition, variables);
final Type promote = AnalyzerCaster.promoteReference(definition, left.actual, right.actual);
if (promote == null) {
throw new ClassCastException(error("Cannot apply reference equals [===] to types " +
"[" + left.actual.name + "] and [" + right.actual.name + "]."));
}
left.expected = promote;
right.expected = promote;
left = left.cast(settings, definition, variables);
right = right.cast(settings, definition, variables);
if (left.isNull && right.isNull) {
throw new IllegalArgumentException(error("Extraneous comparison of null constants."));
}
if ((left.constant != null || left.isNull) && (right.constant != null || right.isNull)) {
final Sort sort = promote.sort;
if (sort == Sort.BOOL) {
constant = (boolean)left.constant == (boolean)right.constant;
} else if (sort == Sort.INT) {
constant = (int)left.constant == (int)right.constant;
} else if (sort == Sort.LONG) {
constant = (long)left.constant == (long)right.constant;
} else if (sort == Sort.FLOAT) {
constant = (float)left.constant == (float)right.constant;
} else if (sort == Sort.DOUBLE) {
constant = (double)left.constant == (double)right.constant;
} else {
constant = left.constant == right.constant;
}
}
actual = definition.booleanType;
}
private void analyzeNE(final CompilerSettings settings, final Definition definition, final Variables variables) {
left.analyze(settings, definition, variables);
right.analyze(settings, definition, variables);
final Type promote = AnalyzerCaster.promoteEquality(definition, left.actual, right.actual);
if (promote == null) {
throw new ClassCastException(error("Cannot apply not equals [!=] to types " +
"[" + left.actual.name + "] and [" + right.actual.name + "]."));
}
left.expected = promote;
right.expected = promote;
left = left.cast(settings, definition, variables);
right = right.cast(settings, definition, variables);
if (left.isNull && right.isNull) {
throw new IllegalArgumentException(error("Extraneous comparison of null constants."));
}
if ((left.constant != null || left.isNull) && (right.constant != null || right.isNull)) {
final Sort sort = promote.sort;
if (sort == Sort.BOOL) {
constant = (boolean)left.constant != (boolean)right.constant;
} else if (sort == Sort.INT) {
constant = (int)left.constant != (int)right.constant;
} else if (sort == Sort.LONG) {
constant = (long)left.constant != (long)right.constant;
} else if (sort == Sort.FLOAT) {
constant = (float)left.constant != (float)right.constant;
} else if (sort == Sort.DOUBLE) {
constant = (double)left.constant != (double)right.constant;
} else if (!left.isNull) {
constant = !left.constant.equals(right.constant);
} else if (!right.isNull) {
constant = !right.constant.equals(null);
} else {
throw new IllegalStateException(error("Illegal tree structure."));
}
}
actual = definition.booleanType;
}
private void analyzeNER(final CompilerSettings settings, final Definition definition, final Variables variables) {
left.analyze(settings, definition, variables);
right.analyze(settings, definition, variables);
final Type promote = AnalyzerCaster.promoteReference(definition, left.actual, right.actual);
if (promote == null) {
throw new ClassCastException(error("Cannot apply reference not equals [!==] to types " +
"[" + left.actual.name + "] and [" + right.actual.name + "]."));
}
left.expected = promote;
right.expected = promote;
left = left.cast(settings, definition, variables);
right = right.cast(settings, definition, variables);
if (left.isNull && right.isNull) {
throw new IllegalArgumentException(error("Extraneous comparison of null constants."));
}
if ((left.constant != null || left.isNull) && (right.constant != null || right.isNull)) {
final Sort sort = promote.sort;
if (sort == Sort.BOOL) {
constant = (boolean)left.constant != (boolean)right.constant;
} else if (sort == Sort.INT) {
constant = (int)left.constant != (int)right.constant;
} else if (sort == Sort.LONG) {
constant = (long)left.constant != (long)right.constant;
} else if (sort == Sort.FLOAT) {
constant = (float)left.constant != (float)right.constant;
} else if (sort == Sort.DOUBLE) {
constant = (double)left.constant != (double)right.constant;
} else {
constant = left.constant != right.constant;
}
}
actual = definition.booleanType;
}
private void analyzeGTE(final CompilerSettings settings, final Definition definition, final Variables variables) {
left.analyze(settings, definition, variables);
right.analyze(settings, definition, variables);
final Type promote = AnalyzerCaster.promoteNumeric(definition, left.actual, right.actual, true, true);
if (promote == null) {
throw new ClassCastException(error("Cannot apply greater than or equals [>=] to types " +
"[" + left.actual.name + "] and [" + right.actual.name + "]."));
}
left.expected = promote;
right.expected = promote;
left = left.cast(settings, definition, variables);
right = right.cast(settings, definition, variables);
if (left.constant != null && right.constant != null) {
final Sort sort = promote.sort;
if (sort == Sort.INT) {
constant = (int)left.constant >= (int)right.constant;
} else if (sort == Sort.LONG) {
constant = (long)left.constant >= (long)right.constant;
} else if (sort == Sort.FLOAT) {
constant = (float)left.constant >= (float)right.constant;
} else if (sort == Sort.DOUBLE) {
constant = (double)left.constant >= (double)right.constant;
} else {
throw new IllegalStateException(error("Illegal tree structure."));
}
}
actual = definition.booleanType;
}
private void analyzeGT(final CompilerSettings settings, final Definition definition, final Variables variables) {
left.analyze(settings, definition, variables);
right.analyze(settings, definition, variables);
final Type promote = AnalyzerCaster.promoteNumeric(definition, left.actual, right.actual, true, true);
if (promote == null) {
throw new ClassCastException(error("Cannot apply greater than [>] to types " +
"[" + left.actual.name + "] and [" + right.actual.name + "]."));
}
left.expected = promote;
right.expected = promote;
left = left.cast(settings, definition, variables);
right = right.cast(settings, definition, variables);
if (left.constant != null && right.constant != null) {
final Sort sort = promote.sort;
if (sort == Sort.INT) {
constant = (int)left.constant > (int)right.constant;
} else if (sort == Sort.LONG) {
constant = (long)left.constant > (long)right.constant;
} else if (sort == Sort.FLOAT) {
constant = (float)left.constant > (float)right.constant;
} else if (sort == Sort.DOUBLE) {
constant = (double)left.constant > (double)right.constant;
} else {
throw new IllegalStateException(error("Illegal tree structure."));
}
}
actual = definition.booleanType;
}
private void analyzeLTE(final CompilerSettings settings, final Definition definition, final Variables variables) {
left.analyze(settings, definition, variables);
right.analyze(settings, definition, variables);
final Type promote = AnalyzerCaster.promoteNumeric(definition, left.actual, right.actual, true, true);
if (promote == null) {
throw new ClassCastException(error("Cannot apply less than or equals [<=] to types " +
"[" + left.actual.name + "] and [" + right.actual.name + "]."));
}
left.expected = promote;
right.expected = promote;
left = left.cast(settings, definition, variables);
right = right.cast(settings, definition, variables);
if (left.constant != null && right.constant != null) {
final Sort sort = promote.sort;
if (sort == Sort.INT) {
constant = (int)left.constant <= (int)right.constant;
} else if (sort == Sort.LONG) {
constant = (long)left.constant <= (long)right.constant;
} else if (sort == Sort.FLOAT) {
constant = (float)left.constant <= (float)right.constant;
} else if (sort == Sort.DOUBLE) {
constant = (double)left.constant <= (double)right.constant;
} else {
throw new IllegalStateException(error("Illegal tree structure."));
}
}
actual = definition.booleanType;
}
private void analyzeLT(final CompilerSettings settings, final Definition definition, final Variables variables) {
left.analyze(settings, definition, variables);
right.analyze(settings, definition, variables);
final Type promote = AnalyzerCaster.promoteNumeric(definition, left.actual, right.actual, true, true);
if (promote == null) {
throw new ClassCastException(error("Cannot apply less than [>=] to types " +
"[" + left.actual.name + "] and [" + right.actual.name + "]."));
}
left.expected = promote;
right.expected = promote;
left = left.cast(settings, definition, variables);
right = right.cast(settings, definition, variables);
if (left.constant != null && right.constant != null) {
final Sort sort = promote.sort;
if (sort == Sort.INT) {
constant = (int)left.constant < (int)right.constant;
} else if (sort == Sort.LONG) {
constant = (long)left.constant < (long)right.constant;
} else if (sort == Sort.FLOAT) {
constant = (float)left.constant < (float)right.constant;
} else if (sort == Sort.DOUBLE) {
constant = (double)left.constant < (double)right.constant;
} else {
throw new IllegalStateException(error("Illegal tree structure."));
}
}
actual = definition.booleanType;
}
@Override
void write(final CompilerSettings settings, final Definition definition, final GeneratorAdapter adapter) {
final boolean branch = tru != null || fals != null;
final org.objectweb.asm.Type rtype = right.actual.type;
final Sort rsort = right.actual.sort;
left.write(settings, definition, adapter);
if (!right.isNull) {
right.write(settings, definition, adapter);
}
final Label jump = tru != null ? tru : fals != null ? fals : new Label();
final Label end = new Label();
final boolean eq = (operation == Operation.EQ || operation == Operation.EQR) && (tru != null || fals == null) ||
(operation == Operation.NE || operation == Operation.NER) && fals != null;
final boolean ne = (operation == Operation.NE || operation == Operation.NER) && (tru != null || fals == null) ||
(operation == Operation.EQ || operation == Operation.EQR) && fals != null;
final boolean lt = operation == Operation.LT && (tru != null || fals == null) || operation == Operation.GTE && fals != null;
final boolean lte = operation == Operation.LTE && (tru != null || fals == null) || operation == Operation.GT && fals != null;
final boolean gt = operation == Operation.GT && (tru != null || fals == null) || operation == Operation.LTE && fals != null;
final boolean gte = operation == Operation.GTE && (tru != null || fals == null) || operation == Operation.LT && fals != null;
boolean writejump = true;
switch (rsort) {
case VOID:
case BYTE:
case SHORT:
case CHAR:
throw new IllegalStateException(error("Illegal tree structure."));
case BOOL:
if (eq) adapter.ifZCmp(GeneratorAdapter.EQ, jump);
else if (ne) adapter.ifZCmp(GeneratorAdapter.NE, jump);
else {
throw new IllegalStateException(error("Illegal tree structure."));
}
break;
case INT:
case LONG:
case FLOAT:
case DOUBLE:
if (eq) adapter.ifCmp(rtype, GeneratorAdapter.EQ, jump);
else if (ne) adapter.ifCmp(rtype, GeneratorAdapter.NE, jump);
else if (lt) adapter.ifCmp(rtype, GeneratorAdapter.LT, jump);
else if (lte) adapter.ifCmp(rtype, GeneratorAdapter.LE, jump);
else if (gt) adapter.ifCmp(rtype, GeneratorAdapter.GT, jump);
else if (gte) adapter.ifCmp(rtype, GeneratorAdapter.GE, jump);
else {
throw new IllegalStateException(error("Illegal tree structure."));
}
break;
case DEF:
if (eq) {
if (right.isNull) {
adapter.ifNull(jump);
} else if (!left.isNull && operation == Operation.EQ) {
adapter.invokeStatic(definition.defobjType.type, DEF_EQ_CALL);
} else {
adapter.ifCmp(rtype, GeneratorAdapter.EQ, jump);
}
} else if (ne) {
if (right.isNull) {
adapter.ifNonNull(jump);
} else if (!left.isNull && operation == Operation.NE) {
adapter.invokeStatic(definition.defobjType.type, DEF_EQ_CALL);
adapter.ifZCmp(GeneratorAdapter.EQ, jump);
} else {
adapter.ifCmp(rtype, GeneratorAdapter.NE, jump);
}
} else if (lt) {
adapter.invokeStatic(definition.defobjType.type, DEF_LT_CALL);
} else if (lte) {
adapter.invokeStatic(definition.defobjType.type, DEF_LTE_CALL);
} else if (gt) {
adapter.invokeStatic(definition.defobjType.type, DEF_GT_CALL);
} else if (gte) {
adapter.invokeStatic(definition.defobjType.type, DEF_GTE_CALL);
} else {
throw new IllegalStateException(error("Illegal tree structure."));
}
writejump = left.isNull || ne || operation == Operation.EQR;
if (branch && !writejump) {
adapter.ifZCmp(GeneratorAdapter.NE, jump);
}
break;
default:
if (eq) {
if (right.isNull) {
adapter.ifNull(jump);
} else if (operation == Operation.EQ) {
adapter.invokeStatic(definition.utilityType.type, CHECKEQUALS);
if (branch) {
adapter.ifZCmp(GeneratorAdapter.NE, jump);
}
writejump = false;
} else {
adapter.ifCmp(rtype, GeneratorAdapter.EQ, jump);
}
} else if (ne) {
if (right.isNull) {
adapter.ifNonNull(jump);
} else if (operation == Operation.NE) {
adapter.invokeStatic(definition.utilityType.type, CHECKEQUALS);
adapter.ifZCmp(GeneratorAdapter.EQ, jump);
} else {
adapter.ifCmp(rtype, GeneratorAdapter.NE, jump);
}
} else {
throw new IllegalStateException(error("Illegal tree structure."));
}
}
if (!branch && writejump) {
adapter.push(false);
adapter.goTo(end);
adapter.mark(jump);
adapter.push(true);
adapter.mark(end);
}
}
}

View File

@ -0,0 +1,94 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.painless.node;
import org.elasticsearch.painless.CompilerSettings;
import org.elasticsearch.painless.Definition;
import org.elasticsearch.painless.Definition.Type;
import org.elasticsearch.painless.AnalyzerCaster;
import org.elasticsearch.painless.Variables;
import org.objectweb.asm.Label;
import org.objectweb.asm.commons.GeneratorAdapter;
/**
* Respresents a conditional expression.
*/
public final class EConditional extends AExpression {
AExpression condition;
AExpression left;
AExpression right;
public EConditional(final String location, final AExpression condition, final AExpression left, final AExpression right) {
super(location);
this.condition = condition;
this.left = left;
this.right = right;
}
@Override
void analyze(final CompilerSettings settings, final Definition definition, final Variables variables) {
condition.expected = definition.booleanType;
condition.analyze(settings, definition, variables);
condition = condition.cast(settings, definition, variables);
if (condition.constant != null) {
throw new IllegalArgumentException(error("Extraneous conditional statement."));
}
left.expected = expected;
left.explicit = explicit;
right.expected = expected;
right.explicit = explicit;
actual = expected;
left.analyze(settings, definition, variables);
right.analyze(settings, definition, variables);
if (expected == null) {
final Type promote = AnalyzerCaster.promoteConditional(definition, left.actual, right.actual, left.constant, right.constant);
left.expected = promote;
right.expected = promote;
actual = promote;
}
left = left.cast(settings, definition, variables);
right = right.cast(settings, definition, variables);
}
@Override
void write(final CompilerSettings settings, final Definition definition, final GeneratorAdapter adapter) {
final Label localfals = new Label();
final Label end = new Label();
condition.fals = localfals;
left.tru = right.tru = tru;
left.fals = right.fals = fals;
condition.write(settings, definition, adapter);
left.write(settings, definition, adapter);
adapter.goTo(end);
adapter.mark(localfals);
right.write(settings, definition, adapter);
adapter.mark(end);
}
}

View File

@ -0,0 +1,97 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.painless.node;
import org.elasticsearch.painless.CompilerSettings;
import org.elasticsearch.painless.Definition;
import org.elasticsearch.painless.Definition.Sort;
import org.elasticsearch.painless.Variables;
import org.elasticsearch.painless.WriterUtility;
import org.objectweb.asm.commons.GeneratorAdapter;
/**
* Respresents a constant. Note this replaces any other expression
* node with a constant value set during a cast. (Internal only.)
*/
final class EConstant extends AExpression {
EConstant(final String location, final Object constant) {
super(location);
this.constant = constant;
}
@Override
void analyze(final CompilerSettings settings, final Definition definition, final Variables variables) {
if (constant instanceof String) {
actual = definition.stringType;
} else if (constant instanceof Double) {
actual = definition.doubleType;
} else if (constant instanceof Float) {
actual = definition.floatType;
} else if (constant instanceof Long) {
actual = definition.longType;
} else if (constant instanceof Integer) {
actual = definition.intType;
} else if (constant instanceof Character) {
actual = definition.charType;
} else if (constant instanceof Short) {
actual = definition.shortType;
} else if (constant instanceof Byte) {
actual = definition.byteType;
} else if (constant instanceof Boolean) {
actual = definition.booleanType;
} else {
throw new IllegalStateException(error("Illegal tree structure."));
}
}
@Override
void write(final CompilerSettings settings, final Definition definition, final GeneratorAdapter adapter) {
final Sort sort = actual.sort;
switch (sort) {
case STRING: adapter.push((String)constant); break;
case DOUBLE: adapter.push((double)constant); break;
case FLOAT: adapter.push((float)constant); break;
case LONG: adapter.push((long)constant); break;
case INT: adapter.push((int)constant); break;
case CHAR: adapter.push((char)constant); break;
case SHORT: adapter.push((short)constant); break;
case BYTE: adapter.push((byte)constant); break;
case BOOL:
if (tru != null && (boolean)constant) {
adapter.goTo(tru);
} else if (fals != null && !(boolean)constant) {
adapter.goTo(fals);
} else if (tru == null && fals == null) {
adapter.push((boolean)constant);
}
break;
default:
throw new IllegalStateException(error("Illegal tree structure."));
}
if (sort != Sort.BOOL) {
WriterUtility.writeBranch(adapter, tru, fals);
}
}
}

View File

@ -0,0 +1,63 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.painless.node;
import org.elasticsearch.painless.CompilerSettings;
import org.elasticsearch.painless.Definition;
import org.elasticsearch.painless.Variables;
import org.objectweb.asm.commons.GeneratorAdapter;
/**
* Respresents a decimal constant.
*/
public final class EDecimal extends AExpression {
final String value;
public EDecimal(final String location, final String value) {
super(location);
this.value = value;
}
@Override
void analyze(final CompilerSettings settings, final Definition definition, final Variables variables) {
if (value.endsWith("f") || value.endsWith("F")) {
try {
constant = Float.parseFloat(value.substring(0, value.length() - 1));
actual = definition.floatType;
} catch (final NumberFormatException exception) {
throw new IllegalArgumentException(error("Invalid float constant [" + value + "]."));
}
} else {
try {
constant = Double.parseDouble(value);
actual = definition.doubleType;
} catch (final NumberFormatException exception) {
throw new IllegalArgumentException(error("Invalid double constant [" + value + "]."));
}
}
}
@Override
void write(final CompilerSettings settings, final Definition definition, final GeneratorAdapter adapter) {
throw new IllegalArgumentException(error("Illegal tree structure."));
}
}

View File

@ -0,0 +1,70 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.painless.node;
import org.elasticsearch.painless.CompilerSettings;
import org.elasticsearch.painless.Definition;
import org.elasticsearch.painless.Definition.Cast;
import org.elasticsearch.painless.Variables;
import org.objectweb.asm.commons.GeneratorAdapter;
/**
* Represents an explicit cast.
*/
public final class EExplicit extends AExpression {
final String type;
AExpression child;
Cast cast = null;
public EExplicit(final String location, final String type, final AExpression child) {
super(location);
this.type = type;
this.child = child;
}
@Override
void analyze(final CompilerSettings settings, final Definition definition, final Variables variables) {
try {
actual = definition.getType(this.type);
} catch (final IllegalArgumentException exception) {
throw new IllegalArgumentException(error("Not a type [" + this.type + "]."));
}
child.expected = actual;
child.explicit = true;
child.analyze(settings, definition, variables);
child = child.cast(settings, definition, variables);
}
@Override
void write(final CompilerSettings settings, final Definition definition, final GeneratorAdapter adapter) {
throw new IllegalArgumentException(error("Illegal tree structure."));
}
AExpression cast(final CompilerSettings settings, final Definition definition, final Variables variables) {
child.expected = expected;
child.explicit = explicit;
return child.cast(settings, definition, variables);
}
}

View File

@ -0,0 +1,56 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.painless.node;
import org.elasticsearch.painless.CompilerSettings;
import org.elasticsearch.painless.Definition;
import org.elasticsearch.painless.Variables;
import org.objectweb.asm.Opcodes;
import org.objectweb.asm.commons.GeneratorAdapter;
/**
* Represents a null constant.
*/
public final class ENull extends AExpression {
public ENull(final String location) {
super(location);
}
@Override
void analyze(final CompilerSettings settings, final Definition definition, final Variables variables) {
isNull = true;
if (expected != null) {
if (expected.sort.primitive) {
throw new IllegalArgumentException(error("Cannot cast null to a primitive type [" + expected.name + "]."));
}
actual = expected;
} else {
actual = definition.objectType;
}
}
@Override
void write(final CompilerSettings settings, final Definition definition, final GeneratorAdapter adapter) {
adapter.visitInsn(Opcodes.ACONST_NULL);
}
}

View File

@ -0,0 +1,102 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.painless.node;
import org.elasticsearch.painless.CompilerSettings;
import org.elasticsearch.painless.Definition;
import org.elasticsearch.painless.Definition.Sort;
import org.elasticsearch.painless.Variables;
import org.objectweb.asm.commons.GeneratorAdapter;
/**
* Respresents a non-decimal numeric constant.
*/
public final class ENumeric extends AExpression {
final String value;
int radix;
public ENumeric(final String location, final String value, final int radix) {
super(location);
this.value = value;
this.radix = radix;
}
@Override
void analyze(final CompilerSettings settings, final Definition definition, final Variables variables) {
if (value.endsWith("d") || value.endsWith("D")) {
if (radix != 10) {
throw new IllegalStateException(error("Invalid tree structure."));
}
try {
constant = Double.parseDouble(value.substring(0, value.length() - 1));
actual = definition.doubleType;
} catch (final NumberFormatException exception) {
throw new IllegalArgumentException(error("Invalid double constant [" + value + "]."));
}
} else if (value.endsWith("f") || value.endsWith("F")) {
if (radix != 10) {
throw new IllegalStateException(error("Invalid tree structure."));
}
try {
constant = Float.parseFloat(value.substring(0, value.length() - 1));
actual = definition.floatType;
} catch (final NumberFormatException exception) {
throw new IllegalArgumentException(error("Invalid float constant [" + value + "]."));
}
} else if (value.endsWith("l") || value.endsWith("L")) {
try {
constant = Long.parseLong(value.substring(0, value.length() - 1), radix);
actual = definition.longType;
} catch (final NumberFormatException exception) {
throw new IllegalArgumentException(error("Invalid long constant [" + value + "]."));
}
} else {
try {
final Sort sort = expected == null ? Sort.INT : expected.sort;
final int integer = Integer.parseInt(value, radix);
if (sort == Sort.BYTE && integer >= Byte.MIN_VALUE && integer <= Byte.MAX_VALUE) {
constant = (byte)integer;
actual = definition.byteType;
} else if (sort == Sort.CHAR && integer >= Character.MIN_VALUE && integer <= Character.MAX_VALUE) {
constant = (char)integer;
actual = definition.charType;
} else if (sort == Sort.SHORT && integer >= Short.MIN_VALUE && integer <= Short.MAX_VALUE) {
constant = (short)integer;
actual = definition.shortType;
} else {
constant = integer;
actual = definition.intType;
}
} catch (final NumberFormatException exception) {
throw new IllegalArgumentException(error("Invalid int constant [" + value + "]."));
}
}
}
@Override
void write(final CompilerSettings settings, final Definition definition, final GeneratorAdapter adapter) {
throw new IllegalArgumentException(error("Illegal tree structure."));
}
}

View File

@ -0,0 +1,234 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.painless.node;
import org.elasticsearch.painless.CompilerSettings;
import org.elasticsearch.painless.Definition;
import org.elasticsearch.painless.Definition.Sort;
import org.elasticsearch.painless.Definition.Type;
import org.elasticsearch.painless.AnalyzerCaster;
import org.elasticsearch.painless.Operation;
import org.elasticsearch.painless.Variables;
import org.elasticsearch.painless.WriterUtility;
import org.objectweb.asm.Label;
import org.objectweb.asm.commons.GeneratorAdapter;
import static org.elasticsearch.painless.WriterConstants.DEF_NEG_CALL;
import static org.elasticsearch.painless.WriterConstants.DEF_NOT_CALL;
import static org.elasticsearch.painless.WriterConstants.NEGATEEXACT_INT;
import static org.elasticsearch.painless.WriterConstants.NEGATEEXACT_LONG;
/**
* Represents a unary math expression.
*/
public final class EUnary extends AExpression {
Operation operation;
AExpression child;
public EUnary(final String location, final Operation operation, final AExpression child) {
super(location);
this.operation = operation;
this.child = child;
}
@Override
void analyze(final CompilerSettings settings, final Definition definition, final Variables variables) {
if (operation == Operation.NOT) {
analyzeNot(settings, definition, variables);
} else if (operation == Operation.BWNOT) {
analyzeBWNot(settings, definition, variables);
} else if (operation == Operation.ADD) {
analyzerAdd(settings, definition, variables);
} else if (operation == Operation.SUB) {
analyzerSub(settings, definition, variables);
} else {
throw new IllegalStateException(error("Illegal tree structure."));
}
}
void analyzeNot(final CompilerSettings settings, final Definition definition, final Variables variables) {
child.expected = definition.booleanType;
child.analyze(settings, definition, variables);
child = child.cast(settings, definition, variables);
if (child.constant != null) {
constant = !(boolean)child.constant;
}
actual = definition.booleanType;
}
void analyzeBWNot(final CompilerSettings settings, final Definition definition, final Variables variables) {
child.analyze(settings, definition, variables);
final Type promote = AnalyzerCaster.promoteNumeric(definition, child.actual, false, true);
if (promote == null) {
throw new ClassCastException(error("Cannot apply not [~] to type [" + child.actual.name + "]."));
}
child.expected = promote;
child = child.cast(settings, definition, variables);
if (child.constant != null) {
final Sort sort = promote.sort;
if (sort == Sort.INT) {
constant = ~(int)child.constant;
} else if (sort == Sort.LONG) {
constant = ~(long)child.constant;
} else {
throw new IllegalStateException(error("Illegal tree structure."));
}
}
actual = promote;
}
void analyzerAdd(final CompilerSettings settings, final Definition definition, final Variables variables) {
child.analyze(settings, definition, variables);
final Type promote = AnalyzerCaster.promoteNumeric(definition, child.actual, true, true);
if (promote == null) {
throw new ClassCastException(error("Cannot apply positive [+] to type [" + child.actual.name + "]."));
}
child.expected = promote;
child = child.cast(settings, definition, variables);
if (child.constant != null) {
final Sort sort = promote.sort;
if (sort == Sort.INT) {
constant = +(int)child.constant;
} else if (sort == Sort.LONG) {
constant = +(long)child.constant;
} else if (sort == Sort.FLOAT) {
constant = +(float)child.constant;
} else if (sort == Sort.DOUBLE) {
constant = +(double)child.constant;
} else {
throw new IllegalStateException(error("Illegal tree structure."));
}
}
actual = promote;
}
void analyzerSub(final CompilerSettings settings, final Definition definition, final Variables variables) {
child.analyze(settings, definition, variables);
final Type promote = AnalyzerCaster.promoteNumeric(definition, child.actual, true, true);
if (promote == null) {
throw new ClassCastException(error("Cannot apply negative [-] to type [" + child.actual.name + "]."));
}
child.expected = promote;
child = child.cast(settings, definition, variables);
if (child.constant != null) {
final boolean overflow = settings.getNumericOverflow();
final Sort sort = promote.sort;
if (sort == Sort.INT) {
constant = overflow ? -(int)child.constant : Math.negateExact((int)child.constant);
} else if (sort == Sort.LONG) {
constant = overflow ? -(long)child.constant : Math.negateExact((long)child.constant);
} else if (sort == Sort.FLOAT) {
constant = -(float)child.constant;
} else if (sort == Sort.DOUBLE) {
constant = -(double)child.constant;
} else {
throw new IllegalStateException(error("Illegal tree structure."));
}
}
actual = promote;
}
@Override
void write(final CompilerSettings settings, final Definition definition, final GeneratorAdapter adapter) {
if (operation == Operation.NOT) {
if (tru == null && fals == null) {
final Label localfals = new Label();
final Label end = new Label();
child.fals = localfals;
child.write(settings, definition, adapter);
adapter.push(false);
adapter.goTo(end);
adapter.mark(localfals);
adapter.push(true);
adapter.mark(end);
} else {
child.tru = fals;
child.fals = tru;
child.write(settings, definition, adapter);
}
} else {
final org.objectweb.asm.Type type = actual.type;
final Sort sort = actual.sort;
child.write(settings, definition, adapter);
if (operation == Operation.BWNOT) {
if (sort == Sort.DEF) {
adapter.invokeStatic(definition.defobjType.type, DEF_NOT_CALL);
} else {
if (sort == Sort.INT) {
adapter.push(-1);
} else if (sort == Sort.LONG) {
adapter.push(-1L);
} else {
throw new IllegalStateException(error("Illegal tree structure."));
}
adapter.math(GeneratorAdapter.XOR, type);
}
} else if (operation == Operation.SUB) {
if (sort == Sort.DEF) {
adapter.invokeStatic(definition.defobjType.type, DEF_NEG_CALL);
} else {
if (settings.getNumericOverflow()) {
adapter.math(GeneratorAdapter.NEG, type);
} else {
if (sort == Sort.INT) {
adapter.invokeStatic(definition.mathType.type, NEGATEEXACT_INT);
} else if (sort == Sort.LONG) {
adapter.invokeStatic(definition.mathType.type, NEGATEEXACT_LONG);
} else {
throw new IllegalStateException(error("Illegal tree structure."));
}
}
}
} else if (operation != Operation.ADD) {
throw new IllegalStateException(error("Illegal tree structure."));
}
WriterUtility.writeBranch(adapter, tru, fals);
}
}
}

View File

@ -0,0 +1,71 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.painless.node;
import org.elasticsearch.painless.CompilerSettings;
import org.elasticsearch.painless.Definition;
import org.elasticsearch.painless.Variables;
import org.objectweb.asm.commons.GeneratorAdapter;
/**
* Represents an array length field load.
*/
public final class LArrayLength extends ALink {
final String value;
LArrayLength(final String location, final String value) {
super(location, -1);
this.value = value;
}
@Override
ALink analyze(final CompilerSettings settings, final Definition definition, final Variables variables) {
if ("length".equals(value)) {
if (!load) {
throw new IllegalArgumentException(error("Must read array field [length]."));
} else if (store) {
throw new IllegalArgumentException(error("Cannot write to read-only array field [length]."));
}
after = definition.intType;
} else {
throw new IllegalArgumentException(error("Illegal field access [" + value + "]."));
}
return this;
}
@Override
void write(final CompilerSettings settings, final Definition definition, final GeneratorAdapter adapter) {
// Do nothing.
}
@Override
void load(final CompilerSettings settings, final Definition definition, final GeneratorAdapter adapter) {
adapter.arrayLength();
}
@Override
void store(final CompilerSettings settings, final Definition definition, final GeneratorAdapter adapter) {
throw new IllegalStateException(error("Illegal tree structure."));
}
}

View File

@ -0,0 +1,98 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.painless.node;
import org.elasticsearch.painless.CompilerSettings;
import org.elasticsearch.painless.Definition;
import org.elasticsearch.painless.Definition.Sort;
import org.elasticsearch.painless.Variables;
import org.objectweb.asm.commons.GeneratorAdapter;
import java.util.List;
import java.util.Map;
/**
* Represents an array load/store or defers to possible shortcuts.
*/
public final class LBrace extends ALink {
AExpression index;
public LBrace(final String location, final AExpression index) {
super(location, 2);
this.index = index;
}
@Override
ALink analyze(final CompilerSettings settings, final Definition definition, final Variables variables) {
if (before == null) {
throw new IllegalStateException(error("Illegal tree structure."));
}
final Sort sort = before.sort;
if (sort == Sort.ARRAY) {
index.expected = definition.intType;
index.analyze(settings, definition, variables);
index = index.cast(settings, definition, variables);
after = definition.getType(before.struct, before.dimensions - 1);
return this;
} else if (sort == Sort.DEF) {
return new LDefArray(location, index).copy(this).analyze(settings, definition, variables);
} else {
try {
before.clazz.asSubclass(Map.class);
return new LMapShortcut(location, index).copy(this).analyze(settings, definition, variables);
} catch (final ClassCastException exception) {
// Do nothing.
}
try {
before.clazz.asSubclass(List.class);
return new LListShortcut(location, index).copy(this).analyze(settings, definition, variables);
} catch (final ClassCastException exception) {
// Do nothing.
}
}
throw new IllegalArgumentException(error("Illegal array access on type [" + before.name + "]."));
}
@Override
void write(final CompilerSettings settings, final Definition definition, final GeneratorAdapter adapter) {
index.write(settings, definition, adapter);
}
@Override
void load(final CompilerSettings settings, final Definition definition, final GeneratorAdapter adapter) {
adapter.arrayLoad(after.type);
}
@Override
void store(final CompilerSettings settings, final Definition definition, final GeneratorAdapter adapter) {
adapter.arrayStore(after.type);
}
}

View File

@ -0,0 +1,120 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.painless.node;
import org.elasticsearch.painless.CompilerSettings;
import org.elasticsearch.painless.Definition;
import org.elasticsearch.painless.Definition.Method;
import org.elasticsearch.painless.Definition.Struct;
import org.elasticsearch.painless.Variables;
import org.objectweb.asm.commons.GeneratorAdapter;
import java.util.List;
/**
* Represents a method call or deferes to a def call.
*/
public final class LCall extends ALink {
final String name;
final List<AExpression> arguments;
Method method = null;
public LCall(final String location, final String name, final List<AExpression> arguments) {
super(location, -1);
this.name = name;
this.arguments = arguments;
}
@Override
ALink analyze(final CompilerSettings settings, final Definition definition, final Variables variables) {
if (before == null) {
throw new IllegalStateException(error("Illegal tree structure."));
} else if (before.sort == Definition.Sort.ARRAY) {
throw new IllegalArgumentException(error("Illegal call [" + name + "] on array type."));
} else if (store) {
throw new IllegalArgumentException(error("Cannot assign a value to a call [" + name + "]."));
}
final Struct struct = before.struct;
method = statik ? struct.functions.get(name) : struct.methods.get(name);
if (method != null) {
final Definition.Type[] types = new Definition.Type[method.arguments.size()];
method.arguments.toArray(types);
if (method.arguments.size() != arguments.size()) {
throw new IllegalArgumentException(error("When calling [" + name + "] on type [" + struct.name + "]" +
" expected [" + method.arguments.size() + "] arguments, but found [" + arguments.size() + "]."));
}
for (int argument = 0; argument < arguments.size(); ++argument) {
final AExpression expression = arguments.get(argument);
expression.expected = types[argument];
expression.analyze(settings, definition, variables);
arguments.set(argument, expression.cast(settings, definition, variables));
}
statement = true;
after = method.rtn;
return this;
} else if (before.sort == Definition.Sort.DEF) {
final ALink link = new LDefCall(location, name, arguments);
link.copy(this);
return link.analyze(settings, definition, variables);
}
throw new IllegalArgumentException(error("Unknown call [" + name + "] on type [" + struct.name + "]."));
}
@Override
void write(final CompilerSettings settings, final Definition definition, final GeneratorAdapter adapter) {
// Do nothing.
}
@Override
void load(final CompilerSettings settings, final Definition definition, final GeneratorAdapter adapter) {
for (final AExpression argument : arguments) {
argument.write(settings, definition, adapter);
}
if (java.lang.reflect.Modifier.isStatic(method.reflect.getModifiers())) {
adapter.invokeStatic(method.owner.type, method.method);
} else if (java.lang.reflect.Modifier.isInterface(method.owner.clazz.getModifiers())) {
adapter.invokeInterface(method.owner.type, method.method);
} else {
adapter.invokeVirtual(method.owner.type, method.method);
}
if (!method.rtn.clazz.equals(method.handle.type().returnType())) {
adapter.checkCast(method.rtn.type);
}
}
@Override
void store(final CompilerSettings settings, final Definition definition, final GeneratorAdapter adapter) {
throw new IllegalStateException(error("Illegal tree structure."));
}
}

View File

@ -0,0 +1,78 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.painless.node;
import org.elasticsearch.painless.CompilerSettings;
import org.elasticsearch.painless.Definition;
import org.elasticsearch.painless.Definition.Cast;
import org.elasticsearch.painless.AnalyzerCaster;
import org.elasticsearch.painless.Variables;
import org.elasticsearch.painless.WriterUtility;
import org.objectweb.asm.commons.GeneratorAdapter;
/**
* Represents a cast made in a variable/method chain.
*/
public final class LCast extends ALink {
final String type;
Cast cast = null;
public LCast(final String location, final String type) {
super(location, -1);
this.type = type;
}
@Override
ALink analyze(final CompilerSettings settings, final Definition definition, final Variables variables) {
if (before == null) {
throw new IllegalStateException(error("Illegal tree structure."));
} else if (store) {
throw new IllegalArgumentException(error("Cannot assign a value to a cast."));
}
try {
after = definition.getType(type);
} catch (final IllegalArgumentException exception) {
throw new IllegalArgumentException(error("Not a type [" + type + "]."));
}
cast = AnalyzerCaster.getLegalCast(definition, location, before, after, true);
return cast != null ? this : null;
}
@Override
void write(final CompilerSettings settings, final Definition definition, final GeneratorAdapter adapter) {
WriterUtility.writeCast(adapter, cast);
}
@Override
void load(final CompilerSettings settings, final Definition definition, final GeneratorAdapter adapter) {
// Do nothing.
}
@Override
void store(final CompilerSettings settings, final Definition definition, final GeneratorAdapter adapter) {
throw new IllegalStateException(error("Illegal tree structure."));
}
}

View File

@ -0,0 +1,74 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.painless.node;
import org.elasticsearch.painless.CompilerSettings;
import org.elasticsearch.painless.Definition;
import org.elasticsearch.painless.DynamicCallSite;
import org.elasticsearch.painless.Variables;
import org.objectweb.asm.commons.GeneratorAdapter;
import static org.elasticsearch.painless.WriterConstants.DEF_BOOTSTRAP_HANDLE;
import static org.elasticsearch.painless.WriterConstants.DEF_DYNAMIC_ARRAY_LOAD_DESC;
import static org.elasticsearch.painless.WriterConstants.DEF_DYNAMIC_ARRAY_STORE_DESC;
/**
* Represents an array load/store or shortcut on a def type. (Internal only.)
*/
final class LDefArray extends ALink {
AExpression index;
LDefArray(final String location, final AExpression index) {
super(location, 0);
this.index = index;
}
@Override
ALink analyze(final CompilerSettings settings, final Definition definition, final Variables variables) {
index.expected = definition.objectType;
index.analyze(settings, definition, variables);
index = index.cast(settings, definition, variables);
after = definition.defType;
return this;
}
@Override
void write(final CompilerSettings settings, final Definition definition, final GeneratorAdapter adapter) {
index.write(settings, definition, adapter);
}
@Override
void load(final CompilerSettings settings, final Definition definition, final GeneratorAdapter adapter) {
adapter.visitInvokeDynamicInsn(
"arrayLoad", DEF_DYNAMIC_ARRAY_LOAD_DESC, DEF_BOOTSTRAP_HANDLE, new Object[] { DynamicCallSite.ARRAY_LOAD });
}
@Override
void store(final CompilerSettings settings, final Definition definition, final GeneratorAdapter adapter) {
adapter.visitInvokeDynamicInsn(
"arrayStore", DEF_DYNAMIC_ARRAY_STORE_DESC, DEF_BOOTSTRAP_HANDLE, new Object[] { DynamicCallSite.ARRAY_STORE });
}
}

View File

@ -0,0 +1,94 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.painless.node;
import org.elasticsearch.painless.CompilerSettings;
import org.elasticsearch.painless.Definition;
import org.elasticsearch.painless.DynamicCallSite;
import org.elasticsearch.painless.Variables;
import org.objectweb.asm.commons.GeneratorAdapter;
import java.util.List;
import static org.elasticsearch.painless.WriterConstants.DEF_BOOTSTRAP_HANDLE;
/**
* Represents a method call made on a def type. (Internal only.)
*/
final class LDefCall extends ALink {
final String name;
final List<AExpression> arguments;
LDefCall(final String location, final String name, final List<AExpression> arguments) {
super(location, -1);
this.name = name;
this.arguments = arguments;
}
@Override
ALink analyze(final CompilerSettings settings, final Definition definition, final Variables variables) {
for (int argument = 0; argument < arguments.size(); ++argument) {
final AExpression expression = arguments.get(argument);
expression.analyze(settings, definition, variables);
expression.expected = expression.actual;
arguments.set(argument, expression.cast(settings, definition, variables));
}
statement = true;
after = definition.defType;
return this;
}
@Override
void write(final CompilerSettings settings, final Definition definition, final GeneratorAdapter adapter) {
// Do nothing.
}
@Override
void load(final CompilerSettings settings, final Definition definition, final GeneratorAdapter adapter) {
final StringBuilder signature = new StringBuilder();
signature.append('(');
// first parameter is the receiver, we never know its type: always Object
signature.append(definition.defType.type.getDescriptor());
// TODO: remove our explicit conversions and feed more type information for return value,
// it can avoid some unnecessary boxing etc.
for (final AExpression argument : arguments) {
signature.append(argument.actual.type.getDescriptor());
argument.write(settings, definition, adapter);
}
signature.append(')');
// return value
signature.append(definition.defType.type.getDescriptor());
adapter.visitInvokeDynamicInsn(name, signature.toString(), DEF_BOOTSTRAP_HANDLE, new Object[] { DynamicCallSite.METHOD_CALL });
}
@Override
void store(final CompilerSettings settings, final Definition definition, final GeneratorAdapter adapter) {
throw new IllegalStateException(error("Illegal tree structure."));
}
}

View File

@ -0,0 +1,67 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.painless.node;
import org.elasticsearch.painless.CompilerSettings;
import org.elasticsearch.painless.Definition;
import org.elasticsearch.painless.DynamicCallSite;
import org.elasticsearch.painless.Variables;
import org.objectweb.asm.commons.GeneratorAdapter;
import static org.elasticsearch.painless.WriterConstants.DEF_BOOTSTRAP_HANDLE;
import static org.elasticsearch.painless.WriterConstants.DEF_DYNAMIC_LOAD_FIELD_DESC;
import static org.elasticsearch.painless.WriterConstants.DEF_DYNAMIC_STORE_FIELD_DESC;
/**
* Represents a field load/store or shortcut on a def type. (Internal only.)
*/
final class LDefField extends ALink {
final String value;
LDefField(final String location, final String value) {
super(location, 1);
this.value = value;
}
@Override
ALink analyze(final CompilerSettings settings, final Definition definition, final Variables variables) {
after = definition.defType;
return this;
}
@Override
void write(final CompilerSettings settings, final Definition definition, final GeneratorAdapter adapter) {
// Do nothing.
}
@Override
void load(final CompilerSettings settings, final Definition definition, final GeneratorAdapter adapter) {
adapter.visitInvokeDynamicInsn(value, DEF_DYNAMIC_LOAD_FIELD_DESC, DEF_BOOTSTRAP_HANDLE, new Object[] { DynamicCallSite.LOAD });
}
@Override
void store(final CompilerSettings settings, final Definition definition, final GeneratorAdapter adapter) {
adapter.visitInvokeDynamicInsn(value, DEF_DYNAMIC_STORE_FIELD_DESC, DEF_BOOTSTRAP_HANDLE, new Object[] { DynamicCallSite.STORE });
}
}

View File

@ -0,0 +1,136 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.painless.node;
import org.elasticsearch.painless.CompilerSettings;
import org.elasticsearch.painless.Definition;
import org.elasticsearch.painless.Definition.Field;
import org.elasticsearch.painless.Definition.Sort;
import org.elasticsearch.painless.Definition.Struct;
import org.elasticsearch.painless.Variables;
import org.objectweb.asm.commons.GeneratorAdapter;
import java.util.List;
import java.util.Map;
/**
* Represents a field load/store or defers to a possible shortcuts.
*/
public final class LField extends ALink {
final String value;
Field field;
public LField(final String location, final String value) {
super(location, 1);
this.value = value;
}
@Override
ALink analyze(CompilerSettings settings, Definition definition, Variables variables) {
if (before == null) {
throw new IllegalStateException(error("Illegal tree structure."));
}
final Sort sort = before.sort;
if (sort == Sort.ARRAY) {
return new LArrayLength(location, value).copy(this).analyze(settings, definition, variables);
} else if (sort == Sort.DEF) {
return new LDefField(location, value).copy(this).analyze(settings, definition, variables);
}
final Struct struct = before.struct;
field = statik ? struct.statics.get(value) : struct.members.get(value);
if (field != null) {
if (store && java.lang.reflect.Modifier.isFinal(field.reflect.getModifiers())) {
throw new IllegalArgumentException(error(
"Cannot write to read-only field [" + value + "] for type [" + struct.name + "]."));
}
after = field.type;
return this;
} else {
final boolean shortcut =
struct.methods.containsKey("get" + Character.toUpperCase(value.charAt(0)) + value.substring(1)) ||
struct.methods.containsKey("set" + Character.toUpperCase(value.charAt(0)) + value.substring(1));
if (shortcut) {
return new LShortcut(location, value).copy(this).analyze(settings, definition, variables);
} else {
final EConstant index = new EConstant(location, value);
index.analyze(settings, definition, variables);
try {
before.clazz.asSubclass(Map.class);
return new LMapShortcut(location, index).copy(this).analyze(settings, definition, variables);
} catch (final ClassCastException exception) {
// Do nothing.
}
try {
before.clazz.asSubclass(List.class);
return new LListShortcut(location, index).copy(this).analyze(settings, definition, variables);
} catch (final ClassCastException exception) {
// Do nothing.
}
}
}
throw new IllegalArgumentException(error("Unknown field [" + value + "] for type [" + struct.name + "]."));
}
@Override
void write(final CompilerSettings settings, final Definition definition, final GeneratorAdapter adapter) {
// Do nothing.
}
@Override
void load(final CompilerSettings settings, final Definition definition, final GeneratorAdapter adapter) {
if (java.lang.reflect.Modifier.isStatic(field.reflect.getModifiers())) {
adapter.getStatic(field.owner.type, field.reflect.getName(), field.type.type);
if (!field.generic.clazz.equals(field.type.clazz)) {
adapter.checkCast(field.generic.type);
}
} else {
adapter.getField(field.owner.type, field.reflect.getName(), field.type.type);
if (!field.generic.clazz.equals(field.type.clazz)) {
adapter.checkCast(field.generic.type);
}
}
}
@Override
void store(final CompilerSettings settings, final Definition definition, final GeneratorAdapter adapter) {
if (java.lang.reflect.Modifier.isStatic(field.reflect.getModifiers())) {
adapter.putStatic(field.owner.type, field.reflect.getName(), field.type.type);
} else {
adapter.putField(field.owner.type, field.reflect.getName(), field.type.type);
}
}
}

View File

@ -0,0 +1,105 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.painless.node;
import org.elasticsearch.painless.CompilerSettings;
import org.elasticsearch.painless.Definition;
import org.elasticsearch.painless.Definition.Method;
import org.elasticsearch.painless.Definition.Sort;
import org.elasticsearch.painless.Variables;
import org.elasticsearch.painless.WriterUtility;
import org.objectweb.asm.commons.GeneratorAdapter;
/**
* Represents a list load/store shortcut. (Internal only.)
*/
final class LListShortcut extends ALink {
AExpression index;
Method getter;
Method setter;
LListShortcut(final String location, final AExpression index) {
super(location, 2);
this.index = index;
}
@Override
ALink analyze(final CompilerSettings settings, final Definition definition, final Variables variables) {
getter = before.struct.methods.get("get");
setter = before.struct.methods.get("set");
if (getter != null && (getter.rtn.sort == Sort.VOID || getter.arguments.size() != 1 ||
getter.arguments.get(0).sort != Sort.INT)) {
throw new IllegalArgumentException(error("Illegal list get shortcut for type [" + before.name + "]."));
}
if (setter != null && (setter.arguments.size() != 2 || setter.arguments.get(0).sort != Sort.INT)) {
throw new IllegalArgumentException(error("Illegal list set shortcut for type [" + before.name + "]."));
}
if (getter != null && setter != null && (!getter.arguments.get(0).equals(setter.arguments.get(0))
|| !getter.rtn.equals(setter.arguments.get(1)))) {
throw new IllegalArgumentException(error("Shortcut argument types must match."));
}
if ((load || store) && (!load || getter != null) && (!store || setter != null)) {
index.expected = definition.intType;
index.analyze(settings, definition, variables);
index = index.cast(settings, definition, variables);
after = setter != null ? setter.arguments.get(1) : getter.rtn;
} else {
throw new IllegalArgumentException(error("Illegal list shortcut for type [" + before.name + "]."));
}
return this;
}
@Override
void write(final CompilerSettings settings, final Definition definition, final GeneratorAdapter adapter) {
index.write(settings, definition, adapter);
}
@Override
void load(final CompilerSettings settings, final Definition definition, final GeneratorAdapter adapter) {
if (java.lang.reflect.Modifier.isInterface(getter.owner.clazz.getModifiers())) {
adapter.invokeInterface(getter.owner.type, getter.method);
} else {
adapter.invokeVirtual(getter.owner.type, getter.method);
}
if (!getter.rtn.clazz.equals(getter.handle.type().returnType())) {
adapter.checkCast(getter.rtn.type);
}
}
@Override
void store(final CompilerSettings settings, final Definition definition, final GeneratorAdapter adapter) {
if (java.lang.reflect.Modifier.isInterface(setter.owner.clazz.getModifiers())) {
adapter.invokeInterface(setter.owner.type, setter.method);
} else {
adapter.invokeVirtual(setter.owner.type, setter.method);
}
WriterUtility.writePop(adapter, setter.rtn.sort.size);
}
}

View File

@ -0,0 +1,104 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.painless.node;
import org.elasticsearch.painless.CompilerSettings;
import org.elasticsearch.painless.Definition;
import org.elasticsearch.painless.Definition.Method;
import org.elasticsearch.painless.Definition.Sort;
import org.elasticsearch.painless.Variables;
import org.elasticsearch.painless.WriterUtility;
import org.objectweb.asm.commons.GeneratorAdapter;
/**
* Represents a map load/store shortcut. (Internal only.)
*/
final class LMapShortcut extends ALink {
AExpression index;
Method getter;
Method setter;
LMapShortcut(final String location, final AExpression index) {
super(location, 2);
this.index = index;
}
@Override
ALink analyze(final CompilerSettings settings, final Definition definition, final Variables variables) {
getter = before.struct.methods.get("get");
setter = before.struct.methods.get("put");
if (getter != null && (getter.rtn.sort == Sort.VOID || getter.arguments.size() != 1)) {
throw new IllegalArgumentException(error("Illegal map get shortcut for type [" + before.name + "]."));
}
if (setter != null && setter.arguments.size() != 2) {
throw new IllegalArgumentException(error("Illegal map set shortcut for type [" + before.name + "]."));
}
if (getter != null && setter != null &&
(!getter.arguments.get(0).equals(setter.arguments.get(0)) || !getter.rtn.equals(setter.arguments.get(1)))) {
throw new IllegalArgumentException(error("Shortcut argument types must match."));
}
if ((load || store) && (!load || getter != null) && (!store || setter != null)) {
index.expected = setter != null ? setter.arguments.get(0) : getter.arguments.get(0);
index.analyze(settings, definition, variables);
index = index.cast(settings, definition, variables);
after = setter != null ? setter.arguments.get(1) : getter.rtn;
} else {
throw new IllegalArgumentException(error("Illegal map shortcut for type [" + before.name + "]."));
}
return this;
}
@Override
void write(final CompilerSettings settings, final Definition definition, final GeneratorAdapter adapter) {
index.write(settings, definition, adapter);
}
@Override
void load(final CompilerSettings settings, final Definition definition, final GeneratorAdapter adapter) {
if (java.lang.reflect.Modifier.isInterface(getter.owner.clazz.getModifiers())) {
adapter.invokeInterface(getter.owner.type, getter.method);
} else {
adapter.invokeVirtual(getter.owner.type, getter.method);
}
if (!getter.rtn.clazz.equals(getter.handle.type().returnType())) {
adapter.checkCast(getter.rtn.type);
}
}
@Override
void store(final CompilerSettings settings, final Definition definition, final GeneratorAdapter adapter) {
if (java.lang.reflect.Modifier.isInterface(setter.owner.clazz.getModifiers())) {
adapter.invokeInterface(setter.owner.type, setter.method);
} else {
adapter.invokeVirtual(setter.owner.type, setter.method);
}
WriterUtility.writePop(adapter, setter.rtn.sort.size);
}
}

View File

@ -0,0 +1,98 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.painless.node;
import org.elasticsearch.painless.CompilerSettings;
import org.elasticsearch.painless.Definition;
import org.elasticsearch.painless.Definition.Type;
import org.elasticsearch.painless.Variables;
import org.objectweb.asm.commons.GeneratorAdapter;
import java.util.List;
/**
* Represents an array instantiation.
*/
public final class LNewArray extends ALink {
final String type;
final List<AExpression> arguments;
public LNewArray(final String location, final String type, final List<AExpression> arguments) {
super(location, -1);
this.type = type;
this.arguments = arguments;
}
@Override
ALink analyze(final CompilerSettings settings, final Definition definition, final Variables variables) {
if (before != null) {
throw new IllegalStateException(error("Illegal tree structure."));
} else if (store) {
throw new IllegalArgumentException(error("Cannot assign a value to a new array."));
} else if (!load) {
throw new IllegalArgumentException(error("A newly created array must be assigned."));
}
final Type type;
try {
type = definition.getType(this.type);
} catch (final IllegalArgumentException exception) {
throw new IllegalArgumentException(error("Not a type [" + this.type + "]."));
}
for (int argument = 0; argument < arguments.size(); ++argument) {
final AExpression expression = arguments.get(argument);
expression.expected = definition.intType;
expression.analyze(settings, definition, variables);
arguments.set(argument, expression.cast(settings, definition, variables));
}
after = definition.getType(type.struct, arguments.size());
return this;
}
@Override
void write(final CompilerSettings settings, final Definition definition, final GeneratorAdapter adapter) {
// Do nothing.
}
@Override
void load(final CompilerSettings settings, final Definition definition, final GeneratorAdapter adapter) {
for (final AExpression argument : arguments) {
argument.write(settings, definition, adapter);
}
if (arguments.size() > 1) {
adapter.visitMultiANewArrayInsn(after.type.getDescriptor(), after.type.getDimensions());
} else {
adapter.newArray(definition.getType(after.struct, 0).type);
}
}
@Override
void store(final CompilerSettings settings, final Definition definition, final GeneratorAdapter adapter) {
throw new IllegalStateException(error("Illegal tree structure."));
}
}

View File

@ -0,0 +1,118 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.painless.node;
import org.elasticsearch.painless.CompilerSettings;
import org.elasticsearch.painless.Definition;
import org.elasticsearch.painless.Definition.Constructor;
import org.elasticsearch.painless.Definition.Struct;
import org.elasticsearch.painless.Definition.Type;
import org.elasticsearch.painless.Variables;
import org.objectweb.asm.commons.GeneratorAdapter;
import java.util.List;
/**
* Respresents and object instantiation.
*/
public final class LNewObj extends ALink {
final String type;
final List<AExpression> arguments;
Constructor constructor;
public LNewObj(final String location, final String type, final List<AExpression> arguments) {
super(location, -1);
this.type = type;
this.arguments = arguments;
}
@Override
ALink analyze(final CompilerSettings settings, final Definition definition, final Variables variables) {
if (before != null) {
throw new IllegalStateException(error("Illegal tree structure"));
} else if (store) {
throw new IllegalArgumentException(error("Cannot assign a value to a new call."));
}
final Type type;
try {
type = definition.getType(this.type);
} catch (final IllegalArgumentException exception) {
throw new IllegalArgumentException(error("Not a type [" + this.type + "]."));
}
final Struct struct = type.struct;
constructor = struct.constructors.get("new");
if (constructor != null) {
final Type[] types = new Type[constructor.arguments.size()];
constructor.arguments.toArray(types);
if (constructor.arguments.size() != arguments.size()) {
throw new IllegalArgumentException(error("When calling constructor on type [" + struct.name + "]" +
" expected [" + constructor.arguments.size() + "] arguments, but found [" + arguments.size() + "]."));
}
for (int argument = 0; argument < arguments.size(); ++argument) {
final AExpression expression = arguments.get(argument);
expression.expected = types[argument];
expression.analyze(settings, definition, variables);
arguments.set(argument, expression.cast(settings, definition, variables));
}
statement = true;
after = type;
} else {
throw new IllegalArgumentException(error("Unknown new call on type [" + struct.name + "]."));
}
return this;
}
@Override
void write(final CompilerSettings settings, final Definition definition, final GeneratorAdapter adapter) {
// Do nothing.
}
@Override
void load(final CompilerSettings settings, final Definition definition, final GeneratorAdapter adapter) {
adapter.newInstance(after.type);
if (load) {
adapter.dup();
}
for (final AExpression argument : arguments) {
argument.write(settings, definition, adapter);
}
adapter.invokeConstructor(constructor.owner.type, constructor.method);
}
@Override
void store(final CompilerSettings settings, final Definition definition, final GeneratorAdapter adapter) {
throw new IllegalStateException(error("Illegal tree structure."));
}
}

View File

@ -0,0 +1,105 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.painless.node;
import org.elasticsearch.painless.CompilerSettings;
import org.elasticsearch.painless.Definition;
import org.elasticsearch.painless.Definition.Method;
import org.elasticsearch.painless.Definition.Sort;
import org.elasticsearch.painless.Definition.Struct;
import org.elasticsearch.painless.Variables;
import org.elasticsearch.painless.WriterUtility;
import org.objectweb.asm.commons.GeneratorAdapter;
/**
* Represents a field load/store shortcut. (Internal only.)
*/
final class LShortcut extends ALink {
final String value;
Method getter = null;
Method setter = null;
LShortcut(final String location, final String value) {
super(location, 1);
this.value = value;
}
@Override
ALink analyze(final CompilerSettings settings, final Definition definition, final Variables variables) {
final Struct struct = before.struct;
getter = struct.methods.get("get" + Character.toUpperCase(value.charAt(0)) + value.substring(1));
setter = struct.methods.get("set" + Character.toUpperCase(value.charAt(0)) + value.substring(1));
if (getter != null && (getter.rtn.sort == Sort.VOID || !getter.arguments.isEmpty())) {
throw new IllegalArgumentException(error(
"Illegal get shortcut on field [" + value + "] for type [" + struct.name + "]."));
}
if (setter != null && (setter.rtn.sort != Sort.VOID || setter.arguments.size() != 1)) {
throw new IllegalArgumentException(error(
"Illegal set shortcut on field [" + value + "] for type [" + struct.name + "]."));
}
if (getter != null && setter != null && setter.arguments.get(0) != getter.rtn) {
throw new IllegalArgumentException(error("Shortcut argument types must match."));
}
if ((getter != null || setter != null) && (!load || getter != null) && (!store || setter != null)) {
after = setter != null ? setter.arguments.get(0) : getter.rtn;
} else {
throw new IllegalArgumentException(error("Illegal shortcut on field [" + value + "] for type [" + struct.name + "]."));
}
return this;
}
@Override
void write(final CompilerSettings settings, final Definition definition, final GeneratorAdapter adapter) {
// Do nothing.
}
@Override
void load(final CompilerSettings settings, final Definition definition, final GeneratorAdapter adapter) {
if (java.lang.reflect.Modifier.isInterface(getter.owner.clazz.getModifiers())) {
adapter.invokeInterface(getter.owner.type, getter.method);
} else {
adapter.invokeVirtual(getter.owner.type, getter.method);
}
if (!getter.rtn.clazz.equals(getter.handle.type().returnType())) {
adapter.checkCast(getter.rtn.type);
}
}
@Override
void store(final CompilerSettings settings, final Definition definition, final GeneratorAdapter adapter) {
if (java.lang.reflect.Modifier.isInterface(setter.owner.clazz.getModifiers())) {
adapter.invokeInterface(setter.owner.type, setter.method);
} else {
adapter.invokeVirtual(setter.owner.type, setter.method);
}
WriterUtility.writePop(adapter, setter.rtn.sort.size);
}
}

View File

@ -0,0 +1,67 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.painless.node;
import org.elasticsearch.painless.CompilerSettings;
import org.elasticsearch.painless.Definition;
import org.elasticsearch.painless.Variables;
import org.objectweb.asm.commons.GeneratorAdapter;
/**
* Represents a string constant.
*/
public final class LString extends ALink {
public LString(final String location, final String string) {
super(location, -1);
this.string = string;
}
@Override
ALink analyze(final CompilerSettings settings, final Definition definition, final Variables variables) {
if (before != null) {
throw new IllegalStateException("Illegal tree structure.");
} else if (store) {
throw new IllegalArgumentException(error("Cannot write to read-only String constant [" + string + "]."));
} else if (!load) {
throw new IllegalArgumentException(error("Must read String constant [" + string + "]."));
}
after = definition.stringType;
return this;
}
@Override
void write(final CompilerSettings settings, final Definition definition, final GeneratorAdapter adapter) {
// Do nothing.
}
@Override
void load(final CompilerSettings settings, final Definition definition, final GeneratorAdapter adapter) {
adapter.push(string);
}
@Override
void store(final CompilerSettings settings, final Definition definition, final GeneratorAdapter adapter) {
throw new IllegalStateException(error("Illegal tree structure."));
}
}

View File

@ -0,0 +1,90 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.painless.node;
import org.elasticsearch.painless.CompilerSettings;
import org.elasticsearch.painless.Definition;
import org.elasticsearch.painless.Definition.Type;
import org.elasticsearch.painless.Variables;
import org.elasticsearch.painless.Variables.Variable;
import org.objectweb.asm.Opcodes;
import org.objectweb.asm.commons.GeneratorAdapter;
/**
* Represents a variable load/store.
*/
public final class LVariable extends ALink {
final String name;
int slot;
public LVariable(final String location, final String name) {
super(location, 0);
this.name = name;
}
@Override
ALink analyze(final CompilerSettings settings, final Definition definition, final Variables variables) {
if (before != null) {
throw new IllegalStateException(error("Illegal tree structure."));
}
Type type = null;
try {
type = definition.getType(name);
} catch (final IllegalArgumentException exception) {
// Do nothing.
}
if (type != null) {
statik = true;
after = type;
} else {
final Variable variable = variables.getVariable(location, name);
if (store && variable.readonly) {
throw new IllegalArgumentException(error("Variable [" + variable.name + "] is read-only."));
}
slot = variable.slot;
after = variable.type;
}
return this;
}
@Override
void write(final CompilerSettings settings, final Definition definition, final GeneratorAdapter adapter) {
// Do nothing.
}
@Override
void load(final CompilerSettings settings, final Definition definition, final GeneratorAdapter adapter) {
adapter.visitVarInsn(after.type.getOpcode(Opcodes.ILOAD), slot);
}
@Override
void store(final CompilerSettings settings, final Definition definition, final GeneratorAdapter adapter) {
adapter.visitVarInsn(after.type.getOpcode(Opcodes.ISTORE), slot);
}
}

View File

@ -0,0 +1,75 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.painless.node;
import org.elasticsearch.painless.CompilerSettings;
import org.elasticsearch.painless.Definition;
import org.elasticsearch.painless.Variables;
import org.objectweb.asm.commons.GeneratorAdapter;
import java.util.Collections;
import java.util.List;
/**
* Represents a set of statements as a branch of control-flow.
*/
public final class SBlock extends AStatement {
final List<AStatement> statements;
public SBlock(final String location, final List<AStatement> statements) {
super(location);
this.statements = Collections.unmodifiableList(statements);
}
@Override
void analyze(final CompilerSettings settings, final Definition definition, final Variables variables) {
final AStatement last = statements.get(statements.size() - 1);
for (final AStatement statement : statements) {
if (allEscape) {
throw new IllegalArgumentException(error("Unreachable statement."));
}
statement.inLoop = inLoop;
statement.lastSource = lastSource && statement == last;
statement.lastLoop = (beginLoop || lastLoop) && statement == last;
statement.analyze(settings, definition, variables);
methodEscape = statement.methodEscape;
loopEscape = statement.loopEscape;
allEscape = statement.allEscape;
anyContinue |= statement.anyContinue;
anyBreak |= statement.anyBreak;
statementCount += statement.statementCount;
}
}
@Override
void write(final CompilerSettings settings, final Definition definition, final GeneratorAdapter adapter) {
for (final AStatement statement : statements) {
statement.continu = continu;
statement.brake = brake;
statement.write(settings, definition, adapter);
}
}
}

View File

@ -0,0 +1,52 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.painless.node;
import org.elasticsearch.painless.CompilerSettings;
import org.elasticsearch.painless.Definition;
import org.elasticsearch.painless.Variables;
import org.objectweb.asm.commons.GeneratorAdapter;
/**
* Represents a break statement.
*/
public final class SBreak extends AStatement {
public SBreak(final String location) {
super(location);
}
@Override
void analyze(final CompilerSettings settings, final Definition definition, final Variables variables) {
if (!inLoop) {
throw new IllegalArgumentException(error("Break statement outside of a loop."));
}
loopEscape = true;
allEscape = true;
anyBreak = true;
statementCount = 1;
}
@Override
void write(final CompilerSettings settings, final Definition definition, final GeneratorAdapter adapter) {
adapter.goTo(brake);
}
}

View File

@ -0,0 +1,55 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.painless.node;
import org.elasticsearch.painless.CompilerSettings;
import org.elasticsearch.painless.Definition;
import org.elasticsearch.painless.Variables;
import org.objectweb.asm.commons.GeneratorAdapter;
/**
* Represents a continue statement.
*/
public final class SContinue extends AStatement {
public SContinue(final String location) {
super(location);
}
@Override
void analyze(final CompilerSettings settings, final Definition definition, final Variables variables) {
if (!inLoop) {
throw new IllegalArgumentException(error("Continue statement outside of a loop."));
}
if (lastLoop) {
throw new IllegalArgumentException(error("Extraneous continue statement."));
}
allEscape = true;
anyContinue = true;
statementCount = 1;
}
@Override
void write(final CompilerSettings settings, final Definition definition, final GeneratorAdapter adapter) {
adapter.goTo(continu);
}
}

View File

@ -0,0 +1,58 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.painless.node;
import org.elasticsearch.painless.CompilerSettings;
import org.elasticsearch.painless.Definition;
import org.elasticsearch.painless.Variables;
import org.objectweb.asm.commons.GeneratorAdapter;
import java.util.Collections;
import java.util.List;
/**
* Represents a series of declarations.
*/
public final class SDeclBlock extends AStatement {
final List<SDeclaration> declarations;
public SDeclBlock(final String location, final List<SDeclaration> declarations) {
super(location);
this.declarations = Collections.unmodifiableList(declarations);
}
@Override
void analyze(final CompilerSettings settings, final Definition definition, final Variables variables) {
for (final SDeclaration declaration : declarations) {
declaration.analyze(settings, definition, variables);
}
statementCount = declarations.size();
}
@Override
void write(final CompilerSettings settings, final Definition definition, final GeneratorAdapter adapter) {
for (final SDeclaration declaration : declarations) {
declaration.write(settings, definition, adapter);
}
}
}

View File

@ -0,0 +1,86 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.painless.node;
import org.elasticsearch.painless.CompilerSettings;
import org.elasticsearch.painless.Definition;
import org.elasticsearch.painless.Definition.Sort;
import org.elasticsearch.painless.Variables;
import org.elasticsearch.painless.Variables.Variable;
import org.objectweb.asm.Opcodes;
import org.objectweb.asm.commons.GeneratorAdapter;
/**
* Represents a single variable declaration.
*/
public final class SDeclaration extends AStatement {
final String type;
final String name;
AExpression expression;
Variable variable;
public SDeclaration(final String location, final String type, final String name, final AExpression expression) {
super(location);
this.type = type;
this.name = name;
this.expression = expression;
}
@Override
void analyze(final CompilerSettings settings, final Definition definition, final Variables variables) {
variable = variables.addVariable(location, type, name, false, false);
if (expression != null) {
expression.expected = variable.type;
expression.analyze(settings, definition, variables);
expression = expression.cast(settings, definition, variables);
}
}
@Override
void write(final CompilerSettings settings, final Definition definition, final GeneratorAdapter adapter) {
final org.objectweb.asm.Type type = variable.type.type;
final Sort sort = variable.type.sort;
final boolean initialize = expression == null;
if (!initialize) {
expression.write(settings, definition, adapter);
}
switch (sort) {
case VOID: throw new IllegalStateException(error("Illegal tree structure."));
case BOOL:
case BYTE:
case SHORT:
case CHAR:
case INT: if (initialize) adapter.push(0); break;
case LONG: if (initialize) adapter.push(0L); break;
case FLOAT: if (initialize) adapter.push(0.0F); break;
case DOUBLE: if (initialize) adapter.push(0.0); break;
default: if (initialize) adapter.visitInsn(Opcodes.ACONST_NULL);
}
adapter.visitVarInsn(type.getOpcode(Opcodes.ISTORE), variable.slot);
}
}

View File

@ -0,0 +1,105 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.painless.node;
import org.elasticsearch.painless.CompilerSettings;
import org.elasticsearch.painless.Definition;
import org.elasticsearch.painless.Variables;
import org.elasticsearch.painless.WriterUtility;
import org.objectweb.asm.Label;
import org.objectweb.asm.commons.GeneratorAdapter;
/**
* Represents a do-while loop.
*/
public final class SDo extends AStatement {
final AStatement block;
AExpression condition;
public SDo(final String location, final AStatement block, final AExpression condition) {
super(location);
this.condition = condition;
this.block = block;
}
@Override
void analyze(final CompilerSettings settings, final Definition definition, final Variables variables) {
variables.incrementScope();
block.beginLoop = true;
block.inLoop = true;
block.analyze(settings, definition, variables);
if (block.loopEscape && !block.anyContinue) {
throw new IllegalArgumentException(error("Extraneous do while loop."));
}
condition.expected = definition.booleanType;
condition.analyze(settings, definition, variables);
condition = condition.cast(settings, definition, variables);
if (condition.constant != null) {
final boolean continuous = (boolean)condition.constant;
if (!continuous) {
throw new IllegalArgumentException(error("Extraneous do while loop."));
}
if (!block.anyBreak) {
methodEscape = true;
allEscape = true;
}
}
statementCount = 1;
if (settings.getMaxLoopCounter() > 0) {
loopCounterSlot = variables.getVariable(location, "#loop").slot;
}
variables.decrementScope();
}
@Override
void write(final CompilerSettings settings, final Definition definition, final GeneratorAdapter adapter) {
final Label start = new Label();
final Label begin = new Label();
final Label end = new Label();
adapter.mark(start);
block.continu = begin;
block.brake = end;
block.write(settings, definition, adapter);
adapter.mark(begin);
condition.fals = end;
condition.write(settings, definition, adapter);
WriterUtility.writeLoopCounter(adapter, loopCounterSlot, Math.max(1, block.statementCount));
adapter.goTo(start);
adapter.mark(end);
}
}

View File

@ -0,0 +1,72 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.painless.node;
import org.elasticsearch.painless.CompilerSettings;
import org.elasticsearch.painless.Definition;
import org.elasticsearch.painless.Definition.Sort;
import org.elasticsearch.painless.Variables;
import org.elasticsearch.painless.WriterUtility;
import org.objectweb.asm.commons.GeneratorAdapter;
/**
* Represents the top-level node for an expression as a statement.
*/
public final class SExpression extends AStatement {
AExpression expression;
public SExpression(final String location, final AExpression expression) {
super(location);
this.expression = expression;
}
@Override
void analyze(final CompilerSettings settings, final Definition definition, final Variables variables) {
expression.read = lastSource;
expression.analyze(settings, definition, variables);
if (!lastSource && !expression.statement) {
throw new IllegalArgumentException(error("Not a statement."));
}
final boolean rtn = lastSource && expression.actual.sort != Sort.VOID;
expression.expected = rtn ? definition.objectType : expression.actual;
expression = expression.cast(settings, definition, variables);
methodEscape = rtn;
loopEscape = rtn;
allEscape = rtn;
statementCount = 1;
}
@Override
void write(final CompilerSettings settings, final Definition definition, final GeneratorAdapter adapter) {
expression.write(settings, definition, adapter);
if (methodEscape) {
adapter.returnValue();
} else {
WriterUtility.writePop(adapter, expression.expected.sort.size);
}
}
}

View File

@ -0,0 +1,181 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.painless.node;
import org.elasticsearch.painless.CompilerSettings;
import org.elasticsearch.painless.Definition;
import org.elasticsearch.painless.Variables;
import org.elasticsearch.painless.WriterUtility;
import org.objectweb.asm.Label;
import org.objectweb.asm.commons.GeneratorAdapter;
/**
* Represents a for loop.
*/
public final class SFor extends AStatement {
ANode initializer;
AExpression condition;
AExpression afterthought;
final AStatement block;
public SFor(final String location,
final ANode initializer, final AExpression condition, final AExpression afterthought, final AStatement block) {
super(location);
this.initializer = initializer;
this.condition = condition;
this.afterthought = afterthought;
this.block = block;
}
@Override
void analyze(final CompilerSettings settings, final Definition definition, final Variables variables) {
variables.incrementScope();
boolean continuous = false;
if (initializer != null) {
if (initializer instanceof SDeclBlock) {
((SDeclBlock)initializer).analyze(settings, definition, variables);
} else if (initializer instanceof AExpression) {
final AExpression initializer = (AExpression)this.initializer;
initializer.read = false;
initializer.analyze(settings, definition, variables);
if (!initializer.statement) {
throw new IllegalArgumentException(initializer.error("Not a statement."));
}
} else {
throw new IllegalStateException(error("Illegal tree structure."));
}
}
if (condition != null) {
condition.expected = definition.booleanType;
condition.analyze(settings, definition, variables);
condition = condition.cast(settings, definition, variables);
if (condition.constant != null) {
continuous = (boolean)condition.constant;
if (!continuous) {
throw new IllegalArgumentException(error("Extraneous for loop."));
}
if (block == null) {
throw new IllegalArgumentException(error("For loop has no escape."));
}
}
} else {
continuous = true;
}
if (afterthought != null) {
afterthought.read = false;
afterthought.analyze(settings, definition, variables);
if (!afterthought.statement) {
throw new IllegalArgumentException(afterthought.error("Not a statement."));
}
}
int count = 1;
if (block != null) {
block.beginLoop = true;
block.inLoop = true;
block.analyze(settings, definition, variables);
if (block.loopEscape && !block.anyContinue) {
throw new IllegalArgumentException(error("Extraneous for loop."));
}
if (continuous && !block.anyBreak) {
methodEscape = true;
allEscape = true;
}
block.statementCount = Math.max(count, block.statementCount);
}
statementCount = 1;
if (settings.getMaxLoopCounter() > 0) {
loopCounterSlot = variables.getVariable(location, "#loop").slot;
}
variables.decrementScope();
}
@Override
void write(final CompilerSettings settings, final Definition definition, final GeneratorAdapter adapter) {
final Label start = new Label();
final Label begin = afterthought == null ? start : new Label();
final Label end = new Label();
if (initializer instanceof SDeclBlock) {
((SDeclBlock)initializer).write(settings, definition, adapter);
} else if (initializer instanceof AExpression) {
AExpression initializer = (AExpression)this.initializer;
initializer.write(settings, definition, adapter);
WriterUtility.writePop(adapter, initializer.expected.sort.size);
}
adapter.mark(start);
if (condition != null) {
condition.fals = end;
condition.write(settings, definition, adapter);
}
boolean allEscape = false;
if (block != null) {
allEscape = block.allEscape;
int statementCount = Math.max(1, block.statementCount);
if (afterthought != null) {
++statementCount;
}
WriterUtility.writeLoopCounter(adapter, loopCounterSlot, statementCount);
block.write(settings, definition, adapter);
} else {
WriterUtility.writeLoopCounter(adapter, loopCounterSlot, 1);
}
if (afterthought != null) {
adapter.mark(begin);
afterthought.write(settings, definition, adapter);
}
if (afterthought != null || !allEscape) {
adapter.goTo(start);
}
adapter.mark(end);
}
}

View File

@ -0,0 +1,111 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.painless.node;
import org.elasticsearch.painless.CompilerSettings;
import org.elasticsearch.painless.Definition;
import org.elasticsearch.painless.Variables;
import org.objectweb.asm.Label;
import org.objectweb.asm.commons.GeneratorAdapter;
/**
* Represents an if/else block.
*/
public final class SIfElse extends AStatement {
AExpression condition;
final AStatement ifblock;
final AStatement elseblock;
public SIfElse(final String location, final AExpression condition, final AStatement ifblock, final AStatement elseblock) {
super(location);
this.condition = condition;
this.ifblock = ifblock;
this.elseblock = elseblock;
}
@Override
void analyze(final CompilerSettings settings, final Definition definition, final Variables variables) {
condition.expected = definition.booleanType;
condition.analyze(settings, definition, variables);
condition = condition.cast(settings, definition, variables);
if (condition.constant != null) {
throw new IllegalArgumentException(error("Extraneous if statement."));
}
ifblock.lastSource = lastSource;
ifblock.inLoop = inLoop;
ifblock.lastLoop = lastLoop;
variables.incrementScope();
ifblock.analyze(settings, definition, variables);
variables.decrementScope();
anyContinue = ifblock.anyContinue;
anyBreak = ifblock.anyBreak;
statementCount = ifblock.statementCount;
if (elseblock != null) {
elseblock.lastSource = lastSource;
elseblock.inLoop = inLoop;
elseblock.lastLoop = lastLoop;
variables.incrementScope();
elseblock.analyze(settings, definition, variables);
variables.decrementScope();
methodEscape = ifblock.methodEscape && elseblock.methodEscape;
loopEscape = ifblock.loopEscape && elseblock.loopEscape;
allEscape = ifblock.allEscape && elseblock.allEscape;
anyContinue |= elseblock.anyContinue;
anyBreak |= elseblock.anyBreak;
statementCount = Math.max(ifblock.statementCount, elseblock.statementCount);
}
}
@Override
void write(final CompilerSettings settings, final Definition definition, final GeneratorAdapter adapter) {
final Label end = new Label();
final Label fals = elseblock != null ? new Label() : end;
condition.fals = fals;
condition.write(settings, definition, adapter);
ifblock.continu = continu;
ifblock.brake = brake;
ifblock.write(settings, definition, adapter);
if (elseblock != null) {
if (!ifblock.allEscape) {
adapter.goTo(end);
}
adapter.mark(fals);
elseblock.continu = continu;
elseblock.brake = brake;
elseblock.write(settings, definition, adapter);
}
adapter.mark(end);
}
}

View File

@ -0,0 +1,58 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.painless.node;
import org.elasticsearch.painless.CompilerSettings;
import org.elasticsearch.painless.Definition;
import org.elasticsearch.painless.Variables;
import org.objectweb.asm.commons.GeneratorAdapter;
/**
* Represents a return statement.
*/
public final class SReturn extends AStatement {
AExpression expression;
public SReturn(final String location, final AExpression expression) {
super(location);
this.expression = expression;
}
@Override
void analyze(final CompilerSettings settings, final Definition definition, final Variables variables) {
expression.expected = definition.objectType;
expression.analyze(settings, definition, variables);
expression = expression.cast(settings, definition, variables);
methodEscape = true;
loopEscape = true;
allEscape = true;
statementCount = 1;
}
@Override
void write(final CompilerSettings settings, final Definition definition, final GeneratorAdapter adapter) {
expression.write(settings, definition, adapter);
adapter.returnValue();
}
}

View File

@ -0,0 +1,76 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.painless.node;
import org.elasticsearch.painless.CompilerSettings;
import org.elasticsearch.painless.Definition;
import org.elasticsearch.painless.Variables;
import org.objectweb.asm.Opcodes;
import org.objectweb.asm.commons.GeneratorAdapter;
import java.util.Collections;
import java.util.List;
/**
* The root of all Painless trees. Contains a series of statements.
*/
public final class SSource extends AStatement {
final List<AStatement> statements;
public SSource(final String location, final List<AStatement> statements) {
super(location);
this.statements = Collections.unmodifiableList(statements);
}
@Override
public void analyze(final CompilerSettings settings, final Definition definition, final Variables variables) {
variables.incrementScope();
final AStatement last = statements.get(statements.size() - 1);
for (final AStatement statement : statements) {
if (allEscape) {
throw new IllegalArgumentException(error("Unreachable statement."));
}
statement.lastSource = statement == last;
statement.analyze(settings, definition, variables);
methodEscape = statement.methodEscape;
allEscape = statement.allEscape;
}
variables.decrementScope();
}
@Override
public void write(final CompilerSettings settings, final Definition definition, final GeneratorAdapter adapter) {
for (final AStatement statement : statements) {
statement.write(settings, definition, adapter);
}
if (!methodEscape) {
adapter.visitInsn(Opcodes.ACONST_NULL);
adapter.returnValue();
}
}
}

View File

@ -0,0 +1,57 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.painless.node;
import org.elasticsearch.painless.CompilerSettings;
import org.elasticsearch.painless.Definition;
import org.elasticsearch.painless.Variables;
import org.objectweb.asm.commons.GeneratorAdapter;
/**
* Represents a throw statement.
*/
public final class SThrow extends AStatement {
AExpression expression;
public SThrow(final String location, final AExpression expression) {
super(location);
this.expression = expression;
}
@Override
void analyze(final CompilerSettings settings, final Definition definition, final Variables variables) {
expression.expected = definition.exceptionType;
expression.analyze(settings, definition, variables);
expression = expression.cast(settings, definition, variables);
methodEscape = true;
loopEscape = true;
allEscape = true;
statementCount = 1;
}
@Override
void write(final CompilerSettings settings, final Definition definition, final GeneratorAdapter adapter) {
expression.write(settings, definition, adapter);
adapter.throwException();
}
}

View File

@ -0,0 +1,98 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.painless.node;
import org.elasticsearch.painless.CompilerSettings;
import org.elasticsearch.painless.Definition;
import org.elasticsearch.painless.Variables;
import org.elasticsearch.painless.Variables.Variable;
import org.objectweb.asm.Label;
import org.objectweb.asm.Opcodes;
import org.objectweb.asm.commons.GeneratorAdapter;
/**
* Represents a catch block as part of a try-catch block.
*/
public final class STrap extends AStatement {
final String type;
final String name;
final AStatement block;
Variable variable;
Label begin;
Label end;
Label exception;
public STrap(final String location, final String type, final String name, final AStatement block) {
super(location);
this.type = type;
this.name = name;
this.block = block;
}
@Override
void analyze(final CompilerSettings settings, final Definition definition, final Variables variables) {
variable = variables.addVariable(location, type, name, true, false);
try {
variable.type.clazz.asSubclass(Exception.class);
} catch (final ClassCastException cce) {
throw new ClassCastException(error("Not an exception type [" + variable.type.name + "]."));
}
if (block != null) {
block.lastSource = lastSource;
block.inLoop = inLoop;
block.lastLoop = lastLoop;
block.analyze(settings, definition, variables);
methodEscape = block.methodEscape;
loopEscape = block.loopEscape;
allEscape = block.allEscape;
anyContinue = block.anyContinue;
anyBreak = block.anyBreak;
statementCount = block.statementCount;
}
}
@Override
void write(final CompilerSettings settings, final Definition definition, final GeneratorAdapter adapter) {
final Label jump = new Label();
adapter.mark(jump);
adapter.visitVarInsn(variable.type.type.getOpcode(Opcodes.ISTORE), variable.slot);
if (block != null) {
block.continu = continu;
block.brake = brake;
block.write(settings, definition, adapter);
}
adapter.visitTryCatchBlock(begin, end, jump, variable.type.type.getInternalName());
if (exception != null && !block.allEscape) {
adapter.goTo(exception);
}
}
}

View File

@ -0,0 +1,114 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.painless.node;
import org.elasticsearch.painless.CompilerSettings;
import org.elasticsearch.painless.Definition;
import org.elasticsearch.painless.Variables;
import org.objectweb.asm.Label;
import org.objectweb.asm.commons.GeneratorAdapter;
import java.util.Collections;
import java.util.List;
/**
* Represents the try block as part of a try-catch block.
*/
public final class STry extends AStatement {
final AStatement block;
final List<STrap> traps;
public STry(final String location, final AStatement block, final List<STrap> traps) {
super(location);
this.block = block;
this.traps = Collections.unmodifiableList(traps);
}
@Override
void analyze(final CompilerSettings settings, final Definition definition, final Variables variables) {
block.lastSource = lastSource;
block.inLoop = inLoop;
block.lastLoop = lastLoop;
variables.incrementScope();
block.analyze(settings, definition, variables);
variables.decrementScope();
methodEscape = block.methodEscape;
loopEscape = block.loopEscape;
allEscape = block.allEscape;
anyContinue = block.anyContinue;
anyBreak = block.anyBreak;
int statementCount = 0;
for (final STrap trap : traps) {
trap.lastSource = lastSource;
trap.inLoop = inLoop;
trap.lastLoop = lastLoop;
variables.incrementScope();
trap.analyze(settings, definition, variables);
variables.decrementScope();
methodEscape &= trap.methodEscape;
loopEscape &= trap.loopEscape;
allEscape &= trap.allEscape;
anyContinue |= trap.anyContinue;
anyBreak |= trap.anyBreak;
statementCount = Math.max(statementCount, trap.statementCount);
}
this.statementCount = block.statementCount + statementCount;
}
@Override
void write(final CompilerSettings settings, final Definition definition, final GeneratorAdapter adapter) {
final Label begin = new Label();
final Label end = new Label();
final Label exception = new Label();
adapter.mark(begin);
block.continu = continu;
block.brake = brake;
block.write(settings, definition, adapter);
if (!block.allEscape) {
adapter.goTo(exception);
}
adapter.mark(end);
for (final STrap trap : traps) {
trap.begin = begin;
trap.end = end;
trap.exception = traps.size() > 1 ? exception : null;
trap.write(settings, definition, adapter);
}
if (!block.allEscape || traps.size() > 1) {
adapter.mark(exception);
}
}
}

View File

@ -0,0 +1,121 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.painless.node;
import org.elasticsearch.painless.CompilerSettings;
import org.elasticsearch.painless.Definition;
import org.elasticsearch.painless.Variables;
import org.elasticsearch.painless.WriterUtility;
import org.objectweb.asm.Label;
import org.objectweb.asm.commons.GeneratorAdapter;
/**
* Represents a while loop.
*/
public final class SWhile extends AStatement {
AExpression condition;
final AStatement block;
public SWhile(final String location, final AExpression condition, final AStatement block) {
super(location);
this.condition = condition;
this.block = block;
}
@Override
void analyze(final CompilerSettings settings, final Definition definition, final Variables variables) {
variables.incrementScope();
condition.expected = definition.booleanType;
condition.analyze(settings, definition, variables);
condition = condition.cast(settings, definition, variables);
boolean continuous = false;
if (condition.constant != null) {
continuous = (boolean)condition.constant;
if (!continuous) {
throw new IllegalArgumentException(error("Extraneous while loop."));
}
if (block == null) {
throw new IllegalArgumentException(error("While loop has no escape."));
}
}
int count = 1;
if (block != null) {
block.beginLoop = true;
block.inLoop = true;
block.analyze(settings, definition, variables);
if (block.loopEscape && !block.anyContinue) {
throw new IllegalArgumentException(error("Extranous while loop."));
}
if (continuous && !block.anyBreak) {
methodEscape = true;
allEscape = true;
}
block.statementCount = Math.max(count, block.statementCount);
}
statementCount = 1;
if (settings.getMaxLoopCounter() > 0) {
loopCounterSlot = variables.getVariable(location, "#loop").slot;
}
variables.decrementScope();
}
@Override
void write(final CompilerSettings settings, final Definition definition, final GeneratorAdapter adapter) {
final Label begin = new Label();
final Label end = new Label();
adapter.mark(begin);
condition.fals = end;
condition.write(settings, definition, adapter);
if (block != null) {
WriterUtility.writeLoopCounter(adapter, loopCounterSlot, Math.max(1, block.statementCount));
block.continu = begin;
block.brake = end;
block.write(settings, definition, adapter);
} else {
WriterUtility.writeLoopCounter(adapter, loopCounterSlot, 1);
}
if (block == null || !block.allEscape) {
adapter.goTo(begin);
}
adapter.mark(end);
}
}

View File

@ -0,0 +1,130 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/**
* A painless tree is composed of the node classes found in this package.
* <p>
* The following are the types of nodes:
* A* (abstract) - These are the abstract nodes that are the superclasses for the other types.
* S* (statement) - These are nodes that represent a statement in Painless. These are the highest level nodes.
* E* (expression) - These are nodess that represent an expression in Painless. These are the middle level nodes.
* L* (link) - These are nodes that respresent a piece of a variable/method chain. The are the lowest level nodes.
* <p>
* The following is a brief description of each node:
* {@link org.elasticsearch.painless.node.AExpression} - The superclass for all E* (expression) nodes.
* {@link org.elasticsearch.painless.node.ALink} - The superclass for all L* (link) nodes.
* {@link org.elasticsearch.painless.node.ANode} - The superclass for all other nodes.
* {@link org.elasticsearch.painless.node.AStatement} - The superclass for all S* (statement) nodes.
* {@link org.elasticsearch.painless.node.EBinary} - Represents a binary math expression.
* {@link org.elasticsearch.painless.node.EBool} - Represents a boolean expression.
* {@link org.elasticsearch.painless.node.EBoolean} - Represents a boolean constant.
* {@link org.elasticsearch.painless.node.ECast} - Represents an implicit cast in most cases. (Internal only.)
* {@link org.elasticsearch.painless.node.EChain} - Represents the entirety of a variable/method chain for read/write operations.
* {@link org.elasticsearch.painless.node.EComp} - Represents a comparison expression.
* {@link org.elasticsearch.painless.node.EConditional} - Respresents a conditional expression.
* {@link org.elasticsearch.painless.node.EConstant} - Respresents a constant. (Internal only.)
* {@link org.elasticsearch.painless.node.EDecimal} - Respresents a decimal constant.
* {@link org.elasticsearch.painless.node.EExplicit} - Represents an explicit cast.
* {@link org.elasticsearch.painless.node.ENull} - Represents a null constant.
* {@link org.elasticsearch.painless.node.ENumeric} - Respresents a non-decimal numeric constant.
* {@link org.elasticsearch.painless.node.EUnary} - Represents a unary math expression.
* {@link org.elasticsearch.painless.node.LArrayLength} - Represents an array length field load.
* {@link org.elasticsearch.painless.node.LBrace} - Represents an array load/store or defers to possible shortcuts.
* {@link org.elasticsearch.painless.node.LCall} - Represents a method call or deferes to a def call.
* {@link org.elasticsearch.painless.node.LCast} - Represents a cast made in a variable/method chain.
* {@link org.elasticsearch.painless.node.LDefArray} - Represents an array load/store or shortcut on a def type. (Internal only.)
* {@link org.elasticsearch.painless.node.LDefCall} - Represents a method call made on a def type. (Internal only.)
* {@link org.elasticsearch.painless.node.LDefField} - Represents a field load/store or shortcut on a def type. (Internal only.)
* {@link org.elasticsearch.painless.node.LField} - Represents a field load/store or defers to a possible shortcuts.
* {@link org.elasticsearch.painless.node.LListShortcut} - Represents a list load/store shortcut. (Internal only.)
* {@link org.elasticsearch.painless.node.LMapShortcut} - Represents a map load/store shortcut. (Internal only.)
* {@link org.elasticsearch.painless.node.LNewArray} - Represents an array instantiation.
* {@link org.elasticsearch.painless.node.LNewObj} - Respresents and object instantiation.
* {@link org.elasticsearch.painless.node.LShortcut} - Represents a field load/store shortcut. (Internal only.)
* {@link org.elasticsearch.painless.node.LString} - Represents a string constant.
* {@link org.elasticsearch.painless.node.LVariable} - Represents a variable load/store.
* {@link org.elasticsearch.painless.node.SBlock} - Represents a set of statements as a branch of control-flow.
* {@link org.elasticsearch.painless.node.SBreak} - Represents a break statement.
* {@link org.elasticsearch.painless.node.SContinue} - Represents a continue statement.
* {@link org.elasticsearch.painless.node.SDeclaration} - Represents a single variable declaration.
* {@link org.elasticsearch.painless.node.SDeclBlock} - Represents a series of declarations.
* {@link org.elasticsearch.painless.node.SDo} - Represents a do-while loop.
* {@link org.elasticsearch.painless.node.SExpression} - Represents the top-level node for an expression as a statement.
* {@link org.elasticsearch.painless.node.SFor} - Represents a for loop.
* {@link org.elasticsearch.painless.node.SIfElse} - Represents an if/else block.
* {@link org.elasticsearch.painless.node.SReturn} - Represents a return statement.
* {@link org.elasticsearch.painless.node.SSource} - The root of all Painless trees. Contains a series of statements.
* {@link org.elasticsearch.painless.node.SThrow} - Represents a throw statement.
* {@link org.elasticsearch.painless.node.STrap} - Represents a catch block as part of a try-catch block.
* {@link org.elasticsearch.painless.node.STry} - Represents the try block as part of a try-catch block.
* {@link org.elasticsearch.painless.node.SWhile} - Represents a while loop.
* <p>
* Note that internal nodes are generated during the analysis phase by modifying the tree on-the-fly
* for clarity of development and convenience during the writing phase.
* <p>
* All Painless trees must start with an SSource node at the root. Each node has a constructor that requires
* all of its values and children be passed in at the time of instantiation. This means that Painless trees
* are build bottom-up; however, this helps enforce tree structure to be correct and fits naturally with a
* standard recurvise-descent parser.
* <p>
* Generally, statement nodes have member data that evaluate legal control-flow during the analysis phase.
* The typical order for statement nodes is for each node to call analyze on it's children during the analysis phase
* and write on it's children during the writing phase. No modifications are made to the structure of statement nodes.
* <p>
* Generally, expression nodes have member data that evaluate static types. The typical order for an expression node
* during the analysis phase looks like the following:
* {@code
* For known expected types:
*
* expression.child.expected = expectedType // set the known expected type
*
* expression.child.analyze(...) // analyze the child node to set the child's actual type
*
* expression.child = expression.child.cast(...) // add an implicit cast node if the child node's
* // actual type is not the expected type and set the
* // expression's child to the implicit cast node
*
* For unknown expected types that need promotion:
*
* expression.child.analyze(...) // analyze the child node to set the child's actual type
*
* Type promote = Caster.promote(...) // get the promotion type for the child based on
* // the current operation and child's actual type
*
* expression.child.expected = promote // set the expected type to the promotion type
*
* expression.child = expression.child.cast(...) // add an implicit cast node if the child node's
* // actual type is not the expected type and set the
* // expression's child to the implicit cast node
* }
* Expression nodes just call each child during the writing phase.
* <p>
* Generally, link nodes have member data that help keep track of items necessary to do a
* load/store on a variable/field/method. Analysis of link nodes happens in a chain node
* where each link node will be analysed with the chain node acting as a bridge to pass the
* previous link's after type to the next link's before type. Upon analysis completion, a link
* will return either itself or another link node depending on if a shortcut or def type was found.
* Cast nodes as links will return null and be removed from the chain node if the cast is
* unnecessary. Link nodes have three methods for writing -- write, load, and store. The write
* method is always once called before a load/store to give links a chance to write any values
* such as array indices before the load/store happens. Load is called to read a link node, and
* store is called to write a link node. Note that store will only ever be called on the final
* link node in a chain, all previous links will be considered loads.
*/
package org.elasticsearch.painless.node;

View File

@ -55,13 +55,12 @@ public class BinaryOperatorTests extends ScriptTestCase {
}
public void testLongShiftsConst() {
// note: we always promote the results of shifts too (unlike java)
assertEquals(1L << 2, exec("return 1L << 2;"));
assertEquals(1L << 2L, exec("return 1 << 2L;"));
assertEquals(4L >> 2L, exec("return 4 >> 2L;"));
assertEquals(1 << 2L, exec("return 1 << 2L;"));
assertEquals(4 >> 2L, exec("return 4 >> 2L;"));
assertEquals(4L >> 2, exec("return 4L >> 2;"));
assertEquals(-1L >>> 29, exec("return -1L >>> 29;"));
assertEquals(-1L >>> 29L, exec("return -1 >>> 29L;"));
assertEquals(-1 >>> 29L, exec("return -1 >>> 29L;"));
}
public void testMixedTypes() {

View File

@ -32,21 +32,23 @@ import java.nio.charset.StandardCharsets;
final class Debugger {
/** compiles source to bytecode, and returns debugging output */
static String toString(String source) {
static String toString(final String source) {
return toString(source, new CompilerSettings());
}
/** compiles to bytecode, and returns debugging output */
static String toString(String source, CompilerSettings settings) {
byte[] bytes = Compiler.compile("debugger", source, Definition.INSTANCE, settings);
ByteArrayOutputStream output = new ByteArrayOutputStream();
PrintWriter outputWriter = new PrintWriter(new OutputStreamWriter(output, StandardCharsets.UTF_8));
ClassReader reader = new ClassReader(bytes);
static String toString(final String source, final CompilerSettings settings) {
final byte[] bytes = Compiler.compile(source, settings);
final ByteArrayOutputStream output = new ByteArrayOutputStream();
final PrintWriter outputWriter = new PrintWriter(new OutputStreamWriter(output, StandardCharsets.UTF_8));
final ClassReader reader = new ClassReader(bytes);
reader.accept(new TraceClassVisitor(outputWriter), 0);
outputWriter.flush();
try {
return output.toString("UTF-8");
} catch (UnsupportedEncodingException e) {
} catch (final UnsupportedEncodingException e) {
throw new RuntimeException(e);
}
}

View File

@ -401,26 +401,26 @@ public class DefTests extends ScriptTestCase {
assertEquals(2L, exec("def x = (float)1 def y = (int)1 return x << y"));
assertEquals(2L, exec("def x = (double)1 def y = (int)1 return x << y"));
assertEquals(2L, exec("def x = (byte)1 def y = (long)1 return x << y"));
assertEquals(2L, exec("def x = (short)1 def y = (long)1 return x << y"));
assertEquals(2L, exec("def x = (char)1 def y = (long)1 return x << y"));
assertEquals(2L, exec("def x = (int)1 def y = (long)1 return x << y"));
assertEquals(2, exec("def x = (byte)1 def y = (long)1 return x << y"));
assertEquals(2, exec("def x = (short)1 def y = (long)1 return x << y"));
assertEquals(2, exec("def x = (char)1 def y = (long)1 return x << y"));
assertEquals(2, exec("def x = (int)1 def y = (long)1 return x << y"));
assertEquals(2L, exec("def x = (long)1 def y = (long)1 return x << y"));
assertEquals(2L, exec("def x = (float)1 def y = (long)1 return x << y"));
assertEquals(2L, exec("def x = (double)1 def y = (long)1 return x << y"));
assertEquals(2L, exec("def x = (byte)1 def y = (float)1 return x << y"));
assertEquals(2L, exec("def x = (short)1 def y = (float)1 return x << y"));
assertEquals(2L, exec("def x = (char)1 def y = (float)1 return x << y"));
assertEquals(2L, exec("def x = (int)1 def y = (float)1 return x << y"));
assertEquals(2, exec("def x = (byte)1 def y = (float)1 return x << y"));
assertEquals(2, exec("def x = (short)1 def y = (float)1 return x << y"));
assertEquals(2, exec("def x = (char)1 def y = (float)1 return x << y"));
assertEquals(2, exec("def x = (int)1 def y = (float)1 return x << y"));
assertEquals(2L, exec("def x = (long)1 def y = (float)1 return x << y"));
assertEquals(2L, exec("def x = (float)1 def y = (float)1 return x << y"));
assertEquals(2L, exec("def x = (double)1 def y = (float)1 return x << y"));
assertEquals(2L, exec("def x = (byte)1 def y = (double)1 return x << y"));
assertEquals(2L, exec("def x = (short)1 def y = (double)1 return x << y"));
assertEquals(2L, exec("def x = (char)1 def y = (double)1 return x << y"));
assertEquals(2L, exec("def x = (int)1 def y = (double)1 return x << y"));
assertEquals(2, exec("def x = (byte)1 def y = (double)1 return x << y"));
assertEquals(2, exec("def x = (short)1 def y = (double)1 return x << y"));
assertEquals(2, exec("def x = (char)1 def y = (double)1 return x << y"));
assertEquals(2, exec("def x = (int)1 def y = (double)1 return x << y"));
assertEquals(2L, exec("def x = (long)1 def y = (double)1 return x << y"));
assertEquals(2L, exec("def x = (float)1 def y = (double)1 return x << y"));
assertEquals(2L, exec("def x = (double)1 def y = (double)1 return x << y"));
@ -467,26 +467,26 @@ public class DefTests extends ScriptTestCase {
assertEquals(2L, exec("def x = (float)4 def y = (int)1 return x >> y"));
assertEquals(2L, exec("def x = (double)4 def y = (int)1 return x >> y"));
assertEquals(2L, exec("def x = (byte)4 def y = (long)1 return x >> y"));
assertEquals(2L, exec("def x = (short)4 def y = (long)1 return x >> y"));
assertEquals(2L, exec("def x = (char)4 def y = (long)1 return x >> y"));
assertEquals(2L, exec("def x = (int)4 def y = (long)1 return x >> y"));
assertEquals(2, exec("def x = (byte)4 def y = (long)1 return x >> y"));
assertEquals(2, exec("def x = (short)4 def y = (long)1 return x >> y"));
assertEquals(2, exec("def x = (char)4 def y = (long)1 return x >> y"));
assertEquals(2, exec("def x = (int)4 def y = (long)1 return x >> y"));
assertEquals(2L, exec("def x = (long)4 def y = (long)1 return x >> y"));
assertEquals(2L, exec("def x = (float)4 def y = (long)1 return x >> y"));
assertEquals(2L, exec("def x = (double)4 def y = (long)1 return x >> y"));
assertEquals(2L, exec("def x = (byte)4 def y = (float)1 return x >> y"));
assertEquals(2L, exec("def x = (short)4 def y = (float)1 return x >> y"));
assertEquals(2L, exec("def x = (char)4 def y = (float)1 return x >> y"));
assertEquals(2L, exec("def x = (int)4 def y = (float)1 return x >> y"));
assertEquals(2, exec("def x = (byte)4 def y = (float)1 return x >> y"));
assertEquals(2, exec("def x = (short)4 def y = (float)1 return x >> y"));
assertEquals(2, exec("def x = (char)4 def y = (float)1 return x >> y"));
assertEquals(2, exec("def x = (int)4 def y = (float)1 return x >> y"));
assertEquals(2L, exec("def x = (long)4 def y = (float)1 return x >> y"));
assertEquals(2L, exec("def x = (float)4 def y = (float)1 return x >> y"));
assertEquals(2L, exec("def x = (double)4 def y = (float)1 return x >> y"));
assertEquals(2L, exec("def x = (byte)4 def y = (double)1 return x >> y"));
assertEquals(2L, exec("def x = (short)4 def y = (double)1 return x >> y"));
assertEquals(2L, exec("def x = (char)4 def y = (double)1 return x >> y"));
assertEquals(2L, exec("def x = (int)4 def y = (double)1 return x >> y"));
assertEquals(2, exec("def x = (byte)4 def y = (double)1 return x >> y"));
assertEquals(2, exec("def x = (short)4 def y = (double)1 return x >> y"));
assertEquals(2, exec("def x = (char)4 def y = (double)1 return x >> y"));
assertEquals(2, exec("def x = (int)4 def y = (double)1 return x >> y"));
assertEquals(2L, exec("def x = (long)4 def y = (double)1 return x >> y"));
assertEquals(2L, exec("def x = (float)4 def y = (double)1 return x >> y"));
assertEquals(2L, exec("def x = (double)4 def y = (double)1 return x >> y"));
@ -533,26 +533,26 @@ public class DefTests extends ScriptTestCase {
assertEquals(2L, exec("def x = (float)4 def y = (int)1 return x >>> y"));
assertEquals(2L, exec("def x = (double)4 def y = (int)1 return x >>> y"));
assertEquals(2L, exec("def x = (byte)4 def y = (long)1 return x >>> y"));
assertEquals(2L, exec("def x = (short)4 def y = (long)1 return x >>> y"));
assertEquals(2L, exec("def x = (char)4 def y = (long)1 return x >>> y"));
assertEquals(2L, exec("def x = (int)4 def y = (long)1 return x >>> y"));
assertEquals(2, exec("def x = (byte)4 def y = (long)1 return x >>> y"));
assertEquals(2, exec("def x = (short)4 def y = (long)1 return x >>> y"));
assertEquals(2, exec("def x = (char)4 def y = (long)1 return x >>> y"));
assertEquals(2, exec("def x = (int)4 def y = (long)1 return x >>> y"));
assertEquals(2L, exec("def x = (long)4 def y = (long)1 return x >>> y"));
assertEquals(2L, exec("def x = (float)4 def y = (long)1 return x >>> y"));
assertEquals(2L, exec("def x = (double)4 def y = (long)1 return x >>> y"));
assertEquals(2L, exec("def x = (byte)4 def y = (float)1 return x >>> y"));
assertEquals(2L, exec("def x = (short)4 def y = (float)1 return x >>> y"));
assertEquals(2L, exec("def x = (char)4 def y = (float)1 return x >>> y"));
assertEquals(2L, exec("def x = (int)4 def y = (float)1 return x >>> y"));
assertEquals(2, exec("def x = (byte)4 def y = (float)1 return x >>> y"));
assertEquals(2, exec("def x = (short)4 def y = (float)1 return x >>> y"));
assertEquals(2, exec("def x = (char)4 def y = (float)1 return x >>> y"));
assertEquals(2, exec("def x = (int)4 def y = (float)1 return x >>> y"));
assertEquals(2L, exec("def x = (long)4 def y = (float)1 return x >>> y"));
assertEquals(2L, exec("def x = (float)4 def y = (float)1 return x >>> y"));
assertEquals(2L, exec("def x = (double)4 def y = (float)1 return x >>> y"));
assertEquals(2L, exec("def x = (byte)4 def y = (double)1 return x >>> y"));
assertEquals(2L, exec("def x = (short)4 def y = (double)1 return x >>> y"));
assertEquals(2L, exec("def x = (char)4 def y = (double)1 return x >>> y"));
assertEquals(2L, exec("def x = (int)4 def y = (double)1 return x >>> y"));
assertEquals(2, exec("def x = (byte)4 def y = (double)1 return x >>> y"));
assertEquals(2, exec("def x = (short)4 def y = (double)1 return x >>> y"));
assertEquals(2, exec("def x = (char)4 def y = (double)1 return x >>> y"));
assertEquals(2, exec("def x = (int)4 def y = (double)1 return x >>> y"));
assertEquals(2L, exec("def x = (long)4 def y = (double)1 return x >>> y"));
assertEquals(2L, exec("def x = (float)4 def y = (double)1 return x >>> y"));
assertEquals(2L, exec("def x = (double)4 def y = (double)1 return x >>> y"));

View File

@ -24,15 +24,15 @@ import java.util.HashMap;
/** Tests for special reserved words such as _score */
public class ReservedWordTests extends ScriptTestCase {
/** check that we can't declare a variable of _score, its really reserved! */
public void testScoreVar() {
IllegalArgumentException expected = expectThrows(IllegalArgumentException.class, () -> {
exec("int _score = 5; return _score;");
});
assertTrue(expected.getMessage().contains("Variable name [_score] already defined"));
assertTrue(expected.getMessage().contains("Variable name [_score] is reserved"));
}
/** check that we can't write to _score, its read-only! */
public void testScoreStore() {
IllegalArgumentException expected = expectThrows(IllegalArgumentException.class, () -> {
@ -40,15 +40,15 @@ public class ReservedWordTests extends ScriptTestCase {
});
assertTrue(expected.getMessage().contains("Variable [_score] is read-only"));
}
/** check that we can't declare a variable of doc, its really reserved! */
public void testDocVar() {
IllegalArgumentException expected = expectThrows(IllegalArgumentException.class, () -> {
exec("int doc = 5; return doc;");
});
assertTrue(expected.getMessage().contains("Variable name [doc] already defined"));
assertTrue(expected.getMessage().contains("Variable name [doc] is reserved"));
}
/** check that we can't write to doc, its read-only! */
public void testDocStore() {
IllegalArgumentException expected = expectThrows(IllegalArgumentException.class, () -> {
@ -56,15 +56,15 @@ public class ReservedWordTests extends ScriptTestCase {
});
assertTrue(expected.getMessage().contains("Variable [doc] is read-only"));
}
/** check that we can't declare a variable of ctx, its really reserved! */
public void testCtxVar() {
IllegalArgumentException expected = expectThrows(IllegalArgumentException.class, () -> {
exec("int ctx = 5; return ctx;");
});
assertTrue(expected.getMessage().contains("Variable name [ctx] already defined"));
assertTrue(expected.getMessage().contains("Variable name [ctx] is reserved"));
}
/** check that we can't write to ctx, its read-only! */
public void testCtxStore() {
IllegalArgumentException expected = expectThrows(IllegalArgumentException.class, () -> {
@ -72,20 +72,20 @@ public class ReservedWordTests extends ScriptTestCase {
});
assertTrue(expected.getMessage().contains("Variable [ctx] is read-only"));
}
/** check that we can modify its contents though */
public void testCtxStoreMap() {
assertEquals(5, exec("ctx.foo = 5; return ctx.foo;", Collections.singletonMap("ctx", new HashMap<String,Object>())));
}
/** check that we can't declare a variable of _value, its really reserved! */
public void testAggregationValueVar() {
IllegalArgumentException expected = expectThrows(IllegalArgumentException.class, () -> {
exec("int _value = 5; return _value;");
});
assertTrue(expected.getMessage().contains("Variable name [_value] already defined"));
assertTrue(expected.getMessage().contains("Variable name [_value] is reserved"));
}
/** check that we can't write to _value, its read-only! */
public void testAggregationValueStore() {
IllegalArgumentException expected = expectThrows(IllegalArgumentException.class, () -> {

View File

@ -132,26 +132,30 @@ public class StringTests extends ScriptTestCase {
try {
assertEquals("cc", exec("return (String)(char)\"cc\""));
} catch (final IllegalArgumentException ise) {
ise.getMessage().contains("Cannot cast constant from [String] to [char].");
fail();
} catch (final ClassCastException cce) {
assertTrue(cce.getMessage().contains("Cannot cast from [String] to [char]."));
}
try {
assertEquals("cc", exec("return (String)(char)'cc'"));
} catch (final IllegalArgumentException ise) {
ise.getMessage().contains("Cannot cast constant from [String] to [char].");
fail();
} catch (final ClassCastException cce) {
assertTrue(cce.getMessage().contains("Cannot cast from [String] to [char]."));
}
try {
assertEquals('c', exec("String s = \"cc\" (char)s"));
fail();
} catch (final ClassCastException cce) {
cce.getMessage().contains("Cannot cast [String] with length greater than one to [char].");
assertTrue(cce.getMessage().contains("Cannot cast [String] with length greater than one to [char]."));
}
try {
assertEquals('c', exec("String s = 'cc' (char)s"));
fail();
} catch (final ClassCastException cce) {
cce.getMessage().contains("Cannot cast [String] with length greater than one to [char].");
assertTrue(cce.getMessage().contains("Cannot cast [String] with length greater than one to [char]."));
}
assertEquals('c', exec("return (Character)\"c\""));
@ -164,26 +168,30 @@ public class StringTests extends ScriptTestCase {
try {
assertEquals("cc", exec("return (String)(Character)\"cc\""));
fail();
} catch (final ClassCastException ise) {
ise.getMessage().contains("Cannot cast [String] with length greater than one to [Character].");
assertTrue(ise.getMessage().contains("Cannot cast [String] with length greater than one to [Character]."));
}
try {
assertEquals("cc", exec("return (String)(Character)'cc'"));
fail();
} catch (final ClassCastException ise) {
ise.getMessage().contains("Cannot cast [String] with length greater than one to [Character].");
assertTrue(ise.getMessage().contains("Cannot cast [String] with length greater than one to [Character]."));
}
try {
assertEquals('c', exec("String s = \"cc\" (Character)s"));
fail();
} catch (final ClassCastException cce) {
cce.getMessage().contains("Cannot cast [String] with length greater than one to [Character].");
assertTrue(cce.getMessage().contains("Cannot cast [String] with length greater than one to [Character]."));
}
try {
assertEquals('c', exec("String s = 'cc' (Character)s"));
fail();
} catch (final ClassCastException cce) {
cce.getMessage().contains("Cannot cast [String] with length greater than one to [Character].");
assertTrue(cce.getMessage().contains("Cannot cast [String] with length greater than one to [Character]."));
}
}
}

View File

@ -98,7 +98,7 @@ public class WhenThingsGoWrongTests extends ScriptTestCase {
exec("try { int x } catch (PainlessError error) {}");
fail("should have hit ParseException");
});
assertTrue(parseException.getMessage().contains("Invalid type [PainlessError]."));
assertTrue(parseException.getMessage().contains("Not a type [PainlessError]."));
}
public void testLoopLimits() {
@ -126,20 +126,20 @@ public class WhenThingsGoWrongTests extends ScriptTestCase {
// ok
assertEquals(0, exec(new String(exactlyAtLimit)));
}
public void testIllegalDynamicMethod() {
IllegalArgumentException expected = expectThrows(IllegalArgumentException.class, () -> {
exec("def x = 'test'; return x.getClass().toString()");
});
assertTrue(expected.getMessage().contains("Unable to find dynamic method"));
}
public void testDynamicNPE() {
expectThrows(NullPointerException.class, () -> {
exec("def x = null; return x.toString()");
});
}
public void testDynamicWrongArgs() {
expectThrows(WrongMethodTypeException.class, () -> {
exec("def x = new ArrayList(); return x.get('bogus');");