Merge branch 'master' into shave_mustache

This commit is contained in:
Robert Muir 2015-12-10 07:58:24 -05:00
commit e454fadc22
131 changed files with 21490 additions and 1050 deletions

View File

@ -15,8 +15,15 @@ import org.gradle.api.logging.LogLevel
import org.gradle.api.logging.Logger
import org.junit.runner.Description
import java.util.concurrent.atomic.AtomicBoolean
import java.util.concurrent.atomic.AtomicInteger
import javax.sound.sampled.AudioSystem;
import javax.sound.sampled.Clip;
import javax.sound.sampled.Line;
import javax.sound.sampled.LineEvent;
import javax.sound.sampled.LineListener;
import static com.carrotsearch.ant.tasks.junit4.FormattingUtils.*
import static com.carrotsearch.gradle.junit4.TestLoggingConfiguration.OutputMode
@ -102,9 +109,36 @@ class TestReportLogger extends TestsSummaryEventListener implements AggregatedEv
formatTime(e.getCurrentTime()) + ", stalled for " +
formatDurationInSeconds(e.getNoEventDuration()) + " at: " +
(e.getDescription() == null ? "<unknown>" : formatDescription(e.getDescription())))
try {
playBeat();
} catch (Exception nosound) { /* handling exceptions with style */ }
slowTestsFound = true
}
void playBeat() throws Exception {
Clip clip = (Clip)AudioSystem.getLine(new Line.Info(Clip.class));
final AtomicBoolean stop = new AtomicBoolean();
clip.addLineListener(new LineListener() {
@Override
public void update(LineEvent event) {
if (event.getType() == LineEvent.Type.STOP) {
stop.set(true);
}
}
});
InputStream stream = getClass().getResourceAsStream("/beat.wav");
try {
clip.open(AudioSystem.getAudioInputStream(stream));
clip.start();
while (!stop.get()) {
Thread.sleep(20);
}
clip.close();
} finally {
stream.close();
}
}
@Subscribe
void onQuit(AggregatedQuitEvent e) throws IOException {
if (config.showNumFailuresAtEnd > 0 && !failedTests.isEmpty()) {

Binary file not shown.

View File

@ -78,19 +78,19 @@ public class IndicesAliasesRequest extends AcknowledgedRequest<IndicesAliasesReq
indices(indices);
aliases(aliases);
}
public AliasActions(AliasAction.Type type, String index, String alias) {
aliasAction = new AliasAction(type);
indices(index);
aliases(alias);
}
AliasActions(AliasAction.Type type, String[] index, String alias) {
aliasAction = new AliasAction(type);
indices(index);
aliases(alias);
}
public AliasActions(AliasAction action) {
this.aliasAction = action;
indices(action.index());
@ -110,7 +110,7 @@ public class IndicesAliasesRequest extends AcknowledgedRequest<IndicesAliasesReq
aliasAction.filter(filter);
return this;
}
public AliasActions filter(QueryBuilder filter) {
aliasAction.filter(filter);
return this;
@ -197,7 +197,7 @@ public class IndicesAliasesRequest extends AcknowledgedRequest<IndicesAliasesReq
aliasAction = readAliasAction(in);
return this;
}
public void writeTo(StreamOutput out) throws IOException {
out.writeStringArray(indices);
out.writeStringArray(aliases);
@ -225,7 +225,7 @@ public class IndicesAliasesRequest extends AcknowledgedRequest<IndicesAliasesReq
addAliasAction(new AliasActions(action));
return this;
}
/**
* Adds an alias to the index.
* @param alias The alias
@ -247,8 +247,8 @@ public class IndicesAliasesRequest extends AcknowledgedRequest<IndicesAliasesReq
addAliasAction(new AliasActions(AliasAction.Type.ADD, indices, alias).filter(filterBuilder));
return this;
}
/**
* Removes an alias to the index.
*
@ -259,7 +259,7 @@ public class IndicesAliasesRequest extends AcknowledgedRequest<IndicesAliasesReq
addAliasAction(new AliasActions(AliasAction.Type.REMOVE, indices, aliases));
return this;
}
/**
* Removes an alias to the index.
*
@ -286,25 +286,14 @@ public class IndicesAliasesRequest extends AcknowledgedRequest<IndicesAliasesReq
return addValidationError("Must specify at least one alias action", validationException);
}
for (AliasActions aliasAction : allAliasActions) {
if (aliasAction.actionType() == AliasAction.Type.ADD) {
if (aliasAction.aliases.length != 1) {
if (aliasAction.aliases.length == 0) {
validationException = addValidationError("Alias action [" + aliasAction.actionType().name().toLowerCase(Locale.ENGLISH)
+ "]: aliases may not be empty", validationException);
}
for (String alias : aliasAction.aliases) {
if (!Strings.hasText(alias)) {
validationException = addValidationError("Alias action [" + aliasAction.actionType().name().toLowerCase(Locale.ENGLISH)
+ "] requires exactly one [alias] to be set", validationException);
}
if (!Strings.hasText(aliasAction.aliases[0])) {
validationException = addValidationError("Alias action [" + aliasAction.actionType().name().toLowerCase(Locale.ENGLISH)
+ "] requires an [alias] to be set", validationException);
}
} else {
if (aliasAction.aliases.length == 0) {
validationException = addValidationError("Alias action [" + aliasAction.actionType().name().toLowerCase(Locale.ENGLISH)
+ "]: aliases may not be empty", validationException);
}
for (String alias : aliasAction.aliases) {
if (!Strings.hasText(alias)) {
validationException = addValidationError("Alias action [" + aliasAction.actionType().name().toLowerCase(Locale.ENGLISH)
+ "]: [alias] may not be empty string", validationException);
}
+ "]: [alias] may not be empty string", validationException);
}
}
if (CollectionUtils.isEmpty(aliasAction.indices)) {
@ -345,7 +334,7 @@ public class IndicesAliasesRequest extends AcknowledgedRequest<IndicesAliasesReq
public IndicesOptions indicesOptions() {
return INDICES_OPTIONS;
}
private static AliasActions readAliasActions(StreamInput in) throws IOException {
AliasActions actions = new AliasActions();
return actions.readFrom(in);

View File

@ -22,6 +22,7 @@ package org.elasticsearch.action.admin.indices.flush;
import org.elasticsearch.action.support.replication.ReplicationRequest;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.index.shard.ShardId;
import java.io.IOException;
@ -29,8 +30,8 @@ public class ShardFlushRequest extends ReplicationRequest<ShardFlushRequest> {
private FlushRequest request = new FlushRequest();
public ShardFlushRequest(FlushRequest request) {
super(request);
public ShardFlushRequest(FlushRequest request, ShardId shardId) {
super(request, shardId);
this.request = request;
}
@ -53,5 +54,8 @@ public class ShardFlushRequest extends ReplicationRequest<ShardFlushRequest> {
request.writeTo(out);
}
@Override
public String toString() {
return "flush {" + super.toString() + "}";
}
}

View File

@ -53,7 +53,7 @@ public class TransportFlushAction extends TransportBroadcastReplicationAction<Fl
@Override
protected ShardFlushRequest newShardRequest(FlushRequest request, ShardId shardId) {
return new ShardFlushRequest(request).setShardId(shardId);
return new ShardFlushRequest(request, shardId);
}
@Override

View File

@ -23,18 +23,15 @@ import org.elasticsearch.action.ActionWriteResponse;
import org.elasticsearch.action.support.ActionFilters;
import org.elasticsearch.action.support.replication.TransportReplicationAction;
import org.elasticsearch.cluster.ClusterService;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.action.index.MappingUpdatedAction;
import org.elasticsearch.cluster.action.shard.ShardStateAction;
import org.elasticsearch.cluster.block.ClusterBlockException;
import org.elasticsearch.cluster.block.ClusterBlockLevel;
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
import org.elasticsearch.cluster.routing.ShardIterator;
import org.elasticsearch.cluster.metadata.MetaData;
import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.shard.IndexShard;
import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.indices.IndicesService;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.TransportService;
@ -61,15 +58,15 @@ public class TransportShardFlushAction extends TransportReplicationAction<ShardF
}
@Override
protected Tuple<ActionWriteResponse, ShardFlushRequest> shardOperationOnPrimary(ClusterState clusterState, PrimaryOperationRequest shardRequest) throws Throwable {
IndexShard indexShard = indicesService.indexServiceSafe(shardRequest.shardId.getIndex()).getShard(shardRequest.shardId.id());
indexShard.flush(shardRequest.request.getRequest());
protected Tuple<ActionWriteResponse, ShardFlushRequest> shardOperationOnPrimary(MetaData metaData, ShardFlushRequest shardRequest) throws Throwable {
IndexShard indexShard = indicesService.indexServiceSafe(shardRequest.shardId().getIndex()).getShard(shardRequest.shardId().id());
indexShard.flush(shardRequest.getRequest());
logger.trace("{} flush request executed on primary", indexShard.shardId());
return new Tuple<>(new ActionWriteResponse(), shardRequest.request);
return new Tuple<>(new ActionWriteResponse(), shardRequest);
}
@Override
protected void shardOperationOnReplica(ShardId shardId, ShardFlushRequest request) {
protected void shardOperationOnReplica(ShardFlushRequest request) {
IndexShard indexShard = indicesService.indexServiceSafe(request.shardId().getIndex()).getShard(request.shardId().id());
indexShard.flush(request.getRequest());
logger.trace("{} flush request executed on replica", indexShard.shardId());
@ -81,18 +78,13 @@ public class TransportShardFlushAction extends TransportReplicationAction<ShardF
}
@Override
protected ShardIterator shards(ClusterState clusterState, InternalRequest request) {
return clusterState.getRoutingTable().indicesRouting().get(request.concreteIndex()).getShards().get(request.request().shardId().getId()).shardsIt();
protected ClusterBlockLevel globalBlockLevel() {
return ClusterBlockLevel.METADATA_WRITE;
}
@Override
protected ClusterBlockException checkGlobalBlock(ClusterState state) {
return state.blocks().globalBlockedException(ClusterBlockLevel.METADATA_WRITE);
}
@Override
protected ClusterBlockException checkRequestBlock(ClusterState state, InternalRequest request) {
return state.blocks().indicesBlockedException(ClusterBlockLevel.METADATA_WRITE, new String[]{request.concreteIndex()});
protected ClusterBlockLevel indexBlockLevel() {
return ClusterBlockLevel.METADATA_WRITE;
}
@Override

View File

@ -54,7 +54,7 @@ public class TransportRefreshAction extends TransportBroadcastReplicationAction<
@Override
protected ReplicationRequest newShardRequest(RefreshRequest request, ShardId shardId) {
return new ReplicationRequest(request).setShardId(shardId);
return new ReplicationRequest(request, shardId);
}
@Override

View File

@ -24,13 +24,11 @@ import org.elasticsearch.action.support.ActionFilters;
import org.elasticsearch.action.support.replication.ReplicationRequest;
import org.elasticsearch.action.support.replication.TransportReplicationAction;
import org.elasticsearch.cluster.ClusterService;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.action.index.MappingUpdatedAction;
import org.elasticsearch.cluster.action.shard.ShardStateAction;
import org.elasticsearch.cluster.block.ClusterBlockException;
import org.elasticsearch.cluster.block.ClusterBlockLevel;
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
import org.elasticsearch.cluster.routing.ShardIterator;
import org.elasticsearch.cluster.metadata.MetaData;
import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Settings;
@ -62,15 +60,16 @@ public class TransportShardRefreshAction extends TransportReplicationAction<Repl
}
@Override
protected Tuple<ActionWriteResponse, ReplicationRequest> shardOperationOnPrimary(ClusterState clusterState, PrimaryOperationRequest shardRequest) throws Throwable {
IndexShard indexShard = indicesService.indexServiceSafe(shardRequest.shardId.getIndex()).getShard(shardRequest.shardId.id());
protected Tuple<ActionWriteResponse, ReplicationRequest> shardOperationOnPrimary(MetaData metaData, ReplicationRequest shardRequest) throws Throwable {
IndexShard indexShard = indicesService.indexServiceSafe(shardRequest.shardId().getIndex()).getShard(shardRequest.shardId().id());
indexShard.refresh("api");
logger.trace("{} refresh request executed on primary", indexShard.shardId());
return new Tuple<>(new ActionWriteResponse(), shardRequest.request);
return new Tuple<>(new ActionWriteResponse(), shardRequest);
}
@Override
protected void shardOperationOnReplica(ShardId shardId, ReplicationRequest request) {
protected void shardOperationOnReplica(ReplicationRequest request) {
final ShardId shardId = request.shardId();
IndexShard indexShard = indicesService.indexServiceSafe(shardId.getIndex()).getShard(shardId.id());
indexShard.refresh("api");
logger.trace("{} refresh request executed on replica", indexShard.shardId());
@ -82,18 +81,13 @@ public class TransportShardRefreshAction extends TransportReplicationAction<Repl
}
@Override
protected ShardIterator shards(ClusterState clusterState, InternalRequest request) {
return clusterState.getRoutingTable().indicesRouting().get(request.concreteIndex()).getShards().get(request.request().shardId().getId()).shardsIt();
protected ClusterBlockLevel globalBlockLevel() {
return ClusterBlockLevel.METADATA_WRITE;
}
@Override
protected ClusterBlockException checkGlobalBlock(ClusterState state) {
return state.blocks().globalBlockedException(ClusterBlockLevel.METADATA_WRITE);
}
@Override
protected ClusterBlockException checkRequestBlock(ClusterState state, InternalRequest request) {
return state.blocks().indicesBlockedException(ClusterBlockLevel.METADATA_WRITE, new String[]{request.concreteIndex()});
protected ClusterBlockLevel indexBlockLevel() {
return ClusterBlockLevel.METADATA_WRITE;
}
@Override

View File

@ -40,10 +40,8 @@ public class BulkShardRequest extends ReplicationRequest<BulkShardRequest> {
public BulkShardRequest() {
}
BulkShardRequest(BulkRequest bulkRequest, String index, int shardId, boolean refresh, BulkItemRequest[] items) {
super(bulkRequest);
this.index = index;
this.setShardId(new ShardId(index, shardId));
BulkShardRequest(BulkRequest bulkRequest, ShardId shardId, boolean refresh, BulkItemRequest[] items) {
super(bulkRequest, shardId);
this.items = items;
this.refresh = refresh;
}
@ -93,4 +91,9 @@ public class BulkShardRequest extends ReplicationRequest<BulkShardRequest> {
}
refresh = in.readBoolean();
}
@Override
public String toString() {
return "shard bulk {" + super.toString() + "}";
}
}

View File

@ -275,7 +275,7 @@ public class TransportBulkAction extends HandledTransportAction<BulkRequest, Bul
list.add(new BulkItemRequest(i, new DeleteRequest(deleteRequest)));
}
} else {
ShardId shardId = clusterService.operationRouting().deleteShards(clusterState, concreteIndex, deleteRequest.type(), deleteRequest.id(), deleteRequest.routing()).shardId();
ShardId shardId = clusterService.operationRouting().indexShards(clusterState, concreteIndex, deleteRequest.type(), deleteRequest.id(), deleteRequest.routing()).shardId();
List<BulkItemRequest> list = requestsByShard.get(shardId);
if (list == null) {
list = new ArrayList<>();
@ -312,7 +312,7 @@ public class TransportBulkAction extends HandledTransportAction<BulkRequest, Bul
for (Map.Entry<ShardId, List<BulkItemRequest>> entry : requestsByShard.entrySet()) {
final ShardId shardId = entry.getKey();
final List<BulkItemRequest> requests = entry.getValue();
BulkShardRequest bulkShardRequest = new BulkShardRequest(bulkRequest, shardId.index().name(), shardId.id(), bulkRequest.refresh(), requests.toArray(new BulkItemRequest[requests.size()]));
BulkShardRequest bulkShardRequest = new BulkShardRequest(bulkRequest, shardId, bulkRequest.refresh(), requests.toArray(new BulkItemRequest[requests.size()]));
bulkShardRequest.consistencyLevel(bulkRequest.consistencyLevel());
bulkShardRequest.timeout(bulkRequest.timeout());
shardBulkAction.execute(bulkShardRequest, new ActionListener<BulkShardResponse>() {

View File

@ -35,12 +35,11 @@ import org.elasticsearch.action.update.UpdateHelper;
import org.elasticsearch.action.update.UpdateRequest;
import org.elasticsearch.action.update.UpdateResponse;
import org.elasticsearch.cluster.ClusterService;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.action.index.MappingUpdatedAction;
import org.elasticsearch.cluster.action.shard.ShardStateAction;
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
import org.elasticsearch.cluster.metadata.MappingMetaData;
import org.elasticsearch.cluster.routing.ShardIterator;
import org.elasticsearch.cluster.metadata.MetaData;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.inject.Inject;
@ -87,11 +86,6 @@ public class TransportShardBulkAction extends TransportReplicationAction<BulkSha
this.allowIdGeneration = settings.getAsBoolean("action.allow_id_generation", true);
}
@Override
protected boolean checkWriteConsistency() {
return true;
}
@Override
protected TransportRequestOptions transportOptions() {
return BulkAction.INSTANCE.transportOptions(settings);
@ -108,15 +102,9 @@ public class TransportShardBulkAction extends TransportReplicationAction<BulkSha
}
@Override
protected ShardIterator shards(ClusterState clusterState, InternalRequest request) {
return clusterState.routingTable().index(request.concreteIndex()).shard(request.request().shardId().id()).shardsIt();
}
@Override
protected Tuple<BulkShardResponse, BulkShardRequest> shardOperationOnPrimary(ClusterState clusterState, PrimaryOperationRequest shardRequest) {
final BulkShardRequest request = shardRequest.request;
protected Tuple<BulkShardResponse, BulkShardRequest> shardOperationOnPrimary(MetaData metaData, BulkShardRequest request) {
final IndexService indexService = indicesService.indexServiceSafe(request.index());
final IndexShard indexShard = indexService.getShard(shardRequest.shardId.id());
final IndexShard indexShard = indexService.getShard(request.shardId().id());
long[] preVersions = new long[request.items().length];
VersionType[] preVersionTypes = new VersionType[request.items().length];
@ -128,7 +116,7 @@ public class TransportShardBulkAction extends TransportReplicationAction<BulkSha
preVersions[requestIndex] = indexRequest.version();
preVersionTypes[requestIndex] = indexRequest.versionType();
try {
WriteResult<IndexResponse> result = shardIndexOperation(request, indexRequest, clusterState, indexShard, true);
WriteResult<IndexResponse> result = shardIndexOperation(request, indexRequest, metaData, indexShard, true);
location = locationToSync(location, result.location);
// add the response
IndexResponse indexResponse = result.response();
@ -143,9 +131,9 @@ public class TransportShardBulkAction extends TransportReplicationAction<BulkSha
throw (ElasticsearchException) e;
}
if (ExceptionsHelper.status(e) == RestStatus.CONFLICT) {
logger.trace("{} failed to execute bulk item (index) {}", e, shardRequest.shardId, indexRequest);
logger.trace("{} failed to execute bulk item (index) {}", e, request.shardId(), indexRequest);
} else {
logger.debug("{} failed to execute bulk item (index) {}", e, shardRequest.shardId, indexRequest);
logger.debug("{} failed to execute bulk item (index) {}", e, request.shardId(), indexRequest);
}
// if its a conflict failure, and we already executed the request on a primary (and we execute it
// again, due to primary relocation and only processing up to N bulk items when the shard gets closed)
@ -178,9 +166,9 @@ public class TransportShardBulkAction extends TransportReplicationAction<BulkSha
throw (ElasticsearchException) e;
}
if (ExceptionsHelper.status(e) == RestStatus.CONFLICT) {
logger.trace("{} failed to execute bulk item (delete) {}", e, shardRequest.shardId, deleteRequest);
logger.trace("{} failed to execute bulk item (delete) {}", e, request.shardId(), deleteRequest);
} else {
logger.debug("{} failed to execute bulk item (delete) {}", e, shardRequest.shardId, deleteRequest);
logger.debug("{} failed to execute bulk item (delete) {}", e, request.shardId(), deleteRequest);
}
// if its a conflict failure, and we already executed the request on a primary (and we execute it
// again, due to primary relocation and only processing up to N bulk items when the shard gets closed)
@ -200,7 +188,7 @@ public class TransportShardBulkAction extends TransportReplicationAction<BulkSha
for (int updateAttemptsCount = 0; updateAttemptsCount <= updateRequest.retryOnConflict(); updateAttemptsCount++) {
UpdateResult updateResult;
try {
updateResult = shardUpdateOperation(clusterState, request, updateRequest, indexShard);
updateResult = shardUpdateOperation(metaData, request, updateRequest, indexShard);
} catch (Throwable t) {
updateResult = new UpdateResult(null, null, false, t, null);
}
@ -219,7 +207,7 @@ public class TransportShardBulkAction extends TransportReplicationAction<BulkSha
UpdateResponse updateResponse = new UpdateResponse(indexResponse.getShardInfo(), indexResponse.getIndex(), indexResponse.getType(), indexResponse.getId(), indexResponse.getVersion(), indexResponse.isCreated());
if (updateRequest.fields() != null && updateRequest.fields().length > 0) {
Tuple<XContentType, Map<String, Object>> sourceAndContent = XContentHelper.convertToMap(indexSourceAsBytes, true);
updateResponse.setGetResult(updateHelper.extractGetResult(updateRequest, shardRequest.request.index(), indexResponse.getVersion(), sourceAndContent.v2(), sourceAndContent.v1(), indexSourceAsBytes));
updateResponse.setGetResult(updateHelper.extractGetResult(updateRequest, request.index(), indexResponse.getVersion(), sourceAndContent.v2(), sourceAndContent.v1(), indexSourceAsBytes));
}
item = request.items()[requestIndex] = new BulkItemRequest(request.items()[requestIndex].id(), indexRequest);
setResponse(item, new BulkItemResponse(item.id(), OP_TYPE_UPDATE, updateResponse));
@ -229,7 +217,7 @@ public class TransportShardBulkAction extends TransportReplicationAction<BulkSha
DeleteResponse response = writeResult.response();
DeleteRequest deleteRequest = updateResult.request();
updateResponse = new UpdateResponse(response.getShardInfo(), response.getIndex(), response.getType(), response.getId(), response.getVersion(), false);
updateResponse.setGetResult(updateHelper.extractGetResult(updateRequest, shardRequest.request.index(), response.getVersion(), updateResult.result.updatedSourceAsMap(), updateResult.result.updateSourceContentType(), null));
updateResponse.setGetResult(updateHelper.extractGetResult(updateRequest, request.index(), response.getVersion(), updateResult.result.updatedSourceAsMap(), updateResult.result.updateSourceContentType(), null));
// Replace the update request to the translated delete request to execute on the replica.
item = request.items()[requestIndex] = new BulkItemRequest(request.items()[requestIndex].id(), deleteRequest);
setResponse(item, new BulkItemResponse(item.id(), OP_TYPE_UPDATE, updateResponse));
@ -264,16 +252,16 @@ public class TransportShardBulkAction extends TransportReplicationAction<BulkSha
if (item.getPrimaryResponse() != null && isConflictException(t)) {
setResponse(item, item.getPrimaryResponse());
} else if (updateResult.result == null) {
setResponse(item, new BulkItemResponse(item.id(), OP_TYPE_UPDATE, new BulkItemResponse.Failure(shardRequest.request.index(), updateRequest.type(), updateRequest.id(), t)));
setResponse(item, new BulkItemResponse(item.id(), OP_TYPE_UPDATE, new BulkItemResponse.Failure(request.index(), updateRequest.type(), updateRequest.id(), t)));
} else {
switch (updateResult.result.operation()) {
case UPSERT:
case INDEX:
IndexRequest indexRequest = updateResult.request();
if (ExceptionsHelper.status(t) == RestStatus.CONFLICT) {
logger.trace("{} failed to execute bulk item (index) {}", t, shardRequest.shardId, indexRequest);
logger.trace("{} failed to execute bulk item (index) {}", t, request.shardId(), indexRequest);
} else {
logger.debug("{} failed to execute bulk item (index) {}", t, shardRequest.shardId, indexRequest);
logger.debug("{} failed to execute bulk item (index) {}", t, request.shardId(), indexRequest);
}
setResponse(item, new BulkItemResponse(item.id(), OP_TYPE_UPDATE,
new BulkItemResponse.Failure(request.index(), indexRequest.type(), indexRequest.id(), t)));
@ -281,9 +269,9 @@ public class TransportShardBulkAction extends TransportReplicationAction<BulkSha
case DELETE:
DeleteRequest deleteRequest = updateResult.request();
if (ExceptionsHelper.status(t) == RestStatus.CONFLICT) {
logger.trace("{} failed to execute bulk item (delete) {}", t, shardRequest.shardId, deleteRequest);
logger.trace("{} failed to execute bulk item (delete) {}", t, request.shardId(), deleteRequest);
} else {
logger.debug("{} failed to execute bulk item (delete) {}", t, shardRequest.shardId, deleteRequest);
logger.debug("{} failed to execute bulk item (delete) {}", t, request.shardId(), deleteRequest);
}
setResponse(item, new BulkItemResponse(item.id(), OP_TYPE_DELETE,
new BulkItemResponse.Failure(request.index(), deleteRequest.type(), deleteRequest.id(), t)));
@ -310,7 +298,7 @@ public class TransportShardBulkAction extends TransportReplicationAction<BulkSha
for (int i = 0; i < items.length; i++) {
responses[i] = items[i].getPrimaryResponse();
}
return new Tuple<>(new BulkShardResponse(shardRequest.shardId, responses), shardRequest.request);
return new Tuple<>(new BulkShardResponse(request.shardId(), responses), request);
}
private void setResponse(BulkItemRequest request, BulkItemResponse response) {
@ -320,11 +308,11 @@ public class TransportShardBulkAction extends TransportReplicationAction<BulkSha
}
}
private WriteResult<IndexResponse> shardIndexOperation(BulkShardRequest request, IndexRequest indexRequest, ClusterState clusterState,
private WriteResult shardIndexOperation(BulkShardRequest request, IndexRequest indexRequest, MetaData metaData,
IndexShard indexShard, boolean processed) throws Throwable {
// validate, if routing is required, that we got routing
MappingMetaData mappingMd = clusterState.metaData().index(request.index()).mappingOrDefault(indexRequest.type());
MappingMetaData mappingMd = metaData.index(request.index()).mappingOrDefault(indexRequest.type());
if (mappingMd != null && mappingMd.routing().required()) {
if (indexRequest.routing() == null) {
throw new RoutingMissingException(request.index(), indexRequest.type(), indexRequest.id());
@ -332,7 +320,7 @@ public class TransportShardBulkAction extends TransportReplicationAction<BulkSha
}
if (!processed) {
indexRequest.process(clusterState.metaData(), mappingMd, allowIdGeneration, request.index());
indexRequest.process(metaData, mappingMd, allowIdGeneration, request.index());
}
return TransportIndexAction.executeIndexRequestOnPrimary(indexRequest, indexShard, mappingUpdatedAction);
}
@ -390,14 +378,14 @@ public class TransportShardBulkAction extends TransportReplicationAction<BulkSha
}
private UpdateResult shardUpdateOperation(ClusterState clusterState, BulkShardRequest bulkShardRequest, UpdateRequest updateRequest, IndexShard indexShard) {
private UpdateResult shardUpdateOperation(MetaData metaData, BulkShardRequest bulkShardRequest, UpdateRequest updateRequest, IndexShard indexShard) {
UpdateHelper.Result translate = updateHelper.prepare(updateRequest, indexShard);
switch (translate.operation()) {
case UPSERT:
case INDEX:
IndexRequest indexRequest = translate.action();
try {
WriteResult result = shardIndexOperation(bulkShardRequest, indexRequest, clusterState, indexShard, false);
WriteResult result = shardIndexOperation(bulkShardRequest, indexRequest, metaData, indexShard, false);
return new UpdateResult(translate, indexRequest, result);
} catch (Throwable t) {
t = ExceptionsHelper.unwrapCause(t);
@ -431,7 +419,8 @@ public class TransportShardBulkAction extends TransportReplicationAction<BulkSha
@Override
protected void shardOperationOnReplica(ShardId shardId, BulkShardRequest request) {
protected void shardOperationOnReplica(BulkShardRequest request) {
final ShardId shardId = request.shardId();
IndexService indexService = indicesService.indexServiceSafe(shardId.getIndex());
IndexShard indexShard = indexService.getShard(shardId.id());
Translog.Location location = null;

View File

@ -34,7 +34,7 @@ import org.elasticsearch.cluster.action.index.MappingUpdatedAction;
import org.elasticsearch.cluster.action.shard.ShardStateAction;
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
import org.elasticsearch.cluster.metadata.MappingMetaData;
import org.elasticsearch.cluster.routing.ShardIterator;
import org.elasticsearch.cluster.metadata.MetaData;
import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Settings;
@ -94,45 +94,41 @@ public class TransportDeleteAction extends TransportReplicationAction<DeleteRequ
}
@Override
protected void resolveRequest(final ClusterState state, final InternalRequest request, final ActionListener<DeleteResponse> listener) {
request.request().routing(state.metaData().resolveIndexRouting(request.request().routing(), request.request().index()));
if (state.metaData().hasIndex(request.concreteIndex())) {
protected void resolveRequest(final MetaData metaData, String concreteIndex, DeleteRequest request) {
request.routing(metaData.resolveIndexRouting(request.routing(), request.index()));
if (metaData.hasIndex(concreteIndex)) {
// check if routing is required, if so, do a broadcast delete
MappingMetaData mappingMd = state.metaData().index(request.concreteIndex()).mappingOrDefault(request.request().type());
MappingMetaData mappingMd = metaData.index(concreteIndex).mappingOrDefault(request.type());
if (mappingMd != null && mappingMd.routing().required()) {
if (request.request().routing() == null) {
if (request.request().versionType() != VersionType.INTERNAL) {
if (request.routing() == null) {
if (request.versionType() != VersionType.INTERNAL) {
// TODO: implement this feature
throw new IllegalArgumentException("routing value is required for deleting documents of type [" + request.request().type()
+ "] while using version_type [" + request.request().versionType() + "]");
throw new IllegalArgumentException("routing value is required for deleting documents of type [" + request.type()
+ "] while using version_type [" + request.versionType() + "]");
}
throw new RoutingMissingException(request.concreteIndex(), request.request().type(), request.request().id());
throw new RoutingMissingException(concreteIndex, request.type(), request.id());
}
}
}
ShardId shardId = clusterService.operationRouting().shardId(clusterService.state(), concreteIndex, request.id(), request.routing());
request.setShardId(shardId);
}
private void innerExecute(final DeleteRequest request, final ActionListener<DeleteResponse> listener) {
super.doExecute(request, listener);
}
@Override
protected boolean checkWriteConsistency() {
return true;
}
@Override
protected DeleteResponse newResponseInstance() {
return new DeleteResponse();
}
@Override
protected Tuple<DeleteResponse, DeleteRequest> shardOperationOnPrimary(ClusterState clusterState, PrimaryOperationRequest shardRequest) {
DeleteRequest request = shardRequest.request;
IndexShard indexShard = indicesService.indexServiceSafe(shardRequest.shardId.getIndex()).getShard(shardRequest.shardId.id());
protected Tuple<DeleteResponse, DeleteRequest> shardOperationOnPrimary(MetaData metaData, DeleteRequest request) {
IndexShard indexShard = indicesService.indexServiceSafe(request.shardId().getIndex()).getShard(request.shardId().id());
final WriteResult<DeleteResponse> result = executeDeleteRequestOnPrimary(request, indexShard);
processAfterWrite(request.refresh(), indexShard, result.location);
return new Tuple<>(result.response, shardRequest.request);
return new Tuple<>(result.response, request);
}
public static WriteResult<DeleteResponse> executeDeleteRequestOnPrimary(DeleteRequest request, IndexShard indexShard) {
@ -154,17 +150,12 @@ public class TransportDeleteAction extends TransportReplicationAction<DeleteRequ
return delete;
}
@Override
protected void shardOperationOnReplica(ShardId shardId, DeleteRequest request) {
protected void shardOperationOnReplica(DeleteRequest request) {
final ShardId shardId = request.shardId();
IndexShard indexShard = indicesService.indexServiceSafe(shardId.getIndex()).getShard(shardId.id());
Engine.Delete delete = executeDeleteRequestOnReplica(request, indexShard);
processAfterWrite(request.refresh(), indexShard, delete.getTranslogLocation());
}
@Override
protected ShardIterator shards(ClusterState clusterState, InternalRequest request) {
return clusterService.operationRouting()
.deleteShards(clusterService.state(), request.concreteIndex(), request.request().type(), request.request().id(), request.request().routing());
}
}

View File

@ -36,7 +36,6 @@ import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
import org.elasticsearch.cluster.metadata.MappingMetaData;
import org.elasticsearch.cluster.metadata.MetaData;
import org.elasticsearch.cluster.routing.ShardIterator;
import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Settings;
@ -120,62 +119,51 @@ public class TransportIndexAction extends TransportReplicationAction<IndexReques
}
@Override
protected void resolveRequest(ClusterState state, InternalRequest request, ActionListener<IndexResponse> indexResponseActionListener) {
MetaData metaData = clusterService.state().metaData();
protected void resolveRequest(MetaData metaData, String concreteIndex, IndexRequest request) {
MappingMetaData mappingMd = null;
if (metaData.hasIndex(request.concreteIndex())) {
mappingMd = metaData.index(request.concreteIndex()).mappingOrDefault(request.request().type());
if (metaData.hasIndex(concreteIndex)) {
mappingMd = metaData.index(concreteIndex).mappingOrDefault(request.type());
}
request.request().process(metaData, mappingMd, allowIdGeneration, request.concreteIndex());
request.process(metaData, mappingMd, allowIdGeneration, concreteIndex);
ShardId shardId = clusterService.operationRouting().shardId(clusterService.state(), concreteIndex, request.id(), request.routing());
request.setShardId(shardId);
}
private void innerExecute(final IndexRequest request, final ActionListener<IndexResponse> listener) {
super.doExecute(request, listener);
}
@Override
protected boolean checkWriteConsistency() {
return true;
}
@Override
protected IndexResponse newResponseInstance() {
return new IndexResponse();
}
@Override
protected ShardIterator shards(ClusterState clusterState, InternalRequest request) {
return clusterService.operationRouting()
.indexShards(clusterService.state(), request.concreteIndex(), request.request().type(), request.request().id(), request.request().routing());
}
@Override
protected Tuple<IndexResponse, IndexRequest> shardOperationOnPrimary(ClusterState clusterState, PrimaryOperationRequest shardRequest) throws Throwable {
final IndexRequest request = shardRequest.request;
protected Tuple<IndexResponse, IndexRequest> shardOperationOnPrimary(MetaData metaData, IndexRequest request) throws Throwable {
// validate, if routing is required, that we got routing
IndexMetaData indexMetaData = clusterState.metaData().index(shardRequest.shardId.getIndex());
IndexMetaData indexMetaData = metaData.index(request.shardId().getIndex());
MappingMetaData mappingMd = indexMetaData.mappingOrDefault(request.type());
if (mappingMd != null && mappingMd.routing().required()) {
if (request.routing() == null) {
throw new RoutingMissingException(shardRequest.shardId.getIndex(), request.type(), request.id());
throw new RoutingMissingException(request.shardId().getIndex(), request.type(), request.id());
}
}
IndexService indexService = indicesService.indexServiceSafe(shardRequest.shardId.getIndex());
IndexShard indexShard = indexService.getShard(shardRequest.shardId.id());
IndexService indexService = indicesService.indexServiceSafe(request.shardId().getIndex());
IndexShard indexShard = indexService.getShard(request.shardId().id());
final WriteResult<IndexResponse> result = executeIndexRequestOnPrimary(request, indexShard, mappingUpdatedAction);
final IndexResponse response = result.response;
final Translog.Location location = result.location;
processAfterWrite(request.refresh(), indexShard, location);
return new Tuple<>(response, shardRequest.request);
return new Tuple<>(response, request);
}
@Override
protected void shardOperationOnReplica(ShardId shardId, IndexRequest request) {
protected void shardOperationOnReplica(IndexRequest request) {
final ShardId shardId = request.shardId();
IndexService indexService = indicesService.indexServiceSafe(shardId.getIndex());
IndexShard indexShard = indexService.getShard(shardId.id());
final Engine.Index operation = executeIndexRequestOnReplica(request, indexShard);

View File

@ -42,7 +42,12 @@ public class ReplicationRequest<T extends ReplicationRequest> extends ActionRequ
public static final TimeValue DEFAULT_TIMEOUT = new TimeValue(1, TimeUnit.MINUTES);
ShardId internalShardId;
/**
* Target shard the request should execute on. In case of index and delete requests,
* shard id gets resolved by the transport action before performing request operation
* and at request creation time for shard-level bulk, refresh and flush requests.
*/
protected ShardId shardId;
protected TimeValue timeout = DEFAULT_TIMEOUT;
protected String index;
@ -60,6 +65,15 @@ public class ReplicationRequest<T extends ReplicationRequest> extends ActionRequ
super(request);
}
/**
* Creates a new request with resolved shard id
*/
public ReplicationRequest(ActionRequest request, ShardId shardId) {
super(request);
this.index = shardId.getIndex();
this.shardId = shardId;
}
/**
* Copy constructor that creates a new request that is a copy of the one provided as an argument.
*/
@ -124,12 +138,12 @@ public class ReplicationRequest<T extends ReplicationRequest> extends ActionRequ
/**
* @return the shardId of the shard where this operation should be executed on.
* can be null in case the shardId is determined by a single document (index, type, id) for example for index or delete request.
* can be null if the shardID has not yet been resolved
*/
public
@Nullable
ShardId shardId() {
return internalShardId;
return shardId;
}
/**
@ -154,9 +168,9 @@ public class ReplicationRequest<T extends ReplicationRequest> extends ActionRequ
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
if (in.readBoolean()) {
internalShardId = ShardId.readShardId(in);
shardId = ShardId.readShardId(in);
} else {
internalShardId = null;
shardId = null;
}
consistencyLevel = WriteConsistencyLevel.fromId(in.readByte());
timeout = TimeValue.readTimeValue(in);
@ -166,9 +180,9 @@ public class ReplicationRequest<T extends ReplicationRequest> extends ActionRequ
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
if (internalShardId != null) {
if (shardId != null) {
out.writeBoolean(true);
internalShardId.writeTo(out);
shardId.writeTo(out);
} else {
out.writeBoolean(false);
}
@ -177,9 +191,21 @@ public class ReplicationRequest<T extends ReplicationRequest> extends ActionRequ
out.writeString(index);
}
/**
* Sets the target shard id for the request. The shard id is set when a
* index/delete request is resolved by the transport action
*/
public T setShardId(ShardId shardId) {
this.internalShardId = shardId;
this.index = shardId.getIndex();
this.shardId = shardId;
return (T) this;
}
@Override
public String toString() {
if (shardId != null) {
return shardId.toString();
} else {
return index;
}
}
}

View File

@ -19,7 +19,6 @@
package org.elasticsearch.action.termvectors;
import org.elasticsearch.ExceptionsHelper;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.DocumentRequest;
import org.elasticsearch.action.support.ActionFilters;
@ -79,8 +78,8 @@ public class TransportMultiTermVectorsAction extends HandledTransportAction<Mult
new IllegalArgumentException("routing is required for [" + concreteSingleIndex + "]/[" + termVectorsRequest.type() + "]/[" + termVectorsRequest.id() + "]"))));
continue;
}
ShardId shardId = clusterService.operationRouting().getShards(clusterState, concreteSingleIndex,
termVectorsRequest.type(), termVectorsRequest.id(), termVectorsRequest.routing(), null).shardId();
ShardId shardId = clusterService.operationRouting().shardId(clusterState, concreteSingleIndex,
termVectorsRequest.id(), termVectorsRequest.routing());
MultiTermVectorsShardRequest shardRequest = shardRequests.get(shardId);
if (shardRequest == null) {
shardRequest = new MultiTermVectorsShardRequest(request, shardId.index().name(), shardId.id());

View File

@ -25,7 +25,6 @@ import org.elasticsearch.cluster.node.DiscoveryNodes;
import org.elasticsearch.cluster.routing.allocation.decider.AwarenessAllocationDecider;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.SuppressForbidden;
import org.elasticsearch.common.component.AbstractComponent;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.math.MathUtils;
@ -55,19 +54,16 @@ public class OperationRouting extends AbstractComponent {
}
public ShardIterator indexShards(ClusterState clusterState, String index, String type, String id, @Nullable String routing) {
return shards(clusterState, index, type, id, routing).shardsIt();
}
public ShardIterator deleteShards(ClusterState clusterState, String index, String type, String id, @Nullable String routing) {
return shards(clusterState, index, type, id, routing).shardsIt();
return shards(clusterState, index, id, routing).shardsIt();
}
public ShardIterator getShards(ClusterState clusterState, String index, String type, String id, @Nullable String routing, @Nullable String preference) {
return preferenceActiveShardIterator(shards(clusterState, index, type, id, routing), clusterState.nodes().localNodeId(), clusterState.nodes(), preference);
return preferenceActiveShardIterator(shards(clusterState, index, id, routing), clusterState.nodes().localNodeId(), clusterState.nodes(), preference);
}
public ShardIterator getShards(ClusterState clusterState, String index, int shardId, @Nullable String preference) {
return preferenceActiveShardIterator(shards(clusterState, index, shardId), clusterState.nodes().localNodeId(), clusterState.nodes(), preference);
final IndexShardRoutingTable indexShard = clusterState.getRoutingTable().shardRoutingTable(index, shardId);
return preferenceActiveShardIterator(indexShard, clusterState.nodes().localNodeId(), clusterState.nodes(), preference);
}
public GroupShardsIterator broadcastDeleteShards(ClusterState clusterState, String index) {
@ -102,7 +98,7 @@ public class OperationRouting extends AbstractComponent {
final Set<String> effectiveRouting = routing.get(index);
if (effectiveRouting != null) {
for (String r : effectiveRouting) {
int shardId = shardId(clusterState, index, null, null, r);
int shardId = generateShardId(clusterState, index, null, r);
IndexShardRoutingTable indexShard = indexRouting.shard(shardId);
if (indexShard == null) {
throw new ShardNotFoundException(new ShardId(index, shardId));
@ -200,14 +196,6 @@ public class OperationRouting extends AbstractComponent {
}
}
public IndexMetaData indexMetaData(ClusterState clusterState, String index) {
IndexMetaData indexMetaData = clusterState.metaData().index(index);
if (indexMetaData == null) {
throw new IndexNotFoundException(index);
}
return indexMetaData;
}
protected IndexRoutingTable indexRoutingTable(ClusterState clusterState, String index) {
IndexRoutingTable indexRouting = clusterState.routingTable().index(index);
if (indexRouting == null) {
@ -216,25 +204,20 @@ public class OperationRouting extends AbstractComponent {
return indexRouting;
}
// either routing is set, or type/id are set
protected IndexShardRoutingTable shards(ClusterState clusterState, String index, String type, String id, String routing) {
int shardId = shardId(clusterState, index, type, id, routing);
return shards(clusterState, index, shardId);
protected IndexShardRoutingTable shards(ClusterState clusterState, String index, String id, String routing) {
int shardId = generateShardId(clusterState, index, id, routing);
return clusterState.getRoutingTable().shardRoutingTable(index, shardId);
}
protected IndexShardRoutingTable shards(ClusterState clusterState, String index, int shardId) {
IndexShardRoutingTable indexShard = indexRoutingTable(clusterState, index).shard(shardId);
if (indexShard == null) {
throw new ShardNotFoundException(new ShardId(index, shardId));
public ShardId shardId(ClusterState clusterState, String index, String id, @Nullable String routing) {
return new ShardId(index, generateShardId(clusterState, index, id, routing));
}
private int generateShardId(ClusterState clusterState, String index, String id, @Nullable String routing) {
IndexMetaData indexMetaData = clusterState.metaData().index(index);
if (indexMetaData == null) {
throw new IndexNotFoundException(index);
}
return indexShard;
}
@SuppressForbidden(reason = "Math#abs is trappy")
private int shardId(ClusterState clusterState, String index, String type, String id, @Nullable String routing) {
final IndexMetaData indexMetaData = indexMetaData(clusterState, index);
final int hash;
if (routing == null) {
hash = Murmur3HashFunction.hash(id);

View File

@ -33,6 +33,8 @@ import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.util.iterable.Iterables;
import org.elasticsearch.index.IndexNotFoundException;
import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.index.shard.ShardNotFoundException;
import java.io.IOException;
import java.util.ArrayList;
@ -95,6 +97,24 @@ public class RoutingTable implements Iterable<IndexRoutingTable>, Diffable<Routi
return indicesRouting();
}
/**
* All shards for the provided index and shard id
* @return All the shard routing entries for the given index and shard id
* @throws IndexNotFoundException if provided index does not exist
* @throws ShardNotFoundException if provided shard id is unknown
*/
public IndexShardRoutingTable shardRoutingTable(String index, int shardId) {
IndexRoutingTable indexRouting = index(index);
if (indexRouting == null) {
throw new IndexNotFoundException(index);
}
IndexShardRoutingTable shard = indexRouting.shard(shardId);
if (shard == null) {
throw new ShardNotFoundException(new ShardId(index, shardId));
}
return shard;
}
public RoutingTable validateRaiseException(MetaData metaData) throws RoutingValidationException {
RoutingTableValidation validation = validate(metaData);
if (!validation.valid()) {

View File

@ -137,8 +137,7 @@ public class NetworkService extends AbstractComponent {
* Resolves {@code publishHosts} to a single publish address. The fact that it returns
* only one address is just a current limitation.
* <p>
* If {@code publishHosts} resolves to more than one address, <b>then one is selected with magic</b>,
* and the user is warned (they can always just be more specific).
* If {@code publishHosts} resolves to more than one address, <b>then one is selected with magic</b>
* @param publishHosts list of hosts to publish as. this may contain special pseudo-hostnames
* such as _local_ (see the documentation). if it is null, it will be populated
* based on global default settings.
@ -186,13 +185,12 @@ public class NetworkService extends AbstractComponent {
}
}
// 3. warn user if we end out with multiple publish addresses
// 3. if we end out with multiple publish addresses, select by preference.
// don't warn the user, or they will get confused by bind_host vs publish_host etc.
if (addresses.length > 1) {
List<InetAddress> sorted = new ArrayList<>(Arrays.asList(addresses));
NetworkUtils.sortAddresses(sorted);
addresses = new InetAddress[] { sorted.get(0) };
logger.warn("publish host: {} resolves to multiple addresses, auto-selecting {{}} as single publish address",
Arrays.toString(publishHosts), NetworkAddress.format(addresses[0]));
}
return addresses[0];
}

View File

@ -18,6 +18,7 @@
*/
package org.elasticsearch.common.util;
import org.apache.lucene.util.ThreadInterruptedException;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.ExceptionsHelper;
import org.elasticsearch.common.Nullable;
@ -84,7 +85,7 @@ public class CancellableThreads {
RuntimeException throwable = null;
try {
interruptable.run();
} catch (InterruptedException e) {
} catch (InterruptedException | ThreadInterruptedException e) {
// assume this is us and ignore
} catch (RuntimeException t) {
throwable = t;

View File

@ -178,12 +178,6 @@ public interface XContentParser extends Releasable {
NumberType numberType() throws IOException;
/**
* Is the number type estimated or not (i.e. an int might actually be a long, its just low enough
* to be an int).
*/
boolean estimatedNumberType();
short shortValue(boolean coerce) throws IOException;
int intValue(boolean coerce) throws IOException;

View File

@ -68,11 +68,6 @@ public class JsonXContentParser extends AbstractXContentParser {
return convertNumberType(parser.getNumberType());
}
@Override
public boolean estimatedNumberType() {
return true;
}
@Override
public String currentName() throws IOException {
return parser.getCurrentName();

View File

@ -560,44 +560,19 @@ class DocumentParser implements Closeable {
return builder;
} else if (token == XContentParser.Token.VALUE_NUMBER) {
XContentParser.NumberType numberType = context.parser().numberType();
if (numberType == XContentParser.NumberType.INT) {
if (context.parser().estimatedNumberType()) {
Mapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, "long");
if (builder == null) {
builder = MapperBuilders.longField(currentFieldName);
}
return builder;
} else {
Mapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, "integer");
if (builder == null) {
builder = MapperBuilders.integerField(currentFieldName);
}
return builder;
}
} else if (numberType == XContentParser.NumberType.LONG) {
if (numberType == XContentParser.NumberType.INT || numberType == XContentParser.NumberType.LONG) {
Mapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, "long");
if (builder == null) {
builder = MapperBuilders.longField(currentFieldName);
}
return builder;
} else if (numberType == XContentParser.NumberType.FLOAT) {
if (context.parser().estimatedNumberType()) {
Mapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, "double");
if (builder == null) {
builder = MapperBuilders.doubleField(currentFieldName);
}
return builder;
} else {
Mapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, "float");
if (builder == null) {
builder = MapperBuilders.floatField(currentFieldName);
}
return builder;
}
} else if (numberType == XContentParser.NumberType.DOUBLE) {
} else if (numberType == XContentParser.NumberType.FLOAT || numberType == XContentParser.NumberType.DOUBLE) {
Mapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, "double");
if (builder == null) {
builder = MapperBuilders.doubleField(currentFieldName);
// no templates are defined, we use float by default instead of double
// since this is much more space-efficient and should be enough most of
// the time
builder = MapperBuilders.floatField(currentFieldName);
}
return builder;
}

View File

@ -148,7 +148,7 @@ public abstract class Mapper implements ToXContent, Iterable<Mapper> {
};
}
class MultiFieldParserContext extends ParserContext {
static class MultiFieldParserContext extends ParserContext {
MultiFieldParserContext(ParserContext in) {
super(in.type(), in.analysisService, in.similarityLookupService(), in.mapperService(), in.typeParsers(), in.indexVersionCreated(), in.parseFieldMatcher());
}

View File

@ -46,7 +46,7 @@ import java.util.Map;
import static org.apache.lucene.index.IndexOptions.NONE;
import static org.elasticsearch.index.mapper.MapperBuilders.stringField;
import static org.elasticsearch.index.mapper.core.TypeParsers.parseField;
import static org.elasticsearch.index.mapper.core.TypeParsers.parseTextField;
import static org.elasticsearch.index.mapper.core.TypeParsers.parseMultiField;
public class StringFieldMapper extends FieldMapper implements AllFieldMapper.IncludeInAll {
@ -159,7 +159,7 @@ public class StringFieldMapper extends FieldMapper implements AllFieldMapper.Inc
@Override
public Mapper.Builder parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException {
StringFieldMapper.Builder builder = stringField(name);
parseField(builder, name, node, parserContext);
parseTextField(builder, name, node, parserContext);
for (Iterator<Map.Entry<String, Object>> iterator = node.entrySet().iterator(); iterator.hasNext();) {
Map.Entry<String, Object> entry = iterator.next();
String propName = Strings.toUnderscoreCase(entry.getKey());

View File

@ -182,9 +182,72 @@ public class TypeParsers {
}
}
public static void parseField(FieldMapper.Builder builder, String name, Map<String, Object> fieldNode, Mapper.TypeParser.ParserContext parserContext) {
private static void parseAnalyzersAndTermVectors(FieldMapper.Builder builder, String name, Map<String, Object> fieldNode, Mapper.TypeParser.ParserContext parserContext) {
NamedAnalyzer indexAnalyzer = builder.fieldType().indexAnalyzer();
NamedAnalyzer searchAnalyzer = builder.fieldType().searchAnalyzer();
for (Iterator<Map.Entry<String, Object>> iterator = fieldNode.entrySet().iterator(); iterator.hasNext();) {
Map.Entry<String, Object> entry = iterator.next();
final String propName = Strings.toUnderscoreCase(entry.getKey());
final Object propNode = entry.getValue();
if (propName.equals("term_vector")) {
parseTermVector(name, propNode.toString(), builder);
iterator.remove();
} else if (propName.equals("store_term_vectors")) {
builder.storeTermVectors(nodeBooleanValue(propNode));
iterator.remove();
} else if (propName.equals("store_term_vector_offsets")) {
builder.storeTermVectorOffsets(nodeBooleanValue(propNode));
iterator.remove();
} else if (propName.equals("store_term_vector_positions")) {
builder.storeTermVectorPositions(nodeBooleanValue(propNode));
iterator.remove();
} else if (propName.equals("store_term_vector_payloads")) {
builder.storeTermVectorPayloads(nodeBooleanValue(propNode));
iterator.remove();
} else if (propName.equals("analyzer") || // for backcompat, reading old indexes, remove for v3.0
propName.equals("index_analyzer") && parserContext.indexVersionCreated().before(Version.V_2_0_0_beta1)) {
NamedAnalyzer analyzer = parserContext.analysisService().analyzer(propNode.toString());
if (analyzer == null) {
throw new MapperParsingException("analyzer [" + propNode.toString() + "] not found for field [" + name + "]");
}
indexAnalyzer = analyzer;
iterator.remove();
} else if (propName.equals("search_analyzer")) {
NamedAnalyzer analyzer = parserContext.analysisService().analyzer(propNode.toString());
if (analyzer == null) {
throw new MapperParsingException("analyzer [" + propNode.toString() + "] not found for field [" + name + "]");
}
searchAnalyzer = analyzer;
iterator.remove();
}
}
if (indexAnalyzer == null) {
if (searchAnalyzer != null) {
throw new MapperParsingException("analyzer on field [" + name + "] must be set when search_analyzer is set");
}
} else if (searchAnalyzer == null) {
searchAnalyzer = indexAnalyzer;
}
builder.indexAnalyzer(indexAnalyzer);
builder.searchAnalyzer(searchAnalyzer);
}
/**
* Parse text field attributes. In addition to {@link #parseField common attributes}
* this will parse analysis and term-vectors related settings.
*/
public static void parseTextField(FieldMapper.Builder builder, String name, Map<String, Object> fieldNode, Mapper.TypeParser.ParserContext parserContext) {
parseField(builder, name, fieldNode, parserContext);
parseAnalyzersAndTermVectors(builder, name, fieldNode, parserContext);
}
/**
* Parse common field attributes such as {@code doc_values} or {@code store}.
*/
public static void parseField(FieldMapper.Builder builder, String name, Map<String, Object> fieldNode, Mapper.TypeParser.ParserContext parserContext) {
Version indexVersionCreated = parserContext.indexVersionCreated();
for (Iterator<Map.Entry<String, Object>> iterator = fieldNode.entrySet().iterator(); iterator.hasNext();) {
Map.Entry<String, Object> entry = iterator.next();
@ -202,24 +265,9 @@ public class TypeParsers {
} else if (propName.equals(DOC_VALUES)) {
builder.docValues(nodeBooleanValue(propNode));
iterator.remove();
} else if (propName.equals("term_vector")) {
parseTermVector(name, propNode.toString(), builder);
iterator.remove();
} else if (propName.equals("boost")) {
builder.boost(nodeFloatValue(propNode));
iterator.remove();
} else if (propName.equals("store_term_vectors")) {
builder.storeTermVectors(nodeBooleanValue(propNode));
iterator.remove();
} else if (propName.equals("store_term_vector_offsets")) {
builder.storeTermVectorOffsets(nodeBooleanValue(propNode));
iterator.remove();
} else if (propName.equals("store_term_vector_positions")) {
builder.storeTermVectorPositions(nodeBooleanValue(propNode));
iterator.remove();
} else if (propName.equals("store_term_vector_payloads")) {
builder.storeTermVectorPayloads(nodeBooleanValue(propNode));
iterator.remove();
} else if (propName.equals("omit_norms")) {
builder.omitNorms(nodeBooleanValue(propNode));
iterator.remove();
@ -250,22 +298,6 @@ public class TypeParsers {
} else if (propName.equals("index_options")) {
builder.indexOptions(nodeIndexOptionValue(propNode));
iterator.remove();
} else if (propName.equals("analyzer") || // for backcompat, reading old indexes, remove for v3.0
propName.equals("index_analyzer") && indexVersionCreated.before(Version.V_2_0_0_beta1)) {
NamedAnalyzer analyzer = parserContext.analysisService().analyzer(propNode.toString());
if (analyzer == null) {
throw new MapperParsingException("analyzer [" + propNode.toString() + "] not found for field [" + name + "]");
}
indexAnalyzer = analyzer;
iterator.remove();
} else if (propName.equals("search_analyzer")) {
NamedAnalyzer analyzer = parserContext.analysisService().analyzer(propNode.toString());
if (analyzer == null) {
throw new MapperParsingException("analyzer [" + propNode.toString() + "] not found for field [" + name + "]");
}
searchAnalyzer = analyzer;
iterator.remove();
} else if (propName.equals("include_in_all")) {
builder.includeInAll(nodeBooleanValue(propNode));
iterator.remove();
@ -296,16 +328,11 @@ public class TypeParsers {
iterator.remove();
}
}
if (indexAnalyzer == null) {
if (searchAnalyzer != null) {
throw new MapperParsingException("analyzer on field [" + name + "] must be set when search_analyzer is set");
}
} else if (searchAnalyzer == null) {
searchAnalyzer = indexAnalyzer;
if (indexVersionCreated.before(Version.V_2_2_0)) {
// analyzer, search_analyzer, term_vectors were accepted on all fields
// before 2.2, even though it made little sense
parseAnalyzersAndTermVectors(builder, name, fieldNode, parserContext);
}
builder.indexAnalyzer(indexAnalyzer);
builder.searchAnalyzer(searchAnalyzer);
}
public static boolean parseMultiField(FieldMapper.Builder builder, String name, Mapper.TypeParser.ParserContext parserContext, String propName, Object propNode) {

View File

@ -49,7 +49,7 @@ import java.util.Map;
import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeBooleanValue;
import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeMapValue;
import static org.elasticsearch.index.mapper.core.TypeParsers.parseField;
import static org.elasticsearch.index.mapper.core.TypeParsers.parseTextField;
/**
*
@ -134,7 +134,7 @@ public class AllFieldMapper extends MetadataFieldMapper {
}
}
parseField(builder, builder.name, node, parserContext);
parseTextField(builder, builder.name, node, parserContext);
for (Iterator<Map.Entry<String, Object>> iterator = node.entrySet().iterator(); iterator.hasNext();) {
Map.Entry<String, Object> entry = iterator.next();
String fieldName = Strings.toUnderscoreCase(entry.getKey());

View File

@ -83,6 +83,7 @@ public class PluginManager {
"discovery-gce",
"discovery-multicast",
"lang-javascript",
"lang-plan-a",
"lang-python",
"mapper-attachments",
"mapper-murmur3",

View File

@ -316,7 +316,8 @@ public class PluginsService extends AbstractComponent {
// gather urls for jar files
try (DirectoryStream<Path> jarStream = Files.newDirectoryStream(module, "*.jar")) {
for (Path jar : jarStream) {
bundle.urls.add(jar.toUri().toURL());
// normalize with toRealPath to get symlinks out of our hair
bundle.urls.add(jar.toRealPath().toUri().toURL());
}
}
bundles.add(bundle);
@ -357,7 +358,8 @@ public class PluginsService extends AbstractComponent {
// a jvm plugin: gather urls for jar files
try (DirectoryStream<Path> jarStream = Files.newDirectoryStream(plugin, "*.jar")) {
for (Path jar : jarStream) {
urls.add(jar.toUri().toURL());
// normalize with toRealPath to get symlinks out of our hair
urls.add(jar.toRealPath().toUri().toURL());
}
}
}

View File

@ -20,6 +20,7 @@
package org.elasticsearch.rest.action.admin.indices.alias;
import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest;
import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest.AliasActions;
import org.elasticsearch.action.admin.indices.alias.IndicesAliasesResponse;
import org.elasticsearch.client.Client;
import org.elasticsearch.cluster.metadata.AliasAction;
@ -30,9 +31,10 @@ import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.rest.*;
import org.elasticsearch.rest.action.support.AcknowledgedRestListener;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import static org.elasticsearch.cluster.metadata.AliasAction.newAddAliasAction;
import static org.elasticsearch.rest.RestRequest.Method.POST;
/**
@ -75,8 +77,8 @@ public class RestIndicesAliasesAction extends BaseRestHandler {
} else {
throw new IllegalArgumentException("Alias action [" + action + "] not supported");
}
String index = null;
String alias = null;
String[] indices = null;
String[] aliases = null;
Map<String, Object> filter = null;
String routing = null;
boolean routingSet = false;
@ -90,9 +92,9 @@ public class RestIndicesAliasesAction extends BaseRestHandler {
currentFieldName = parser.currentName();
} else if (token.isValue()) {
if ("index".equals(currentFieldName)) {
index = parser.text();
indices = new String[] { parser.text() };
} else if ("alias".equals(currentFieldName)) {
alias = parser.text();
aliases = new String[] { parser.text() };
} else if ("routing".equals(currentFieldName)) {
routing = parser.textOrNull();
routingSet = true;
@ -103,6 +105,23 @@ public class RestIndicesAliasesAction extends BaseRestHandler {
searchRouting = parser.textOrNull();
searchRoutingSet = true;
}
} else if (token == XContentParser.Token.START_ARRAY) {
if ("indices".equals(currentFieldName)) {
List<String> indexNames = new ArrayList<>();
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
String index = parser.text();
indexNames.add(index);
}
indices = indexNames.toArray(new String[indexNames.size()]);
}
if ("aliases".equals(currentFieldName)) {
List<String> aliasNames = new ArrayList<>();
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
String alias = parser.text();
aliasNames.add(alias);
}
aliases = aliasNames.toArray(new String[aliasNames.size()]);
}
} else if (token == XContentParser.Token.START_OBJECT) {
if ("filter".equals(currentFieldName)) {
filter = parser.mapOrdered();
@ -111,19 +130,19 @@ public class RestIndicesAliasesAction extends BaseRestHandler {
}
if (type == AliasAction.Type.ADD) {
AliasAction aliasAction = newAddAliasAction(index, alias).filter(filter);
AliasActions aliasActions = new AliasActions(type, indices, aliases);
if (routingSet) {
aliasAction.routing(routing);
aliasActions.routing(routing);
}
if (indexRoutingSet) {
aliasAction.indexRouting(indexRouting);
aliasActions.indexRouting(indexRouting);
}
if (searchRoutingSet) {
aliasAction.searchRouting(searchRouting);
aliasActions.searchRouting(searchRouting);
}
indicesAliasesRequest.addAliasAction(aliasAction);
indicesAliasesRequest.addAliasAction(aliasActions);
} else if (type == AliasAction.Type.REMOVE) {
indicesAliasesRequest.removeAlias(index, alias);
indicesAliasesRequest.removeAlias(indices, aliases);
}
}
}

View File

@ -43,7 +43,7 @@ import java.util.Map;
/**
*
*/
public class StatsAggegator extends NumericMetricsAggregator.MultiValue {
public class StatsAggregator extends NumericMetricsAggregator.MultiValue {
final ValuesSource.Numeric valuesSource;
final ValueFormatter formatter;
@ -54,10 +54,10 @@ public class StatsAggegator extends NumericMetricsAggregator.MultiValue {
DoubleArray maxes;
public StatsAggegator(String name, ValuesSource.Numeric valuesSource, ValueFormatter formatter,
AggregationContext context,
Aggregator parent, List<PipelineAggregator> pipelineAggregators,
Map<String, Object> metaData) throws IOException {
public StatsAggregator(String name, ValuesSource.Numeric valuesSource, ValueFormatter formatter,
AggregationContext context,
Aggregator parent, List<PipelineAggregator> pipelineAggregators,
Map<String, Object> metaData) throws IOException {
super(name, context, parent, pipelineAggregators, metaData);
this.valuesSource = valuesSource;
if (valuesSource != null) {
@ -164,14 +164,14 @@ public class StatsAggegator extends NumericMetricsAggregator.MultiValue {
@Override
protected Aggregator createUnmapped(AggregationContext aggregationContext, Aggregator parent,
List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData) throws IOException {
return new StatsAggegator(name, null, config.formatter(), aggregationContext, parent, pipelineAggregators, metaData);
return new StatsAggregator(name, null, config.formatter(), aggregationContext, parent, pipelineAggregators, metaData);
}
@Override
protected Aggregator doCreateInternal(ValuesSource.Numeric valuesSource, AggregationContext aggregationContext, Aggregator parent,
boolean collectsFromSingleBucket, List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData)
throws IOException {
return new StatsAggegator(name, valuesSource, config.formatter(), aggregationContext, parent, pipelineAggregators, metaData);
return new StatsAggregator(name, valuesSource, config.formatter(), aggregationContext, parent, pipelineAggregators, metaData);
}
}

View File

@ -34,6 +34,6 @@ public class StatsParser extends NumericValuesSourceMetricsAggregatorParser<Inte
@Override
protected AggregatorFactory createFactory(String aggregationName, ValuesSourceConfig<ValuesSource.Numeric> config) {
return new StatsAggegator.Factory(aggregationName, config);
return new StatsAggregator.Factory(aggregationName, config);
}
}

View File

@ -125,7 +125,7 @@ public abstract class AbstractHighlighterBuilder<HB extends AbstractHighlighterB
}
/**
* Set the fragment size in characters, defaults to {@link HighlighterParseElement#DEFAULT_FRAGMENT_CHAR_SIZE}
* Set the fragment size in characters, defaults to {@link HighlightBuilder#DEFAULT_FRAGMENT_CHAR_SIZE}
*/
@SuppressWarnings("unchecked")
public HB fragmentSize(Integer fragmentSize) {
@ -141,7 +141,7 @@ public abstract class AbstractHighlighterBuilder<HB extends AbstractHighlighterB
}
/**
* Set the number of fragments, defaults to {@link HighlighterParseElement#DEFAULT_NUMBER_OF_FRAGMENTS}
* Set the number of fragments, defaults to {@link HighlightBuilder#DEFAULT_NUMBER_OF_FRAGMENTS}
*/
@SuppressWarnings("unchecked")
public HB numOfFragments(Integer numOfFragments) {
@ -428,7 +428,7 @@ public abstract class AbstractHighlighterBuilder<HB extends AbstractHighlighterB
}
/**
* internal hashCode calculation to overwrite for the implementing classes.
* fields only present in subclass should contribute to hashCode in the implementation
*/
protected abstract int doHashCode();
@ -462,7 +462,7 @@ public abstract class AbstractHighlighterBuilder<HB extends AbstractHighlighterB
}
/**
* internal equals to overwrite for the implementing classes.
* fields only present in subclass should be checked for equality in the implementation
*/
protected abstract boolean doEquals(HB other);

View File

@ -19,6 +19,8 @@
package org.elasticsearch.search.highlight;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.vectorhighlight.SimpleBoundaryScanner;
import org.elasticsearch.ExceptionsHelper;
import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.io.stream.StreamInput;
@ -28,13 +30,20 @@ import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.search.highlight.SearchContextHighlight.FieldOptions;
import org.elasticsearch.search.highlight.SearchContextHighlight.FieldOptions.Builder;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Objects;
import java.util.Set;
/**
* A builder for search highlighting. Settings can control how large fields
@ -48,6 +57,51 @@ public class HighlightBuilder extends AbstractHighlighterBuilder<HighlightBuilde
public static final String HIGHLIGHT_ELEMENT_NAME = "highlight";
/** default for whether to highlight fields based on the source even if stored separately */
public static final boolean DEFAULT_FORCE_SOURCE = false;
/** default for whether a field should be highlighted only if a query matches that field */
public static final boolean DEFAULT_REQUIRE_FIELD_MATCH = true;
/** default for whether <tt>fvh</tt> should provide highlighting on filter clauses */
public static final boolean DEFAULT_HIGHLIGHT_FILTER = false;
/** default for highlight fragments being ordered by score */
public static final boolean DEFAULT_SCORE_ORDERED = false;
/** the default encoder setting */
public static final String DEFAULT_ENCODER = "default";
/** default for the maximum number of phrases the fvh will consider */
public static final int DEFAULT_PHRASE_LIMIT = 256;
/** default for fragment size when there are no matches */
public static final int DEFAULT_NO_MATCH_SIZE = 0;
/** the default number of fragments for highlighting */
public static final int DEFAULT_NUMBER_OF_FRAGMENTS = 5;
/** the default number of fragments size in characters */
public static final int DEFAULT_FRAGMENT_CHAR_SIZE = 100;
/** the default opening tag */
public static final String[] DEFAULT_PRE_TAGS = new String[]{"<em>"};
/** the default closing tag */
public static final String[] DEFAULT_POST_TAGS = new String[]{"</em>"};
/** the default opening tags when <tt>tag_schema = "styled"</tt> */
public static final String[] DEFAULT_STYLED_PRE_TAG = {
"<em class=\"hlt1\">", "<em class=\"hlt2\">", "<em class=\"hlt3\">",
"<em class=\"hlt4\">", "<em class=\"hlt5\">", "<em class=\"hlt6\">",
"<em class=\"hlt7\">", "<em class=\"hlt8\">", "<em class=\"hlt9\">",
"<em class=\"hlt10\">"
};
/** the default closing tags when <tt>tag_schema = "styled"</tt> */
public static final String[] DEFAULT_STYLED_POST_TAGS = {"</em>"};
/**
* a {@link FieldOptions.Builder} with default settings
*/
public final static Builder defaultFieldOptions() {
return new SearchContextHighlight.FieldOptions.Builder()
.preTags(DEFAULT_PRE_TAGS).postTags(DEFAULT_POST_TAGS).scoreOrdered(DEFAULT_SCORE_ORDERED).highlightFilter(DEFAULT_HIGHLIGHT_FILTER)
.requireFieldMatch(DEFAULT_REQUIRE_FIELD_MATCH).forceSource(DEFAULT_FORCE_SOURCE).fragmentCharSize(DEFAULT_FRAGMENT_CHAR_SIZE).numberOfFragments(DEFAULT_NUMBER_OF_FRAGMENTS)
.encoder(DEFAULT_ENCODER).boundaryMaxScan(SimpleBoundaryScanner.DEFAULT_MAX_SCAN)
.boundaryChars(SimpleBoundaryScanner.DEFAULT_BOUNDARY_CHARS)
.noMatchSize(DEFAULT_NO_MATCH_SIZE).phraseLimit(DEFAULT_PHRASE_LIMIT);
}
private final List<Field> fields = new ArrayList<>();
private String encoder;
@ -120,12 +174,12 @@ public class HighlightBuilder extends AbstractHighlighterBuilder<HighlightBuilde
public HighlightBuilder tagsSchema(String schemaName) {
switch (schemaName) {
case "default":
preTags(HighlighterParseElement.DEFAULT_PRE_TAGS);
postTags(HighlighterParseElement.DEFAULT_POST_TAGS);
preTags(DEFAULT_PRE_TAGS);
postTags(DEFAULT_POST_TAGS);
break;
case "styled":
preTags(HighlighterParseElement.STYLED_PRE_TAG);
postTags(HighlighterParseElement.STYLED_POST_TAGS);
preTags(DEFAULT_STYLED_PRE_TAG);
postTags(DEFAULT_STYLED_POST_TAGS);
break;
default:
throw new IllegalArgumentException("Unknown tag schema ["+ schemaName +"]");
@ -289,7 +343,87 @@ public class HighlightBuilder extends AbstractHighlighterBuilder<HighlightBuilde
return highlightBuilder;
}
public SearchContextHighlight build(QueryShardContext context) throws IOException {
// create template global options that are later merged with any partial field options
final SearchContextHighlight.FieldOptions.Builder globalOptionsBuilder = new SearchContextHighlight.FieldOptions.Builder();
globalOptionsBuilder.encoder(this.encoder);
transferOptions(this, globalOptionsBuilder, context);
// overwrite unset global options by default values
globalOptionsBuilder.merge(defaultFieldOptions().build());
// create field options
Collection<org.elasticsearch.search.highlight.SearchContextHighlight.Field> fieldOptions = new ArrayList<>();
for (Field field : this.fields) {
final SearchContextHighlight.FieldOptions.Builder fieldOptionsBuilder = new SearchContextHighlight.FieldOptions.Builder();
fieldOptionsBuilder.fragmentOffset(field.fragmentOffset);
if (field.matchedFields != null) {
Set<String> matchedFields = new HashSet<String>(field.matchedFields.length);
Collections.addAll(matchedFields, field.matchedFields);
fieldOptionsBuilder.matchedFields(matchedFields);
}
transferOptions(field, fieldOptionsBuilder, context);
fieldOptions.add(new SearchContextHighlight.Field(field.name(), fieldOptionsBuilder.merge(globalOptionsBuilder.build()).build()));
}
return new SearchContextHighlight(fieldOptions);
}
/**
* Transfers field options present in the input {@link AbstractHighlighterBuilder} to the receiving
* {@link FieldOptions.Builder}, effectively overwriting existing settings
* @param targetOptionsBuilder the receiving options builder
* @param highlighterBuilder highlight builder with the input options
* @param context needed to convert {@link QueryBuilder} to {@link Query}
* @throws IOException on errors parsing any optional nested highlight query
*/
@SuppressWarnings({ "rawtypes", "unchecked" })
private static void transferOptions(AbstractHighlighterBuilder highlighterBuilder, SearchContextHighlight.FieldOptions.Builder targetOptionsBuilder, QueryShardContext context) throws IOException {
targetOptionsBuilder.preTags(highlighterBuilder.preTags);
targetOptionsBuilder.postTags(highlighterBuilder.postTags);
targetOptionsBuilder.scoreOrdered("score".equals(highlighterBuilder.order));
if (highlighterBuilder.highlightFilter != null) {
targetOptionsBuilder.highlightFilter(highlighterBuilder.highlightFilter);
}
if (highlighterBuilder.fragmentSize != null) {
targetOptionsBuilder.fragmentCharSize(highlighterBuilder.fragmentSize);
}
if (highlighterBuilder.numOfFragments != null) {
targetOptionsBuilder.numberOfFragments(highlighterBuilder.numOfFragments);
}
if (highlighterBuilder.requireFieldMatch != null) {
targetOptionsBuilder.requireFieldMatch(highlighterBuilder.requireFieldMatch);
}
if (highlighterBuilder.boundaryMaxScan != null) {
targetOptionsBuilder.boundaryMaxScan(highlighterBuilder.boundaryMaxScan);
}
targetOptionsBuilder.boundaryChars(convertCharArray(highlighterBuilder.boundaryChars));
targetOptionsBuilder.highlighterType(highlighterBuilder.highlighterType);
targetOptionsBuilder.fragmenter(highlighterBuilder.fragmenter);
if (highlighterBuilder.noMatchSize != null) {
targetOptionsBuilder.noMatchSize(highlighterBuilder.noMatchSize);
}
if (highlighterBuilder.forceSource != null) {
targetOptionsBuilder.forceSource(highlighterBuilder.forceSource);
}
if (highlighterBuilder.phraseLimit != null) {
targetOptionsBuilder.phraseLimit(highlighterBuilder.phraseLimit);
}
targetOptionsBuilder.options(highlighterBuilder.options);
if (highlighterBuilder.highlightQuery != null) {
targetOptionsBuilder.highlightQuery(highlighterBuilder.highlightQuery.toQuery(context));
}
}
private static Character[] convertCharArray(char[] array) {
if (array == null) {
return null;
}
Character[] charArray = new Character[array.length];
for (int i = 0; i < array.length; i++) {
charArray[i] = array[i];
}
return charArray;
}
public void innerXContent(XContentBuilder builder) throws IOException {
// first write common options

View File

@ -19,7 +19,6 @@
package org.elasticsearch.search.highlight;
import org.apache.lucene.search.vectorhighlight.SimpleBoundaryScanner;
import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.query.QueryShardContext;
@ -52,39 +51,6 @@ import java.util.Set;
*/
public class HighlighterParseElement implements SearchParseElement {
/** default for whether to highlight fields based on the source even if stored separately */
public static final boolean DEFAULT_FORCE_SOURCE = false;
/** default for whether a field should be highlighted only if a query matches that field */
public static final boolean DEFAULT_REQUIRE_FIELD_MATCH = true;
/** default for whether <tt>fvh</tt> should provide highlighting on filter clauses */
public static final boolean DEFAULT_HIGHLIGHT_FILTER = false;
/** default for highlight fragments being ordered by score */
public static final boolean DEFAULT_SCORE_ORDERED = false;
/** the default encoder setting */
public static final String DEFAULT_ENCODER = "default";
/** default for the maximum number of phrases the fvh will consider */
public static final int DEFAULT_PHRASE_LIMIT = 256;
/** default for fragment size when there are no matches */
public static final int DEFAULT_NO_MATCH_SIZE = 0;
/** the default number of fragments for highlighting */
public static final int DEFAULT_NUMBER_OF_FRAGMENTS = 5;
/** the default number of fragments size in characters */
public static final int DEFAULT_FRAGMENT_CHAR_SIZE = 100;
/** the default opening tag */
public static final String[] DEFAULT_PRE_TAGS = new String[]{"<em>"};
/** the default closing tag */
public static final String[] DEFAULT_POST_TAGS = new String[]{"</em>"};
/** the default opening tags when <tt>tag_schema = "styled"</tt> */
public static final String[] STYLED_PRE_TAG = {
"<em class=\"hlt1\">", "<em class=\"hlt2\">", "<em class=\"hlt3\">",
"<em class=\"hlt4\">", "<em class=\"hlt5\">", "<em class=\"hlt6\">",
"<em class=\"hlt7\">", "<em class=\"hlt8\">", "<em class=\"hlt9\">",
"<em class=\"hlt10\">"
};
/** the default closing tags when <tt>tag_schema = "styled"</tt> */
public static final String[] STYLED_POST_TAGS = {"</em>"};
@Override
public void parse(XContentParser parser, SearchContext context) throws Exception {
try {
@ -99,12 +65,7 @@ public class HighlighterParseElement implements SearchParseElement {
String topLevelFieldName = null;
final List<Tuple<String, SearchContextHighlight.FieldOptions.Builder>> fieldsOptions = new ArrayList<>();
final SearchContextHighlight.FieldOptions.Builder globalOptionsBuilder = new SearchContextHighlight.FieldOptions.Builder()
.preTags(DEFAULT_PRE_TAGS).postTags(DEFAULT_POST_TAGS).scoreOrdered(DEFAULT_SCORE_ORDERED).highlightFilter(DEFAULT_HIGHLIGHT_FILTER)
.requireFieldMatch(DEFAULT_REQUIRE_FIELD_MATCH).forceSource(DEFAULT_FORCE_SOURCE).fragmentCharSize(DEFAULT_FRAGMENT_CHAR_SIZE).numberOfFragments(DEFAULT_NUMBER_OF_FRAGMENTS)
.encoder(DEFAULT_ENCODER).boundaryMaxScan(SimpleBoundaryScanner.DEFAULT_MAX_SCAN)
.boundaryChars(SimpleBoundaryScanner.DEFAULT_BOUNDARY_CHARS)
.noMatchSize(DEFAULT_NO_MATCH_SIZE).phraseLimit(DEFAULT_PHRASE_LIMIT);
final SearchContextHighlight.FieldOptions.Builder globalOptionsBuilder = HighlightBuilder.defaultFieldOptions();
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
@ -147,8 +108,8 @@ public class HighlighterParseElement implements SearchParseElement {
} else if ("tags_schema".equals(topLevelFieldName) || "tagsSchema".equals(topLevelFieldName)) {
String schema = parser.text();
if ("styled".equals(schema)) {
globalOptionsBuilder.preTags(STYLED_PRE_TAG);
globalOptionsBuilder.postTags(STYLED_POST_TAGS);
globalOptionsBuilder.preTags(HighlightBuilder.DEFAULT_STYLED_PRE_TAG);
globalOptionsBuilder.postTags(HighlightBuilder.DEFAULT_STYLED_POST_TAGS);
}
} else if ("highlight_filter".equals(topLevelFieldName) || "highlightFilter".equals(topLevelFieldName)) {
globalOptionsBuilder.highlightFilter(parser.booleanValue());

View File

@ -53,6 +53,10 @@ public class SearchContextHighlight {
this.globalForceSource = globalForceSource;
}
boolean globalForceSource() {
return this.globalForceSource;
}
public boolean forceSource(Field field) {
if (globalForceSource) {
return true;

View File

@ -44,6 +44,7 @@ OFFICIAL PLUGINS
- discovery-gce
- discovery-multicast
- lang-javascript
- lang-plan-a
- lang-python
- mapper-attachments
- mapper-murmur3

View File

@ -28,7 +28,6 @@ import org.elasticsearch.action.support.ActionFilters;
import org.elasticsearch.action.support.PlainActionFuture;
import org.elasticsearch.cluster.ClusterService;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.ClusterStateObserver;
import org.elasticsearch.cluster.action.shard.ShardStateAction;
import org.elasticsearch.cluster.block.ClusterBlock;
import org.elasticsearch.cluster.block.ClusterBlockException;
@ -46,18 +45,17 @@ import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.lease.Releasable;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.IndexNotFoundException;
import org.elasticsearch.index.shard.IndexShardNotStartedException;
import org.elasticsearch.index.shard.IndexShardState;
import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.index.shard.ShardNotFoundException;
import org.elasticsearch.rest.RestStatus;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.test.cluster.TestClusterService;
import org.elasticsearch.test.transport.CapturingTransport;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.TransportChannel;
import org.elasticsearch.transport.TransportResponse;
import org.elasticsearch.transport.TransportResponseOptions;
import org.elasticsearch.transport.TransportService;
import org.elasticsearch.transport.*;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
@ -132,22 +130,22 @@ public class TransportReplicationActionTests extends ESTestCase {
ClusterBlocks.Builder block = ClusterBlocks.builder()
.addGlobalBlock(new ClusterBlock(1, "non retryable", false, true, RestStatus.SERVICE_UNAVAILABLE, ClusterBlockLevel.ALL));
clusterService.setState(ClusterState.builder(clusterService.state()).blocks(block));
TransportReplicationAction<Request, Request, Response>.PrimaryPhase primaryPhase = action.new PrimaryPhase(request, listener);
assertFalse("primary phase should stop execution", primaryPhase.checkBlocks());
TransportReplicationAction.ReroutePhase reroutePhase = action.new ReroutePhase(request, listener);
reroutePhase.run();
assertListenerThrows("primary phase should fail operation", listener, ClusterBlockException.class);
block = ClusterBlocks.builder()
.addGlobalBlock(new ClusterBlock(1, "retryable", true, true, RestStatus.SERVICE_UNAVAILABLE, ClusterBlockLevel.ALL));
clusterService.setState(ClusterState.builder(clusterService.state()).blocks(block));
listener = new PlainActionFuture<>();
primaryPhase = action.new PrimaryPhase(new Request().timeout("5ms"), listener);
assertFalse("primary phase should stop execution on retryable block", primaryPhase.checkBlocks());
reroutePhase = action.new ReroutePhase(new Request().timeout("5ms"), listener);
reroutePhase.run();
assertListenerThrows("failed to timeout on retryable block", listener, ClusterBlockException.class);
listener = new PlainActionFuture<>();
primaryPhase = action.new PrimaryPhase(new Request(), listener);
assertFalse("primary phase should stop execution on retryable block", primaryPhase.checkBlocks());
reroutePhase = action.new ReroutePhase(new Request(), listener);
reroutePhase.run();
assertFalse("primary phase should wait on retryable block", listener.isDone());
block = ClusterBlocks.builder()
@ -172,25 +170,47 @@ public class TransportReplicationActionTests extends ESTestCase {
Request request = new Request(shardId).timeout("1ms");
PlainActionFuture<Response> listener = new PlainActionFuture<>();
TransportReplicationAction<Request, Request, Response>.PrimaryPhase primaryPhase = action.new PrimaryPhase(request, listener);
primaryPhase.run();
TransportReplicationAction.ReroutePhase reroutePhase = action.new ReroutePhase(request, listener);
reroutePhase.run();
assertListenerThrows("unassigned primary didn't cause a timeout", listener, UnavailableShardsException.class);
request = new Request(shardId);
listener = new PlainActionFuture<>();
primaryPhase = action.new PrimaryPhase(request, listener);
primaryPhase.run();
reroutePhase = action.new ReroutePhase(request, listener);
reroutePhase.run();
assertFalse("unassigned primary didn't cause a retry", listener.isDone());
clusterService.setState(state(index, true, ShardRoutingState.STARTED));
logger.debug("--> primary assigned state:\n{}", clusterService.state().prettyPrint());
listener.get();
assertTrue("request wasn't processed on primary, despite of it being assigned", request.processedOnPrimary.get());
final IndexShardRoutingTable shardRoutingTable = clusterService.state().routingTable().index(index).shard(shardId.id());
final String primaryNodeId = shardRoutingTable.primaryShard().currentNodeId();
final List<CapturingTransport.CapturedRequest> capturedRequests = transport.capturedRequestsByTargetNode().get(primaryNodeId);
assertThat(capturedRequests, notNullValue());
assertThat(capturedRequests.size(), equalTo(1));
assertThat(capturedRequests.get(0).action, equalTo("testAction[p]"));
assertIndexShardCounter(1);
}
public void testRoutingToPrimary() {
public void testUnknownIndexOrShardOnReroute() throws InterruptedException {
final String index = "test";
// no replicas in oder to skip the replication part
clusterService.setState(state(index, true,
randomBoolean() ? ShardRoutingState.INITIALIZING : ShardRoutingState.UNASSIGNED));
logger.debug("--> using initial state:\n{}", clusterService.state().prettyPrint());
Request request = new Request(new ShardId("unknown_index", 0)).timeout("1ms");
PlainActionFuture<Response> listener = new PlainActionFuture<>();
TransportReplicationAction.ReroutePhase reroutePhase = action.new ReroutePhase(request, listener);
reroutePhase.run();
assertListenerThrows("must throw index not found exception", listener, IndexNotFoundException.class);
request = new Request(new ShardId(index, 10)).timeout("1ms");
listener = new PlainActionFuture<>();
reroutePhase = action.new ReroutePhase(request, listener);
reroutePhase.run();
assertListenerThrows("must throw shard not found exception", listener, ShardNotFoundException.class);
}
public void testRoutePhaseExecutesRequest() {
final String index = "test";
final ShardId shardId = new ShardId(index, 0);
@ -203,25 +223,126 @@ public class TransportReplicationActionTests extends ESTestCase {
Request request = new Request(shardId);
PlainActionFuture<Response> listener = new PlainActionFuture<>();
TransportReplicationAction<Request, Request, Response>.PrimaryPhase primaryPhase = action.new PrimaryPhase(request, listener);
assertTrue(primaryPhase.checkBlocks());
primaryPhase.routeRequestOrPerformLocally(shardRoutingTable.primaryShard(), shardRoutingTable.shardsIt());
if (primaryNodeId.equals(clusterService.localNode().id())) {
logger.info("--> primary is assigned locally, testing for execution");
assertTrue("request failed to be processed on a local primary", request.processedOnPrimary.get());
if (transport.capturedRequests().length > 0) {
assertIndexShardCounter(2);
} else {
assertIndexShardCounter(1);
}
TransportReplicationAction.ReroutePhase reroutePhase = action.new ReroutePhase(request, listener);
reroutePhase.run();
assertThat(request.shardId(), equalTo(shardId));
logger.info("--> primary is assigned to [{}], checking request forwarded", primaryNodeId);
final List<CapturingTransport.CapturedRequest> capturedRequests = transport.capturedRequestsByTargetNode().get(primaryNodeId);
assertThat(capturedRequests, notNullValue());
assertThat(capturedRequests.size(), equalTo(1));
if (clusterService.state().nodes().localNodeId().equals(primaryNodeId)) {
assertThat(capturedRequests.get(0).action, equalTo("testAction[p]"));
} else {
logger.info("--> primary is assigned to [{}], checking request forwarded", primaryNodeId);
final List<CapturingTransport.CapturedRequest> capturedRequests = transport.capturedRequestsByTargetNode().get(primaryNodeId);
assertThat(capturedRequests, notNullValue());
assertThat(capturedRequests.size(), equalTo(1));
assertThat(capturedRequests.get(0).action, equalTo("testAction"));
assertIndexShardUninitialized();
}
assertIndexShardUninitialized();
}
public void testPrimaryPhaseExecutesRequest() throws InterruptedException, ExecutionException {
final String index = "test";
final ShardId shardId = new ShardId(index, 0);
clusterService.setState(state(index, true, ShardRoutingState.STARTED, ShardRoutingState.STARTED));
Request request = new Request(shardId).timeout("1ms");
PlainActionFuture<Response> listener = new PlainActionFuture<>();
TransportReplicationAction.PrimaryPhase primaryPhase = action.new PrimaryPhase(request, createTransportChannel(listener));
primaryPhase.run();
assertThat("request was not processed on primary", request.processedOnPrimary.get(), equalTo(true));
final String replicaNodeId = clusterService.state().getRoutingTable().shardRoutingTable(index, shardId.id()).replicaShards().get(0).currentNodeId();
final List<CapturingTransport.CapturedRequest> requests = transport.capturedRequestsByTargetNode().get(replicaNodeId);
assertThat(requests, notNullValue());
assertThat(requests.size(), equalTo(1));
assertThat("replica request was not sent", requests.get(0).action, equalTo("testAction[r]"));
}
public void testAddedReplicaAfterPrimaryOperation() {
final String index = "test";
final ShardId shardId = new ShardId(index, 0);
// start with no replicas
clusterService.setState(stateWithStartedPrimary(index, true, 0));
logger.debug("--> using initial state:\n{}", clusterService.state().prettyPrint());
final ClusterState stateWithAddedReplicas = state(index, true, ShardRoutingState.STARTED, randomBoolean() ? ShardRoutingState.INITIALIZING : ShardRoutingState.STARTED);
final Action actionWithAddedReplicaAfterPrimaryOp = new Action(Settings.EMPTY, "testAction", transportService, clusterService, threadPool) {
@Override
protected Tuple<Response, Request> shardOperationOnPrimary(MetaData metaData, Request shardRequest) throws Throwable {
final Tuple<Response, Request> operationOnPrimary = super.shardOperationOnPrimary(metaData, shardRequest);
// add replicas after primary operation
((TestClusterService) clusterService).setState(stateWithAddedReplicas);
logger.debug("--> state after primary operation:\n{}", clusterService.state().prettyPrint());
return operationOnPrimary;
}
};
Request request = new Request(shardId);
PlainActionFuture<Response> listener = new PlainActionFuture<>();
TransportReplicationAction<Request, Request, Response>.PrimaryPhase primaryPhase = actionWithAddedReplicaAfterPrimaryOp.new PrimaryPhase(request, createTransportChannel(listener));
primaryPhase.run();
assertThat("request was not processed on primary", request.processedOnPrimary.get(), equalTo(true));
for (ShardRouting replica : stateWithAddedReplicas.getRoutingTable().shardRoutingTable(index, shardId.id()).replicaShards()) {
List<CapturingTransport.CapturedRequest> requests = transport.capturedRequestsByTargetNode().get(replica.currentNodeId());
assertThat(requests, notNullValue());
assertThat(requests.size(), equalTo(1));
assertThat("replica request was not sent", requests.get(0).action, equalTo("testAction[r]"));
}
}
public void testRelocatingReplicaAfterPrimaryOperation() {
final String index = "test";
final ShardId shardId = new ShardId(index, 0);
// start with a replica
clusterService.setState(state(index, true, ShardRoutingState.STARTED, randomBoolean() ? ShardRoutingState.INITIALIZING : ShardRoutingState.STARTED));
logger.debug("--> using initial state:\n{}", clusterService.state().prettyPrint());
final ClusterState stateWithRelocatingReplica = state(index, true, ShardRoutingState.STARTED, ShardRoutingState.RELOCATING);
final Action actionWithRelocatingReplicasAfterPrimaryOp = new Action(Settings.EMPTY, "testAction", transportService, clusterService, threadPool) {
@Override
protected Tuple<Response, Request> shardOperationOnPrimary(MetaData metaData, Request shardRequest) throws Throwable {
final Tuple<Response, Request> operationOnPrimary = super.shardOperationOnPrimary(metaData, shardRequest);
// set replica to relocating
((TestClusterService) clusterService).setState(stateWithRelocatingReplica);
logger.debug("--> state after primary operation:\n{}", clusterService.state().prettyPrint());
return operationOnPrimary;
}
};
Request request = new Request(shardId);
PlainActionFuture<Response> listener = new PlainActionFuture<>();
TransportReplicationAction<Request, Request, Response>.PrimaryPhase primaryPhase = actionWithRelocatingReplicasAfterPrimaryOp.new PrimaryPhase(request, createTransportChannel(listener));
primaryPhase.run();
assertThat("request was not processed on primary", request.processedOnPrimary.get(), equalTo(true));
ShardRouting relocatingReplicaShard = stateWithRelocatingReplica.getRoutingTable().shardRoutingTable(index, shardId.id()).replicaShards().get(0);
for (String node : new String[] {relocatingReplicaShard.currentNodeId(), relocatingReplicaShard.relocatingNodeId()}) {
List<CapturingTransport.CapturedRequest> requests = transport.capturedRequestsByTargetNode().get(node);
assertThat(requests, notNullValue());
assertThat(requests.size(), equalTo(1));
assertThat("replica request was not sent to replica", requests.get(0).action, equalTo("testAction[r]"));
}
}
public void testIndexDeletedAfterPrimaryOperation() {
final String index = "test";
final ShardId shardId = new ShardId(index, 0);
clusterService.setState(state(index, true, ShardRoutingState.STARTED, ShardRoutingState.STARTED));
logger.debug("--> using initial state:\n{}", clusterService.state().prettyPrint());
final ClusterState stateWithDeletedIndex = state(index + "_new", true, ShardRoutingState.STARTED, ShardRoutingState.RELOCATING);
final Action actionWithDeletedIndexAfterPrimaryOp = new Action(Settings.EMPTY, "testAction", transportService, clusterService, threadPool) {
@Override
protected Tuple<Response, Request> shardOperationOnPrimary(MetaData metaData, Request shardRequest) throws Throwable {
final Tuple<Response, Request> operationOnPrimary = super.shardOperationOnPrimary(metaData, shardRequest);
// delete index after primary op
((TestClusterService) clusterService).setState(stateWithDeletedIndex);
logger.debug("--> state after primary operation:\n{}", clusterService.state().prettyPrint());
return operationOnPrimary;
}
};
Request request = new Request(shardId);
PlainActionFuture<Response> listener = new PlainActionFuture<>();
TransportReplicationAction<Request, Request, Response>.PrimaryPhase primaryPhase = actionWithDeletedIndexAfterPrimaryOp.new PrimaryPhase(request, createTransportChannel(listener));
primaryPhase.run();
assertThat("request was not processed on primary", request.processedOnPrimary.get(), equalTo(true));
assertThat("replication phase should be skipped if index gets deleted after primary operation", transport.capturedRequestsByTargetNode().size(), equalTo(0));
}
public void testWriteConsistency() throws ExecutionException, InterruptedException {
@ -266,10 +387,9 @@ public class TransportReplicationActionTests extends ESTestCase {
final IndexShardRoutingTable shardRoutingTable = clusterService.state().routingTable().index(index).shard(shardId.id());
PlainActionFuture<Response> listener = new PlainActionFuture<>();
TransportReplicationAction<Request, Request, Response>.PrimaryPhase primaryPhase = action.new PrimaryPhase(request, listener);
TransportReplicationAction.PrimaryPhase primaryPhase = action.new PrimaryPhase(request, createTransportChannel(listener));
if (passesWriteConsistency) {
assertThat(primaryPhase.checkWriteConsistency(shardRoutingTable.primaryShard()), nullValue());
assertThat(primaryPhase.checkWriteConsistency(shardRoutingTable.primaryShard().shardId()), nullValue());
primaryPhase.run();
assertTrue("operations should have been perform, consistency level is met", request.processedOnPrimary.get());
if (assignedReplicas > 0) {
@ -278,14 +398,18 @@ public class TransportReplicationActionTests extends ESTestCase {
assertIndexShardCounter(1);
}
} else {
assertThat(primaryPhase.checkWriteConsistency(shardRoutingTable.primaryShard()), notNullValue());
assertThat(primaryPhase.checkWriteConsistency(shardRoutingTable.primaryShard().shardId()), notNullValue());
primaryPhase.run();
assertFalse("operations should not have been perform, consistency level is *NOT* met", request.processedOnPrimary.get());
assertListenerThrows("should throw exception to trigger retry", listener, UnavailableShardsException.class);
assertIndexShardUninitialized();
for (int i = 0; i < replicaStates.length; i++) {
replicaStates[i] = ShardRoutingState.STARTED;
}
clusterService.setState(state(index, true, ShardRoutingState.STARTED, replicaStates));
listener = new PlainActionFuture<>();
primaryPhase = action.new PrimaryPhase(request, createTransportChannel(listener));
primaryPhase.run();
assertTrue("once the consistency level met, operation should continue", request.processedOnPrimary.get());
assertIndexShardCounter(2);
}
@ -340,23 +464,19 @@ public class TransportReplicationActionTests extends ESTestCase {
protected void runReplicateTest(IndexShardRoutingTable shardRoutingTable, int assignedReplicas, int totalShards) throws InterruptedException, ExecutionException {
final ShardRouting primaryShard = shardRoutingTable.primaryShard();
final ShardIterator shardIt = shardRoutingTable.shardsIt();
final ShardId shardId = shardIt.shardId();
final Request request = new Request();
PlainActionFuture<Response> listener = new PlainActionFuture<>();
final Request request = new Request(shardId);
final PlainActionFuture<Response> listener = new PlainActionFuture<>();
logger.debug("expecting [{}] assigned replicas, [{}] total shards. using state: \n{}", assignedReplicas, totalShards, clusterService.state().prettyPrint());
final TransportReplicationAction<Request, Request, Response>.InternalRequest internalRequest = action.new InternalRequest(request);
internalRequest.concreteIndex(shardId.index().name());
Releasable reference = getOrCreateIndexShardOperationsCounter();
assertIndexShardCounter(2);
// TODO: set a default timeout
TransportReplicationAction<Request, Request, Response>.ReplicationPhase replicationPhase =
action.new ReplicationPhase(shardIt, request,
new Response(), new ClusterStateObserver(clusterService, logger),
primaryShard, internalRequest, listener, reference, null);
action.new ReplicationPhase(request,
new Response(),
request.shardId(), createTransportChannel(listener), reference, null);
assertThat(replicationPhase.totalShards(), equalTo(totalShards));
assertThat(replicationPhase.pending(), equalTo(assignedReplicas));
@ -433,7 +553,7 @@ public class TransportReplicationActionTests extends ESTestCase {
* However, this failure would only become apparent once listener.get is called. Seems a little implicit.
* */
action = new ActionWithDelay(Settings.EMPTY, "testActionWithExceptions", transportService, clusterService, threadPool);
final TransportReplicationAction<Request, Request, Response>.PrimaryPhase primaryPhase = action.new PrimaryPhase(request, listener);
final TransportReplicationAction.PrimaryPhase primaryPhase = action.new PrimaryPhase(request, createTransportChannel(listener));
Thread t = new Thread() {
@Override
public void run() {
@ -464,7 +584,7 @@ public class TransportReplicationActionTests extends ESTestCase {
logger.debug("--> using initial state:\n{}", clusterService.state().prettyPrint());
Request request = new Request(shardId).timeout("100ms");
PlainActionFuture<Response> listener = new PlainActionFuture<>();
TransportReplicationAction<Request, Request, Response>.PrimaryPhase primaryPhase = action.new PrimaryPhase(request, listener);
TransportReplicationAction.PrimaryPhase primaryPhase = action.new PrimaryPhase(request, createTransportChannel(listener));
primaryPhase.run();
assertIndexShardCounter(2);
assertThat(transport.capturedRequests().length, equalTo(1));
@ -473,7 +593,7 @@ public class TransportReplicationActionTests extends ESTestCase {
assertIndexShardCounter(1);
transport.clear();
request = new Request(shardId).timeout("100ms");
primaryPhase = action.new PrimaryPhase(request, listener);
primaryPhase = action.new PrimaryPhase(request, createTransportChannel(listener));
primaryPhase.run();
assertIndexShardCounter(2);
CapturingTransport.CapturedRequest[] replicationRequests = transport.capturedRequests();
@ -498,7 +618,7 @@ public class TransportReplicationActionTests extends ESTestCase {
@Override
public void run() {
try {
replicaOperationTransportHandler.messageReceived(new Request(), createTransportChannel());
replicaOperationTransportHandler.messageReceived(new Request(), createTransportChannel(new PlainActionFuture<>()));
} catch (Exception e) {
}
}
@ -515,7 +635,7 @@ public class TransportReplicationActionTests extends ESTestCase {
action = new ActionWithExceptions(Settings.EMPTY, "testActionWithExceptions", transportService, clusterService, threadPool);
final Action.ReplicaOperationTransportHandler replicaOperationTransportHandlerForException = action.new ReplicaOperationTransportHandler();
try {
replicaOperationTransportHandlerForException.messageReceived(new Request(shardId), createTransportChannel());
replicaOperationTransportHandlerForException.messageReceived(new Request(shardId), createTransportChannel(new PlainActionFuture<>()));
fail();
} catch (Throwable t2) {
}
@ -531,7 +651,7 @@ public class TransportReplicationActionTests extends ESTestCase {
logger.debug("--> using initial state:\n{}", clusterService.state().prettyPrint());
Request request = new Request(shardId).timeout("100ms");
PlainActionFuture<Response> listener = new PlainActionFuture<>();
TransportReplicationAction<Request, Request, Response>.PrimaryPhase primaryPhase = action.new PrimaryPhase(request, listener);
TransportReplicationAction.PrimaryPhase primaryPhase = action.new PrimaryPhase(request, createTransportChannel(listener));
primaryPhase.run();
// no replica request should have been sent yet
assertThat(transport.capturedRequests().length, equalTo(0));
@ -559,7 +679,6 @@ public class TransportReplicationActionTests extends ESTestCase {
}
public static class Request extends ReplicationRequest<Request> {
int shardId;
public AtomicBoolean processedOnPrimary = new AtomicBoolean();
public AtomicInteger processedOnReplicas = new AtomicInteger();
@ -568,21 +687,19 @@ public class TransportReplicationActionTests extends ESTestCase {
Request(ShardId shardId) {
this();
this.shardId = shardId.id();
this.index(shardId.index().name());
this.shardId = shardId;
this.index = shardId.getIndex();
// keep things simple
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
out.writeVInt(shardId);
}
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
shardId = in.readVInt();
}
}
@ -605,22 +722,17 @@ public class TransportReplicationActionTests extends ESTestCase {
}
@Override
protected Tuple<Response, Request> shardOperationOnPrimary(ClusterState clusterState, PrimaryOperationRequest shardRequest) throws Throwable {
boolean executedBefore = shardRequest.request.processedOnPrimary.getAndSet(true);
protected Tuple<Response, Request> shardOperationOnPrimary(MetaData metaData, Request shardRequest) throws Throwable {
boolean executedBefore = shardRequest.processedOnPrimary.getAndSet(true);
assert executedBefore == false : "request has already been executed on the primary";
return new Tuple<>(new Response(), shardRequest.request);
return new Tuple<>(new Response(), shardRequest);
}
@Override
protected void shardOperationOnReplica(ShardId shardId, Request request) {
protected void shardOperationOnReplica(Request request) {
request.processedOnReplicas.incrementAndGet();
}
@Override
protected ShardIterator shards(ClusterState clusterState, InternalRequest request) {
return clusterState.getRoutingTable().index(request.concreteIndex()).shard(request.request().shardId).shardsIt();
}
@Override
protected boolean checkWriteConsistency() {
return false;
@ -659,8 +771,8 @@ public class TransportReplicationActionTests extends ESTestCase {
}
@Override
protected Tuple<Response, Request> shardOperationOnPrimary(ClusterState clusterState, PrimaryOperationRequest shardRequest) throws Throwable {
return throwException(shardRequest.shardId);
protected Tuple<Response, Request> shardOperationOnPrimary(MetaData metaData, Request shardRequest) throws Throwable {
return throwException(shardRequest.shardId());
}
private Tuple<Response, Request> throwException(ShardId shardId) {
@ -681,8 +793,8 @@ public class TransportReplicationActionTests extends ESTestCase {
}
@Override
protected void shardOperationOnReplica(ShardId shardId, Request shardRequest) {
throwException(shardRequest.internalShardId);
protected void shardOperationOnReplica(Request shardRequest) {
throwException(shardRequest.shardId());
}
}
@ -697,9 +809,9 @@ public class TransportReplicationActionTests extends ESTestCase {
}
@Override
protected Tuple<Response, Request> shardOperationOnPrimary(ClusterState clusterState, PrimaryOperationRequest shardRequest) throws Throwable {
protected Tuple<Response, Request> shardOperationOnPrimary(MetaData metaData, Request shardRequest) throws Throwable {
awaitLatch();
return new Tuple<>(new Response(), shardRequest.request);
return new Tuple<>(new Response(), shardRequest);
}
private void awaitLatch() throws InterruptedException {
@ -708,7 +820,7 @@ public class TransportReplicationActionTests extends ESTestCase {
}
@Override
protected void shardOperationOnReplica(ShardId shardId, Request shardRequest) {
protected void shardOperationOnReplica(Request shardRequest) {
try {
awaitLatch();
} catch (InterruptedException e) {
@ -720,7 +832,7 @@ public class TransportReplicationActionTests extends ESTestCase {
/*
* Transport channel that is needed for replica operation testing.
* */
public TransportChannel createTransportChannel() {
public TransportChannel createTransportChannel(final PlainActionFuture<Response> listener) {
return new TransportChannel() {
@Override
@ -735,14 +847,17 @@ public class TransportReplicationActionTests extends ESTestCase {
@Override
public void sendResponse(TransportResponse response) throws IOException {
listener.onResponse(((Response) response));
}
@Override
public void sendResponse(TransportResponse response, TransportResponseOptions options) throws IOException {
listener.onResponse(((Response) response));
}
@Override
public void sendResponse(Throwable error) throws IOException {
listener.onFailure(error);
}
};
}

View File

@ -759,7 +759,7 @@ public class IndexAliasesIT extends ESIntegTestCase {
admin().indices().prepareAliases().addAliasAction(AliasAction.newAddAliasAction("index1", null)).get();
fail("Expected ActionRequestValidationException");
} catch (ActionRequestValidationException e) {
assertThat(e.getMessage(), containsString("requires an [alias] to be set"));
assertThat(e.getMessage(), containsString("[alias] may not be empty string"));
}
}
@ -768,7 +768,7 @@ public class IndexAliasesIT extends ESIntegTestCase {
admin().indices().prepareAliases().addAliasAction(AliasAction.newAddAliasAction("index1", "")).get();
fail("Expected ActionRequestValidationException");
} catch (ActionRequestValidationException e) {
assertThat(e.getMessage(), containsString("requires an [alias] to be set"));
assertThat(e.getMessage(), containsString("[alias] may not be empty string"));
}
}

View File

@ -26,13 +26,11 @@ import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.cluster.metadata.MetaData;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.node.Node;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.test.VersionUtils;
import java.io.BufferedReader;
import java.io.InputStreamReader;
import java.nio.file.Path;
import java.util.Arrays;
public class RoutingBackwardCompatibilityTests extends ESTestCase {

View File

@ -24,14 +24,16 @@ import org.elasticsearch.test.ESTestCase;
import java.net.InetAddress;
import static org.hamcrest.Matchers.is;
/**
* Tests for network service... try to keep them safe depending upon configuration
* please don't actually bind to anything, just test the addresses.
*/
public class NetworkServiceTests extends ESTestCase {
/**
* ensure exception if we bind to multicast ipv4 address
/**
* ensure exception if we bind to multicast ipv4 address
*/
public void testBindMulticastV4() throws Exception {
NetworkService service = new NetworkService(Settings.EMPTY);
@ -42,9 +44,8 @@ public class NetworkServiceTests extends ESTestCase {
assertTrue(e.getMessage().contains("invalid: multicast"));
}
}
/**
* ensure exception if we bind to multicast ipv6 address
/**
* ensure exception if we bind to multicast ipv6 address
*/
public void testBindMulticastV6() throws Exception {
NetworkService service = new NetworkService(Settings.EMPTY);
@ -55,9 +56,9 @@ public class NetworkServiceTests extends ESTestCase {
assertTrue(e.getMessage().contains("invalid: multicast"));
}
}
/**
* ensure exception if we publish to multicast ipv4 address
/**
* ensure exception if we publish to multicast ipv4 address
*/
public void testPublishMulticastV4() throws Exception {
NetworkService service = new NetworkService(Settings.EMPTY);
@ -68,9 +69,9 @@ public class NetworkServiceTests extends ESTestCase {
assertTrue(e.getMessage().contains("invalid: multicast"));
}
}
/**
* ensure exception if we publish to multicast ipv6 address
/**
* ensure exception if we publish to multicast ipv6 address
*/
public void testPublishMulticastV6() throws Exception {
NetworkService service = new NetworkService(Settings.EMPTY);
@ -82,24 +83,24 @@ public class NetworkServiceTests extends ESTestCase {
}
}
/**
* ensure specifying wildcard ipv4 address will bind to all interfaces
/**
* ensure specifying wildcard ipv4 address will bind to all interfaces
*/
public void testBindAnyLocalV4() throws Exception {
NetworkService service = new NetworkService(Settings.EMPTY);
assertEquals(InetAddress.getByName("0.0.0.0"), service.resolveBindHostAddresses(new String[] { "0.0.0.0" })[0]);
}
/**
* ensure specifying wildcard ipv6 address will bind to all interfaces
/**
* ensure specifying wildcard ipv6 address will bind to all interfaces
*/
public void testBindAnyLocalV6() throws Exception {
NetworkService service = new NetworkService(Settings.EMPTY);
assertEquals(InetAddress.getByName("::"), service.resolveBindHostAddresses(new String[] { "::" })[0]);
}
/**
* ensure specifying wildcard ipv4 address selects reasonable publish address
/**
* ensure specifying wildcard ipv4 address selects reasonable publish address
*/
public void testPublishAnyLocalV4() throws Exception {
NetworkService service = new NetworkService(Settings.EMPTY);
@ -107,12 +108,34 @@ public class NetworkServiceTests extends ESTestCase {
assertFalse(address.isAnyLocalAddress());
}
/**
* ensure specifying wildcard ipv6 address selects reasonable publish address
/**
* ensure specifying wildcard ipv6 address selects reasonable publish address
*/
public void testPublishAnyLocalV6() throws Exception {
NetworkService service = new NetworkService(Settings.EMPTY);
InetAddress address = service.resolvePublishHostAddresses(new String[] { "::" });
assertFalse(address.isAnyLocalAddress());
}
/**
* ensure we can bind to multiple addresses
*/
public void testBindMultipleAddresses() throws Exception {
NetworkService service = new NetworkService(Settings.EMPTY);
InetAddress[] addresses = service.resolveBindHostAddresses(new String[]{"127.0.0.1", "127.0.0.2"});
assertThat(addresses.length, is(2));
}
/**
* ensure we can't bind to multiple addresses when using wildcard
*/
public void testBindMultipleAddressesWithWildcard() throws Exception {
NetworkService service = new NetworkService(Settings.EMPTY);
try {
service.resolveBindHostAddresses(new String[]{"0.0.0.0", "127.0.0.1"});
fail("should have hit exception");
} catch (IllegalArgumentException e) {
assertTrue(e.getMessage().contains("is wildcard, but multiple addresses specified"));
}
}
}

View File

@ -53,7 +53,7 @@ public class WriteConsistencyLevelIT extends ESIntegTestCase {
fail("can't index, does not match consistency");
} catch (UnavailableShardsException e) {
assertThat(e.status(), equalTo(RestStatus.SERVICE_UNAVAILABLE));
assertThat(e.getMessage(), equalTo("[test][0] Not enough active copies to meet write consistency of [QUORUM] (have 1, needed 2). Timeout: [100ms], request: index {[test][type1][1], source[{ type1 : { \"id\" : \"1\", \"name\" : \"test\" } }]}"));
assertThat(e.getMessage(), equalTo("[test][0] Not enough active copies to meet write consistency of [QUORUM] (have 1, needed 2). Timeout: [100ms], request: [index {[test][type1][1], source[{ type1 : { \"id\" : \"1\", \"name\" : \"test\" } }]}]"));
// but really, all is well
}
@ -76,7 +76,7 @@ public class WriteConsistencyLevelIT extends ESIntegTestCase {
fail("can't index, does not match consistency");
} catch (UnavailableShardsException e) {
assertThat(e.status(), equalTo(RestStatus.SERVICE_UNAVAILABLE));
assertThat(e.getMessage(), equalTo("[test][0] Not enough active copies to meet write consistency of [ALL] (have 2, needed 3). Timeout: [100ms], request: index {[test][type1][1], source[{ type1 : { \"id\" : \"1\", \"name\" : \"test\" } }]}"));
assertThat(e.getMessage(), equalTo("[test][0] Not enough active copies to meet write consistency of [ALL] (have 2, needed 3). Timeout: [100ms], request: [index {[test][type1][1], source[{ type1 : { \"id\" : \"1\", \"name\" : \"test\" } }]}]"));
// but really, all is well
}

View File

@ -67,7 +67,7 @@ public class FieldStatsTests extends ESSingleNodeTestCase {
}
public void testString() {
createIndex("test", Settings.EMPTY, "field", "value", "type=string");
createIndex("test", Settings.EMPTY, "test", "field", "type=string");
for (int value = 0; value <= 10; value++) {
client().prepareIndex("test", "test").setSource("field", String.format(Locale.ENGLISH, "%03d", value)).get();
}
@ -85,7 +85,7 @@ public class FieldStatsTests extends ESSingleNodeTestCase {
public void testDouble() {
String fieldName = "field";
createIndex("test", Settings.EMPTY, fieldName, "value", "type=double");
createIndex("test", Settings.EMPTY, "test", fieldName, "type=double");
for (double value = -1; value <= 9; value++) {
client().prepareIndex("test", "test").setSource(fieldName, value).get();
}
@ -102,7 +102,7 @@ public class FieldStatsTests extends ESSingleNodeTestCase {
public void testFloat() {
String fieldName = "field";
createIndex("test", Settings.EMPTY, fieldName, "value", "type=float");
createIndex("test", Settings.EMPTY, "test", fieldName, "type=float");
for (float value = -1; value <= 9; value++) {
client().prepareIndex("test", "test").setSource(fieldName, value).get();
}
@ -112,14 +112,14 @@ public class FieldStatsTests extends ESSingleNodeTestCase {
assertThat(result.getAllFieldStats().get(fieldName).getMaxDoc(), equalTo(11l));
assertThat(result.getAllFieldStats().get(fieldName).getDocCount(), equalTo(11l));
assertThat(result.getAllFieldStats().get(fieldName).getDensity(), equalTo(100));
assertThat(result.getAllFieldStats().get(fieldName).getMinValue(), equalTo(-1.0));
assertThat(result.getAllFieldStats().get(fieldName).getMaxValue(), equalTo(9.0));
assertThat(result.getAllFieldStats().get(fieldName).getMinValue(), equalTo(-1f));
assertThat(result.getAllFieldStats().get(fieldName).getMaxValue(), equalTo(9f));
assertThat(result.getAllFieldStats().get(fieldName).getMinValueAsString(), equalTo(Float.toString(-1)));
assertThat(result.getAllFieldStats().get(fieldName).getMaxValueAsString(), equalTo(Float.toString(9)));
}
private void testNumberRange(String fieldName, String fieldType, long min, long max) {
createIndex("test", Settings.EMPTY, fieldName, "value", "type=" + fieldType);
createIndex("test", Settings.EMPTY, "test", fieldName, "type=" + fieldType);
for (long value = min; value <= max; value++) {
client().prepareIndex("test", "test").setSource(fieldName, value).get();
}
@ -180,11 +180,11 @@ public class FieldStatsTests extends ESSingleNodeTestCase {
}
public void testInvalidField() {
createIndex("test1", Settings.EMPTY, "field1", "value", "type=string");
createIndex("test1", Settings.EMPTY, "test", "field1", "type=string");
client().prepareIndex("test1", "test").setSource("field1", "a").get();
client().prepareIndex("test1", "test").setSource("field1", "b").get();
createIndex("test2", Settings.EMPTY, "field2", "value", "type=string");
createIndex("test2", Settings.EMPTY, "test", "field2", "type=string");
client().prepareIndex("test2", "test").setSource("field2", "a").get();
client().prepareIndex("test2", "test").setSource("field2", "b").get();
client().admin().indices().prepareRefresh().get();

View File

@ -21,6 +21,7 @@ package org.elasticsearch.index.mapper;
import org.elasticsearch.Version;
import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsResponse;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
@ -28,15 +29,21 @@ import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.IndexService;
import org.elasticsearch.index.mapper.core.DoubleFieldMapper;
import org.elasticsearch.index.mapper.core.FloatFieldMapper;
import org.elasticsearch.index.mapper.core.IntegerFieldMapper;
import org.elasticsearch.index.mapper.core.StringFieldMapper;
import org.elasticsearch.test.ESSingleNodeTestCase;
import java.io.IOException;
import java.util.Arrays;
import java.util.HashMap;
import java.util.Map;
import static java.util.Collections.emptyMap;
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.instanceOf;
import static org.hamcrest.Matchers.nullValue;
public class DynamicMappingTests extends ESSingleNodeTestCase {
@ -407,4 +414,26 @@ public class DynamicMappingTests extends ESSingleNodeTestCase {
// expected
}
}
public void testDefaultFloatingPointMappings() throws IOException {
DocumentMapper mapper = createIndex("test").mapperService().documentMapperWithAutoCreate("type").getDocumentMapper();
doTestDefaultFloatingPointMappings(mapper, XContentFactory.jsonBuilder());
doTestDefaultFloatingPointMappings(mapper, XContentFactory.yamlBuilder());
doTestDefaultFloatingPointMappings(mapper, XContentFactory.smileBuilder());
doTestDefaultFloatingPointMappings(mapper, XContentFactory.cborBuilder());
}
private void doTestDefaultFloatingPointMappings(DocumentMapper mapper, XContentBuilder builder) throws IOException {
BytesReference source = builder.startObject()
.field("foo", 3.2f) // float
.field("bar", 3.2d) // double
.field("baz", (double) 3.2f) // double that can be accurately represented as a float
.endObject().bytes();
ParsedDocument parsedDocument = mapper.parse("index", "type", "id", source);
Mapping update = parsedDocument.dynamicMappingsUpdate();
assertNotNull(update);
assertThat(update.root().getMapper("foo"), instanceOf(FloatFieldMapper.class));
assertThat(update.root().getMapper("bar"), instanceOf(FloatFieldMapper.class));
assertThat(update.root().getMapper("baz"), instanceOf(FloatFieldMapper.class));
}
}

View File

@ -19,6 +19,7 @@
package org.elasticsearch.index.mapper;
import org.elasticsearch.ExceptionsHelper;
import org.elasticsearch.Version;
import org.elasticsearch.action.admin.indices.create.CreateIndexResponse;
import org.elasticsearch.common.compress.CompressedXContent;
@ -117,8 +118,9 @@ public class MapperServiceTests extends ESSingleNodeTestCase {
if (t instanceof ExecutionException) {
t = ((ExecutionException) t).getCause();
}
if (t instanceof IllegalArgumentException) {
assertEquals("It is forbidden to index into the default mapping [_default_]", t.getMessage());
final Throwable throwable = ExceptionsHelper.unwrapCause(t);
if (throwable instanceof IllegalArgumentException) {
assertEquals("It is forbidden to index into the default mapping [_default_]", throwable.getMessage());
} else {
throw t;
}
@ -133,8 +135,9 @@ public class MapperServiceTests extends ESSingleNodeTestCase {
if (t instanceof ExecutionException) {
t = ((ExecutionException) t).getCause();
}
if (t instanceof IllegalArgumentException) {
assertEquals("It is forbidden to index into the default mapping [_default_]", t.getMessage());
final Throwable throwable = ExceptionsHelper.unwrapCause(t);
if (throwable instanceof IllegalArgumentException) {
assertEquals("It is forbidden to index into the default mapping [_default_]", throwable.getMessage());
} else {
throw t;
}

View File

@ -24,6 +24,8 @@ import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.document.Field;
import org.apache.lucene.index.DocValuesType;
import org.apache.lucene.index.IndexableField;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.index.IndexService;
@ -41,9 +43,11 @@ import org.elasticsearch.index.mapper.string.SimpleStringMappingTests;
import org.elasticsearch.test.ESSingleNodeTestCase;
import java.io.IOException;
import java.util.Arrays;
import static org.elasticsearch.common.settings.Settings.settingsBuilder;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.instanceOf;
import static org.hamcrest.Matchers.notNullValue;
import static org.hamcrest.Matchers.nullValue;
@ -510,4 +514,62 @@ public class SimpleNumericTests extends ESSingleNodeTestCase {
assertThat(ts, instanceOf(NumericTokenStream.class));
assertEquals(expected, ((NumericTokenStream)ts).getPrecisionStep());
}
public void testTermVectorsBackCompat() throws Exception {
for (String type : Arrays.asList("byte", "short", "integer", "long", "float", "double")) {
doTestTermVectorsBackCompat(type);
}
}
private void doTestTermVectorsBackCompat(String type) throws Exception {
DocumentMapperParser parser = createIndex("index-" + type).mapperService().documentMapperParser();
String mappingWithTV = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties")
.startObject("foo")
.field("type", type)
.field("term_vector", "yes")
.endObject()
.endObject().endObject().endObject().string();
try {
parser.parse(mappingWithTV);
fail();
} catch (MapperParsingException e) {
assertThat(e.getMessage(), containsString("Mapping definition for [foo] has unsupported parameters: [term_vector : yes]"));
}
Settings oldIndexSettings = Settings.builder()
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_2_1_0)
.build();
parser = createIndex("index2-" + type, oldIndexSettings).mapperService().documentMapperParser();
parser.parse(mappingWithTV); // no exception
}
public void testAnalyzerBackCompat() throws Exception {
for (String type : Arrays.asList("byte", "short", "integer", "long", "float", "double")) {
doTestAnalyzerBackCompat(type);
}
}
private void doTestAnalyzerBackCompat(String type) throws Exception {
DocumentMapperParser parser = createIndex("index-" + type).mapperService().documentMapperParser();
String mappingWithTV = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties")
.startObject("foo")
.field("type", type)
.field("analyzer", "keyword")
.endObject()
.endObject().endObject().endObject().string();
try {
parser.parse(mappingWithTV);
fail();
} catch (MapperParsingException e) {
assertThat(e.getMessage(), containsString("Mapping definition for [foo] has unsupported parameters: [analyzer : keyword]"));
}
Settings oldIndexSettings = Settings.builder()
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_2_1_0)
.build();
parser = createIndex("index2-" + type, oldIndexSettings).mapperService().documentMapperParser();
parser.parse(mappingWithTV); // no exception
}
}

View File

@ -21,6 +21,8 @@ package org.elasticsearch.search.highlight;
import org.elasticsearch.common.ParseFieldMatcher;
import org.elasticsearch.common.ParsingException;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.io.stream.BytesStreamOutput;
import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput;
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
@ -32,6 +34,13 @@ import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.Index;
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.mapper.ContentPath;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.Mapper;
import org.elasticsearch.index.mapper.MapperBuilders;
import org.elasticsearch.index.mapper.core.StringFieldMapper;
import org.elasticsearch.index.query.IdsQueryBuilder;
import org.elasticsearch.index.query.IdsQueryParser;
import org.elasticsearch.index.query.MatchAllQueryBuilder;
@ -39,11 +48,15 @@ import org.elasticsearch.index.query.MatchAllQueryParser;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.index.query.QueryParser;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.index.query.TermQueryBuilder;
import org.elasticsearch.index.query.TermQueryParser;
import org.elasticsearch.indices.query.IndicesQueriesRegistry;
import org.elasticsearch.search.highlight.HighlightBuilder;
import org.elasticsearch.search.highlight.HighlightBuilder.Field;
import org.elasticsearch.search.highlight.SearchContextHighlight.FieldOptions;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.test.IndexSettingsModule;
import org.junit.AfterClass;
import org.junit.BeforeClass;
@ -51,6 +64,7 @@ import java.io.IOException;
import java.util.Arrays;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
@ -128,7 +142,7 @@ public class HighlightBuilderTests extends ESTestCase {
}
/**
* Generic test that creates new highlighter from the test highlighter and checks both for equality
* creates random highlighter, renders it to xContent and back to new instance that should be equal to original
*/
public void testFromXContent() throws IOException {
QueryParseContext context = new QueryParseContext(indicesQueriesRegistry);
@ -261,6 +275,70 @@ public class HighlightBuilderTests extends ESTestCase {
} catch (ParsingException e) {
assertEquals("cannot parse object with name [bad_fieldname]", e.getMessage());
}
}
/**
* test that build() outputs a {@link SearchContextHighlight} that is similar to the one
* we would get when parsing the xContent the test highlight builder is rendering out
*/
public void testBuildSearchContextHighlight() throws IOException {
Settings indexSettings = Settings.settingsBuilder()
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build();
Index index = new Index(randomAsciiOfLengthBetween(1, 10));
IndexSettings idxSettings = IndexSettingsModule.newIndexSettings(index, indexSettings);
// shard context will only need indicesQueriesRegistry for building Query objects nested in highlighter
QueryShardContext mockShardContext = new QueryShardContext(idxSettings, null, null, null, null, null, null, indicesQueriesRegistry) {
@Override
public MappedFieldType fieldMapper(String name) {
StringFieldMapper.Builder builder = MapperBuilders.stringField(name);
return builder.build(new Mapper.BuilderContext(idxSettings.getSettings(), new ContentPath(1))).fieldType();
}
};
mockShardContext.setMapUnmappedFieldAsString(true);
for (int runs = 0; runs < NUMBER_OF_TESTBUILDERS; runs++) {
HighlightBuilder highlightBuilder = randomHighlighterBuilder();
SearchContextHighlight highlight = highlightBuilder.build(mockShardContext);
XContentBuilder builder = XContentFactory.contentBuilder(randomFrom(XContentType.values()));
if (randomBoolean()) {
builder.prettyPrint();
}
builder.startObject();
highlightBuilder.innerXContent(builder);
builder.endObject();
XContentParser parser = XContentHelper.createParser(builder.bytes());
SearchContextHighlight parsedHighlight = new HighlighterParseElement().parse(parser, mockShardContext);
assertNotSame(highlight, parsedHighlight);
assertEquals(highlight.globalForceSource(), parsedHighlight.globalForceSource());
assertEquals(highlight.fields().size(), parsedHighlight.fields().size());
Iterator<org.elasticsearch.search.highlight.SearchContextHighlight.Field> iterator = parsedHighlight.fields().iterator();
for (org.elasticsearch.search.highlight.SearchContextHighlight.Field field : highlight.fields()) {
org.elasticsearch.search.highlight.SearchContextHighlight.Field otherField = iterator.next();
assertEquals(field.field(), otherField.field());
FieldOptions options = field.fieldOptions();
FieldOptions otherOptions = otherField.fieldOptions();
assertArrayEquals(options.boundaryChars(), options.boundaryChars());
assertEquals(options.boundaryMaxScan(), otherOptions.boundaryMaxScan());
assertEquals(options.encoder(), otherOptions.encoder());
assertEquals(options.fragmentCharSize(), otherOptions.fragmentCharSize());
assertEquals(options.fragmenter(), otherOptions.fragmenter());
assertEquals(options.fragmentOffset(), otherOptions.fragmentOffset());
assertEquals(options.highlighterType(), otherOptions.highlighterType());
assertEquals(options.highlightFilter(), otherOptions.highlightFilter());
assertEquals(options.highlightQuery(), otherOptions.highlightQuery());
assertEquals(options.matchedFields(), otherOptions.matchedFields());
assertEquals(options.noMatchSize(), otherOptions.noMatchSize());
assertEquals(options.numberOfFragments(), otherOptions.numberOfFragments());
assertEquals(options.options(), otherOptions.options());
assertEquals(options.phraseLimit(), otherOptions.phraseLimit());
assertArrayEquals(options.preTags(), otherOptions.preTags());
assertArrayEquals(options.postTags(), otherOptions.postTags());
assertEquals(options.requireFieldMatch(), otherOptions.requireFieldMatch());
assertEquals(options.scoreOrdered(), otherOptions.scoreOrdered());
}
}
}
/**
@ -277,9 +355,9 @@ public class HighlightBuilderTests extends ESTestCase {
context.reset(parser);
HighlightBuilder highlightBuilder = HighlightBuilder.fromXContent(context);
assertArrayEquals("setting tags_schema 'styled' should alter pre_tags", HighlighterParseElement.STYLED_PRE_TAG,
assertArrayEquals("setting tags_schema 'styled' should alter pre_tags", HighlightBuilder.DEFAULT_STYLED_PRE_TAG,
highlightBuilder.preTags());
assertArrayEquals("setting tags_schema 'styled' should alter post_tags", HighlighterParseElement.STYLED_POST_TAGS,
assertArrayEquals("setting tags_schema 'styled' should alter post_tags", HighlightBuilder.DEFAULT_STYLED_POST_TAGS,
highlightBuilder.postTags());
highlightElement = "{\n" +
@ -289,9 +367,9 @@ public class HighlightBuilderTests extends ESTestCase {
context.reset(parser);
highlightBuilder = HighlightBuilder.fromXContent(context);
assertArrayEquals("setting tags_schema 'default' should alter pre_tags", HighlighterParseElement.DEFAULT_PRE_TAGS,
assertArrayEquals("setting tags_schema 'default' should alter pre_tags", HighlightBuilder.DEFAULT_PRE_TAGS,
highlightBuilder.preTags());
assertArrayEquals("setting tags_schema 'default' should alter post_tags", HighlighterParseElement.DEFAULT_POST_TAGS,
assertArrayEquals("setting tags_schema 'default' should alter post_tags", HighlightBuilder.DEFAULT_POST_TAGS,
highlightBuilder.postTags());
highlightElement = "{\n" +

View File

@ -70,6 +70,7 @@ DEFAULT_PLUGINS = ["analysis-icu",
"lang-expression",
"lang-groovy",
"lang-javascript",
"lang-plan-a",
"lang-python",
"mapper-murmur3",
"mapper-size",

View File

@ -60,19 +60,8 @@
# For more information, see the documentation at:
# <http://www.elastic.co/guide/en/elasticsearch/reference/current/modules-network.html>
#
# ---------------------------------- Gateway -----------------------------------
#
# Block initial recovery after a full cluster restart until N nodes are started:
#
# gateway.recover_after_nodes: 3
#
# For more information, see the documentation at:
# <http://www.elastic.co/guide/en/elasticsearch/reference/current/modules-gateway.html>
#
# --------------------------------- Discovery ----------------------------------
#
# Elasticsearch nodes will find each other via unicast, by default.
#
# Pass an initial list of hosts to perform discovery when new node is started:
# The default list of hosts is ["127.0.0.1", "[::1]"]
#
@ -85,6 +74,15 @@
# For more information, see the documentation at:
# <http://www.elastic.co/guide/en/elasticsearch/reference/current/modules-discovery.html>
#
# ---------------------------------- Gateway -----------------------------------
#
# Block initial recovery after a full cluster restart until N nodes are started:
#
# gateway.recover_after_nodes: 3
#
# For more information, see the documentation at:
# <http://www.elastic.co/guide/en/elasticsearch/reference/current/modules-gateway.html>
#
# ---------------------------------- Various -----------------------------------
#
# Disable starting multiple nodes on a single system:

View File

@ -64,16 +64,19 @@ cloud:
protocol: https
----
In addition, a proxy can be configured with the `proxy_host` and `proxy_port` settings (note that protocol can be
`http` or `https`):
In addition, a proxy can be configured with the `proxy.host`, `proxy.port`, `proxy.username` and `proxy.password` settings
(note that protocol can be `http` or `https`):
[source,yaml]
----
cloud:
aws:
protocol: https
proxy_host: proxy1.company.com
proxy_port: 8083
proxy:
host: proxy1.company.com
port: 8083
username: myself
password: theBestPasswordEver!
----
You can also set different proxies for `ec2` and `s3`:
@ -83,11 +86,17 @@ You can also set different proxies for `ec2` and `s3`:
cloud:
aws:
s3:
proxy_host: proxy1.company.com
proxy_port: 8083
proxy:
host: proxy1.company.com
port: 8083
username: myself1
password: theBestPasswordEver1!
ec2:
proxy_host: proxy2.company.com
proxy_port: 8083
proxy:
host: proxy2.company.com
port: 8083
username: myself2
password: theBestPasswordEver2!
----
[[discovery-ec2-usage-region]]

View File

@ -67,16 +67,19 @@ cloud:
protocol: https
----
In addition, a proxy can be configured with the `proxy_host` and `proxy_port` settings (note that protocol can be
`http` or `https`):
In addition, a proxy can be configured with the `proxy.host`, `proxy.port`, `proxy.username` and `proxy.password` settings
(note that protocol can be `http` or `https`):
[source,yaml]
----
cloud:
aws:
protocol: https
proxy_host: proxy1.company.com
proxy_port: 8083
proxy:
host: proxy1.company.com
port: 8083
username: myself
password: theBestPasswordEver!
----
You can also set different proxies for `ec2` and `s3`:
@ -86,11 +89,17 @@ You can also set different proxies for `ec2` and `s3`:
cloud:
aws:
s3:
proxy_host: proxy1.company.com
proxy_port: 8083
proxy:
host: proxy1.company.com
port: 8083
username: myself1
password: theBestPasswordEver1!
ec2:
proxy_host: proxy2.company.com
proxy_port: 8083
proxy:
host: proxy2.company.com
port: 8083
username: myself2
password: theBestPasswordEver2!
----
[[repository-s3-usage-region]]

View File

@ -360,6 +360,22 @@ are:
`s`:: Second
`ms`:: Milli-second
[[size-units]]
[float]
=== Data size units
Whenever the size of data needs to be specified, eg when setting a buffer size
parameter, the value must specify the unit, like `10kb` for 10 kilobytes. The
supported units are:
[horizontal]
`b`:: Bytes
`kb`:: Kilobytes
`mb`:: Megabytes
`gb`:: Gigabytes
`tb`:: Terabytes
`pb`:: Petabytes
[[distance-units]]
[float]
=== Distance Units

View File

@ -7,6 +7,7 @@
:jdk: 1.8.0_25
:defguide: https://www.elastic.co/guide/en/elasticsearch/guide/current
:plugins: https://www.elastic.co/guide/en/elasticsearch/plugins/master
:javaclient: https://www.elastic.co/guide/en/elasticsearch/client/java-api/master/
:issue: https://github.com/elastic/elasticsearch/issues/
:pull: https://github.com/elastic/elasticsearch/pull/

View File

@ -63,7 +63,22 @@ curl -XPOST 'http://localhost:9200/_aliases' -d '
}'
--------------------------------------------------
Alternatively, you can use a glob pattern to associate an alias to
Multiple indices can be specified for an action with the `indices` array syntax:
[source,js]
--------------------------------------------------
curl -XPOST 'http://localhost:9200/_aliases' -d '
{
"actions" : [
{ "add" : { "indices" : ["test1", "test2"], "alias" : "alias1" } }
]
}'
--------------------------------------------------
To specify multiple aliases in one action, the corresponding `aliases` array
syntax exists as well.
For the example above, a glob pattern can also be used to associate an alias to
more than one index that share a common name:
[source,js]

View File

@ -206,6 +206,13 @@ cluster settings please use the settings update API and set their superseded key
The `transform` feature from mappings has been removed. It made issues very hard to debug.
==== Default number mappings
When a floating-point number is encountered, it is now dynamically mapped as a
float by default instead of a double. The reasoning is that floats should be
more than enough for most cases but would decrease storage requirements
significantly.
[[breaking_30_plugins]]
=== Plugin changes
@ -237,6 +244,15 @@ Cloud AWS plugin has been split in two plugins:
* {plugins}/discovery-ec2.html[Discovery EC2 plugin]
* {plugins}/repository-s3.html[Repository S3 plugin]
Proxy settings for both plugins have been renamed:
* from `cloud.aws.proxy_host` to `cloud.aws.proxy.host`
* from `cloud.aws.ec2.proxy_host` to `cloud.aws.ec2.proxy.host`
* from `cloud.aws.s3.proxy_host` to `cloud.aws.s3.proxy.host`
* from `cloud.aws.proxy_port` to `cloud.aws.proxy.port`
* from `cloud.aws.ec2.proxy_port` to `cloud.aws.ec2.proxy.port`
* from `cloud.aws.s3.proxy_port` to `cloud.aws.s3.proxy.port`
==== Cloud Azure plugin changes
Cloud Azure plugin has been split in three plugins:

View File

@ -1,94 +1,175 @@
[[modules-network]]
== Network Settings
There are several modules within a Node that use network based
configuration, for example, the
<<modules-transport,transport>> and
<<modules-http,http>> modules. Node level
network settings allows to set common settings that will be shared among
all network based modules (unless explicitly overridden in each module).
Elasticsearch binds to localhost only by default. This is sufficient for you
to run a local development server (or even a development cluster, if you start
multiple nodes on the same machine), but you will need to configure some
<<common-network-settings,basic network settings>> in order to run a real
production cluster across multiple servers.
Be careful with host configuration! Never expose an unprotected instance
to the public internet.
[WARNING]
.Be careful with the network configuration!
=============================
Never expose an unprotected node to the public internet.
=============================
The `network.bind_host` setting allows to control the host different network
components will bind on. By default, the bind host will be `_local_`
(loopback addresses such as `127.0.0.1`, `::1`).
[float]
[[common-network-settings]]
=== Commonly Used Network Settings
The `network.publish_host` setting allows to control the host the node will
publish itself within the cluster so other nodes will be able to connect to it.
Currently an elasticsearch node may be bound to multiple addresses, but only
publishes one. If not specified, this defaults to the "best" address from
`network.bind_host`, sorted by IPv4/IPv6 stack preference, then by reachability.
`network.host`::
The `network.host` setting is a simple setting to automatically set both
`network.bind_host` and `network.publish_host` to the same host value.
The node will bind to this hostname or IP address and _publish_ (advertise)
this host to other nodes in the cluster. Accepts an IP address, hostname, or a
<<network-interface-values,special value>>.
+
Defaults to `_local_`.
Both settings allows to be configured with either explicit host address(es)
or host name(s). The settings also accept logical setting value(s) explained
in the following table:
`discovery.zen.ping.unicast.hosts`::
[cols="<,<",options="header",]
|=======================================================================
|Logical Host Setting Value |Description
|`_local_` |Will be resolved to loopback addresses
In order to join a cluster, a node needs to know the hostname or IP address of
at least some of the other nodes in the cluster. This settting provides the
initial list of other nodes that this node will try to contact. Accepts IP
addresses or hostnames.
+
Defaults to `["127.0.0.1", "[::1]"]`.
|`_local:ipv4_` |Will be resolved to loopback IPv4 addresses (e.g. 127.0.0.1)
`http.port`::
|`_local:ipv6_` |Will be resolved to loopback IPv6 addresses (e.g. ::1)
Port to bind to for incoming HTTP requests. Accepts a single value or a range.
If a range is specified, the node will bind to the first available port in the
range.
+
Defaults to `9200-9300`.
|`_site_` |Will be resolved to site-local addresses ("private network")
`transport.tcp.port`::
|`_site:ipv4_` |Will be resolved to site-local IPv4 addresses (e.g. 192.168.0.1)
Port to bind for communication between nodes. Accepts a single value or a
range. If a range is specified, the node will bind to the first available port
in the range.
+
Defaults to `9300-9400`.
|`_site:ipv6_` |Will be resolved to site-local IPv6 addresses (e.g. fec0::1)
[float]
[[network-interface-values]]
=== Special values for `network.host`
|`_global_` |Will be resolved to globally-scoped addresses ("publicly reachable")
The following special values may be passed to `network.host`:
|`_global:ipv4_` |Will be resolved to globally-scoped IPv4 addresses (e.g. 8.8.8.8)
[horizontal]
`_[networkInterface]_`::
|`_global:ipv6_` |Will be resolved to globally-scoped IPv6 addresses (e.g. 2001:4860:4860::8888)
Addresses of a network interface, for example `_en0_`.
|`_[networkInterface]_` |Resolves to the addresses of the provided
network interface. For example `_en0_`.
`_local_`::
|`_[networkInterface]:ipv4_` |Resolves to the ipv4 addresses of the
provided network interface. For example `_en0:ipv4_`.
Any loopback addresses on the system, for example `127.0.0.1`.
|`_[networkInterface]:ipv6_` |Resolves to the ipv6 addresses of the
provided network interface. For example `_en0:ipv6_`.
|=======================================================================
`_site_`::
When the `discovery-ec2` plugin is installed, you can use
{plugins}/discovery-ec2-discovery.html#discovery-ec2-network-host[ec2 specific host settings].
Any site-local addresses on the system, for example `192.168.0.1`.
When the `discovery-gce` plugin is installed, you can use
{plugins}/discovery-gce-network-host.html[gce specific host settings].
`_global_`::
Any globally-scoped addresses on the system, for example `8.8.8.8`.
[float]
==== IPv4 vs IPv6
These special values will work over both IPv4 and IPv6 by default, but you can
also limit this with the use of `:ipv4` of `:ipv6` specifiers. For example,
`_en0:ipv4_` would only bind to the IPv4 addresses of interface `en0`.
[TIP]
.Discovery in the cloud
================================
More special settings are available when running in the cloud with either the
{plugins}/discovery-ec2-discovery.html#discovery-ec2-network-host[EC2 discovery plugin] or the
{plugins}/discovery-gce-network-host.html#discovery-gce-network-host[Google Compute Engine discovery plugin]
installed.
================================
[float]
[[advanced-network-settings]]
=== Advanced network settings
The `network.host` setting explained in <<common-network-settings,Commonly used network settings>>
is a shortcut which sets the _bind host_ and the _publish host_ at the same
time. In advanced used cases, such as when running behind a proxy server, you
may need to set these settings to different values:
`network.bind_host`::
This specifies which network interface(s) a node should bind to in order to
listen for incoming requests. A node can bind to multiple interfaces, e.g.
two network cards, or a site-local address and a local address. Defaults to
`network.host`.
`network.publish_host`::
The publish host is the single interface that the node advertises to other
nodes in the cluster, so that those nodes can connect to it. Currently an
elasticsearch node may be bound to multiple addresses, but only publishes one.
If not specified, this defaults to the ``best'' address from
`network.bind_host`, sorted by IPv4/IPv6 stack preference, then by
reachability.
Both of the above settings can be configured just like `network.host` -- they
accept IP addresses, host names, and
<<network-interface-values,special values>>.
[float]
[[tcp-settings]]
=== TCP Settings
=== Advanced TCP Settings
Any component that uses TCP (like the HTTP, Transport and Memcached)
share the following allowed settings:
Any component that uses TCP (like the <<modules-http,HTTP>> and
<<modules-transport,Transport>> modules) share the following settings:
[cols="<,<",options="header",]
|=======================================================================
|Setting |Description
|`network.tcp.no_delay` |Enable or disable tcp no delay setting.
[horizontal]
`network.tcp.no_delay`::
Enable or disable the https://en.wikipedia.org/wiki/Nagle%27s_algorithm[TCP no delay]
setting. Defaults to `true`.
`network.tcp.keep_alive`::
Enable or disable https://en.wikipedia.org/wiki/Keepalive[TCP keep alive].
Defaults to `true`.
|`network.tcp.keep_alive` |Enable or disable tcp keep alive. Defaults
to `true`.
`network.tcp.reuse_address`::
|`network.tcp.reuse_address` |Should an address be reused or not.
Defaults to `true` on non-windows machines.
Should an address be reused or not. Defaults to `true` on non-windows
machines.
|`network.tcp.send_buffer_size` |The size of the tcp send buffer size
(in size setting format). By default not explicitly set.
`network.tcp.send_buffer_size`::
|`network.tcp.receive_buffer_size` |The size of the tcp receive buffer
size (in size setting format). By default not explicitly set.
|=======================================================================
The size of the TCP send buffer (specified with <<size-units,size units>>).
By default not explicitly set.
`network.tcp.receive_buffer_size`::
The size of the TCP receive buffer (specified with <<size-units,size units>>).
By default not explicitly set.
[float]
=== Transport and HTTP protocols
An Elasticsearch node exposes two network protocols which inherit the above
settings, but may be further configured independently:
TCP Transport::
Used for communication between nodes in the cluster and by the Java
{javaclient}/node-client.html[Node client],
{javaclient}/transport-client.html[Transport client], and by the
<<modules-tribe,Tribe node>>. See the <<modules-transport,Transport module>>
for more information.
HTTP::
Exposes the JSON-over-HTTP interface used by all clients other than the Java
clients. See the <<modules-http,HTTP module>> for more information.

View File

@ -178,7 +178,7 @@ public class IndicesRequestTests extends ESIntegTestCase {
}
public void testIndex() {
String[] indexShardActions = new String[]{IndexAction.NAME, IndexAction.NAME + "[r]"};
String[] indexShardActions = new String[]{IndexAction.NAME, IndexAction.NAME + "[p]", IndexAction.NAME + "[r]"};
interceptTransportActions(indexShardActions);
IndexRequest indexRequest = new IndexRequest(randomIndexOrAlias(), "type", "id").source("field", "value");
@ -189,7 +189,7 @@ public class IndicesRequestTests extends ESIntegTestCase {
}
public void testDelete() {
String[] deleteShardActions = new String[]{DeleteAction.NAME, DeleteAction.NAME + "[r]"};
String[] deleteShardActions = new String[]{DeleteAction.NAME, DeleteAction.NAME + "[p]", DeleteAction.NAME + "[r]"};
interceptTransportActions(deleteShardActions);
DeleteRequest deleteRequest = new DeleteRequest(randomIndexOrAlias(), "type", "id");
@ -244,7 +244,7 @@ public class IndicesRequestTests extends ESIntegTestCase {
}
public void testBulk() {
String[] bulkShardActions = new String[]{BulkAction.NAME + "[s]", BulkAction.NAME + "[s][r]"};
String[] bulkShardActions = new String[]{BulkAction.NAME + "[s][p]", BulkAction.NAME + "[s][r]"};
interceptTransportActions(bulkShardActions);
List<String> indices = new ArrayList<>();
@ -344,7 +344,7 @@ public class IndicesRequestTests extends ESIntegTestCase {
}
public void testFlush() {
String[] indexShardActions = new String[]{TransportShardFlushAction.NAME + "[r]", TransportShardFlushAction.NAME};
String[] indexShardActions = new String[]{TransportShardFlushAction.NAME, TransportShardFlushAction.NAME + "[r]", TransportShardFlushAction.NAME + "[p]"};
interceptTransportActions(indexShardActions);
FlushRequest flushRequest = new FlushRequest(randomIndicesOrAliases());
@ -367,7 +367,7 @@ public class IndicesRequestTests extends ESIntegTestCase {
}
public void testRefresh() {
String[] indexShardActions = new String[]{TransportShardRefreshAction.NAME + "[r]", TransportShardRefreshAction.NAME};
String[] indexShardActions = new String[]{TransportShardRefreshAction.NAME, TransportShardRefreshAction.NAME + "[r]", TransportShardRefreshAction.NAME + "[p]"};
interceptTransportActions(indexShardActions);
RefreshRequest refreshRequest = new RefreshRequest(randomIndicesOrAliases());

View File

@ -42,7 +42,7 @@ dependencyLicenses {
mapping from: /jackson-.*/, to: 'jackson'
}
compileJava.options.compilerArgs << '-Xlint:-rawtypes'
compileJava.options.compilerArgs << '-Xlint:-rawtypes,-deprecation'
test {
// this is needed for insecure plugins, remove if possible!

View File

@ -27,20 +27,32 @@ public interface AwsEc2Service extends LifecycleComponent<AwsEc2Service> {
public static final String KEY = "cloud.aws.access_key";
public static final String SECRET = "cloud.aws.secret_key";
public static final String PROTOCOL = "cloud.aws.protocol";
public static final String PROXY_HOST = "cloud.aws.proxy_host";
public static final String PROXY_PORT = "cloud.aws.proxy_port";
public static final String PROXY_HOST = "cloud.aws.proxy.host";
public static final String PROXY_PORT = "cloud.aws.proxy.port";
public static final String PROXY_USERNAME = "cloud.aws.proxy.username";
public static final String PROXY_PASSWORD = "cloud.aws.proxy.password";
public static final String SIGNER = "cloud.aws.signer";
public static final String REGION = "cloud.aws.region";
@Deprecated
public static final String DEPRECATED_PROXY_HOST = "cloud.aws.proxy_host";
@Deprecated
public static final String DEPRECATED_PROXY_PORT = "cloud.aws.proxy_port";
}
final class CLOUD_EC2 {
public static final String KEY = "cloud.aws.ec2.access_key";
public static final String SECRET = "cloud.aws.ec2.secret_key";
public static final String PROTOCOL = "cloud.aws.ec2.protocol";
public static final String PROXY_HOST = "cloud.aws.ec2.proxy_host";
public static final String PROXY_PORT = "cloud.aws.ec2.proxy_port";
public static final String PROXY_HOST = "cloud.aws.ec2.proxy.host";
public static final String PROXY_PORT = "cloud.aws.ec2.proxy.port";
public static final String PROXY_USERNAME = "cloud.aws.ec2.proxy.username";
public static final String PROXY_PASSWORD = "cloud.aws.ec2.proxy.password";
public static final String SIGNER = "cloud.aws.ec2.signer";
public static final String ENDPOINT = "cloud.aws.ec2.endpoint";
@Deprecated
public static final String DEPRECATED_PROXY_HOST = "cloud.aws.ec2.proxy_host";
@Deprecated
public static final String DEPRECATED_PROXY_PORT = "cloud.aws.ec2.proxy_port";
}
final class DISCOVERY_EC2 {

View File

@ -56,8 +56,10 @@ public class AwsEc2ServiceImpl extends AbstractLifecycleComponent<AwsEc2Service>
// Filter global settings
settingsFilter.addFilter(CLOUD_AWS.KEY);
settingsFilter.addFilter(CLOUD_AWS.SECRET);
settingsFilter.addFilter(CLOUD_AWS.PROXY_PASSWORD);
settingsFilter.addFilter(CLOUD_EC2.KEY);
settingsFilter.addFilter(CLOUD_EC2.SECRET);
settingsFilter.addFilter(CLOUD_EC2.PROXY_PASSWORD);
// add specific ec2 name resolver
networkService.addCustomNameResolver(new Ec2NameResolver(settings));
discoveryNodeService.addCustomAttributeProvider(new Ec2CustomNodeAttributes(settings));
@ -83,16 +85,25 @@ public class AwsEc2ServiceImpl extends AbstractLifecycleComponent<AwsEc2Service>
String account = settings.get(CLOUD_EC2.KEY, settings.get(CLOUD_AWS.KEY));
String key = settings.get(CLOUD_EC2.SECRET, settings.get(CLOUD_AWS.SECRET));
String proxyHost = settings.get(CLOUD_EC2.PROXY_HOST, settings.get(CLOUD_AWS.PROXY_HOST));
String proxyHost = settings.get(CLOUD_AWS.PROXY_HOST, settings.get(CLOUD_AWS.DEPRECATED_PROXY_HOST));
proxyHost = settings.get(CLOUD_EC2.PROXY_HOST, settings.get(CLOUD_EC2.DEPRECATED_PROXY_HOST, proxyHost));
if (proxyHost != null) {
String portString = settings.get(CLOUD_EC2.PROXY_PORT, settings.get(CLOUD_AWS.PROXY_PORT, "80"));
String portString = settings.get(CLOUD_AWS.PROXY_PORT, settings.get(CLOUD_AWS.DEPRECATED_PROXY_PORT, "80"));
portString = settings.get(CLOUD_EC2.PROXY_PORT, settings.get(CLOUD_EC2.DEPRECATED_PROXY_PORT, portString));
Integer proxyPort;
try {
proxyPort = Integer.parseInt(portString, 10);
} catch (NumberFormatException ex) {
throw new IllegalArgumentException("The configured proxy port value [" + portString + "] is invalid", ex);
}
clientConfiguration.withProxyHost(proxyHost).setProxyPort(proxyPort);
String proxyUsername = settings.get(CLOUD_EC2.PROXY_USERNAME, settings.get(CLOUD_AWS.PROXY_USERNAME));
String proxyPassword = settings.get(CLOUD_EC2.PROXY_PASSWORD, settings.get(CLOUD_AWS.PROXY_PASSWORD));
clientConfiguration
.withProxyHost(proxyHost)
.withProxyPort(proxyPort)
.withProxyUsername(proxyUsername)
.withProxyPassword(proxyPassword);
}
// #155: we might have 3rd party users using older EC2 API version

145
plugins/lang-plan-a/ant.xml Normal file
View File

@ -0,0 +1,145 @@
<?xml version="1.0"?>
<project name="ant-stuff">
<!--
grammar regeneration logic
we do this with ant for several reasons:
* remove generated tabs for forbidden-apis
* remove generated timestamps/filenames for reproducible build
* fix CRLF line endings for windows consistency
* ability to make classes package-private
* keeping in source code control is easier on IDEs
* regeneration should be rare, no reason to be religious about generated files
* all logic already written and battle tested in lucene build
-->
<target name="regenerate" description="Regenerate antlr lexer and parser" depends="run-antlr"/>
<target name="run-antlr">
<regen-delete grammar="PlanA"/>
<regen-lexer grammar="PlanA"/>
<regen-parser grammar="PlanA"/>
<regen-fix grammar="PlanA"/>
</target>
<macrodef name="replace-value">
<attribute name="value" />
<attribute name="property" />
<attribute name="from" />
<attribute name="to" />
<sequential>
<loadresource property="@{property}">
<string value="@{value}"/>
<filterchain>
<tokenfilter>
<filetokenizer/>
<replacestring from="@{from}" to="@{to}"/>
</tokenfilter>
</filterchain>
</loadresource>
</sequential>
</macrodef>
<macrodef name="regen-delete">
<attribute name="grammar" />
<sequential>
<local name="output.path"/>
<patternset id="grammar.@{grammar}.patternset">
<include name="@{grammar}Lexer.java" />
<include name="@{grammar}Parser.java" />
<include name="@{grammar}ParserVisitor.java" />
<include name="@{grammar}ParserBaseVisitor.java" />
</patternset>
<property name="output.path" location="src/main/java/org/elasticsearch/plan/a"/>
<!-- delete parser and lexer so files will be generated -->
<delete dir="${output.path}">
<patternset refid="grammar.@{grammar}.patternset"/>
</delete>
</sequential>
</macrodef>
<macrodef name="regen-lexer">
<attribute name="grammar" />
<sequential>
<local name="grammar.path"/>
<local name="output.path"/>
<property name="grammar.path" location="src/main/antlr"/>
<property name="output.path" location="src/main/java/org/elasticsearch/plan/a"/>
<!-- invoke ANTLR4 -->
<java classname="org.antlr.v4.Tool" fork="true" failonerror="true" classpathref="regenerate.classpath" taskname="antlr">
<sysproperty key="file.encoding" value="UTF-8"/>
<sysproperty key="user.language" value="en"/>
<sysproperty key="user.country" value="US"/>
<sysproperty key="user.variant" value=""/>
<arg value="-package"/>
<arg value="org.elasticsearch.plan.a"/>
<arg value="-o"/>
<arg path="${output.path}"/>
<arg path="${grammar.path}/@{grammar}Lexer.g4"/>
</java>
</sequential>
</macrodef>
<macrodef name="regen-parser">
<attribute name="grammar" />
<sequential>
<local name="grammar.path"/>
<local name="output.path"/>
<property name="grammar.path" location="src/main/antlr"/>
<property name="output.path" location="src/main/java/org/elasticsearch/plan/a"/>
<!-- invoke ANTLR4 -->
<java classname="org.antlr.v4.Tool" fork="true" failonerror="true" classpathref="regenerate.classpath" taskname="antlr">
<sysproperty key="file.encoding" value="UTF-8"/>
<sysproperty key="user.language" value="en"/>
<sysproperty key="user.country" value="US"/>
<sysproperty key="user.variant" value=""/>
<arg value="-package"/>
<arg value="org.elasticsearch.plan.a"/>
<arg value="-no-listener"/>
<arg value="-visitor"/>
<!-- <arg value="-Xlog"/> -->
<arg value="-o"/>
<arg path="${output.path}"/>
<arg path="${grammar.path}/@{grammar}Parser.g4"/>
</java>
</sequential>
</macrodef>
<macrodef name="regen-fix">
<attribute name="grammar" />
<sequential>
<local name="grammar.path"/>
<local name="output.path"/>
<property name="grammar.path" location="src/main/antlr"/>
<property name="output.path" location="src/main/java/org/elasticsearch/plan/a"/>
<patternset id="grammar.@{grammar}.patternset">
<include name="@{grammar}Lexer.java" />
<include name="@{grammar}Parser.java" />
<include name="@{grammar}ParserVisitor.java" />
<include name="@{grammar}ParserBaseVisitor.java" />
</patternset>
<!-- fileset with files to edit -->
<fileset id="grammar.fileset" dir="${output.path}">
<patternset refid="grammar.@{grammar}.patternset"/>
</fileset>
<!-- remove files that are not needed to compile or at runtime -->
<delete dir="${grammar.path}" includes="@{grammar}*.tokens"/>
<delete dir="${output.path}" includes="@{grammar}*.tokens"/>
<!-- make the generated classes package private -->
<replaceregexp match="public ((interface|class) \Q@{grammar}\E\w+)" replace="\1" encoding="UTF-8">
<fileset refid="grammar.fileset"/>
</replaceregexp>
<!-- nuke timestamps/filenames in generated files -->
<replaceregexp match="\Q// Generated from \E.*" replace="\/\/ ANTLR GENERATED CODE: DO NOT EDIT" encoding="UTF-8">
<fileset refid="grammar.fileset"/>
</replaceregexp>
<!-- remove tabs in antlr generated files -->
<replaceregexp match="\t" flags="g" replace=" " encoding="UTF-8">
<fileset refid="grammar.fileset"/>
</replaceregexp>
<!-- fix line endings -->
<fixcrlf srcdir="${output.path}">
<patternset refid="grammar.@{grammar}.patternset"/>
</fixcrlf>
</sequential>
</macrodef>
</project>

View File

@ -0,0 +1,48 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import org.apache.tools.ant.types.Path
esplugin {
description 'An easy, safe and fast scripting language for Elasticsearch'
classname 'org.elasticsearch.plan.a.PlanAPlugin'
}
dependencies {
compile 'org.antlr:antlr4-runtime:4.5.1-1'
compile 'org.ow2.asm:asm:5.0.4'
compile 'org.ow2.asm:asm-commons:5.0.4'
}
compileJava.options.compilerArgs << '-Xlint:-cast,-fallthrough,-rawtypes'
compileTestJava.options.compilerArgs << '-Xlint:-unchecked'
// regeneration logic, comes in via ant right now
// don't port it to gradle, it works fine.
configurations {
regenerate
}
dependencies {
regenerate 'org.antlr:antlr4:4.5.1-1'
}
ant.references['regenerate.classpath'] = new Path(ant.project, configurations.regenerate.asPath)
ant.importBuild 'ant.xml'

View File

@ -0,0 +1 @@
66144204f9d6d7d3f3f775622c2dd7e9bd511d97

View File

@ -0,0 +1,26 @@
[The "BSD license"]
Copyright (c) 2015 Terence Parr, Sam Harwell
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions
are met:
1. Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
3. The name of the author may not be used to endorse or promote products
derived from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

View File

@ -0,0 +1 @@
0da08b8cce7bbf903602a25a3a163ae252435795

View File

@ -0,0 +1,26 @@
Copyright (c) 2012 France Télécom
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions
are met:
1. Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
3. Neither the name of the copyright holders nor the names of its
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
THE POSSIBILITY OF SUCH DAMAGE.

View File

@ -0,0 +1 @@

View File

@ -0,0 +1 @@
5a556786086c23cd689a0328f8519db93821c04c

View File

@ -0,0 +1,26 @@
Copyright (c) 2012 France Télécom
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions
are met:
1. Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
3. Neither the name of the copyright holders nor the names of its
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
THE POSSIBILITY OF SUCH DAMAGE.

View File

@ -0,0 +1 @@

View File

@ -0,0 +1,120 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
lexer grammar PlanALexer;
@header {
import java.util.Set;
}
@members {
private Set<String> types = null;
void setTypes(Set<String> types) {
this.types = types;
}
}
WS: [ \t\n\r]+ -> skip;
COMMENT: ( '//' .*? [\n\r] | '/*' .*? '*/' ) -> skip;
LBRACK: '{';
RBRACK: '}';
LBRACE: '[';
RBRACE: ']';
LP: '(';
RP: ')';
DOT: '.' -> mode(EXT);
COMMA: ',';
SEMICOLON: ';';
IF: 'if';
ELSE: 'else';
WHILE: 'while';
DO: 'do';
FOR: 'for';
CONTINUE: 'continue';
BREAK: 'break';
RETURN: 'return';
NEW: 'new';
TRY: 'try';
CATCH: 'catch';
THROW: 'throw';
BOOLNOT: '!';
BWNOT: '~';
MUL: '*';
DIV: '/';
REM: '%';
ADD: '+';
SUB: '-';
LSH: '<<';
RSH: '>>';
USH: '>>>';
LT: '<';
LTE: '<=';
GT: '>';
GTE: '>=';
EQ: '==';
EQR: '===';
NE: '!=';
NER: '!==';
BWAND: '&';
BWXOR: '^';
BWOR: '|';
BOOLAND: '&&';
BOOLOR: '||';
COND: '?';
COLON: ':';
INCR: '++';
DECR: '--';
ASSIGN: '=';
AADD: '+=';
ASUB: '-=';
AMUL: '*=';
ADIV: '/=';
AREM: '%=';
AAND: '&=';
AXOR: '^=';
AOR: '|=';
ALSH: '<<=';
ARSH: '>>=';
AUSH: '>>>=';
ACAT: '..=';
OCTAL: '0' [0-7]+ [lL]?;
HEX: '0' [xX] [0-9a-fA-F]+ [lL]?;
INTEGER: ( '0' | [1-9] [0-9]* ) [lLfFdD]?;
DECIMAL: ( '0' | [1-9] [0-9]* ) DOT [0-9]* ( [eE] [+\-]? [0-9]+ )? [fF]?;
STRING: '"' ( '\\"' | '\\\\' | ~[\\"] )*? '"' {setText(getText().substring(1, getText().length() - 1));};
CHAR: '\'' . '\'' {setText(getText().substring(1, getText().length() - 1));};
TRUE: 'true';
FALSE: 'false';
NULL: 'null';
TYPE: ID GENERIC? {types.contains(getText().replace(" ", ""))}? {setText(getText().replace(" ", ""));};
fragment GENERIC: ' '* '<' ' '* ( ID GENERIC? ) ' '* ( COMMA ' '* ( ID GENERIC? ) ' '* )* '>';
ID: [_a-zA-Z] [_a-zA-Z0-9]*;
mode EXT;
EXTINTEGER: ( '0' | [1-9] [0-9]* ) -> mode(DEFAULT_MODE);
EXTID: [_a-zA-Z] [_a-zA-Z0-9]* -> mode(DEFAULT_MODE);

View File

@ -0,0 +1,127 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
parser grammar PlanAParser;
options { tokenVocab=PlanALexer; }
source
: statement+ EOF
;
statement
: IF LP expression RP block ( ELSE block )? # if
| WHILE LP expression RP ( block | empty ) # while
| DO block WHILE LP expression RP SEMICOLON? # do
| FOR LP initializer? SEMICOLON expression? SEMICOLON afterthought? RP ( block | empty ) # for
| declaration SEMICOLON? # decl
| CONTINUE SEMICOLON? # continue
| BREAK SEMICOLON? # break
| RETURN expression SEMICOLON? # return
| TRY block ( CATCH LP ( TYPE ID ) RP block )+ # try
| THROW expression SEMICOLON? # throw
| expression SEMICOLON? # expr
;
block
: LBRACK statement* RBRACK # multiple
| statement # single
;
empty
: SEMICOLON
;
initializer
: declaration
| expression
;
afterthought
: expression
;
declaration
: decltype declvar ( COMMA declvar )*
;
decltype
: TYPE (LBRACE RBRACE)*
;
declvar
: ID ( ASSIGN expression )?
;
expression
: LP expression RP # precedence
| ( OCTAL | HEX | INTEGER | DECIMAL ) # numeric
| CHAR # char
| TRUE # true
| FALSE # false
| NULL # null
| <assoc=right> extstart increment # postinc
| <assoc=right> increment extstart # preinc
| extstart # external
| <assoc=right> ( BOOLNOT | BWNOT | ADD | SUB ) expression # unary
| <assoc=right> LP decltype RP expression # cast
| expression ( MUL | DIV | REM ) expression # binary
| expression ( ADD | SUB ) expression # binary
| expression ( LSH | RSH | USH ) expression # binary
| expression ( LT | LTE | GT | GTE ) expression # comp
| expression ( EQ | EQR | NE | NER ) expression # comp
| expression BWAND expression # binary
| expression BWXOR expression # binary
| expression BWOR expression # binary
| expression BOOLAND expression # bool
| expression BOOLOR expression # bool
| <assoc=right> expression COND expression COLON expression # conditional
| <assoc=right> extstart ( ASSIGN | AADD | ASUB | AMUL | ADIV
| AREM | AAND | AXOR | AOR
| ALSH | ARSH | AUSH ) expression # assignment
;
extstart
: extprec
| extcast
| exttype
| extvar
| extnew
| extstring
;
extprec: LP ( extprec | extcast | exttype | extvar | extnew | extstring ) RP ( extdot | extbrace )?;
extcast: LP decltype RP ( extprec | extcast | exttype | extvar | extnew | extstring );
extbrace: LBRACE expression RBRACE ( extdot | extbrace )?;
extdot: DOT ( extcall | extfield );
exttype: TYPE extdot;
extcall: EXTID arguments ( extdot | extbrace )?;
extvar: ID ( extdot | extbrace )?;
extfield: ( EXTID | EXTINTEGER ) ( extdot | extbrace )?;
extnew: NEW TYPE ( ( arguments ( extdot | extbrace)? ) | ( ( LBRACE expression RBRACE )+ extdot? ) );
extstring: STRING (extdot | extbrace )?;
arguments
: ( LP ( expression ( COMMA expression )* )? RP )
;
increment
: INCR
| DECR
;

View File

@ -0,0 +1,276 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.plan.a;
import java.util.HashMap;
import java.util.Map;
import org.antlr.v4.runtime.ParserRuleContext;
import org.antlr.v4.runtime.tree.ParseTree;
import static org.elasticsearch.plan.a.Definition.*;
import static org.elasticsearch.plan.a.PlanAParser.*;
class Adapter {
static class StatementMetadata {
final ParserRuleContext source;
boolean last;
boolean allExit;
boolean allReturn;
boolean anyReturn;
boolean allBreak;
boolean anyBreak;
boolean allContinue;
boolean anyContinue;
private StatementMetadata(final ParserRuleContext source) {
this.source = source;
last = false;
allExit = false;
allReturn = false;
anyReturn = false;
allBreak = false;
anyBreak = false;
allContinue = false;
anyContinue = false;
}
}
static class ExpressionMetadata {
final ParserRuleContext source;
boolean read;
boolean statement;
Object preConst;
Object postConst;
boolean isNull;
Type to;
Type from;
boolean explicit;
boolean typesafe;
Cast cast;
private ExpressionMetadata(final ParserRuleContext source) {
this.source = source;
read = true;
statement = false;
preConst = null;
postConst = null;
isNull = false;
to = null;
from = null;
explicit = false;
typesafe = true;
cast = null;
}
}
static class ExternalMetadata {
final ParserRuleContext source;
boolean read;
ParserRuleContext storeExpr;
int token;
boolean pre;
boolean post;
int scope;
Type current;
boolean statik;
boolean statement;
Object constant;
private ExternalMetadata(final ParserRuleContext source) {
this.source = source;
read = false;
storeExpr = null;
token = 0;
pre = false;
post = false;
scope = 0;
current = null;
statik = false;
statement = false;
constant = null;
}
}
static class ExtNodeMetadata {
final ParserRuleContext parent;
final ParserRuleContext source;
Object target;
boolean last;
Type type;
Type promote;
Cast castFrom;
Cast castTo;
private ExtNodeMetadata(final ParserRuleContext parent, final ParserRuleContext source) {
this.parent = parent;
this.source = source;
target = null;
last = false;
type = null;
promote = null;
castFrom = null;
castTo = null;
}
}
static String error(final ParserRuleContext ctx) {
return "Error [" + ctx.getStart().getLine() + ":" + ctx.getStart().getCharPositionInLine() + "]: ";
}
final Definition definition;
final String source;
final ParserRuleContext root;
final CompilerSettings settings;
private final Map<ParserRuleContext, StatementMetadata> statementMetadata;
private final Map<ParserRuleContext, ExpressionMetadata> expressionMetadata;
private final Map<ParserRuleContext, ExternalMetadata> externalMetadata;
private final Map<ParserRuleContext, ExtNodeMetadata> extNodeMetadata;
Adapter(final Definition definition, final String source, final ParserRuleContext root, final CompilerSettings settings) {
this.definition = definition;
this.source = source;
this.root = root;
this.settings = settings;
statementMetadata = new HashMap<>();
expressionMetadata = new HashMap<>();
externalMetadata = new HashMap<>();
extNodeMetadata = new HashMap<>();
}
StatementMetadata createStatementMetadata(final ParserRuleContext source) {
final StatementMetadata sourcesmd = new StatementMetadata(source);
statementMetadata.put(source, sourcesmd);
return sourcesmd;
}
StatementMetadata getStatementMetadata(final ParserRuleContext source) {
final StatementMetadata sourcesmd = statementMetadata.get(source);
if (sourcesmd == null) {
throw new IllegalStateException(error(source) + "Statement metadata does not exist at" +
" the parse node with text [" + source.getText() + "].");
}
return sourcesmd;
}
ExpressionContext updateExpressionTree(ExpressionContext source) {
if (source instanceof PrecedenceContext) {
final ParserRuleContext parent = source.getParent();
int index = 0;
for (final ParseTree child : parent.children) {
if (child == source) {
break;
}
++index;
}
while (source instanceof PrecedenceContext) {
source = ((PrecedenceContext)source).expression();
}
parent.children.set(index, source);
}
return source;
}
ExpressionMetadata createExpressionMetadata(ParserRuleContext source) {
final ExpressionMetadata sourceemd = new ExpressionMetadata(source);
expressionMetadata.put(source, sourceemd);
return sourceemd;
}
ExpressionMetadata getExpressionMetadata(final ParserRuleContext source) {
final ExpressionMetadata sourceemd = expressionMetadata.get(source);
if (sourceemd == null) {
throw new IllegalStateException(error(source) + "Expression metadata does not exist at" +
" the parse node with text [" + source.getText() + "].");
}
return sourceemd;
}
ExternalMetadata createExternalMetadata(final ParserRuleContext source) {
final ExternalMetadata sourceemd = new ExternalMetadata(source);
externalMetadata.put(source, sourceemd);
return sourceemd;
}
ExternalMetadata getExternalMetadata(final ParserRuleContext source) {
final ExternalMetadata sourceemd = externalMetadata.get(source);
if (sourceemd == null) {
throw new IllegalStateException(error(source) + "External metadata does not exist at" +
" the parse node with text [" + source.getText() + "].");
}
return sourceemd;
}
ExtNodeMetadata createExtNodeMetadata(final ParserRuleContext parent, final ParserRuleContext source) {
final ExtNodeMetadata sourceemd = new ExtNodeMetadata(parent, source);
extNodeMetadata.put(source, sourceemd);
return sourceemd;
}
ExtNodeMetadata getExtNodeMetadata(final ParserRuleContext source) {
final ExtNodeMetadata sourceemd = extNodeMetadata.get(source);
if (sourceemd == null) {
throw new IllegalStateException(error(source) + "External metadata does not exist at" +
" the parse node with text [" + source.getText() + "].");
}
return sourceemd;
}
}

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,154 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.plan.a;
import java.net.MalformedURLException;
import java.net.URL;
import java.security.CodeSource;
import java.security.SecureClassLoader;
import java.security.cert.Certificate;
import org.antlr.v4.runtime.ANTLRInputStream;
import org.antlr.v4.runtime.CommonTokenStream;
import org.antlr.v4.runtime.ParserRuleContext;
import org.elasticsearch.bootstrap.BootstrapInfo;
final class Compiler {
private static Definition DEFAULT_DEFINITION = new Definition(new Definition());
/** we define the class with lowest privileges */
private static final CodeSource CODESOURCE;
static {
try {
CODESOURCE = new CodeSource(new URL("file:" + BootstrapInfo.UNTRUSTED_CODEBASE), (Certificate[]) null);
} catch (MalformedURLException impossible) {
throw new RuntimeException(impossible);
}
}
static class Loader extends SecureClassLoader {
Loader(ClassLoader parent) {
super(parent);
}
Class<? extends Executable> define(String name, byte[] bytes) {
return defineClass(name, bytes, 0, bytes.length, CODESOURCE).asSubclass(Executable.class);
}
}
static Executable compile(Loader loader, final String name, final String source, final Definition custom, CompilerSettings settings) {
long start = System.currentTimeMillis();
final Definition definition = custom == null ? DEFAULT_DEFINITION : new Definition(custom);
//long end = System.currentTimeMillis() - start;
//System.out.println("types: " + end);
//start = System.currentTimeMillis();
//final ParserRuleContext root = createParseTree(source, types);
final ANTLRInputStream stream = new ANTLRInputStream(source);
final ErrorHandlingLexer lexer = new ErrorHandlingLexer(stream);
final PlanAParser parser = new PlanAParser(new CommonTokenStream(lexer));
final ParserErrorStrategy strategy = new ParserErrorStrategy();
lexer.removeErrorListeners();
lexer.setTypes(definition.structs.keySet());
//List<? extends Token> tokens = lexer.getAllTokens();
//for (final Token token : tokens) {
// System.out.println(token.getType() + " " + token.getText());
//}
parser.removeErrorListeners();
parser.setErrorHandler(strategy);
ParserRuleContext root = parser.source();
//end = System.currentTimeMillis() - start;
//System.out.println("tree: " + end);
final Adapter adapter = new Adapter(definition, source, root, settings);
start = System.currentTimeMillis();
Analyzer.analyze(adapter);
//System.out.println(root.toStringTree(parser));
//end = System.currentTimeMillis() - start;
//System.out.println("analyze: " + end);
//start = System.currentTimeMillis();
final byte[] bytes = Writer.write(adapter);
//end = System.currentTimeMillis() - start;
//System.out.println("write: " + end);
//start = System.currentTimeMillis();
final Executable executable = createExecutable(loader, definition, name, source, bytes);
//end = System.currentTimeMillis() - start;
//System.out.println("create: " + end);
return executable;
}
private static ParserRuleContext createParseTree(String source, Definition definition) {
final ANTLRInputStream stream = new ANTLRInputStream(source);
final ErrorHandlingLexer lexer = new ErrorHandlingLexer(stream);
final PlanAParser parser = new PlanAParser(new CommonTokenStream(lexer));
final ParserErrorStrategy strategy = new ParserErrorStrategy();
lexer.removeErrorListeners();
lexer.setTypes(definition.structs.keySet());
parser.removeErrorListeners();
parser.setErrorHandler(strategy);
ParserRuleContext root = parser.source();
// System.out.println(root.toStringTree(parser));
return root;
}
private static Executable createExecutable(Loader loader, Definition definition, String name, String source, byte[] bytes) {
try {
// for debugging:
//try {
// FileOutputStream f = new FileOutputStream(new File("/Users/jdconrad/lang/generated/out.class"), false);
// f.write(bytes);
// f.close();
//} catch (Exception e) {
// throw new RuntimeException(e);
//}
final Class<? extends Executable> clazz = loader.define(Writer.CLASS_NAME, bytes);
final java.lang.reflect.Constructor<? extends Executable> constructor =
clazz.getConstructor(Definition.class, String.class, String.class);
return constructor.newInstance(definition, name, source);
} catch (Exception exception) {
throw new IllegalStateException(
"An internal error occurred attempting to define the script [" + name + "].", exception);
}
}
private Compiler() {}
}

View File

@ -0,0 +1,49 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.plan.a;
/**
* Settings to use when compiling a script
*/
final class CompilerSettings {
private boolean numericOverflow = true;
/**
* Returns {@code true} if numeric operations should overflow, {@code false}
* if they should signal an exception.
* <p>
* If this value is {@code true} (default), then things behave like java:
* overflow for integer types can result in unexpected values / unexpected
* signs, and overflow for floating point types can result in infinite or
* {@code NaN} values.
*/
public boolean getNumericOverflow() {
return numericOverflow;
}
/**
* Set {@code true} for numerics to overflow, false to deliver exceptions.
* @see #getNumericOverflow
*/
public void setNumericOverflow(boolean allow) {
this.numericOverflow = allow;
}
}

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,45 @@
package org.elasticsearch.plan.a;
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import java.text.ParseException;
import org.antlr.v4.runtime.CharStream;
import org.antlr.v4.runtime.LexerNoViableAltException;
import org.antlr.v4.runtime.misc.Interval;
class ErrorHandlingLexer extends PlanALexer {
public ErrorHandlingLexer(CharStream charStream) {
super(charStream);
}
@Override
public void recover(LexerNoViableAltException lnvae) {
CharStream charStream = lnvae.getInputStream();
int startIndex = lnvae.getStartIndex();
String text = charStream.getText(Interval.of(startIndex, charStream.index()));
ParseException parseException = new ParseException("Error [" + _tokenStartLine + ":" +
_tokenStartCharPositionInLine + "]: unexpected character [" +
getErrorDisplay(text) + "].", _tokenStartCharIndex);
parseException.initCause(lnvae);
throw new RuntimeException(parseException);
}
}

View File

@ -0,0 +1,50 @@
package org.elasticsearch.plan.a;
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import java.util.Map;
public abstract class Executable {
protected final Definition definition;
private final String name;
private final String source;
public Executable(final Definition definition, final String name, final String source) {
this.definition = definition;
this.name = name;
this.source = source;
}
public String getName() {
return name;
}
public String getSource() {
return source;
}
public Definition getDefinition() {
return definition;
}
public abstract Object execute(Map<String, Object> input);
}

View File

@ -0,0 +1,74 @@
package org.elasticsearch.plan.a;
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import java.text.ParseException;
import org.antlr.v4.runtime.DefaultErrorStrategy;
import org.antlr.v4.runtime.InputMismatchException;
import org.antlr.v4.runtime.NoViableAltException;
import org.antlr.v4.runtime.Parser;
import org.antlr.v4.runtime.RecognitionException;
import org.antlr.v4.runtime.Token;
class ParserErrorStrategy extends DefaultErrorStrategy {
@Override
public void recover(Parser recognizer, RecognitionException re) {
Token token = re.getOffendingToken();
String message;
if (token == null) {
message = "Error: no parse token found.";
} else if (re instanceof InputMismatchException) {
message = "Error[" + token.getLine() + ":" + token.getCharPositionInLine() + "]:" +
" unexpected token [" + getTokenErrorDisplay(token) + "]" +
" was expecting one of [" + re.getExpectedTokens().toString(recognizer.getVocabulary()) + "].";
} else if (re instanceof NoViableAltException) {
if (token.getType() == PlanAParser.EOF) {
message = "Error: unexpected end of script.";
} else {
message = "Error[" + token.getLine() + ":" + token.getCharPositionInLine() + "]:" +
"invalid sequence of tokens near [" + getTokenErrorDisplay(token) + "].";
}
} else {
message = "Error[" + token.getLine() + ":" + token.getCharPositionInLine() + "]:" +
" unexpected token near [" + getTokenErrorDisplay(token) + "].";
}
ParseException parseException = new ParseException(message, token == null ? -1 : token.getStartIndex());
parseException.initCause(re);
throw new RuntimeException(parseException);
}
@Override
public Token recoverInline(Parser recognizer) throws RecognitionException {
Token token = recognizer.getCurrentToken();
String message = "Error[" + token.getLine() + ":" + token.getCharPositionInLine() + "]:" +
" unexpected token [" + getTokenErrorDisplay(token) + "]" +
" was expecting one of [" + recognizer.getExpectedTokens().toString(recognizer.getVocabulary()) + "].";
ParseException parseException = new ParseException(message, token.getStartIndex());
throw new RuntimeException(parseException);
}
@Override
public void sync(Parser recognizer) {
}
}

View File

@ -0,0 +1,390 @@
// ANTLR GENERATED CODE: DO NOT EDIT
package org.elasticsearch.plan.a;
import java.util.Set;
import org.antlr.v4.runtime.Lexer;
import org.antlr.v4.runtime.CharStream;
import org.antlr.v4.runtime.Token;
import org.antlr.v4.runtime.TokenStream;
import org.antlr.v4.runtime.*;
import org.antlr.v4.runtime.atn.*;
import org.antlr.v4.runtime.dfa.DFA;
import org.antlr.v4.runtime.misc.*;
@SuppressWarnings({"all", "warnings", "unchecked", "unused", "cast"})
class PlanALexer extends Lexer {
static { RuntimeMetaData.checkVersion("4.5.1", RuntimeMetaData.VERSION); }
protected static final DFA[] _decisionToDFA;
protected static final PredictionContextCache _sharedContextCache =
new PredictionContextCache();
public static final int
WS=1, COMMENT=2, LBRACK=3, RBRACK=4, LBRACE=5, RBRACE=6, LP=7, RP=8, DOT=9,
COMMA=10, SEMICOLON=11, IF=12, ELSE=13, WHILE=14, DO=15, FOR=16, CONTINUE=17,
BREAK=18, RETURN=19, NEW=20, TRY=21, CATCH=22, THROW=23, BOOLNOT=24, BWNOT=25,
MUL=26, DIV=27, REM=28, ADD=29, SUB=30, LSH=31, RSH=32, USH=33, LT=34,
LTE=35, GT=36, GTE=37, EQ=38, EQR=39, NE=40, NER=41, BWAND=42, BWXOR=43,
BWOR=44, BOOLAND=45, BOOLOR=46, COND=47, COLON=48, INCR=49, DECR=50, ASSIGN=51,
AADD=52, ASUB=53, AMUL=54, ADIV=55, AREM=56, AAND=57, AXOR=58, AOR=59,
ALSH=60, ARSH=61, AUSH=62, ACAT=63, OCTAL=64, HEX=65, INTEGER=66, DECIMAL=67,
STRING=68, CHAR=69, TRUE=70, FALSE=71, NULL=72, TYPE=73, ID=74, EXTINTEGER=75,
EXTID=76;
public static final int EXT = 1;
public static String[] modeNames = {
"DEFAULT_MODE", "EXT"
};
public static final String[] ruleNames = {
"WS", "COMMENT", "LBRACK", "RBRACK", "LBRACE", "RBRACE", "LP", "RP", "DOT",
"COMMA", "SEMICOLON", "IF", "ELSE", "WHILE", "DO", "FOR", "CONTINUE",
"BREAK", "RETURN", "NEW", "TRY", "CATCH", "THROW", "BOOLNOT", "BWNOT",
"MUL", "DIV", "REM", "ADD", "SUB", "LSH", "RSH", "USH", "LT", "LTE", "GT",
"GTE", "EQ", "EQR", "NE", "NER", "BWAND", "BWXOR", "BWOR", "BOOLAND",
"BOOLOR", "COND", "COLON", "INCR", "DECR", "ASSIGN", "AADD", "ASUB", "AMUL",
"ADIV", "AREM", "AAND", "AXOR", "AOR", "ALSH", "ARSH", "AUSH", "ACAT",
"OCTAL", "HEX", "INTEGER", "DECIMAL", "STRING", "CHAR", "TRUE", "FALSE",
"NULL", "TYPE", "GENERIC", "ID", "EXTINTEGER", "EXTID"
};
private static final String[] _LITERAL_NAMES = {
null, null, null, "'{'", "'}'", "'['", "']'", "'('", "')'", "'.'", "','",
"';'", "'if'", "'else'", "'while'", "'do'", "'for'", "'continue'", "'break'",
"'return'", "'new'", "'try'", "'catch'", "'throw'", "'!'", "'~'", "'*'",
"'/'", "'%'", "'+'", "'-'", "'<<'", "'>>'", "'>>>'", "'<'", "'<='", "'>'",
"'>='", "'=='", "'==='", "'!='", "'!=='", "'&'", "'^'", "'|'", "'&&'",
"'||'", "'?'", "':'", "'++'", "'--'", "'='", "'+='", "'-='", "'*='", "'/='",
"'%='", "'&='", "'^='", "'|='", "'<<='", "'>>='", "'>>>='", "'..='", null,
null, null, null, null, null, "'true'", "'false'", "'null'"
};
private static final String[] _SYMBOLIC_NAMES = {
null, "WS", "COMMENT", "LBRACK", "RBRACK", "LBRACE", "RBRACE", "LP", "RP",
"DOT", "COMMA", "SEMICOLON", "IF", "ELSE", "WHILE", "DO", "FOR", "CONTINUE",
"BREAK", "RETURN", "NEW", "TRY", "CATCH", "THROW", "BOOLNOT", "BWNOT",
"MUL", "DIV", "REM", "ADD", "SUB", "LSH", "RSH", "USH", "LT", "LTE", "GT",
"GTE", "EQ", "EQR", "NE", "NER", "BWAND", "BWXOR", "BWOR", "BOOLAND",
"BOOLOR", "COND", "COLON", "INCR", "DECR", "ASSIGN", "AADD", "ASUB", "AMUL",
"ADIV", "AREM", "AAND", "AXOR", "AOR", "ALSH", "ARSH", "AUSH", "ACAT",
"OCTAL", "HEX", "INTEGER", "DECIMAL", "STRING", "CHAR", "TRUE", "FALSE",
"NULL", "TYPE", "ID", "EXTINTEGER", "EXTID"
};
public static final Vocabulary VOCABULARY = new VocabularyImpl(_LITERAL_NAMES, _SYMBOLIC_NAMES);
/**
* @deprecated Use {@link #VOCABULARY} instead.
*/
@Deprecated
public static final String[] tokenNames;
static {
tokenNames = new String[_SYMBOLIC_NAMES.length];
for (int i = 0; i < tokenNames.length; i++) {
tokenNames[i] = VOCABULARY.getLiteralName(i);
if (tokenNames[i] == null) {
tokenNames[i] = VOCABULARY.getSymbolicName(i);
}
if (tokenNames[i] == null) {
tokenNames[i] = "<INVALID>";
}
}
}
@Override
@Deprecated
public String[] getTokenNames() {
return tokenNames;
}
@Override
public Vocabulary getVocabulary() {
return VOCABULARY;
}
private Set<String> types = null;
void setTypes(Set<String> types) {
this.types = types;
}
public PlanALexer(CharStream input) {
super(input);
_interp = new LexerATNSimulator(this,_ATN,_decisionToDFA,_sharedContextCache);
}
@Override
public String getGrammarFileName() { return "PlanALexer.g4"; }
@Override
public String[] getRuleNames() { return ruleNames; }
@Override
public String getSerializedATN() { return _serializedATN; }
@Override
public String[] getModeNames() { return modeNames; }
@Override
public ATN getATN() { return _ATN; }
@Override
public void action(RuleContext _localctx, int ruleIndex, int actionIndex) {
switch (ruleIndex) {
case 67:
STRING_action((RuleContext)_localctx, actionIndex);
break;
case 68:
CHAR_action((RuleContext)_localctx, actionIndex);
break;
case 72:
TYPE_action((RuleContext)_localctx, actionIndex);
break;
}
}
private void STRING_action(RuleContext _localctx, int actionIndex) {
switch (actionIndex) {
case 0:
setText(getText().substring(1, getText().length() - 1));
break;
}
}
private void CHAR_action(RuleContext _localctx, int actionIndex) {
switch (actionIndex) {
case 1:
setText(getText().substring(1, getText().length() - 1));
break;
}
}
private void TYPE_action(RuleContext _localctx, int actionIndex) {
switch (actionIndex) {
case 2:
setText(getText().replace(" ", ""));
break;
}
}
@Override
public boolean sempred(RuleContext _localctx, int ruleIndex, int predIndex) {
switch (ruleIndex) {
case 72:
return TYPE_sempred((RuleContext)_localctx, predIndex);
}
return true;
}
private boolean TYPE_sempred(RuleContext _localctx, int predIndex) {
switch (predIndex) {
case 0:
return types.contains(getText().replace(" ", ""));
}
return true;
}
public static final String _serializedATN =
"\3\u0430\ud6d1\u8206\uad2d\u4417\uaef1\u8d80\uaadd\2N\u0236\b\1\b\1\4"+
"\2\t\2\4\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7\t\7\4\b\t\b\4\t\t\t\4\n\t\n"+
"\4\13\t\13\4\f\t\f\4\r\t\r\4\16\t\16\4\17\t\17\4\20\t\20\4\21\t\21\4\22"+
"\t\22\4\23\t\23\4\24\t\24\4\25\t\25\4\26\t\26\4\27\t\27\4\30\t\30\4\31"+
"\t\31\4\32\t\32\4\33\t\33\4\34\t\34\4\35\t\35\4\36\t\36\4\37\t\37\4 \t"+
" \4!\t!\4\"\t\"\4#\t#\4$\t$\4%\t%\4&\t&\4\'\t\'\4(\t(\4)\t)\4*\t*\4+\t"+
"+\4,\t,\4-\t-\4.\t.\4/\t/\4\60\t\60\4\61\t\61\4\62\t\62\4\63\t\63\4\64"+
"\t\64\4\65\t\65\4\66\t\66\4\67\t\67\48\t8\49\t9\4:\t:\4;\t;\4<\t<\4=\t"+
"=\4>\t>\4?\t?\4@\t@\4A\tA\4B\tB\4C\tC\4D\tD\4E\tE\4F\tF\4G\tG\4H\tH\4"+
"I\tI\4J\tJ\4K\tK\4L\tL\4M\tM\4N\tN\3\2\6\2\u00a0\n\2\r\2\16\2\u00a1\3"+
"\2\3\2\3\3\3\3\3\3\3\3\7\3\u00aa\n\3\f\3\16\3\u00ad\13\3\3\3\3\3\3\3\3"+
"\3\3\3\7\3\u00b4\n\3\f\3\16\3\u00b7\13\3\3\3\3\3\5\3\u00bb\n\3\3\3\3\3"+
"\3\4\3\4\3\5\3\5\3\6\3\6\3\7\3\7\3\b\3\b\3\t\3\t\3\n\3\n\3\n\3\n\3\13"+
"\3\13\3\f\3\f\3\r\3\r\3\r\3\16\3\16\3\16\3\16\3\16\3\17\3\17\3\17\3\17"+
"\3\17\3\17\3\20\3\20\3\20\3\21\3\21\3\21\3\21\3\22\3\22\3\22\3\22\3\22"+
"\3\22\3\22\3\22\3\22\3\23\3\23\3\23\3\23\3\23\3\23\3\24\3\24\3\24\3\24"+
"\3\24\3\24\3\24\3\25\3\25\3\25\3\25\3\26\3\26\3\26\3\26\3\27\3\27\3\27"+
"\3\27\3\27\3\27\3\30\3\30\3\30\3\30\3\30\3\30\3\31\3\31\3\32\3\32\3\33"+
"\3\33\3\34\3\34\3\35\3\35\3\36\3\36\3\37\3\37\3 \3 \3 \3!\3!\3!\3\"\3"+
"\"\3\"\3\"\3#\3#\3$\3$\3$\3%\3%\3&\3&\3&\3\'\3\'\3\'\3(\3(\3(\3(\3)\3"+
")\3)\3*\3*\3*\3*\3+\3+\3,\3,\3-\3-\3.\3.\3.\3/\3/\3/\3\60\3\60\3\61\3"+
"\61\3\62\3\62\3\62\3\63\3\63\3\63\3\64\3\64\3\65\3\65\3\65\3\66\3\66\3"+
"\66\3\67\3\67\3\67\38\38\38\39\39\39\3:\3:\3:\3;\3;\3;\3<\3<\3<\3=\3="+
"\3=\3=\3>\3>\3>\3>\3?\3?\3?\3?\3?\3@\3@\3@\3@\3A\3A\6A\u0185\nA\rA\16"+
"A\u0186\3A\5A\u018a\nA\3B\3B\3B\6B\u018f\nB\rB\16B\u0190\3B\5B\u0194\n"+
"B\3C\3C\3C\7C\u0199\nC\fC\16C\u019c\13C\5C\u019e\nC\3C\5C\u01a1\nC\3D"+
"\3D\3D\7D\u01a6\nD\fD\16D\u01a9\13D\5D\u01ab\nD\3D\3D\7D\u01af\nD\fD\16"+
"D\u01b2\13D\3D\3D\5D\u01b6\nD\3D\6D\u01b9\nD\rD\16D\u01ba\5D\u01bd\nD"+
"\3D\5D\u01c0\nD\3E\3E\3E\3E\3E\3E\7E\u01c8\nE\fE\16E\u01cb\13E\3E\3E\3"+
"E\3F\3F\3F\3F\3F\3G\3G\3G\3G\3G\3H\3H\3H\3H\3H\3H\3I\3I\3I\3I\3I\3J\3"+
"J\5J\u01e7\nJ\3J\3J\3J\3K\7K\u01ed\nK\fK\16K\u01f0\13K\3K\3K\7K\u01f4"+
"\nK\fK\16K\u01f7\13K\3K\3K\5K\u01fb\nK\3K\7K\u01fe\nK\fK\16K\u0201\13"+
"K\3K\3K\7K\u0205\nK\fK\16K\u0208\13K\3K\3K\5K\u020c\nK\3K\7K\u020f\nK"+
"\fK\16K\u0212\13K\7K\u0214\nK\fK\16K\u0217\13K\3K\3K\3L\3L\7L\u021d\n"+
"L\fL\16L\u0220\13L\3M\3M\3M\7M\u0225\nM\fM\16M\u0228\13M\5M\u022a\nM\3"+
"M\3M\3N\3N\7N\u0230\nN\fN\16N\u0233\13N\3N\3N\5\u00ab\u00b5\u01c9\2O\4"+
"\3\6\4\b\5\n\6\f\7\16\b\20\t\22\n\24\13\26\f\30\r\32\16\34\17\36\20 \21"+
"\"\22$\23&\24(\25*\26,\27.\30\60\31\62\32\64\33\66\348\35:\36<\37> @!"+
"B\"D#F$H%J&L\'N(P)R*T+V,X-Z.\\/^\60`\61b\62d\63f\64h\65j\66l\67n8p9r:"+
"t;v<x=z>|?~@\u0080A\u0082B\u0084C\u0086D\u0088E\u008aF\u008cG\u008eH\u0090"+
"I\u0092J\u0094K\u0096\2\u0098L\u009aM\u009cN\4\2\3\21\5\2\13\f\17\17\""+
"\"\4\2\f\f\17\17\3\2\629\4\2NNnn\4\2ZZzz\5\2\62;CHch\3\2\63;\3\2\62;\b"+
"\2FFHHNNffhhnn\4\2GGgg\4\2--//\4\2HHhh\4\2$$^^\5\2C\\aac|\6\2\62;C\\a"+
"ac|\u0255\2\4\3\2\2\2\2\6\3\2\2\2\2\b\3\2\2\2\2\n\3\2\2\2\2\f\3\2\2\2"+
"\2\16\3\2\2\2\2\20\3\2\2\2\2\22\3\2\2\2\2\24\3\2\2\2\2\26\3\2\2\2\2\30"+
"\3\2\2\2\2\32\3\2\2\2\2\34\3\2\2\2\2\36\3\2\2\2\2 \3\2\2\2\2\"\3\2\2\2"+
"\2$\3\2\2\2\2&\3\2\2\2\2(\3\2\2\2\2*\3\2\2\2\2,\3\2\2\2\2.\3\2\2\2\2\60"+
"\3\2\2\2\2\62\3\2\2\2\2\64\3\2\2\2\2\66\3\2\2\2\28\3\2\2\2\2:\3\2\2\2"+
"\2<\3\2\2\2\2>\3\2\2\2\2@\3\2\2\2\2B\3\2\2\2\2D\3\2\2\2\2F\3\2\2\2\2H"+
"\3\2\2\2\2J\3\2\2\2\2L\3\2\2\2\2N\3\2\2\2\2P\3\2\2\2\2R\3\2\2\2\2T\3\2"+
"\2\2\2V\3\2\2\2\2X\3\2\2\2\2Z\3\2\2\2\2\\\3\2\2\2\2^\3\2\2\2\2`\3\2\2"+
"\2\2b\3\2\2\2\2d\3\2\2\2\2f\3\2\2\2\2h\3\2\2\2\2j\3\2\2\2\2l\3\2\2\2\2"+
"n\3\2\2\2\2p\3\2\2\2\2r\3\2\2\2\2t\3\2\2\2\2v\3\2\2\2\2x\3\2\2\2\2z\3"+
"\2\2\2\2|\3\2\2\2\2~\3\2\2\2\2\u0080\3\2\2\2\2\u0082\3\2\2\2\2\u0084\3"+
"\2\2\2\2\u0086\3\2\2\2\2\u0088\3\2\2\2\2\u008a\3\2\2\2\2\u008c\3\2\2\2"+
"\2\u008e\3\2\2\2\2\u0090\3\2\2\2\2\u0092\3\2\2\2\2\u0094\3\2\2\2\2\u0098"+
"\3\2\2\2\3\u009a\3\2\2\2\3\u009c\3\2\2\2\4\u009f\3\2\2\2\6\u00ba\3\2\2"+
"\2\b\u00be\3\2\2\2\n\u00c0\3\2\2\2\f\u00c2\3\2\2\2\16\u00c4\3\2\2\2\20"+
"\u00c6\3\2\2\2\22\u00c8\3\2\2\2\24\u00ca\3\2\2\2\26\u00ce\3\2\2\2\30\u00d0"+
"\3\2\2\2\32\u00d2\3\2\2\2\34\u00d5\3\2\2\2\36\u00da\3\2\2\2 \u00e0\3\2"+
"\2\2\"\u00e3\3\2\2\2$\u00e7\3\2\2\2&\u00f0\3\2\2\2(\u00f6\3\2\2\2*\u00fd"+
"\3\2\2\2,\u0101\3\2\2\2.\u0105\3\2\2\2\60\u010b\3\2\2\2\62\u0111\3\2\2"+
"\2\64\u0113\3\2\2\2\66\u0115\3\2\2\28\u0117\3\2\2\2:\u0119\3\2\2\2<\u011b"+
"\3\2\2\2>\u011d\3\2\2\2@\u011f\3\2\2\2B\u0122\3\2\2\2D\u0125\3\2\2\2F"+
"\u0129\3\2\2\2H\u012b\3\2\2\2J\u012e\3\2\2\2L\u0130\3\2\2\2N\u0133\3\2"+
"\2\2P\u0136\3\2\2\2R\u013a\3\2\2\2T\u013d\3\2\2\2V\u0141\3\2\2\2X\u0143"+
"\3\2\2\2Z\u0145\3\2\2\2\\\u0147\3\2\2\2^\u014a\3\2\2\2`\u014d\3\2\2\2"+
"b\u014f\3\2\2\2d\u0151\3\2\2\2f\u0154\3\2\2\2h\u0157\3\2\2\2j\u0159\3"+
"\2\2\2l\u015c\3\2\2\2n\u015f\3\2\2\2p\u0162\3\2\2\2r\u0165\3\2\2\2t\u0168"+
"\3\2\2\2v\u016b\3\2\2\2x\u016e\3\2\2\2z\u0171\3\2\2\2|\u0175\3\2\2\2~"+
"\u0179\3\2\2\2\u0080\u017e\3\2\2\2\u0082\u0182\3\2\2\2\u0084\u018b\3\2"+
"\2\2\u0086\u019d\3\2\2\2\u0088\u01aa\3\2\2\2\u008a\u01c1\3\2\2\2\u008c"+
"\u01cf\3\2\2\2\u008e\u01d4\3\2\2\2\u0090\u01d9\3\2\2\2\u0092\u01df\3\2"+
"\2\2\u0094\u01e4\3\2\2\2\u0096\u01ee\3\2\2\2\u0098\u021a\3\2\2\2\u009a"+
"\u0229\3\2\2\2\u009c\u022d\3\2\2\2\u009e\u00a0\t\2\2\2\u009f\u009e\3\2"+
"\2\2\u00a0\u00a1\3\2\2\2\u00a1\u009f\3\2\2\2\u00a1\u00a2\3\2\2\2\u00a2"+
"\u00a3\3\2\2\2\u00a3\u00a4\b\2\2\2\u00a4\5\3\2\2\2\u00a5\u00a6\7\61\2"+
"\2\u00a6\u00a7\7\61\2\2\u00a7\u00ab\3\2\2\2\u00a8\u00aa\13\2\2\2\u00a9"+
"\u00a8\3\2\2\2\u00aa\u00ad\3\2\2\2\u00ab\u00ac\3\2\2\2\u00ab\u00a9\3\2"+
"\2\2\u00ac\u00ae\3\2\2\2\u00ad\u00ab\3\2\2\2\u00ae\u00bb\t\3\2\2\u00af"+
"\u00b0\7\61\2\2\u00b0\u00b1\7,\2\2\u00b1\u00b5\3\2\2\2\u00b2\u00b4\13"+
"\2\2\2\u00b3\u00b2\3\2\2\2\u00b4\u00b7\3\2\2\2\u00b5\u00b6\3\2\2\2\u00b5"+
"\u00b3\3\2\2\2\u00b6\u00b8\3\2\2\2\u00b7\u00b5\3\2\2\2\u00b8\u00b9\7,"+
"\2\2\u00b9\u00bb\7\61\2\2\u00ba\u00a5\3\2\2\2\u00ba\u00af\3\2\2\2\u00bb"+
"\u00bc\3\2\2\2\u00bc\u00bd\b\3\2\2\u00bd\7\3\2\2\2\u00be\u00bf\7}\2\2"+
"\u00bf\t\3\2\2\2\u00c0\u00c1\7\177\2\2\u00c1\13\3\2\2\2\u00c2\u00c3\7"+
"]\2\2\u00c3\r\3\2\2\2\u00c4\u00c5\7_\2\2\u00c5\17\3\2\2\2\u00c6\u00c7"+
"\7*\2\2\u00c7\21\3\2\2\2\u00c8\u00c9\7+\2\2\u00c9\23\3\2\2\2\u00ca\u00cb"+
"\7\60\2\2\u00cb\u00cc\3\2\2\2\u00cc\u00cd\b\n\3\2\u00cd\25\3\2\2\2\u00ce"+
"\u00cf\7.\2\2\u00cf\27\3\2\2\2\u00d0\u00d1\7=\2\2\u00d1\31\3\2\2\2\u00d2"+
"\u00d3\7k\2\2\u00d3\u00d4\7h\2\2\u00d4\33\3\2\2\2\u00d5\u00d6\7g\2\2\u00d6"+
"\u00d7\7n\2\2\u00d7\u00d8\7u\2\2\u00d8\u00d9\7g\2\2\u00d9\35\3\2\2\2\u00da"+
"\u00db\7y\2\2\u00db\u00dc\7j\2\2\u00dc\u00dd\7k\2\2\u00dd\u00de\7n\2\2"+
"\u00de\u00df\7g\2\2\u00df\37\3\2\2\2\u00e0\u00e1\7f\2\2\u00e1\u00e2\7"+
"q\2\2\u00e2!\3\2\2\2\u00e3\u00e4\7h\2\2\u00e4\u00e5\7q\2\2\u00e5\u00e6"+
"\7t\2\2\u00e6#\3\2\2\2\u00e7\u00e8\7e\2\2\u00e8\u00e9\7q\2\2\u00e9\u00ea"+
"\7p\2\2\u00ea\u00eb\7v\2\2\u00eb\u00ec\7k\2\2\u00ec\u00ed\7p\2\2\u00ed"+
"\u00ee\7w\2\2\u00ee\u00ef\7g\2\2\u00ef%\3\2\2\2\u00f0\u00f1\7d\2\2\u00f1"+
"\u00f2\7t\2\2\u00f2\u00f3\7g\2\2\u00f3\u00f4\7c\2\2\u00f4\u00f5\7m\2\2"+
"\u00f5\'\3\2\2\2\u00f6\u00f7\7t\2\2\u00f7\u00f8\7g\2\2\u00f8\u00f9\7v"+
"\2\2\u00f9\u00fa\7w\2\2\u00fa\u00fb\7t\2\2\u00fb\u00fc\7p\2\2\u00fc)\3"+
"\2\2\2\u00fd\u00fe\7p\2\2\u00fe\u00ff\7g\2\2\u00ff\u0100\7y\2\2\u0100"+
"+\3\2\2\2\u0101\u0102\7v\2\2\u0102\u0103\7t\2\2\u0103\u0104\7{\2\2\u0104"+
"-\3\2\2\2\u0105\u0106\7e\2\2\u0106\u0107\7c\2\2\u0107\u0108\7v\2\2\u0108"+
"\u0109\7e\2\2\u0109\u010a\7j\2\2\u010a/\3\2\2\2\u010b\u010c\7v\2\2\u010c"+
"\u010d\7j\2\2\u010d\u010e\7t\2\2\u010e\u010f\7q\2\2\u010f\u0110\7y\2\2"+
"\u0110\61\3\2\2\2\u0111\u0112\7#\2\2\u0112\63\3\2\2\2\u0113\u0114\7\u0080"+
"\2\2\u0114\65\3\2\2\2\u0115\u0116\7,\2\2\u0116\67\3\2\2\2\u0117\u0118"+
"\7\61\2\2\u01189\3\2\2\2\u0119\u011a\7\'\2\2\u011a;\3\2\2\2\u011b\u011c"+
"\7-\2\2\u011c=\3\2\2\2\u011d\u011e\7/\2\2\u011e?\3\2\2\2\u011f\u0120\7"+
">\2\2\u0120\u0121\7>\2\2\u0121A\3\2\2\2\u0122\u0123\7@\2\2\u0123\u0124"+
"\7@\2\2\u0124C\3\2\2\2\u0125\u0126\7@\2\2\u0126\u0127\7@\2\2\u0127\u0128"+
"\7@\2\2\u0128E\3\2\2\2\u0129\u012a\7>\2\2\u012aG\3\2\2\2\u012b\u012c\7"+
">\2\2\u012c\u012d\7?\2\2\u012dI\3\2\2\2\u012e\u012f\7@\2\2\u012fK\3\2"+
"\2\2\u0130\u0131\7@\2\2\u0131\u0132\7?\2\2\u0132M\3\2\2\2\u0133\u0134"+
"\7?\2\2\u0134\u0135\7?\2\2\u0135O\3\2\2\2\u0136\u0137\7?\2\2\u0137\u0138"+
"\7?\2\2\u0138\u0139\7?\2\2\u0139Q\3\2\2\2\u013a\u013b\7#\2\2\u013b\u013c"+
"\7?\2\2\u013cS\3\2\2\2\u013d\u013e\7#\2\2\u013e\u013f\7?\2\2\u013f\u0140"+
"\7?\2\2\u0140U\3\2\2\2\u0141\u0142\7(\2\2\u0142W\3\2\2\2\u0143\u0144\7"+
"`\2\2\u0144Y\3\2\2\2\u0145\u0146\7~\2\2\u0146[\3\2\2\2\u0147\u0148\7("+
"\2\2\u0148\u0149\7(\2\2\u0149]\3\2\2\2\u014a\u014b\7~\2\2\u014b\u014c"+
"\7~\2\2\u014c_\3\2\2\2\u014d\u014e\7A\2\2\u014ea\3\2\2\2\u014f\u0150\7"+
"<\2\2\u0150c\3\2\2\2\u0151\u0152\7-\2\2\u0152\u0153\7-\2\2\u0153e\3\2"+
"\2\2\u0154\u0155\7/\2\2\u0155\u0156\7/\2\2\u0156g\3\2\2\2\u0157\u0158"+
"\7?\2\2\u0158i\3\2\2\2\u0159\u015a\7-\2\2\u015a\u015b\7?\2\2\u015bk\3"+
"\2\2\2\u015c\u015d\7/\2\2\u015d\u015e\7?\2\2\u015em\3\2\2\2\u015f\u0160"+
"\7,\2\2\u0160\u0161\7?\2\2\u0161o\3\2\2\2\u0162\u0163\7\61\2\2\u0163\u0164"+
"\7?\2\2\u0164q\3\2\2\2\u0165\u0166\7\'\2\2\u0166\u0167\7?\2\2\u0167s\3"+
"\2\2\2\u0168\u0169\7(\2\2\u0169\u016a\7?\2\2\u016au\3\2\2\2\u016b\u016c"+
"\7`\2\2\u016c\u016d\7?\2\2\u016dw\3\2\2\2\u016e\u016f\7~\2\2\u016f\u0170"+
"\7?\2\2\u0170y\3\2\2\2\u0171\u0172\7>\2\2\u0172\u0173\7>\2\2\u0173\u0174"+
"\7?\2\2\u0174{\3\2\2\2\u0175\u0176\7@\2\2\u0176\u0177\7@\2\2\u0177\u0178"+
"\7?\2\2\u0178}\3\2\2\2\u0179\u017a\7@\2\2\u017a\u017b\7@\2\2\u017b\u017c"+
"\7@\2\2\u017c\u017d\7?\2\2\u017d\177\3\2\2\2\u017e\u017f\7\60\2\2\u017f"+
"\u0180\7\60\2\2\u0180\u0181\7?\2\2\u0181\u0081\3\2\2\2\u0182\u0184\7\62"+
"\2\2\u0183\u0185\t\4\2\2\u0184\u0183\3\2\2\2\u0185\u0186\3\2\2\2\u0186"+
"\u0184\3\2\2\2\u0186\u0187\3\2\2\2\u0187\u0189\3\2\2\2\u0188\u018a\t\5"+
"\2\2\u0189\u0188\3\2\2\2\u0189\u018a\3\2\2\2\u018a\u0083\3\2\2\2\u018b"+
"\u018c\7\62\2\2\u018c\u018e\t\6\2\2\u018d\u018f\t\7\2\2\u018e\u018d\3"+
"\2\2\2\u018f\u0190\3\2\2\2\u0190\u018e\3\2\2\2\u0190\u0191\3\2\2\2\u0191"+
"\u0193\3\2\2\2\u0192\u0194\t\5\2\2\u0193\u0192\3\2\2\2\u0193\u0194\3\2"+
"\2\2\u0194\u0085\3\2\2\2\u0195\u019e\7\62\2\2\u0196\u019a\t\b\2\2\u0197"+
"\u0199\t\t\2\2\u0198\u0197\3\2\2\2\u0199\u019c\3\2\2\2\u019a\u0198\3\2"+
"\2\2\u019a\u019b\3\2\2\2\u019b\u019e\3\2\2\2\u019c\u019a\3\2\2\2\u019d"+
"\u0195\3\2\2\2\u019d\u0196\3\2\2\2\u019e\u01a0\3\2\2\2\u019f\u01a1\t\n"+
"\2\2\u01a0\u019f\3\2\2\2\u01a0\u01a1\3\2\2\2\u01a1\u0087\3\2\2\2\u01a2"+
"\u01ab\7\62\2\2\u01a3\u01a7\t\b\2\2\u01a4\u01a6\t\t\2\2\u01a5\u01a4\3"+
"\2\2\2\u01a6\u01a9\3\2\2\2\u01a7\u01a5\3\2\2\2\u01a7\u01a8\3\2\2\2\u01a8"+
"\u01ab\3\2\2\2\u01a9\u01a7\3\2\2\2\u01aa\u01a2\3\2\2\2\u01aa\u01a3\3\2"+
"\2\2\u01ab\u01ac\3\2\2\2\u01ac\u01b0\5\24\n\2\u01ad\u01af\t\t\2\2\u01ae"+
"\u01ad\3\2\2\2\u01af\u01b2\3\2\2\2\u01b0\u01ae\3\2\2\2\u01b0\u01b1\3\2"+
"\2\2\u01b1\u01bc\3\2\2\2\u01b2\u01b0\3\2\2\2\u01b3\u01b5\t\13\2\2\u01b4"+
"\u01b6\t\f\2\2\u01b5\u01b4\3\2\2\2\u01b5\u01b6\3\2\2\2\u01b6\u01b8\3\2"+
"\2\2\u01b7\u01b9\t\t\2\2\u01b8\u01b7\3\2\2\2\u01b9\u01ba\3\2\2\2\u01ba"+
"\u01b8\3\2\2\2\u01ba\u01bb\3\2\2\2\u01bb\u01bd\3\2\2\2\u01bc\u01b3\3\2"+
"\2\2\u01bc\u01bd\3\2\2\2\u01bd\u01bf\3\2\2\2\u01be\u01c0\t\r\2\2\u01bf"+
"\u01be\3\2\2\2\u01bf\u01c0\3\2\2\2\u01c0\u0089\3\2\2\2\u01c1\u01c9\7$"+
"\2\2\u01c2\u01c3\7^\2\2\u01c3\u01c8\7$\2\2\u01c4\u01c5\7^\2\2\u01c5\u01c8"+
"\7^\2\2\u01c6\u01c8\n\16\2\2\u01c7\u01c2\3\2\2\2\u01c7\u01c4\3\2\2\2\u01c7"+
"\u01c6\3\2\2\2\u01c8\u01cb\3\2\2\2\u01c9\u01ca\3\2\2\2\u01c9\u01c7\3\2"+
"\2\2\u01ca\u01cc\3\2\2\2\u01cb\u01c9\3\2\2\2\u01cc\u01cd\7$\2\2\u01cd"+
"\u01ce\bE\4\2\u01ce\u008b\3\2\2\2\u01cf\u01d0\7)\2\2\u01d0\u01d1\13\2"+
"\2\2\u01d1\u01d2\7)\2\2\u01d2\u01d3\bF\5\2\u01d3\u008d\3\2\2\2\u01d4\u01d5"+
"\7v\2\2\u01d5\u01d6\7t\2\2\u01d6\u01d7\7w\2\2\u01d7\u01d8\7g\2\2\u01d8"+
"\u008f\3\2\2\2\u01d9\u01da\7h\2\2\u01da\u01db\7c\2\2\u01db\u01dc\7n\2"+
"\2\u01dc\u01dd\7u\2\2\u01dd\u01de\7g\2\2\u01de\u0091\3\2\2\2\u01df\u01e0"+
"\7p\2\2\u01e0\u01e1\7w\2\2\u01e1\u01e2\7n\2\2\u01e2\u01e3\7n\2\2\u01e3"+
"\u0093\3\2\2\2\u01e4\u01e6\5\u0098L\2\u01e5\u01e7\5\u0096K\2\u01e6\u01e5"+
"\3\2\2\2\u01e6\u01e7\3\2\2\2\u01e7\u01e8\3\2\2\2\u01e8\u01e9\6J\2\2\u01e9"+
"\u01ea\bJ\6\2\u01ea\u0095\3\2\2\2\u01eb\u01ed\7\"\2\2\u01ec\u01eb\3\2"+
"\2\2\u01ed\u01f0\3\2\2\2\u01ee\u01ec\3\2\2\2\u01ee\u01ef\3\2\2\2\u01ef"+
"\u01f1\3\2\2\2\u01f0\u01ee\3\2\2\2\u01f1\u01f5\7>\2\2\u01f2\u01f4\7\""+
"\2\2\u01f3\u01f2\3\2\2\2\u01f4\u01f7\3\2\2\2\u01f5\u01f3\3\2\2\2\u01f5"+
"\u01f6\3\2\2\2\u01f6\u01f8\3\2\2\2\u01f7\u01f5\3\2\2\2\u01f8\u01fa\5\u0098"+
"L\2\u01f9\u01fb\5\u0096K\2\u01fa\u01f9\3\2\2\2\u01fa\u01fb\3\2\2\2\u01fb"+
"\u01ff\3\2\2\2\u01fc\u01fe\7\"\2\2\u01fd\u01fc\3\2\2\2\u01fe\u0201\3\2"+
"\2\2\u01ff\u01fd\3\2\2\2\u01ff\u0200\3\2\2\2\u0200\u0215\3\2\2\2\u0201"+
"\u01ff\3\2\2\2\u0202\u0206\5\26\13\2\u0203\u0205\7\"\2\2\u0204\u0203\3"+
"\2\2\2\u0205\u0208\3\2\2\2\u0206\u0204\3\2\2\2\u0206\u0207\3\2\2\2\u0207"+
"\u0209\3\2\2\2\u0208\u0206\3\2\2\2\u0209\u020b\5\u0098L\2\u020a\u020c"+
"\5\u0096K\2\u020b\u020a\3\2\2\2\u020b\u020c\3\2\2\2\u020c\u0210\3\2\2"+
"\2\u020d\u020f\7\"\2\2\u020e\u020d\3\2\2\2\u020f\u0212\3\2\2\2\u0210\u020e"+
"\3\2\2\2\u0210\u0211\3\2\2\2\u0211\u0214\3\2\2\2\u0212\u0210\3\2\2\2\u0213"+
"\u0202\3\2\2\2\u0214\u0217\3\2\2\2\u0215\u0213\3\2\2\2\u0215\u0216\3\2"+
"\2\2\u0216\u0218\3\2\2\2\u0217\u0215\3\2\2\2\u0218\u0219\7@\2\2\u0219"+
"\u0097\3\2\2\2\u021a\u021e\t\17\2\2\u021b\u021d\t\20\2\2\u021c\u021b\3"+
"\2\2\2\u021d\u0220\3\2\2\2\u021e\u021c\3\2\2\2\u021e\u021f\3\2\2\2\u021f"+
"\u0099\3\2\2\2\u0220\u021e\3\2\2\2\u0221\u022a\7\62\2\2\u0222\u0226\t"+
"\b\2\2\u0223\u0225\t\t\2\2\u0224\u0223\3\2\2\2\u0225\u0228\3\2\2\2\u0226"+
"\u0224\3\2\2\2\u0226\u0227\3\2\2\2\u0227\u022a\3\2\2\2\u0228\u0226\3\2"+
"\2\2\u0229\u0221\3\2\2\2\u0229\u0222\3\2\2\2\u022a\u022b\3\2\2\2\u022b"+
"\u022c\bM\7\2\u022c\u009b\3\2\2\2\u022d\u0231\t\17\2\2\u022e\u0230\t\20"+
"\2\2\u022f\u022e\3\2\2\2\u0230\u0233\3\2\2\2\u0231\u022f\3\2\2\2\u0231"+
"\u0232\3\2\2\2\u0232\u0234\3\2\2\2\u0233\u0231\3\2\2\2\u0234\u0235\bN"+
"\7\2\u0235\u009d\3\2\2\2%\2\3\u00a1\u00ab\u00b5\u00ba\u0186\u0189\u0190"+
"\u0193\u019a\u019d\u01a0\u01a7\u01aa\u01b0\u01b5\u01ba\u01bc\u01bf\u01c7"+
"\u01c9\u01e6\u01ee\u01f5\u01fa\u01ff\u0206\u020b\u0210\u0215\u021e\u0226"+
"\u0229\u0231\b\b\2\2\4\3\2\3E\2\3F\3\3J\4\4\2\2";
public static final ATN _ATN =
new ATNDeserializer().deserialize(_serializedATN.toCharArray());
static {
_decisionToDFA = new DFA[_ATN.getNumberOfDecisions()];
for (int i = 0; i < _ATN.getNumberOfDecisions(); i++) {
_decisionToDFA[i] = new DFA(_ATN.getDecisionState(i), i);
}
}
}

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,357 @@
// ANTLR GENERATED CODE: DO NOT EDIT
package org.elasticsearch.plan.a;
import org.antlr.v4.runtime.tree.AbstractParseTreeVisitor;
/**
* This class provides an empty implementation of {@link PlanAParserVisitor},
* which can be extended to create a visitor which only needs to handle a subset
* of the available methods.
*
* @param <T> The return type of the visit operation. Use {@link Void} for
* operations with no return type.
*/
class PlanAParserBaseVisitor<T> extends AbstractParseTreeVisitor<T> implements PlanAParserVisitor<T> {
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitSource(PlanAParser.SourceContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitIf(PlanAParser.IfContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitWhile(PlanAParser.WhileContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitDo(PlanAParser.DoContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitFor(PlanAParser.ForContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitDecl(PlanAParser.DeclContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitContinue(PlanAParser.ContinueContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitBreak(PlanAParser.BreakContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitReturn(PlanAParser.ReturnContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitTry(PlanAParser.TryContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitThrow(PlanAParser.ThrowContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitExpr(PlanAParser.ExprContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitMultiple(PlanAParser.MultipleContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitSingle(PlanAParser.SingleContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitEmpty(PlanAParser.EmptyContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitInitializer(PlanAParser.InitializerContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitAfterthought(PlanAParser.AfterthoughtContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitDeclaration(PlanAParser.DeclarationContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitDecltype(PlanAParser.DecltypeContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitDeclvar(PlanAParser.DeclvarContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitComp(PlanAParser.CompContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitBool(PlanAParser.BoolContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitConditional(PlanAParser.ConditionalContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitAssignment(PlanAParser.AssignmentContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitFalse(PlanAParser.FalseContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitNumeric(PlanAParser.NumericContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitUnary(PlanAParser.UnaryContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitPrecedence(PlanAParser.PrecedenceContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitPreinc(PlanAParser.PreincContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitPostinc(PlanAParser.PostincContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitCast(PlanAParser.CastContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitExternal(PlanAParser.ExternalContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitNull(PlanAParser.NullContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitBinary(PlanAParser.BinaryContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitChar(PlanAParser.CharContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitTrue(PlanAParser.TrueContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitExtstart(PlanAParser.ExtstartContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitExtprec(PlanAParser.ExtprecContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitExtcast(PlanAParser.ExtcastContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitExtbrace(PlanAParser.ExtbraceContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitExtdot(PlanAParser.ExtdotContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitExttype(PlanAParser.ExttypeContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitExtcall(PlanAParser.ExtcallContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitExtvar(PlanAParser.ExtvarContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitExtfield(PlanAParser.ExtfieldContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitExtnew(PlanAParser.ExtnewContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitExtstring(PlanAParser.ExtstringContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitArguments(PlanAParser.ArgumentsContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitIncrement(PlanAParser.IncrementContext ctx) { return visitChildren(ctx); }
}

View File

@ -0,0 +1,336 @@
// ANTLR GENERATED CODE: DO NOT EDIT
package org.elasticsearch.plan.a;
import org.antlr.v4.runtime.tree.ParseTreeVisitor;
/**
* This interface defines a complete generic visitor for a parse tree produced
* by {@link PlanAParser}.
*
* @param <T> The return type of the visit operation. Use {@link Void} for
* operations with no return type.
*/
interface PlanAParserVisitor<T> extends ParseTreeVisitor<T> {
/**
* Visit a parse tree produced by {@link PlanAParser#source}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitSource(PlanAParser.SourceContext ctx);
/**
* Visit a parse tree produced by the {@code if}
* labeled alternative in {@link PlanAParser#statement}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitIf(PlanAParser.IfContext ctx);
/**
* Visit a parse tree produced by the {@code while}
* labeled alternative in {@link PlanAParser#statement}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitWhile(PlanAParser.WhileContext ctx);
/**
* Visit a parse tree produced by the {@code do}
* labeled alternative in {@link PlanAParser#statement}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitDo(PlanAParser.DoContext ctx);
/**
* Visit a parse tree produced by the {@code for}
* labeled alternative in {@link PlanAParser#statement}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitFor(PlanAParser.ForContext ctx);
/**
* Visit a parse tree produced by the {@code decl}
* labeled alternative in {@link PlanAParser#statement}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitDecl(PlanAParser.DeclContext ctx);
/**
* Visit a parse tree produced by the {@code continue}
* labeled alternative in {@link PlanAParser#statement}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitContinue(PlanAParser.ContinueContext ctx);
/**
* Visit a parse tree produced by the {@code break}
* labeled alternative in {@link PlanAParser#statement}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitBreak(PlanAParser.BreakContext ctx);
/**
* Visit a parse tree produced by the {@code return}
* labeled alternative in {@link PlanAParser#statement}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitReturn(PlanAParser.ReturnContext ctx);
/**
* Visit a parse tree produced by the {@code try}
* labeled alternative in {@link PlanAParser#statement}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitTry(PlanAParser.TryContext ctx);
/**
* Visit a parse tree produced by the {@code throw}
* labeled alternative in {@link PlanAParser#statement}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitThrow(PlanAParser.ThrowContext ctx);
/**
* Visit a parse tree produced by the {@code expr}
* labeled alternative in {@link PlanAParser#statement}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitExpr(PlanAParser.ExprContext ctx);
/**
* Visit a parse tree produced by the {@code multiple}
* labeled alternative in {@link PlanAParser#block}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitMultiple(PlanAParser.MultipleContext ctx);
/**
* Visit a parse tree produced by the {@code single}
* labeled alternative in {@link PlanAParser#block}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitSingle(PlanAParser.SingleContext ctx);
/**
* Visit a parse tree produced by {@link PlanAParser#empty}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitEmpty(PlanAParser.EmptyContext ctx);
/**
* Visit a parse tree produced by {@link PlanAParser#initializer}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitInitializer(PlanAParser.InitializerContext ctx);
/**
* Visit a parse tree produced by {@link PlanAParser#afterthought}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitAfterthought(PlanAParser.AfterthoughtContext ctx);
/**
* Visit a parse tree produced by {@link PlanAParser#declaration}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitDeclaration(PlanAParser.DeclarationContext ctx);
/**
* Visit a parse tree produced by {@link PlanAParser#decltype}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitDecltype(PlanAParser.DecltypeContext ctx);
/**
* Visit a parse tree produced by {@link PlanAParser#declvar}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitDeclvar(PlanAParser.DeclvarContext ctx);
/**
* Visit a parse tree produced by the {@code comp}
* labeled alternative in {@link PlanAParser#expression}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitComp(PlanAParser.CompContext ctx);
/**
* Visit a parse tree produced by the {@code bool}
* labeled alternative in {@link PlanAParser#expression}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitBool(PlanAParser.BoolContext ctx);
/**
* Visit a parse tree produced by the {@code conditional}
* labeled alternative in {@link PlanAParser#expression}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitConditional(PlanAParser.ConditionalContext ctx);
/**
* Visit a parse tree produced by the {@code assignment}
* labeled alternative in {@link PlanAParser#expression}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitAssignment(PlanAParser.AssignmentContext ctx);
/**
* Visit a parse tree produced by the {@code false}
* labeled alternative in {@link PlanAParser#expression}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitFalse(PlanAParser.FalseContext ctx);
/**
* Visit a parse tree produced by the {@code numeric}
* labeled alternative in {@link PlanAParser#expression}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitNumeric(PlanAParser.NumericContext ctx);
/**
* Visit a parse tree produced by the {@code unary}
* labeled alternative in {@link PlanAParser#expression}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitUnary(PlanAParser.UnaryContext ctx);
/**
* Visit a parse tree produced by the {@code precedence}
* labeled alternative in {@link PlanAParser#expression}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitPrecedence(PlanAParser.PrecedenceContext ctx);
/**
* Visit a parse tree produced by the {@code preinc}
* labeled alternative in {@link PlanAParser#expression}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitPreinc(PlanAParser.PreincContext ctx);
/**
* Visit a parse tree produced by the {@code postinc}
* labeled alternative in {@link PlanAParser#expression}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitPostinc(PlanAParser.PostincContext ctx);
/**
* Visit a parse tree produced by the {@code cast}
* labeled alternative in {@link PlanAParser#expression}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitCast(PlanAParser.CastContext ctx);
/**
* Visit a parse tree produced by the {@code external}
* labeled alternative in {@link PlanAParser#expression}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitExternal(PlanAParser.ExternalContext ctx);
/**
* Visit a parse tree produced by the {@code null}
* labeled alternative in {@link PlanAParser#expression}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitNull(PlanAParser.NullContext ctx);
/**
* Visit a parse tree produced by the {@code binary}
* labeled alternative in {@link PlanAParser#expression}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitBinary(PlanAParser.BinaryContext ctx);
/**
* Visit a parse tree produced by the {@code char}
* labeled alternative in {@link PlanAParser#expression}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitChar(PlanAParser.CharContext ctx);
/**
* Visit a parse tree produced by the {@code true}
* labeled alternative in {@link PlanAParser#expression}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitTrue(PlanAParser.TrueContext ctx);
/**
* Visit a parse tree produced by {@link PlanAParser#extstart}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitExtstart(PlanAParser.ExtstartContext ctx);
/**
* Visit a parse tree produced by {@link PlanAParser#extprec}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitExtprec(PlanAParser.ExtprecContext ctx);
/**
* Visit a parse tree produced by {@link PlanAParser#extcast}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitExtcast(PlanAParser.ExtcastContext ctx);
/**
* Visit a parse tree produced by {@link PlanAParser#extbrace}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitExtbrace(PlanAParser.ExtbraceContext ctx);
/**
* Visit a parse tree produced by {@link PlanAParser#extdot}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitExtdot(PlanAParser.ExtdotContext ctx);
/**
* Visit a parse tree produced by {@link PlanAParser#exttype}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitExttype(PlanAParser.ExttypeContext ctx);
/**
* Visit a parse tree produced by {@link PlanAParser#extcall}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitExtcall(PlanAParser.ExtcallContext ctx);
/**
* Visit a parse tree produced by {@link PlanAParser#extvar}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitExtvar(PlanAParser.ExtvarContext ctx);
/**
* Visit a parse tree produced by {@link PlanAParser#extfield}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitExtfield(PlanAParser.ExtfieldContext ctx);
/**
* Visit a parse tree produced by {@link PlanAParser#extnew}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitExtnew(PlanAParser.ExtnewContext ctx);
/**
* Visit a parse tree produced by {@link PlanAParser#extstring}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitExtstring(PlanAParser.ExtstringContext ctx);
/**
* Visit a parse tree produced by {@link PlanAParser#arguments}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitArguments(PlanAParser.ArgumentsContext ctx);
/**
* Visit a parse tree produced by {@link PlanAParser#increment}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitIncrement(PlanAParser.IncrementContext ctx);
}

View File

@ -0,0 +1,40 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.plan.a;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.script.ScriptModule;
public final class PlanAPlugin extends Plugin {
@Override
public String name() {
return "lang-plan-a";
}
@Override
public String description() {
return "Plan A scripting language for Elasticsearch";
}
public void onModule(ScriptModule module) {
module.addScriptEngine(PlanAScriptEngineService.class);
}
}

View File

@ -0,0 +1,140 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.plan.a;
import org.apache.lucene.index.LeafReaderContext;
import org.elasticsearch.SpecialPermission;
import org.elasticsearch.common.component.AbstractComponent;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.script.CompiledScript;
import org.elasticsearch.script.ExecutableScript;
import org.elasticsearch.script.LeafSearchScript;
import org.elasticsearch.script.ScriptEngineService;
import org.elasticsearch.script.SearchScript;
import org.elasticsearch.search.lookup.SearchLookup;
import java.io.IOException;
import java.security.AccessControlContext;
import java.security.AccessController;
import java.security.Permissions;
import java.security.PrivilegedAction;
import java.security.ProtectionDomain;
import java.util.Map;
public class PlanAScriptEngineService extends AbstractComponent implements ScriptEngineService {
public static final String NAME = "plan-a";
// TODO: this should really be per-script since scripts do so many different things?
private static final CompilerSettings compilerSettings = new CompilerSettings();
public static final String NUMERIC_OVERFLOW = "plan-a.numeric_overflow";
// TODO: how should custom definitions be specified?
private Definition definition = null;
@Inject
public PlanAScriptEngineService(Settings settings) {
super(settings);
compilerSettings.setNumericOverflow(settings.getAsBoolean(NUMERIC_OVERFLOW, compilerSettings.getNumericOverflow()));
}
public void setDefinition(final Definition definition) {
this.definition = new Definition(definition);
}
@Override
public String[] types() {
return new String[] { NAME };
}
@Override
public String[] extensions() {
return new String[] { NAME };
}
@Override
public boolean sandboxed() {
return true;
}
// context used during compilation
private static final AccessControlContext COMPILATION_CONTEXT;
static {
Permissions none = new Permissions();
none.setReadOnly();
COMPILATION_CONTEXT = new AccessControlContext(new ProtectionDomain[] {
new ProtectionDomain(null, none)
});
}
@Override
public Object compile(String script) {
// check we ourselves are not being called by unprivileged code
SecurityManager sm = System.getSecurityManager();
if (sm != null) {
sm.checkPermission(new SpecialPermission());
}
// create our loader (which loads compiled code with no permissions)
Compiler.Loader loader = AccessController.doPrivileged(new PrivilegedAction<Compiler.Loader>() {
@Override
public Compiler.Loader run() {
return new Compiler.Loader(getClass().getClassLoader());
}
});
// drop all permissions to actually compile the code itself
return AccessController.doPrivileged(new PrivilegedAction<Executable>() {
@Override
public Executable run() {
return Compiler.compile(loader, "something", script, definition, compilerSettings);
}
}, COMPILATION_CONTEXT);
}
@Override
public ExecutableScript executable(CompiledScript compiledScript, Map<String,Object> vars) {
return new ScriptImpl((Executable) compiledScript.compiled(), vars, null);
}
@Override
public SearchScript search(CompiledScript compiledScript, SearchLookup lookup, Map<String,Object> vars) {
return new SearchScript() {
@Override
public LeafSearchScript getLeafSearchScript(LeafReaderContext context) throws IOException {
return new ScriptImpl((Executable) compiledScript.compiled(), vars, lookup.getLeafSearchLookup(context));
}
@Override
public boolean needsScores() {
return true; // TODO: maybe even do these different and more like expressions.
}
};
}
@Override
public void scriptRemoved(CompiledScript script) {
// nothing to do
}
@Override
public void close() throws IOException {
// nothing to do
}
}

View File

@ -0,0 +1,96 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.plan.a;
import org.apache.lucene.search.Scorer;
import org.elasticsearch.script.ExecutableScript;
import org.elasticsearch.script.LeafSearchScript;
import org.elasticsearch.script.ScoreAccessor;
import org.elasticsearch.search.lookup.LeafSearchLookup;
import java.util.HashMap;
import java.util.Map;
final class ScriptImpl implements ExecutableScript, LeafSearchScript {
final Executable executable;
final Map<String,Object> variables;
final LeafSearchLookup lookup;
ScriptImpl(Executable executable, Map<String,Object> vars, LeafSearchLookup lookup) {
this.executable = executable;
this.lookup = lookup;
this.variables = new HashMap<>();
if (vars != null) {
variables.putAll(vars);
}
if (lookup != null) {
variables.putAll(lookup.asMap());
}
}
@Override
public void setNextVar(String name, Object value) {
variables.put(name, value);
}
@Override
public Object run() {
return executable.execute(variables);
}
@Override
public float runAsFloat() {
return ((Number) run()).floatValue();
}
@Override
public long runAsLong() {
return ((Number) run()).longValue();
}
@Override
public double runAsDouble() {
return ((Number) run()).doubleValue();
}
@Override
public Object unwrap(Object value) {
return value;
}
@Override
public void setScorer(Scorer scorer) {
variables.put("_score", new ScoreAccessor(scorer));
}
@Override
public void setDocument(int doc) {
if (lookup != null) {
lookup.setDocument(doc);
}
}
@Override
public void setSource(Map<String,Object> source) {
if (lookup != null) {
lookup.source().setSource(source);
}
}
}

View File

@ -0,0 +1,801 @@
package org.elasticsearch.plan.a;
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
public class Utility {
public static boolean NumberToboolean(final Number value) {
return value.longValue() != 0;
}
public static char NumberTochar(final Number value) {
return (char)value.intValue();
}
public static Boolean NumberToBoolean(final Number value) {
return value.longValue() != 0;
}
public static Byte NumberToByte(final Number value) {
return value == null ? null : value.byteValue();
}
public static Short NumberToShort(final Number value) {
return value == null ? null : value.shortValue();
}
public static Character NumberToCharacter(final Number value) {
return value == null ? null : (char)value.intValue();
}
public static Integer NumberToInteger(final Number value) {
return value == null ? null : value.intValue();
}
public static Long NumberToLong(final Number value) {
return value == null ? null : value.longValue();
}
public static Float NumberToFloat(final Number value) {
return value == null ? null : value.floatValue();
}
public static Double NumberToDouble(final Number value) {
return value == null ? null : value.doubleValue();
}
public static byte booleanTobyte(final boolean value) {
return (byte)(value ? 1 : 0);
}
public static short booleanToshort(final boolean value) {
return (short)(value ? 1 : 0);
}
public static char booleanTochar(final boolean value) {
return (char)(value ? 1 : 0);
}
public static int booleanToint(final boolean value) {
return value ? 1 : 0;
}
public static long booleanTolong(final boolean value) {
return value ? 1 : 0;
}
public static float booleanTofloat(final boolean value) {
return value ? 1 : 0;
}
public static double booleanTodouble(final boolean value) {
return value ? 1 : 0;
}
public static Integer booleanToInteger(final boolean value) {
return value ? 1 : 0;
}
public static byte BooleanTobyte(final Boolean value) {
return (byte)(value ? 1 : 0);
}
public static short BooleanToshort(final Boolean value) {
return (short)(value ? 1 : 0);
}
public static char BooleanTochar(final Boolean value) {
return (char)(value ? 1 : 0);
}
public static int BooleanToint(final Boolean value) {
return value ? 1 : 0;
}
public static long BooleanTolong(final Boolean value) {
return value ? 1 : 0;
}
public static float BooleanTofloat(final Boolean value) {
return value ? 1 : 0;
}
public static double BooleanTodouble(final Boolean value) {
return value ? 1 : 0;
}
public static Byte BooleanToByte(final Boolean value) {
return value == null ? null : (byte)(value ? 1 : 0);
}
public static Short BooleanToShort(final Boolean value) {
return value == null ? null : (short)(value ? 1 : 0);
}
public static Character BooleanToCharacter(final Boolean value) {
return value == null ? null : (char)(value ? 1 : 0);
}
public static Integer BooleanToInteger(final Boolean value) {
return value == null ? null : value ? 1 : 0;
}
public static Long BooleanToLong(final Boolean value) {
return value == null ? null : value ? 1L : 0L;
}
public static Float BooleanToFloat(final Boolean value) {
return value == null ? null : value ? 1F : 0F;
}
public static Double BooleanToDouble(final Boolean value) {
return value == null ? null : value ? 1D : 0D;
}
public static boolean byteToboolean(final byte value) {
return value != 0;
}
public static Short byteToShort(final byte value) {
return (short)value;
}
public static Character byteToCharacter(final byte value) {
return (char)(byte)value;
}
public static Integer byteToInteger(final byte value) {
return (int)value;
}
public static Long byteToLong(final byte value) {
return (long)value;
}
public static Float byteToFloat(final byte value) {
return (float)value;
}
public static Double byteToDouble(final byte value) {
return (double)value;
}
public static boolean ByteToboolean(final Byte value) {
return value != 0;
}
public static char ByteTochar(final Byte value) {
return (char)value.byteValue();
}
public static boolean shortToboolean(final short value) {
return value != 0;
}
public static Byte shortToByte(final short value) {
return (byte)value;
}
public static Character shortToCharacter(final short value) {
return (char)(short)value;
}
public static Integer shortToInteger(final short value) {
return (int)value;
}
public static Long shortToLong(final short value) {
return (long)value;
}
public static Float shortToFloat(final short value) {
return (float)value;
}
public static Double shortToDouble(final short value) {
return (double)value;
}
public static boolean ShortToboolean(final Short value) {
return value != 0;
}
public static char ShortTochar(final Short value) {
return (char)value.shortValue();
}
public static boolean charToboolean(final char value) {
return value != 0;
}
public static Byte charToByte(final char value) {
return (byte)value;
}
public static Short charToShort(final char value) {
return (short)value;
}
public static Integer charToInteger(final char value) {
return (int)value;
}
public static Long charToLong(final char value) {
return (long)value;
}
public static Float charToFloat(final char value) {
return (float)value;
}
public static Double charToDouble(final char value) {
return (double)value;
}
public static boolean CharacterToboolean(final Character value) {
return value != 0;
}
public static byte CharacterTobyte(final Character value) {
return (byte)value.charValue();
}
public static short CharacterToshort(final Character value) {
return (short)value.charValue();
}
public static int CharacterToint(final Character value) {
return (int)value;
}
public static long CharacterTolong(final Character value) {
return (long)value;
}
public static float CharacterTofloat(final Character value) {
return (float)value;
}
public static double CharacterTodouble(final Character value) {
return (double)value;
}
public static Boolean CharacterToBoolean(final Character value) {
return value == null ? null : value != 0;
}
public static Byte CharacterToByte(final Character value) {
return value == null ? null : (byte)value.charValue();
}
public static Short CharacterToShort(final Character value) {
return value == null ? null : (short)value.charValue();
}
public static Integer CharacterToInteger(final Character value) {
return value == null ? null : (int)value;
}
public static Long CharacterToLong(final Character value) {
return value == null ? null : (long)value;
}
public static Float CharacterToFloat(final Character value) {
return value == null ? null : (float)value;
}
public static Double CharacterToDouble(final Character value) {
return value == null ? null : (double)value;
}
public static boolean intToboolean(final int value) {
return value != 0;
}
public static Byte intToByte(final int value) {
return (byte)value;
}
public static Short intToShort(final int value) {
return (short)value;
}
public static Character intToCharacter(final int value) {
return (char)(int)value;
}
public static Long intToLong(final int value) {
return (long)value;
}
public static Float intToFloat(final int value) {
return (float)value;
}
public static Double intToDouble(final int value) {
return (double)value;
}
public static boolean IntegerToboolean(final Integer value) {
return value != 0;
}
public static char IntegerTochar(final Integer value) {
return (char)value.intValue();
}
public static boolean longToboolean(final long value) {
return value != 0;
}
public static Byte longToByte(final long value) {
return (byte)value;
}
public static Short longToShort(final long value) {
return (short)value;
}
public static Character longToCharacter(final long value) {
return (char)(long)value;
}
public static Integer longToInteger(final long value) {
return (int)value;
}
public static Float longToFloat(final long value) {
return (float)value;
}
public static Double longToDouble(final long value) {
return (double)value;
}
public static boolean LongToboolean(final Long value) {
return value != 0;
}
public static char LongTochar(final Long value) {
return (char)value.longValue();
}
public static boolean floatToboolean(final float value) {
return value != 0;
}
public static Byte floatToByte(final float value) {
return (byte)value;
}
public static Short floatToShort(final float value) {
return (short)value;
}
public static Character floatToCharacter(final float value) {
return (char)(float)value;
}
public static Integer floatToInteger(final float value) {
return (int)value;
}
public static Long floatToLong(final float value) {
return (long)value;
}
public static Double floatToDouble(final float value) {
return (double)value;
}
public static boolean FloatToboolean(final Float value) {
return value != 0;
}
public static char FloatTochar(final Float value) {
return (char)value.floatValue();
}
public static boolean doubleToboolean(final double value) {
return value != 0;
}
public static Byte doubleToByte(final double value) {
return (byte)value;
}
public static Short doubleToShort(final double value) {
return (short)value;
}
public static Character doubleToCharacter(final double value) {
return (char)(double)value;
}
public static Integer doubleToInteger(final double value) {
return (int)value;
}
public static Long doubleToLong(final double value) {
return (long)value;
}
public static Float doubleToFloat(final double value) {
return (float)value;
}
public static boolean DoubleToboolean(final Double value) {
return value != 0;
}
public static char DoubleTochar(final Double value) {
return (char)value.doubleValue();
}
// although divide by zero is guaranteed, the special overflow case is not caught.
// its not needed for remainder because it is not possible there.
// see https://docs.oracle.com/javase/specs/jls/se8/html/jls-15.html#jls-15.17.2
/**
* Integer divide without overflow
* @throws ArithmeticException on overflow or divide-by-zero
*/
public static int divideWithoutOverflow(int x, int y) {
if (x == Integer.MIN_VALUE && y == -1) {
throw new ArithmeticException("integer overflow");
}
return x / y;
}
/**
* Long divide without overflow
* @throws ArithmeticException on overflow or divide-by-zero
*/
public static long divideWithoutOverflow(long x, long y) {
if (x == Long.MIN_VALUE && y == -1L) {
throw new ArithmeticException("long overflow");
}
return x / y;
}
// byte, short, and char are promoted to int for normal operations,
// so the JDK exact methods are typically used, and the result has a wider range.
// but compound assignments and increment/decrement operators (e.g. byte b = Byte.MAX_VALUE; b++;)
// implicitly cast back to the original type: so these need to be checked against the original range.
/**
* Like {@link Math#toIntExact(long)} but for byte range.
*/
public static byte toByteExact(int value) {
byte s = (byte) value;
if (s != value) {
throw new ArithmeticException("byte overflow");
}
return s;
}
/**
* Like {@link Math#toIntExact(long)} but for byte range.
*/
public static byte toByteExact(long value) {
byte s = (byte) value;
if (s != value) {
throw new ArithmeticException("byte overflow");
}
return s;
}
/**
* Like {@link Math#toIntExact(long)} but for byte range.
*/
public static byte toByteWithoutOverflow(float value) {
if (value < Byte.MIN_VALUE || value > Byte.MAX_VALUE) {
throw new ArithmeticException("byte overflow");
}
return (byte)value;
}
/**
* Like {@link Math#toIntExact(long)} but for byte range.
*/
public static byte toByteWithoutOverflow(double value) {
if (value < Byte.MIN_VALUE || value > Byte.MAX_VALUE) {
throw new ArithmeticException("byte overflow");
}
return (byte)value;
}
/**
* Like {@link Math#toIntExact(long)} but for short range.
*/
public static short toShortExact(int value) {
short s = (short) value;
if (s != value) {
throw new ArithmeticException("short overflow");
}
return s;
}
/**
* Like {@link Math#toIntExact(long)} but for short range.
*/
public static short toShortExact(long value) {
short s = (short) value;
if (s != value) {
throw new ArithmeticException("short overflow");
}
return s;
}
/**
* Like {@link Math#toIntExact(long)} but for short range.
*/
public static short toShortWithoutOverflow(float value) {
if (value < Short.MIN_VALUE || value > Short.MAX_VALUE) {
throw new ArithmeticException("short overflow");
}
return (short)value;
}
/**
* Like {@link Math#toIntExact(long)} but for short range.
*/
public static short toShortExact(double value) {
if (value < Short.MIN_VALUE || value > Short.MAX_VALUE) {
throw new ArithmeticException("short overflow");
}
return (short)value;
}
/**
* Like {@link Math#toIntExact(long)} but for char range.
*/
public static char toCharExact(int value) {
char s = (char) value;
if (s != value) {
throw new ArithmeticException("char overflow");
}
return s;
}
/**
* Like {@link Math#toIntExact(long)} but for char range.
*/
public static char toCharExact(long value) {
char s = (char) value;
if (s != value) {
throw new ArithmeticException("char overflow");
}
return s;
}
/**
* Like {@link Math#toIntExact(long)} but for char range.
*/
public static char toCharWithoutOverflow(float value) {
if (value < Character.MIN_VALUE || value > Character.MAX_VALUE) {
throw new ArithmeticException("char overflow");
}
return (char)value;
}
/**
* Like {@link Math#toIntExact(long)} but for char range.
*/
public static char toCharWithoutOverflow(double value) {
if (value < Character.MIN_VALUE || value > Character.MAX_VALUE) {
throw new ArithmeticException("char overflow");
}
return (char)value;
}
/**
* Like {@link Math#toIntExact(long)} but for int range.
*/
public static int toIntWithoutOverflow(float value) {
if (value < Integer.MIN_VALUE || value > Integer.MAX_VALUE) {
throw new ArithmeticException("int overflow");
}
return (int)value;
}
/**
* Like {@link Math#toIntExact(long)} but for int range.
*/
public static int toIntWithoutOverflow(double value) {
if (value < Integer.MIN_VALUE || value > Integer.MAX_VALUE) {
throw new ArithmeticException("int overflow");
}
return (int)value;
}
/**
* Like {@link Math#toIntExact(long)} but for long range.
*/
public static long toLongExactWithoutOverflow(float value) {
if (value < Long.MIN_VALUE || value > Long.MAX_VALUE) {
throw new ArithmeticException("long overflow");
}
return (long)value;
}
/**
* Like {@link Math#toIntExact(long)} but for long range.
*/
public static float toLongExactWithoutOverflow(double value) {
if (value < Long.MIN_VALUE || value > Long.MAX_VALUE) {
throw new ArithmeticException("long overflow");
}
return (long)value;
}
/**
* Like {@link Math#toIntExact(long)} but for float range.
*/
public static float toFloatWithoutOverflow(double value) {
if (value < Float.MIN_VALUE || value > Float.MAX_VALUE) {
throw new ArithmeticException("float overflow");
}
return (float)value;
}
/**
* Checks for overflow, result is infinite but operands are finite
* @throws ArithmeticException if overflow occurred
*/
private static float checkInfFloat(float x, float y, float z) {
if (Float.isInfinite(z)) {
if (Float.isFinite(x) && Float.isFinite(y)) {
throw new ArithmeticException("float overflow");
}
}
return z;
}
/**
* Checks for NaN, result is NaN but operands are finite
* @throws ArithmeticException if overflow occurred
*/
private static float checkNaNFloat(float x, float y, float z) {
if (Float.isNaN(z)) {
if (Float.isFinite(x) && Float.isFinite(y)) {
throw new ArithmeticException("NaN");
}
}
return z;
}
/**
* Checks for NaN, result is infinite but operands are finite
* @throws ArithmeticException if overflow occurred
*/
private static double checkInfDouble(double x, double y, double z) {
if (Double.isInfinite(z)) {
if (Double.isFinite(x) && Double.isFinite(y)) {
throw new ArithmeticException("double overflow");
}
}
return z;
}
/**
* Checks for NaN, result is NaN but operands are finite
* @throws ArithmeticException if overflow occurred
*/
private static double checkNaNDouble(double x, double y, double z) {
if (Double.isNaN(z)) {
if (Double.isFinite(x) && Double.isFinite(y)) {
throw new ArithmeticException("NaN");
}
}
return z;
}
/**
* Adds two floats but throws {@code ArithmeticException}
* if the result overflows.
*/
public static float addWithoutOverflow(float x, float y) {
return checkInfFloat(x, y, x + y);
}
/**
* Adds two doubles but throws {@code ArithmeticException}
* if the result overflows.
*/
public static double addWithoutOverflow(double x, double y) {
return checkInfDouble(x, y, x + y);
}
/**
* Subtracts two floats but throws {@code ArithmeticException}
* if the result overflows.
*/
public static float subtractWithoutOverflow(float x, float y) {
return checkInfFloat(x, y, x - y);
}
/**
* Subtracts two doubles but throws {@code ArithmeticException}
* if the result overflows.
*/
public static double subtractWithoutOverflow(double x, double y) {
return checkInfDouble(x, y , x - y);
}
/**
* Multiplies two floats but throws {@code ArithmeticException}
* if the result overflows.
*/
public static float multiplyWithoutOverflow(float x, float y) {
return checkInfFloat(x, y, x * y);
}
/**
* Multiplies two doubles but throws {@code ArithmeticException}
* if the result overflows.
*/
public static double multiplyWithoutOverflow(double x, double y) {
return checkInfDouble(x, y, x * y);
}
/**
* Divides two floats but throws {@code ArithmeticException}
* if the result overflows, or would create NaN from finite
* inputs ({@code x == 0, y == 0})
*/
public static float divideWithoutOverflow(float x, float y) {
return checkNaNFloat(x, y, checkInfFloat(x, y, x / y));
}
/**
* Divides two doubles but throws {@code ArithmeticException}
* if the result overflows, or would create NaN from finite
* inputs ({@code x == 0, y == 0})
*/
public static double divideWithoutOverflow(double x, double y) {
return checkNaNDouble(x, y, checkInfDouble(x, y, x / y));
}
/**
* Takes remainder two floats but throws {@code ArithmeticException}
* if the result would create NaN from finite inputs ({@code y == 0})
*/
public static float remainderWithoutOverflow(float x, float y) {
return checkNaNFloat(x, y, x % y);
}
/**
* Divides two doubles but throws {@code ArithmeticException}
* if the result would create NaN from finite inputs ({@code y == 0})
*/
public static double remainderWithoutOverflow(double x, double y) {
return checkNaNDouble(x, y, x % y);
}
public static boolean checkEquals(final Object left, final Object right) {
if (left != null && right != null) {
return left.equals(right);
}
return left == null && right == null;
}
private Utility() {}
}

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,23 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
grant {
// needed to generate runtime classes
permission java.lang.RuntimePermission "createClassLoader";
};

View File

@ -0,0 +1,199 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.plan.a;
import java.lang.invoke.MethodHandles;
import java.lang.invoke.MethodType;
import java.util.HashMap;
import java.util.Map;
/** Tests for addition operator across all types */
//TODO: NaN/Inf/overflow/...
public class AdditionTests extends ScriptTestCase {
public void testInt() throws Exception {
assertEquals(1+1, exec("int x = 1; int y = 1; return x+y;"));
assertEquals(1+2, exec("int x = 1; int y = 2; return x+y;"));
assertEquals(5+10, exec("int x = 5; int y = 10; return x+y;"));
assertEquals(1+1+2, exec("int x = 1; int y = 1; int z = 2; return x+y+z;"));
assertEquals((1+1)+2, exec("int x = 1; int y = 1; int z = 2; return (x+y)+z;"));
assertEquals(1+(1+2), exec("int x = 1; int y = 1; int z = 2; return x+(y+z);"));
assertEquals(0+1, exec("int x = 0; int y = 1; return x+y;"));
assertEquals(1+0, exec("int x = 1; int y = 0; return x+y;"));
assertEquals(0+0, exec("int x = 0; int y = 0; return x+y;"));
assertEquals(0+0, exec("int x = 0; int y = 0; return x+y;"));
}
public void testIntConst() throws Exception {
assertEquals(1+1, exec("return 1+1;"));
assertEquals(1+2, exec("return 1+2;"));
assertEquals(5+10, exec("return 5+10;"));
assertEquals(1+1+2, exec("return 1+1+2;"));
assertEquals((1+1)+2, exec("return (1+1)+2;"));
assertEquals(1+(1+2), exec("return 1+(1+2);"));
assertEquals(0+1, exec("return 0+1;"));
assertEquals(1+0, exec("return 1+0;"));
assertEquals(0+0, exec("return 0+0;"));
}
public void testByte() throws Exception {
assertEquals((byte)1+(byte)1, exec("byte x = 1; byte y = 1; return x+y;"));
assertEquals((byte)1+(byte)2, exec("byte x = 1; byte y = 2; return x+y;"));
assertEquals((byte)5+(byte)10, exec("byte x = 5; byte y = 10; return x+y;"));
assertEquals((byte)1+(byte)1+(byte)2, exec("byte x = 1; byte y = 1; byte z = 2; return x+y+z;"));
assertEquals(((byte)1+(byte)1)+(byte)2, exec("byte x = 1; byte y = 1; byte z = 2; return (x+y)+z;"));
assertEquals((byte)1+((byte)1+(byte)2), exec("byte x = 1; byte y = 1; byte z = 2; return x+(y+z);"));
assertEquals((byte)0+(byte)1, exec("byte x = 0; byte y = 1; return x+y;"));
assertEquals((byte)1+(byte)0, exec("byte x = 1; byte y = 0; return x+y;"));
assertEquals((byte)0+(byte)0, exec("byte x = 0; byte y = 0; return x+y;"));
}
public void testByteConst() throws Exception {
assertEquals((byte)1+(byte)1, exec("return (byte)1+(byte)1;"));
assertEquals((byte)1+(byte)2, exec("return (byte)1+(byte)2;"));
assertEquals((byte)5+(byte)10, exec("return (byte)5+(byte)10;"));
assertEquals((byte)1+(byte)1+(byte)2, exec("return (byte)1+(byte)1+(byte)2;"));
assertEquals(((byte)1+(byte)1)+(byte)2, exec("return ((byte)1+(byte)1)+(byte)2;"));
assertEquals((byte)1+((byte)1+(byte)2), exec("return (byte)1+((byte)1+(byte)2);"));
assertEquals((byte)0+(byte)1, exec("return (byte)0+(byte)1;"));
assertEquals((byte)1+(byte)0, exec("return (byte)1+(byte)0;"));
assertEquals((byte)0+(byte)0, exec("return (byte)0+(byte)0;"));
}
public void testChar() throws Exception {
assertEquals((char)1+(char)1, exec("char x = 1; char y = 1; return x+y;"));
assertEquals((char)1+(char)2, exec("char x = 1; char y = 2; return x+y;"));
assertEquals((char)5+(char)10, exec("char x = 5; char y = 10; return x+y;"));
assertEquals((char)1+(char)1+(char)2, exec("char x = 1; char y = 1; char z = 2; return x+y+z;"));
assertEquals(((char)1+(char)1)+(char)2, exec("char x = 1; char y = 1; char z = 2; return (x+y)+z;"));
assertEquals((char)1+((char)1+(char)2), exec("char x = 1; char y = 1; char z = 2; return x+(y+z);"));
assertEquals((char)0+(char)1, exec("char x = 0; char y = 1; return x+y;"));
assertEquals((char)1+(char)0, exec("char x = 1; char y = 0; return x+y;"));
assertEquals((char)0+(char)0, exec("char x = 0; char y = 0; return x+y;"));
}
public void testCharConst() throws Exception {
assertEquals((char)1+(char)1, exec("return (char)1+(char)1;"));
assertEquals((char)1+(char)2, exec("return (char)1+(char)2;"));
assertEquals((char)5+(char)10, exec("return (char)5+(char)10;"));
assertEquals((char)1+(char)1+(char)2, exec("return (char)1+(char)1+(char)2;"));
assertEquals(((char)1+(char)1)+(char)2, exec("return ((char)1+(char)1)+(char)2;"));
assertEquals((char)1+((char)1+(char)2), exec("return (char)1+((char)1+(char)2);"));
assertEquals((char)0+(char)1, exec("return (char)0+(char)1;"));
assertEquals((char)1+(char)0, exec("return (char)1+(char)0;"));
assertEquals((char)0+(char)0, exec("return (char)0+(char)0;"));
}
public void testShort() throws Exception {
assertEquals((short)1+(short)1, exec("short x = 1; short y = 1; return x+y;"));
assertEquals((short)1+(short)2, exec("short x = 1; short y = 2; return x+y;"));
assertEquals((short)5+(short)10, exec("short x = 5; short y = 10; return x+y;"));
assertEquals((short)1+(short)1+(short)2, exec("short x = 1; short y = 1; short z = 2; return x+y+z;"));
assertEquals(((short)1+(short)1)+(short)2, exec("short x = 1; short y = 1; short z = 2; return (x+y)+z;"));
assertEquals((short)1+((short)1+(short)2), exec("short x = 1; short y = 1; short z = 2; return x+(y+z);"));
assertEquals((short)0+(short)1, exec("short x = 0; short y = 1; return x+y;"));
assertEquals((short)1+(short)0, exec("short x = 1; short y = 0; return x+y;"));
assertEquals((short)0+(short)0, exec("short x = 0; short y = 0; return x+y;"));
}
public void testShortConst() throws Exception {
assertEquals((short)1+(short)1, exec("return (short)1+(short)1;"));
assertEquals((short)1+(short)2, exec("return (short)1+(short)2;"));
assertEquals((short)5+(short)10, exec("return (short)5+(short)10;"));
assertEquals((short)1+(short)1+(short)2, exec("return (short)1+(short)1+(short)2;"));
assertEquals(((short)1+(short)1)+(short)2, exec("return ((short)1+(short)1)+(short)2;"));
assertEquals((short)1+((short)1+(short)2), exec("return (short)1+((short)1+(short)2);"));
assertEquals((short)0+(short)1, exec("return (short)0+(short)1;"));
assertEquals((short)1+(short)0, exec("return (short)1+(short)0;"));
assertEquals((short)0+(short)0, exec("return (short)0+(short)0;"));
}
public void testLong() throws Exception {
assertEquals(1L+1L, exec("long x = 1; long y = 1; return x+y;"));
assertEquals(1L+2L, exec("long x = 1; long y = 2; return x+y;"));
assertEquals(5L+10L, exec("long x = 5; long y = 10; return x+y;"));
assertEquals(1L+1L+2L, exec("long x = 1; long y = 1; long z = 2; return x+y+z;"));
assertEquals((1L+1L)+2L, exec("long x = 1; long y = 1; long z = 2; return (x+y)+z;"));
assertEquals(1L+(1L+2L), exec("long x = 1; long y = 1; long z = 2; return x+(y+z);"));
assertEquals(0L+1L, exec("long x = 0; long y = 1; return x+y;"));
assertEquals(1L+0L, exec("long x = 1; long y = 0; return x+y;"));
assertEquals(0L+0L, exec("long x = 0; long y = 0; return x+y;"));
}
public void testLongConst() throws Exception {
assertEquals(1L+1L, exec("return 1L+1L;"));
assertEquals(1L+2L, exec("return 1L+2L;"));
assertEquals(5L+10L, exec("return 5L+10L;"));
assertEquals(1L+1L+2L, exec("return 1L+1L+2L;"));
assertEquals((1L+1L)+2L, exec("return (1L+1L)+2L;"));
assertEquals(1L+(1L+2L), exec("return 1L+(1L+2L);"));
assertEquals(0L+1L, exec("return 0L+1L;"));
assertEquals(1L+0L, exec("return 1L+0L;"));
assertEquals(0L+0L, exec("return 0L+0L;"));
}
public void testFloat() throws Exception {
assertEquals(1F+1F, exec("float x = 1F; float y = 1F; return x+y;"));
assertEquals(1F+2F, exec("float x = 1F; float y = 2F; return x+y;"));
assertEquals(5F+10F, exec("float x = 5F; float y = 10F; return x+y;"));
assertEquals(1F+1F+2F, exec("float x = 1F; float y = 1F; float z = 2F; return x+y+z;"));
assertEquals((1F+1F)+2F, exec("float x = 1F; float y = 1F; float z = 2F; return (x+y)+z;"));
assertEquals((1F+1F)+2F, exec("float x = 1F; float y = 1F; float z = 2F; return x+(y+z);"));
assertEquals(0F+1F, exec("float x = 0F; float y = 1F; return x+y;"));
assertEquals(1F+0F, exec("float x = 1F; float y = 0F; return x+y;"));
assertEquals(0F+0F, exec("float x = 0F; float y = 0F; return x+y;"));
}
public void testFloatConst() throws Exception {
assertEquals(1F+1F, exec("return 1F+1F;"));
assertEquals(1F+2F, exec("return 1F+2F;"));
assertEquals(5F+10F, exec("return 5F+10F;"));
assertEquals(1F+1F+2F, exec("return 1F+1F+2F;"));
assertEquals((1F+1F)+2F, exec("return (1F+1F)+2F;"));
assertEquals(1F+(1F+2F), exec("return 1F+(1F+2F);"));
assertEquals(0F+1F, exec("return 0F+1F;"));
assertEquals(1F+0F, exec("return 1F+0F;"));
assertEquals(0F+0F, exec("return 0F+0F;"));
}
public void testDouble() throws Exception {
assertEquals(1.0+1.0, exec("double x = 1.0; double y = 1.0; return x+y;"));
assertEquals(1.0+2.0, exec("double x = 1.0; double y = 2.0; return x+y;"));
assertEquals(5.0+10.0, exec("double x = 5.0; double y = 10.0; return x+y;"));
assertEquals(1.0+1.0+2.0, exec("double x = 1.0; double y = 1.0; double z = 2.0; return x+y+z;"));
assertEquals((1.0+1.0)+2.0, exec("double x = 1.0; double y = 1.0; double z = 2.0; return (x+y)+z;"));
assertEquals(1.0+(1.0+2.0), exec("double x = 1.0; double y = 1.0; double z = 2.0; return x+(y+z);"));
assertEquals(0.0+1.0, exec("double x = 0.0; double y = 1.0; return x+y;"));
assertEquals(1.0+0.0, exec("double x = 1.0; double y = 0.0; return x+y;"));
assertEquals(0.0+0.0, exec("double x = 0.0; double y = 0.0; return x+y;"));
}
public void testDoubleConst() throws Exception {
assertEquals(1.0+1.0, exec("return 1.0+1.0;"));
assertEquals(1.0+2.0, exec("return 1.0+2.0;"));
assertEquals(5.0+10.0, exec("return 5.0+10.0;"));
assertEquals(1.0+1.0+2.0, exec("return 1.0+1.0+2.0;"));
assertEquals((1.0+1.0)+2.0, exec("return (1.0+1.0)+2.0;"));
assertEquals(1.0+(1.0+2.0), exec("return 1.0+(1.0+2.0);"));
assertEquals(0.0+1.0, exec("return 0.0+1.0;"));
assertEquals(1.0+0.0, exec("return 1.0+0.0;"));
assertEquals(0.0+0.0, exec("return 0.0+0.0;"));
}
}

View File

@ -0,0 +1,48 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.plan.a;
/** Tests for and operator across all types */
public class AndTests extends ScriptTestCase {
public void testInt() throws Exception {
assertEquals(5 & 12, exec("int x = 5; int y = 12; return x & y;"));
assertEquals(5 & -12, exec("int x = 5; int y = -12; return x & y;"));
assertEquals(7 & 15 & 3, exec("int x = 7; int y = 15; int z = 3; return x & y & z;"));
}
public void testIntConst() throws Exception {
assertEquals(5 & 12, exec("return 5 & 12;"));
assertEquals(5 & -12, exec("return 5 & -12;"));
assertEquals(7 & 15 & 3, exec("return 7 & 15 & 3;"));
}
public void testLong() throws Exception {
assertEquals(5L & 12L, exec("long x = 5; long y = 12; return x & y;"));
assertEquals(5L & -12L, exec("long x = 5; long y = -12; return x & y;"));
assertEquals(7L & 15L & 3L, exec("long x = 7; long y = 15; long z = 3; return x & y & z;"));
}
public void testLongConst() throws Exception {
assertEquals(5L & 12L, exec("return 5L & 12L;"));
assertEquals(5L & -12L, exec("return 5L & -12L;"));
assertEquals(7L & 15L & 3L, exec("return 7L & 15L & 3L;"));
}
}

View File

@ -0,0 +1,126 @@
package org.elasticsearch.plan.a;
import java.util.Collections;
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
public class BasicExpressionTests extends ScriptTestCase {
/** simple tests returning a constant value */
public void testReturnConstant() {
assertEquals(5, exec("return 5;"));
assertEquals(7L, exec("return 7L;"));
assertEquals(7.0, exec("return 7.0;"));
assertEquals(32.0F, exec("return 32.0F;"));
assertEquals((byte)255, exec("return (byte)255;"));
assertEquals((short)5, exec("return (short)5;"));
assertEquals("string", exec("return \"string\";"));
assertEquals(true, exec("return true;"));
assertEquals(false, exec("return false;"));
assertNull(exec("return null;"));
}
public void testReturnConstantChar() {
assertEquals('x', exec("return 'x';"));
}
public void testConstantCharTruncation() {
assertEquals('蚠', exec("return (char)100000;"));
}
/** declaring variables for primitive types */
public void testDeclareVariable() {
assertEquals(5, exec("int i = 5; return i;"));
assertEquals(7L, exec("long l = 7; return l;"));
assertEquals(7.0, exec("double d = 7; return d;"));
assertEquals(32.0F, exec("float f = 32F; return f;"));
assertEquals((byte)255, exec("byte b = (byte)255; return b;"));
assertEquals((short)5, exec("short s = (short)5; return s;"));
assertEquals("string", exec("String s = \"string\"; return s;"));
assertEquals(true, exec("boolean v = true; return v;"));
assertEquals(false, exec("boolean v = false; return v;"));
}
public void testCast() {
assertEquals(1, exec("return (int)1.0;"));
assertEquals((byte)100, exec("double x = 100; return (byte)x;"));
assertEquals(3, exec(
"Map x = new HashMap();\n" +
"Object y = x;\n" +
"((Map)y).put(2, 3);\n" +
"return x.get(2);\n"));
}
public void testCat() {
assertEquals("aaabbb", exec("return \"aaa\" + \"bbb\";"));
assertEquals("aaabbb", exec("String aaa = \"aaa\", bbb = \"bbb\"; return aaa + bbb;"));
assertEquals("aaabbbbbbbbb", exec(
"String aaa = \"aaa\", bbb = \"bbb\"; int x;\n" +
"for (; x < 3; ++x) \n" +
" aaa += bbb;\n" +
"return aaa;"));
}
public void testComp() {
assertEquals(true, exec("return 2 < 3;"));
assertEquals(false, exec("int x = 4; char y = 2; return x < y;"));
assertEquals(true, exec("return 3 <= 3;"));
assertEquals(true, exec("int x = 3; char y = 3; return x <= y;"));
assertEquals(false, exec("return 2 > 3;"));
assertEquals(true, exec("int x = 4; long y = 2; return x > y;"));
assertEquals(false, exec("return 3 >= 4;"));
assertEquals(true, exec("double x = 3; float y = 3; return x >= y;"));
assertEquals(false, exec("return 3 == 4;"));
assertEquals(true, exec("double x = 3; float y = 3; return x == y;"));
assertEquals(true, exec("return 3 != 4;"));
assertEquals(false, exec("double x = 3; float y = 3; return x != y;"));
}
/**
* Test boxed objects in various places
*/
public void testBoxing() {
// return
assertEquals(4, exec("return input.get(\"x\");", Collections.singletonMap("x", 4)));
// assignment
assertEquals(4, exec("int y = (Integer)input.get(\"x\"); return y;", Collections.singletonMap("x", 4)));
// comparison
assertEquals(true, exec("return 5 > (Integer)input.get(\"x\");", Collections.singletonMap("x", 4)));
}
public void testBool() {
assertEquals(true, exec("return true && true;"));
assertEquals(false, exec("boolean a = true, b = false; return a && b;"));
assertEquals(true, exec("return true || true;"));
assertEquals(true, exec("boolean a = true, b = false; return a || b;"));
}
public void testConditional() {
assertEquals(1, exec("int x = 5; return x > 3 ? 1 : 0;"));
assertEquals(0, exec("String a = null; return a != null ? 1 : 0;"));
}
public void testPrecedence() {
assertEquals(2, exec("int x = 5; return (x+x)/x;"));
assertEquals(true, exec("boolean t = true, f = false; return t && (f || t);"));
}
}

View File

@ -0,0 +1,178 @@
package org.elasticsearch.plan.a;
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import java.util.HashMap;
import java.util.Map;
public class BasicStatementTests extends ScriptTestCase {
public void testIfStatement() {
assertEquals(1, exec("int x = 5; if (x == 5) return 1; return 0;"));
assertEquals(0, exec("int x = 4; if (x == 5) return 1; else return 0;"));
assertEquals(2, exec("int x = 4; if (x == 5) return 1; else if (x == 4) return 2; else return 0;"));
assertEquals(1, exec("int x = 4; if (x == 5) return 1; else if (x == 4) return 1; else return 0;"));
assertEquals(3, exec(
"int x = 5;\n" +
"if (x == 5) {\n" +
" int y = 2;\n" +
" \n" +
" if (y == 2) {\n" +
" x = 3;\n" +
" }\n" +
" \n" +
"}\n" +
"\n" +
"return x;\n"));
}
public void testWhileStatement() {
assertEquals("aaaaaa", exec("String c = \"a\"; int x; while (x < 5) { c += \"a\"; ++x; } return c;"));
Object value = exec(
" byte[][] b = new byte[5][5]; \n" +
" byte x = 0, y; \n" +
" \n" +
" while (x < 5) { \n" +
" y = 0; \n" +
" \n" +
" while (y < 5) { \n" +
" b[x][y] = (byte)(x*y); \n" +
" ++y; \n" +
" } \n" +
" \n" +
" ++x; \n" +
" } \n" +
" \n" +
" return b; \n");
byte[][] b = (byte[][])value;
for (byte x = 0; x < 5; ++x) {
for (byte y = 0; y < 5; ++y) {
assertEquals(x*y, b[x][y]);
}
}
}
public void testDoWhileStatement() {
assertEquals("aaaaaa", exec("String c = \"a\"; int x; do { c += \"a\"; ++x; } while (x < 5); return c;"));
Object value = exec(
" int[][] b = new int[5][5]; \n" +
" int x = 0, y; \n" +
" \n" +
" do { \n" +
" y = 0; \n" +
" \n" +
" do { \n" +
" b[x][y] = x*y; \n" +
" ++y; \n" +
" } while (y < 5); \n" +
" \n" +
" ++x; \n" +
" } while (x < 5); \n" +
" \n" +
" return b; \n");
int[][] b = (int[][])value;
for (byte x = 0; x < 5; ++x) {
for (byte y = 0; y < 5; ++y) {
assertEquals(x*y, b[x][y]);
}
}
}
public void testForStatement() {
assertEquals("aaaaaa", exec("String c = \"a\"; for (int x = 0; x < 5; ++x) c += \"a\"; return c;"));
Object value = exec(
" int[][] b = new int[5][5]; \n" +
" for (int x = 0; x < 5; ++x) { \n" +
" for (int y = 0; y < 5; ++y) { \n" +
" b[x][y] = x*y; \n" +
" } \n" +
" } \n" +
" \n" +
" return b; \n");
int[][] b = (int[][])value;
for (byte x = 0; x < 5; ++x) {
for (byte y = 0; y < 5; ++y) {
assertEquals(x*y, b[x][y]);
}
}
}
public void testDeclarationStatement() {
assertEquals((byte)2, exec("byte a = 2; return a;"));
assertEquals((short)2, exec("short a = 2; return a;"));
assertEquals((char)2, exec("char a = 2; return a;"));
assertEquals(2, exec("int a = 2; return a;"));
assertEquals(2L, exec("long a = 2; return a;"));
assertEquals(2F, exec("float a = 2; return a;"));
assertEquals(2.0, exec("double a = 2; return a;"));
assertEquals(false, exec("boolean a = false; return a;"));
assertEquals("string", exec("String a = \"string\"; return a;"));
assertEquals(HashMap.class, exec("Map<String,Object> a = new HashMap<String,Object>(); return a;").getClass());
assertEquals(byte[].class, exec("byte[] a = new byte[1]; return a;").getClass());
assertEquals(short[].class, exec("short[] a = new short[1]; return a;").getClass());
assertEquals(char[].class, exec("char[] a = new char[1]; return a;").getClass());
assertEquals(int[].class, exec("int[] a = new int[1]; return a;").getClass());
assertEquals(long[].class, exec("long[] a = new long[1]; return a;").getClass());
assertEquals(float[].class, exec("float[] a = new float[1]; return a;").getClass());
assertEquals(double[].class, exec("double[] a = new double[1]; return a;").getClass());
assertEquals(boolean[].class, exec("boolean[] a = new boolean[1]; return a;").getClass());
assertEquals(String[].class, exec("String[] a = new String[1]; return a;").getClass());
assertEquals(Map[].class, exec("Map<String,Object>[] a = new Map<String,Object>[1]; return a;").getClass());
assertEquals(byte[][].class, exec("byte[][] a = new byte[1][2]; return a;").getClass());
assertEquals(short[][][].class, exec("short[][][] a = new short[1][2][3]; return a;").getClass());
assertEquals(char[][][][].class, exec("char[][][][] a = new char[1][2][3][4]; return a;").getClass());
assertEquals(int[][][][][].class, exec("int[][][][][] a = new int[1][2][3][4][5]; return a;").getClass());
assertEquals(long[][].class, exec("long[][] a = new long[1][2]; return a;").getClass());
assertEquals(float[][][].class, exec("float[][][] a = new float[1][2][3]; return a;").getClass());
assertEquals(double[][][][].class, exec("double[][][][] a = new double[1][2][3][4]; return a;").getClass());
assertEquals(boolean[][][][][].class, exec("boolean[][][][][] a = new boolean[1][2][3][4][5]; return a;").getClass());
assertEquals(String[][].class, exec("String[][] a = new String[1][2]; return a;").getClass());
assertEquals(Map[][][].class, exec("Map<String,Object>[][][] a = new Map<String,Object>[1][2][3]; return a;").getClass());
}
public void testContinueStatement() {
assertEquals(9, exec("int x = 0, y = 0; while (x < 10) { ++x; if (x == 1) continue; ++y; } return y;"));
}
public void testBreakStatement() {
assertEquals(4, exec("int x = 0, y = 0; while (x < 10) { ++x; if (x == 5) break; ++y; } return y;"));
}
public void testReturnStatement() {
assertEquals(10, exec("return 10;"));
assertEquals(5, exec("int x = 5; return x;"));
assertEquals(4, exec("int[] x = new int[2]; x[1] = 4; return x[1];"));
assertEquals(5, ((short[])exec("short[] s = new short[3]; s[1] = 5; return s;"))[1]);
assertEquals(10, ((Map)exec("Map<String, Object> s = new HashMap< String , Object >(); s.put(\"x\", 10); return s;")).get("x"));
}
}

View File

@ -0,0 +1,294 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.plan.a;
/**
* Tests binary operators across different types
*/
// TODO: NaN/Inf/overflow/...
public class BinaryOperatorTests extends ScriptTestCase {
// TODO: move to per-type tests and test for each type
public void testBasics() {
assertEquals(2.25F / 1.5F, exec("return 2.25F / 1.5F;"));
assertEquals(2.25F % 1.5F, exec("return 2.25F % 1.5F;"));
assertEquals(2 - 1, exec("return 2 - 1;"));
assertEquals(1 << 2, exec("return 1 << 2;"));
assertEquals(4 >> 2, exec("return 4 >> 2;"));
assertEquals(-1 >>> 29, exec("return -1 >>> 29;"));
assertEquals(5 & 3, exec("return 5 & 3;"));
assertEquals(5 & 3L, exec("return 5 & 3L;"));
assertEquals(5L & 3, exec("return 5L & 3;"));
assertEquals(5 | 3, exec("return 5 | 3;"));
assertEquals(5L | 3, exec("return 5L | 3;"));
assertEquals(5 | 3L, exec("return 5 | 3L;"));
assertEquals(9 ^ 3, exec("return 9 ^ 3;"));
assertEquals(9L ^ 3, exec("return 9L ^ 3;"));
assertEquals(9 ^ 3L, exec("return 9 ^ 3L;"));
}
public void testLongShifts() {
// note: we always promote the results of shifts too (unlike java)
assertEquals(1L << 2, exec("long x = 1L; int y = 2; return x << y;"));
assertEquals(1L << 2L, exec("long x = 1L; long y = 2L; return x << y;"));
assertEquals(4L >> 2L, exec("long x = 4L; long y = 2L; return x >> y;"));
assertEquals(4L >> 2, exec("long x = 4L; int y = 2; return x >> y;"));
assertEquals(-1L >>> 29, exec("long x = -1L; int y = 29; return x >>> y;"));
assertEquals(-1L >>> 29L, exec("long x = -1L; long y = 29L; return x >>> y;"));
}
public void testLongShiftsConst() {
// note: we always promote the results of shifts too (unlike java)
assertEquals(1L << 2, exec("return 1L << 2;"));
assertEquals(1L << 2L, exec("return 1 << 2L;"));
assertEquals(4L >> 2L, exec("return 4 >> 2L;"));
assertEquals(4L >> 2, exec("return 4L >> 2;"));
assertEquals(-1L >>> 29, exec("return -1L >>> 29;"));
assertEquals(-1L >>> 29L, exec("return -1 >>> 29L;"));
}
public void testMixedTypes() {
assertEquals(8, exec("int x = 4; char y = 2; return x*y;"));
assertEquals(0.5, exec("double x = 1; float y = 2; return x / y;"));
assertEquals(1, exec("int x = 3; int y = 2; return x % y;"));
assertEquals(3.0, exec("double x = 1; byte y = 2; return x + y;"));
assertEquals(-1, exec("int x = 1; char y = 2; return x - y;"));
assertEquals(4, exec("int x = 1; char y = 2; return x << y;"));
assertEquals(-1, exec("int x = -1; char y = 29; return x >> y;"));
assertEquals(3, exec("int x = -1; char y = 30; return x >>> y;"));
assertEquals(1L, exec("int x = 5; long y = 3; return x & y;"));
assertEquals(7, exec("short x = 5; byte y = 3; return x | y;"));
assertEquals(10, exec("short x = 9; char y = 3; return x ^ y;"));
}
public void testBinaryPromotion() throws Exception {
// byte/byte
assertEquals((byte)1 + (byte)1, exec("byte x = 1; byte y = 1; return x+y;"));
// byte/char
assertEquals((byte)1 + (char)1, exec("byte x = 1; char y = 1; return x+y;"));
// byte/short
assertEquals((byte)1 + (short)1, exec("byte x = 1; short y = 1; return x+y;"));
// byte/int
assertEquals((byte)1 + 1, exec("byte x = 1; int y = 1; return x+y;"));
// byte/long
assertEquals((byte)1 + 1L, exec("byte x = 1; long y = 1; return x+y;"));
// byte/float
assertEquals((byte)1 + 1F, exec("byte x = 1; float y = 1; return x+y;"));
// byte/double
assertEquals((byte)1 + 1.0, exec("byte x = 1; double y = 1; return x+y;"));
// char/byte
assertEquals((char)1 + (byte)1, exec("char x = 1; byte y = 1; return x+y;"));
// char/char
assertEquals((char)1 + (char)1, exec("char x = 1; char y = 1; return x+y;"));
// char/short
assertEquals((char)1 + (short)1, exec("char x = 1; short y = 1; return x+y;"));
// char/int
assertEquals((char)1 + 1, exec("char x = 1; int y = 1; return x+y;"));
// char/long
assertEquals((char)1 + 1L, exec("char x = 1; long y = 1; return x+y;"));
// char/float
assertEquals((char)1 + 1F, exec("char x = 1; float y = 1; return x+y;"));
// char/double
assertEquals((char)1 + 1.0, exec("char x = 1; double y = 1; return x+y;"));
// short/byte
assertEquals((short)1 + (byte)1, exec("short x = 1; byte y = 1; return x+y;"));
// short/char
assertEquals((short)1 + (char)1, exec("short x = 1; char y = 1; return x+y;"));
// short/short
assertEquals((short)1 + (short)1, exec("short x = 1; short y = 1; return x+y;"));
// short/int
assertEquals((short)1 + 1, exec("short x = 1; int y = 1; return x+y;"));
// short/long
assertEquals((short)1 + 1L, exec("short x = 1; long y = 1; return x+y;"));
// short/float
assertEquals((short)1 + 1F, exec("short x = 1; float y = 1; return x+y;"));
// short/double
assertEquals((short)1 + 1.0, exec("short x = 1; double y = 1; return x+y;"));
// int/byte
assertEquals(1 + (byte)1, exec("int x = 1; byte y = 1; return x+y;"));
// int/char
assertEquals(1 + (char)1, exec("int x = 1; char y = 1; return x+y;"));
// int/short
assertEquals(1 + (short)1, exec("int x = 1; short y = 1; return x+y;"));
// int/int
assertEquals(1 + 1, exec("int x = 1; int y = 1; return x+y;"));
// int/long
assertEquals(1 + 1L, exec("int x = 1; long y = 1; return x+y;"));
// int/float
assertEquals(1 + 1F, exec("int x = 1; float y = 1; return x+y;"));
// int/double
assertEquals(1 + 1.0, exec("int x = 1; double y = 1; return x+y;"));
// long/byte
assertEquals(1L + (byte)1, exec("long x = 1; byte y = 1; return x+y;"));
// long/char
assertEquals(1L + (char)1, exec("long x = 1; char y = 1; return x+y;"));
// long/short
assertEquals(1L + (short)1, exec("long x = 1; short y = 1; return x+y;"));
// long/int
assertEquals(1L + 1, exec("long x = 1; int y = 1; return x+y;"));
// long/long
assertEquals(1L + 1L, exec("long x = 1; long y = 1; return x+y;"));
// long/float
assertEquals(1L + 1F, exec("long x = 1; float y = 1; return x+y;"));
// long/double
assertEquals(1L + 1.0, exec("long x = 1; double y = 1; return x+y;"));
// float/byte
assertEquals(1F + (byte)1, exec("float x = 1; byte y = 1; return x+y;"));
// float/char
assertEquals(1F + (char)1, exec("float x = 1; char y = 1; return x+y;"));
// float/short
assertEquals(1F + (short)1, exec("float x = 1; short y = 1; return x+y;"));
// float/int
assertEquals(1F + 1, exec("float x = 1; int y = 1; return x+y;"));
// float/long
assertEquals(1F + 1L, exec("float x = 1; long y = 1; return x+y;"));
// float/float
assertEquals(1F + 1F, exec("float x = 1; float y = 1; return x+y;"));
// float/double
assertEquals(1F + 1.0, exec("float x = 1; double y = 1; return x+y;"));
// double/byte
assertEquals(1.0 + (byte)1, exec("double x = 1; byte y = 1; return x+y;"));
// double/char
assertEquals(1.0 + (char)1, exec("double x = 1; char y = 1; return x+y;"));
// double/short
assertEquals(1.0 + (short)1, exec("double x = 1; short y = 1; return x+y;"));
// double/int
assertEquals(1.0 + 1, exec("double x = 1; int y = 1; return x+y;"));
// double/long
assertEquals(1.0 + 1L, exec("double x = 1; long y = 1; return x+y;"));
// double/float
assertEquals(1.0 + 1F, exec("double x = 1; float y = 1; return x+y;"));
// double/double
assertEquals(1.0 + 1.0, exec("double x = 1; double y = 1; return x+y;"));
}
public void testBinaryPromotionConst() throws Exception {
// byte/byte
assertEquals((byte)1 + (byte)1, exec("return (byte)1 + (byte)1;"));
// byte/char
assertEquals((byte)1 + (char)1, exec("return (byte)1 + (char)1;"));
// byte/short
assertEquals((byte)1 + (short)1, exec("return (byte)1 + (short)1;"));
// byte/int
assertEquals((byte)1 + 1, exec("return (byte)1 + 1;"));
// byte/long
assertEquals((byte)1 + 1L, exec("return (byte)1 + 1L;"));
// byte/float
assertEquals((byte)1 + 1F, exec("return (byte)1 + 1F;"));
// byte/double
assertEquals((byte)1 + 1.0, exec("return (byte)1 + 1.0;"));
// char/byte
assertEquals((char)1 + (byte)1, exec("return (char)1 + (byte)1;"));
// char/char
assertEquals((char)1 + (char)1, exec("return (char)1 + (char)1;"));
// char/short
assertEquals((char)1 + (short)1, exec("return (char)1 + (short)1;"));
// char/int
assertEquals((char)1 + 1, exec("return (char)1 + 1;"));
// char/long
assertEquals((char)1 + 1L, exec("return (char)1 + 1L;"));
// char/float
assertEquals((char)1 + 1F, exec("return (char)1 + 1F;"));
// char/double
assertEquals((char)1 + 1.0, exec("return (char)1 + 1.0;"));
// short/byte
assertEquals((short)1 + (byte)1, exec("return (short)1 + (byte)1;"));
// short/char
assertEquals((short)1 + (char)1, exec("return (short)1 + (char)1;"));
// short/short
assertEquals((short)1 + (short)1, exec("return (short)1 + (short)1;"));
// short/int
assertEquals((short)1 + 1, exec("return (short)1 + 1;"));
// short/long
assertEquals((short)1 + 1L, exec("return (short)1 + 1L;"));
// short/float
assertEquals((short)1 + 1F, exec("return (short)1 + 1F;"));
// short/double
assertEquals((short)1 + 1.0, exec("return (short)1 + 1.0;"));
// int/byte
assertEquals(1 + (byte)1, exec("return 1 + (byte)1;"));
// int/char
assertEquals(1 + (char)1, exec("return 1 + (char)1;"));
// int/short
assertEquals(1 + (short)1, exec("return 1 + (short)1;"));
// int/int
assertEquals(1 + 1, exec("return 1 + 1;"));
// int/long
assertEquals(1 + 1L, exec("return 1 + 1L;"));
// int/float
assertEquals(1 + 1F, exec("return 1 + 1F;"));
// int/double
assertEquals(1 + 1.0, exec("return 1 + 1.0;"));
// long/byte
assertEquals(1L + (byte)1, exec("return 1L + (byte)1;"));
// long/char
assertEquals(1L + (char)1, exec("return 1L + (char)1;"));
// long/short
assertEquals(1L + (short)1, exec("return 1L + (short)1;"));
// long/int
assertEquals(1L + 1, exec("return 1L + 1;"));
// long/long
assertEquals(1L + 1L, exec("return 1L + 1L;"));
// long/float
assertEquals(1L + 1F, exec("return 1L + 1F;"));
// long/double
assertEquals(1L + 1.0, exec("return 1L + 1.0;"));
// float/byte
assertEquals(1F + (byte)1, exec("return 1F + (byte)1;"));
// float/char
assertEquals(1F + (char)1, exec("return 1F + (char)1;"));
// float/short
assertEquals(1F + (short)1, exec("return 1F + (short)1;"));
// float/int
assertEquals(1F + 1, exec("return 1F + 1;"));
// float/long
assertEquals(1F + 1L, exec("return 1F + 1L;"));
// float/float
assertEquals(1F + 1F, exec("return 1F + 1F;"));
// float/double
assertEquals(1F + 1.0, exec("return 1F + 1.0;"));
// double/byte
assertEquals(1.0 + (byte)1, exec("return 1.0 + (byte)1;"));
// double/char
assertEquals(1.0 + (char)1, exec("return 1.0 + (char)1;"));
// double/short
assertEquals(1.0 + (short)1, exec("return 1.0 + (short)1;"));
// double/int
assertEquals(1.0 + 1, exec("return 1.0 + 1;"));
// double/long
assertEquals(1.0 + 1L, exec("return 1.0 + 1L;"));
// double/float
assertEquals(1.0 + 1F, exec("return 1.0 + 1F;"));
// double/double
assertEquals(1.0 + 1.0, exec("return 1.0 + 1.0;"));
}
}

View File

@ -0,0 +1,319 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.plan.a;
/**
* Tests compound assignments (+=, etc) across all data types
*/
public class CompoundAssignmentTests extends ScriptTestCase {
public void testAddition() {
// byte
assertEquals((byte) 15, exec("byte x = 5; x += 10; return x;"));
assertEquals((byte) -5, exec("byte x = 5; x += -10; return x;"));
// short
assertEquals((short) 15, exec("short x = 5; x += 10; return x;"));
assertEquals((short) -5, exec("short x = 5; x += -10; return x;"));
// char
assertEquals((char) 15, exec("char x = 5; x += 10; return x;"));
assertEquals((char) 5, exec("char x = 10; x += -5; return x;"));
// int
assertEquals(15, exec("int x = 5; x += 10; return x;"));
assertEquals(-5, exec("int x = 5; x += -10; return x;"));
// long
assertEquals(15L, exec("long x = 5; x += 10; return x;"));
assertEquals(-5L, exec("long x = 5; x += -10; return x;"));
// float
assertEquals(15F, exec("float x = 5f; x += 10; return x;"));
assertEquals(-5F, exec("float x = 5f; x += -10; return x;"));
// double
assertEquals(15D, exec("double x = 5.0; x += 10; return x;"));
assertEquals(-5D, exec("double x = 5.0; x += -10; return x;"));
}
public void testSubtraction() {
// byte
assertEquals((byte) 15, exec("byte x = 5; x -= -10; return x;"));
assertEquals((byte) -5, exec("byte x = 5; x -= 10; return x;"));
// short
assertEquals((short) 15, exec("short x = 5; x -= -10; return x;"));
assertEquals((short) -5, exec("short x = 5; x -= 10; return x;"));
// char
assertEquals((char) 15, exec("char x = 5; x -= -10; return x;"));
assertEquals((char) 5, exec("char x = 10; x -= 5; return x;"));
// int
assertEquals(15, exec("int x = 5; x -= -10; return x;"));
assertEquals(-5, exec("int x = 5; x -= 10; return x;"));
// long
assertEquals(15L, exec("long x = 5; x -= -10; return x;"));
assertEquals(-5L, exec("long x = 5; x -= 10; return x;"));
// float
assertEquals(15F, exec("float x = 5f; x -= -10; return x;"));
assertEquals(-5F, exec("float x = 5f; x -= 10; return x;"));
// double
assertEquals(15D, exec("double x = 5.0; x -= -10; return x;"));
assertEquals(-5D, exec("double x = 5.0; x -= 10; return x;"));
}
public void testMultiplication() {
// byte
assertEquals((byte) 15, exec("byte x = 5; x *= 3; return x;"));
assertEquals((byte) -5, exec("byte x = 5; x *= -1; return x;"));
// short
assertEquals((short) 15, exec("short x = 5; x *= 3; return x;"));
assertEquals((short) -5, exec("short x = 5; x *= -1; return x;"));
// char
assertEquals((char) 15, exec("char x = 5; x *= 3; return x;"));
// int
assertEquals(15, exec("int x = 5; x *= 3; return x;"));
assertEquals(-5, exec("int x = 5; x *= -1; return x;"));
// long
assertEquals(15L, exec("long x = 5; x *= 3; return x;"));
assertEquals(-5L, exec("long x = 5; x *= -1; return x;"));
// float
assertEquals(15F, exec("float x = 5f; x *= 3; return x;"));
assertEquals(-5F, exec("float x = 5f; x *= -1; return x;"));
// double
assertEquals(15D, exec("double x = 5.0; x *= 3; return x;"));
assertEquals(-5D, exec("double x = 5.0; x *= -1; return x;"));
}
public void testDivision() {
// byte
assertEquals((byte) 15, exec("byte x = 45; x /= 3; return x;"));
assertEquals((byte) -5, exec("byte x = 5; x /= -1; return x;"));
// short
assertEquals((short) 15, exec("short x = 45; x /= 3; return x;"));
assertEquals((short) -5, exec("short x = 5; x /= -1; return x;"));
// char
assertEquals((char) 15, exec("char x = 45; x /= 3; return x;"));
// int
assertEquals(15, exec("int x = 45; x /= 3; return x;"));
assertEquals(-5, exec("int x = 5; x /= -1; return x;"));
// long
assertEquals(15L, exec("long x = 45; x /= 3; return x;"));
assertEquals(-5L, exec("long x = 5; x /= -1; return x;"));
// float
assertEquals(15F, exec("float x = 45f; x /= 3; return x;"));
assertEquals(-5F, exec("float x = 5f; x /= -1; return x;"));
// double
assertEquals(15D, exec("double x = 45.0; x /= 3; return x;"));
assertEquals(-5D, exec("double x = 5.0; x /= -1; return x;"));
}
public void testDivisionByZero() {
// byte
try {
exec("byte x = 1; x /= 0; return x;");
fail("should have hit exception");
} catch (ArithmeticException expected) {}
// short
try {
exec("short x = 1; x /= 0; return x;");
fail("should have hit exception");
} catch (ArithmeticException expected) {}
// char
try {
exec("char x = 1; x /= 0; return x;");
fail("should have hit exception");
} catch (ArithmeticException expected) {}
// int
try {
exec("int x = 1; x /= 0; return x;");
fail("should have hit exception");
} catch (ArithmeticException expected) {}
// long
try {
exec("long x = 1; x /= 0; return x;");
fail("should have hit exception");
} catch (ArithmeticException expected) {}
}
public void testRemainder() {
// byte
assertEquals((byte) 3, exec("byte x = 15; x %= 4; return x;"));
assertEquals((byte) -3, exec("byte x = (byte) -15; x %= 4; return x;"));
// short
assertEquals((short) 3, exec("short x = 15; x %= 4; return x;"));
assertEquals((short) -3, exec("short x = (short) -15; x %= 4; return x;"));
// char
assertEquals((char) 3, exec("char x = (char) 15; x %= 4; return x;"));
// int
assertEquals(3, exec("int x = 15; x %= 4; return x;"));
assertEquals(-3, exec("int x = -15; x %= 4; return x;"));
// long
assertEquals(3L, exec("long x = 15L; x %= 4; return x;"));
assertEquals(-3L, exec("long x = -15L; x %= 4; return x;"));
// float
assertEquals(3F, exec("float x = 15F; x %= 4; return x;"));
assertEquals(-3F, exec("float x = -15F; x %= 4; return x;"));
// double
assertEquals(3D, exec("double x = 15.0; x %= 4; return x;"));
assertEquals(-3D, exec("double x = -15.0; x %= 4; return x;"));
}
public void testLeftShift() {
// byte
assertEquals((byte) 60, exec("byte x = 15; x <<= 2; return x;"));
assertEquals((byte) -60, exec("byte x = (byte) -15; x <<= 2; return x;"));
// short
assertEquals((short) 60, exec("short x = 15; x <<= 2; return x;"));
assertEquals((short) -60, exec("short x = (short) -15; x <<= 2; return x;"));
// char
assertEquals((char) 60, exec("char x = (char) 15; x <<= 2; return x;"));
// int
assertEquals(60, exec("int x = 15; x <<= 2; return x;"));
assertEquals(-60, exec("int x = -15; x <<= 2; return x;"));
// long
assertEquals(60L, exec("long x = 15L; x <<= 2; return x;"));
assertEquals(-60L, exec("long x = -15L; x <<= 2; return x;"));
}
public void testRightShift() {
// byte
assertEquals((byte) 15, exec("byte x = 60; x >>= 2; return x;"));
assertEquals((byte) -15, exec("byte x = (byte) -60; x >>= 2; return x;"));
// short
assertEquals((short) 15, exec("short x = 60; x >>= 2; return x;"));
assertEquals((short) -15, exec("short x = (short) -60; x >>= 2; return x;"));
// char
assertEquals((char) 15, exec("char x = (char) 60; x >>= 2; return x;"));
// int
assertEquals(15, exec("int x = 60; x >>= 2; return x;"));
assertEquals(-15, exec("int x = -60; x >>= 2; return x;"));
// long
assertEquals(15L, exec("long x = 60L; x >>= 2; return x;"));
assertEquals(-15L, exec("long x = -60L; x >>= 2; return x;"));
}
public void testUnsignedRightShift() {
// byte
assertEquals((byte) 15, exec("byte x = 60; x >>>= 2; return x;"));
assertEquals((byte) -15, exec("byte x = (byte) -60; x >>>= 2; return x;"));
// short
assertEquals((short) 15, exec("short x = 60; x >>>= 2; return x;"));
assertEquals((short) -15, exec("short x = (short) -60; x >>>= 2; return x;"));
// char
assertEquals((char) 15, exec("char x = (char) 60; x >>>= 2; return x;"));
// int
assertEquals(15, exec("int x = 60; x >>>= 2; return x;"));
assertEquals(-60 >>> 2, exec("int x = -60; x >>>= 2; return x;"));
// long
assertEquals(15L, exec("long x = 60L; x >>>= 2; return x;"));
assertEquals(-60L >>> 2, exec("long x = -60L; x >>>= 2; return x;"));
}
public void testAnd() {
// boolean
assertEquals(true, exec("boolean x = true; x &= true; return x;"));
assertEquals(false, exec("boolean x = true; x &= false; return x;"));
assertEquals(false, exec("boolean x = false; x &= true; return x;"));
assertEquals(false, exec("boolean x = false; x &= false; return x;"));
assertEquals(true, exec("Boolean x = true; x &= true; return x;"));
assertEquals(false, exec("Boolean x = true; x &= false; return x;"));
assertEquals(false, exec("Boolean x = false; x &= true; return x;"));
assertEquals(false, exec("Boolean x = false; x &= false; return x;"));
assertEquals(true, exec("boolean[] x = new boolean[1]; x[0] = true; x[0] &= true; return x[0];"));
assertEquals(false, exec("boolean[] x = new boolean[1]; x[0] = true; x[0] &= false; return x[0];"));
assertEquals(false, exec("boolean[] x = new boolean[1]; x[0] = false; x[0] &= true; return x[0];"));
assertEquals(false, exec("boolean[] x = new boolean[1]; x[0] = false; x[0] &= false; return x[0];"));
assertEquals(true, exec("Boolean[] x = new Boolean[1]; x[0] = true; x[0] &= true; return x[0];"));
assertEquals(false, exec("Boolean[] x = new Boolean[1]; x[0] = true; x[0] &= false; return x[0];"));
assertEquals(false, exec("Boolean[] x = new Boolean[1]; x[0] = false; x[0] &= true; return x[0];"));
assertEquals(false, exec("Boolean[] x = new Boolean[1]; x[0] = false; x[0] &= false; return x[0];"));
// byte
assertEquals((byte) (13 & 14), exec("byte x = 13; x &= 14; return x;"));
// short
assertEquals((short) (13 & 14), exec("short x = 13; x &= 14; return x;"));
// char
assertEquals((char) (13 & 14), exec("char x = 13; x &= 14; return x;"));
// int
assertEquals(13 & 14, exec("int x = 13; x &= 14; return x;"));
// long
assertEquals((long) (13 & 14), exec("long x = 13L; x &= 14; return x;"));
}
public void testOr() {
// boolean
assertEquals(true, exec("boolean x = true; x |= true; return x;"));
assertEquals(true, exec("boolean x = true; x |= false; return x;"));
assertEquals(true, exec("boolean x = false; x |= true; return x;"));
assertEquals(false, exec("boolean x = false; x |= false; return x;"));
assertEquals(true, exec("Boolean x = true; x |= true; return x;"));
assertEquals(true, exec("Boolean x = true; x |= false; return x;"));
assertEquals(true, exec("Boolean x = false; x |= true; return x;"));
assertEquals(false, exec("Boolean x = false; x |= false; return x;"));
assertEquals(true, exec("boolean[] x = new boolean[1]; x[0] = true; x[0] |= true; return x[0];"));
assertEquals(true, exec("boolean[] x = new boolean[1]; x[0] = true; x[0] |= false; return x[0];"));
assertEquals(true, exec("boolean[] x = new boolean[1]; x[0] = false; x[0] |= true; return x[0];"));
assertEquals(false, exec("boolean[] x = new boolean[1]; x[0] = false; x[0] |= false; return x[0];"));
assertEquals(true, exec("Boolean[] x = new Boolean[1]; x[0] = true; x[0] |= true; return x[0];"));
assertEquals(true, exec("Boolean[] x = new Boolean[1]; x[0] = true; x[0] |= false; return x[0];"));
assertEquals(true, exec("Boolean[] x = new Boolean[1]; x[0] = false; x[0] |= true; return x[0];"));
assertEquals(false, exec("Boolean[] x = new Boolean[1]; x[0] = false; x[0] |= false; return x[0];"));
// byte
assertEquals((byte) (13 | 14), exec("byte x = 13; x |= 14; return x;"));
// short
assertEquals((short) (13 | 14), exec("short x = 13; x |= 14; return x;"));
// char
assertEquals((char) (13 | 14), exec("char x = 13; x |= 14; return x;"));
// int
assertEquals(13 | 14, exec("int x = 13; x |= 14; return x;"));
// long
assertEquals((long) (13 | 14), exec("long x = 13L; x |= 14; return x;"));
}
public void testXor() {
// boolean
assertEquals(false, exec("boolean x = true; x ^= true; return x;"));
assertEquals(true, exec("boolean x = true; x ^= false; return x;"));
assertEquals(true, exec("boolean x = false; x ^= true; return x;"));
assertEquals(false, exec("boolean x = false; x ^= false; return x;"));
assertEquals(false, exec("Boolean x = true; x ^= true; return x;"));
assertEquals(true, exec("Boolean x = true; x ^= false; return x;"));
assertEquals(true, exec("Boolean x = false; x ^= true; return x;"));
assertEquals(false, exec("Boolean x = false; x ^= false; return x;"));
assertEquals(false, exec("boolean[] x = new boolean[1]; x[0] = true; x[0] ^= true; return x[0];"));
assertEquals(true, exec("boolean[] x = new boolean[1]; x[0] = true; x[0] ^= false; return x[0];"));
assertEquals(true, exec("boolean[] x = new boolean[1]; x[0] = false; x[0] ^= true; return x[0];"));
assertEquals(false, exec("boolean[] x = new boolean[1]; x[0] = false; x[0] ^= false; return x[0];"));
assertEquals(false, exec("Boolean[] x = new Boolean[1]; x[0] = true; x[0] ^= true; return x[0];"));
assertEquals(true, exec("Boolean[] x = new Boolean[1]; x[0] = true; x[0] ^= false; return x[0];"));
assertEquals(true, exec("Boolean[] x = new Boolean[1]; x[0] = false; x[0] ^= true; return x[0];"));
assertEquals(false, exec("Boolean[] x = new Boolean[1]; x[0] = false; x[0] ^= false; return x[0];"));
// byte
assertEquals((byte) (13 ^ 14), exec("byte x = 13; x ^= 14; return x;"));
// short
assertEquals((short) (13 ^ 14), exec("short x = 13; x ^= 14; return x;"));
// char
assertEquals((char) (13 ^ 14), exec("char x = 13; x ^= 14; return x;"));
// int
assertEquals(13 ^ 14, exec("int x = 13; x ^= 14; return x;"));
// long
assertEquals((long) (13 ^ 14), exec("long x = 13L; x ^= 14; return x;"));
}
}

View File

@ -0,0 +1,93 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.plan.a;
import java.util.ArrayList;
import java.util.HashMap;
public class ConditionalTests extends ScriptTestCase {
public void testBasic() {
assertEquals(2, exec("boolean x = true; return x ? 2 : 3;"));
assertEquals(3, exec("boolean x = false; return x ? 2 : 3;"));
assertEquals(3, exec("boolean x = false, y = true; return x && y ? 2 : 3;"));
assertEquals(2, exec("boolean x = true, y = true; return x && y ? 2 : 3;"));
assertEquals(2, exec("boolean x = true, y = false; return x || y ? 2 : 3;"));
assertEquals(3, exec("boolean x = false, y = false; return x || y ? 2 : 3;"));
}
public void testPrecedence() {
assertEquals(4, exec("boolean x = false, y = true; return x ? (y ? 2 : 3) : 4;"));
assertEquals(2, exec("boolean x = true, y = true; return x ? (y ? 2 : 3) : 4;"));
assertEquals(3, exec("boolean x = true, y = false; return x ? (y ? 2 : 3) : 4;"));
assertEquals(2, exec("boolean x = true, y = true; return x ? y ? 2 : 3 : 4;"));
assertEquals(4, exec("boolean x = false, y = true; return x ? y ? 2 : 3 : 4;"));
assertEquals(3, exec("boolean x = true, y = false; return x ? y ? 2 : 3 : 4;"));
assertEquals(3, exec("boolean x = false, y = true; return x ? 2 : y ? 3 : 4;"));
assertEquals(2, exec("boolean x = true, y = false; return x ? 2 : y ? 3 : 4;"));
assertEquals(4, exec("boolean x = false, y = false; return x ? 2 : y ? 3 : 4;"));
assertEquals(4, exec("boolean x = false, y = false; return (x ? true : y) ? 3 : 4;"));
assertEquals(4, exec("boolean x = true, y = false; return (x ? false : y) ? 3 : 4;"));
assertEquals(3, exec("boolean x = false, y = true; return (x ? false : y) ? 3 : 4;"));
assertEquals(2, exec("boolean x = true, y = false; return (x ? false : y) ? (x ? 3 : 4) : x ? 2 : 1;"));
assertEquals(2, exec("boolean x = true, y = false; return (x ? false : y) ? x ? 3 : 4 : x ? 2 : 1;"));
assertEquals(4, exec("boolean x = false, y = true; return x ? false : y ? x ? 3 : 4 : x ? 2 : 1;"));
}
public void testAssignment() {
assertEquals(4D, exec("boolean x = false; double z = x ? 2 : 4.0F; return z;"));
assertEquals((byte)7, exec("boolean x = false; int y = 2; byte z = x ? (byte)y : 7; return z;"));
assertEquals((byte)7, exec("boolean x = false; int y = 2; byte z = (byte)(x ? y : 7); return z;"));
assertEquals(ArrayList.class, exec("boolean x = false; Object z = x ? new HashMap() : new ArrayList(); return z;").getClass());
}
public void testNullArguments() {
assertEquals(null, exec("boolean b = false, c = true; Object x; Map y; return b && c ? x : y;"));
assertEquals(HashMap.class, exec("boolean b = false, c = true; Object x; Map y = new HashMap(); return b && c ? x : y;").getClass());
}
public void testPromotion() {
assertEquals(false, exec("boolean x = false; boolean y = true; return (x ? 2 : 4.0F) == (y ? 2 : 4.0F);"));
assertEquals(false, exec("boolean x = false; boolean y = true; return (x ? 2 : 4.0F) == (y ? new Long(2) : new Float(4.0F));"));
assertEquals(false, exec("boolean x = false; boolean y = true; return (x ? new HashMap() : new ArrayList()) == (y ? new Long(2) : new Float(4.0F));"));
assertEquals(false, exec("boolean x = false; boolean y = true; return (x ? 2 : 4.0F) == (y ? new HashMap() : new ArrayList());"));
}
public void testIncompatibleAssignment() {
try {
exec("boolean x = false; byte z = x ? 2 : 4.0F; return z;");
fail("expected class cast exception");
} catch (ClassCastException expected) {}
try {
exec("boolean x = false; Map z = x ? 4 : (byte)7; return z;");
fail("expected class cast exception");
} catch (ClassCastException expected) {}
try {
exec("boolean x = false; Map z = x ? new HashMap() : new ArrayList(); return z;");
fail("expected class cast exception");
} catch (ClassCastException expected) {}
try {
exec("boolean x = false; int y = 2; byte z = x ? y : 7; return z;");
fail("expected class cast exception");
} catch (ClassCastException expected) {}
}
}

View File

@ -0,0 +1,914 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.plan.a;
public class DefTests extends ScriptTestCase {
public void testNot() {
assertEquals(~1, exec("def x = (byte)1 return ~x"));
assertEquals(~1, exec("def x = (short)1 return ~x"));
assertEquals(~1, exec("def x = (char)1 return ~x"));
assertEquals(~1, exec("def x = 1 return ~x"));
assertEquals(~1L, exec("def x = 1L return ~x"));
}
public void testNeg() {
assertEquals(-1, exec("def x = (byte)1 return -x"));
assertEquals(-1, exec("def x = (short)1 return -x"));
assertEquals(-1, exec("def x = (char)1 return -x"));
assertEquals(-1, exec("def x = 1 return -x"));
assertEquals(-1L, exec("def x = 1L return -x"));
assertEquals(-1.0F, exec("def x = 1F return -x"));
assertEquals(-1.0, exec("def x = 1.0 return -x"));
}
public void testMul() {
assertEquals(4, exec("def x = (byte)2 def y = (byte)2 return x * y"));
assertEquals(4, exec("def x = (short)2 def y = (byte)2 return x * y"));
assertEquals(4, exec("def x = (char)2 def y = (byte)2 return x * y"));
assertEquals(4, exec("def x = (int)2 def y = (byte)2 return x * y"));
assertEquals(4L, exec("def x = (long)2 def y = (byte)2 return x * y"));
assertEquals(4F, exec("def x = (float)2 def y = (byte)2 return x * y"));
assertEquals(4D, exec("def x = (double)2 def y = (byte)2 return x * y"));
assertEquals(4, exec("def x = (byte)2 def y = (short)2 return x * y"));
assertEquals(4, exec("def x = (short)2 def y = (short)2 return x * y"));
assertEquals(4, exec("def x = (char)2 def y = (short)2 return x * y"));
assertEquals(4, exec("def x = (int)2 def y = (short)2 return x * y"));
assertEquals(4L, exec("def x = (long)2 def y = (short)2 return x * y"));
assertEquals(4F, exec("def x = (float)2 def y = (short)2 return x * y"));
assertEquals(4D, exec("def x = (double)2 def y = (short)2 return x * y"));
assertEquals(4, exec("def x = (byte)2 def y = (char)2 return x * y"));
assertEquals(4, exec("def x = (short)2 def y = (char)2 return x * y"));
assertEquals(4, exec("def x = (char)2 def y = (char)2 return x * y"));
assertEquals(4, exec("def x = (int)2 def y = (char)2 return x * y"));
assertEquals(4L, exec("def x = (long)2 def y = (char)2 return x * y"));
assertEquals(4F, exec("def x = (float)2 def y = (char)2 return x * y"));
assertEquals(4D, exec("def x = (double)2 def y = (char)2 return x * y"));
assertEquals(4, exec("def x = (byte)2 def y = (int)2 return x * y"));
assertEquals(4, exec("def x = (short)2 def y = (int)2 return x * y"));
assertEquals(4, exec("def x = (char)2 def y = (int)2 return x * y"));
assertEquals(4, exec("def x = (int)2 def y = (int)2 return x * y"));
assertEquals(4L, exec("def x = (long)2 def y = (int)2 return x * y"));
assertEquals(4F, exec("def x = (float)2 def y = (int)2 return x * y"));
assertEquals(4D, exec("def x = (double)2 def y = (int)2 return x * y"));
assertEquals(4L, exec("def x = (byte)2 def y = (long)2 return x * y"));
assertEquals(4L, exec("def x = (short)2 def y = (long)2 return x * y"));
assertEquals(4L, exec("def x = (char)2 def y = (long)2 return x * y"));
assertEquals(4L, exec("def x = (int)2 def y = (long)2 return x * y"));
assertEquals(4L, exec("def x = (long)2 def y = (long)2 return x * y"));
assertEquals(4F, exec("def x = (float)2 def y = (long)2 return x * y"));
assertEquals(4D, exec("def x = (double)2 def y = (long)2 return x * y"));
assertEquals(4F, exec("def x = (byte)2 def y = (float)2 return x * y"));
assertEquals(4F, exec("def x = (short)2 def y = (float)2 return x * y"));
assertEquals(4F, exec("def x = (char)2 def y = (float)2 return x * y"));
assertEquals(4F, exec("def x = (int)2 def y = (float)2 return x * y"));
assertEquals(4F, exec("def x = (long)2 def y = (float)2 return x * y"));
assertEquals(4F, exec("def x = (float)2 def y = (float)2 return x * y"));
assertEquals(4D, exec("def x = (double)2 def y = (float)2 return x * y"));
assertEquals(4D, exec("def x = (byte)2 def y = (double)2 return x * y"));
assertEquals(4D, exec("def x = (short)2 def y = (double)2 return x * y"));
assertEquals(4D, exec("def x = (char)2 def y = (double)2 return x * y"));
assertEquals(4D, exec("def x = (int)2 def y = (double)2 return x * y"));
assertEquals(4D, exec("def x = (long)2 def y = (double)2 return x * y"));
assertEquals(4D, exec("def x = (float)2 def y = (double)2 return x * y"));
assertEquals(4D, exec("def x = (double)2 def y = (double)2 return x * y"));
assertEquals(4, exec("def x = (Byte)2 def y = (byte)2 return x * y"));
assertEquals(4, exec("def x = (Short)2 def y = (short)2 return x * y"));
assertEquals(4, exec("def x = (Character)2 def y = (char)2 return x * y"));
assertEquals(4, exec("def x = (Integer)2 def y = (int)2 return x * y"));
assertEquals(4L, exec("def x = (Long)2 def y = (long)2 return x * y"));
assertEquals(4F, exec("def x = (Float)2 def y = (float)2 return x * y"));
assertEquals(4D, exec("def x = (Double)2 def y = (double)2 return x * y"));
}
public void testDiv() {
assertEquals(1, exec("def x = (byte)2 def y = (byte)2 return x / y"));
assertEquals(1, exec("def x = (short)2 def y = (byte)2 return x / y"));
assertEquals(1, exec("def x = (char)2 def y = (byte)2 return x / y"));
assertEquals(1, exec("def x = (int)2 def y = (byte)2 return x / y"));
assertEquals(1L, exec("def x = (long)2 def y = (byte)2 return x / y"));
assertEquals(1F, exec("def x = (float)2 def y = (byte)2 return x / y"));
assertEquals(1D, exec("def x = (double)2 def y = (byte)2 return x / y"));
assertEquals(1, exec("def x = (byte)2 def y = (short)2 return x / y"));
assertEquals(1, exec("def x = (short)2 def y = (short)2 return x / y"));
assertEquals(1, exec("def x = (char)2 def y = (short)2 return x / y"));
assertEquals(1, exec("def x = (int)2 def y = (short)2 return x / y"));
assertEquals(1L, exec("def x = (long)2 def y = (short)2 return x / y"));
assertEquals(1F, exec("def x = (float)2 def y = (short)2 return x / y"));
assertEquals(1D, exec("def x = (double)2 def y = (short)2 return x / y"));
assertEquals(1, exec("def x = (byte)2 def y = (char)2 return x / y"));
assertEquals(1, exec("def x = (short)2 def y = (char)2 return x / y"));
assertEquals(1, exec("def x = (char)2 def y = (char)2 return x / y"));
assertEquals(1, exec("def x = (int)2 def y = (char)2 return x / y"));
assertEquals(1L, exec("def x = (long)2 def y = (char)2 return x / y"));
assertEquals(1F, exec("def x = (float)2 def y = (char)2 return x / y"));
assertEquals(1D, exec("def x = (double)2 def y = (char)2 return x / y"));
assertEquals(1, exec("def x = (byte)2 def y = (int)2 return x / y"));
assertEquals(1, exec("def x = (short)2 def y = (int)2 return x / y"));
assertEquals(1, exec("def x = (char)2 def y = (int)2 return x / y"));
assertEquals(1, exec("def x = (int)2 def y = (int)2 return x / y"));
assertEquals(1L, exec("def x = (long)2 def y = (int)2 return x / y"));
assertEquals(1F, exec("def x = (float)2 def y = (int)2 return x / y"));
assertEquals(1D, exec("def x = (double)2 def y = (int)2 return x / y"));
assertEquals(1L, exec("def x = (byte)2 def y = (long)2 return x / y"));
assertEquals(1L, exec("def x = (short)2 def y = (long)2 return x / y"));
assertEquals(1L, exec("def x = (char)2 def y = (long)2 return x / y"));
assertEquals(1L, exec("def x = (int)2 def y = (long)2 return x / y"));
assertEquals(1L, exec("def x = (long)2 def y = (long)2 return x / y"));
assertEquals(1F, exec("def x = (float)2 def y = (long)2 return x / y"));
assertEquals(1D, exec("def x = (double)2 def y = (long)2 return x / y"));
assertEquals(1F, exec("def x = (byte)2 def y = (float)2 return x / y"));
assertEquals(1F, exec("def x = (short)2 def y = (float)2 return x / y"));
assertEquals(1F, exec("def x = (char)2 def y = (float)2 return x / y"));
assertEquals(1F, exec("def x = (int)2 def y = (float)2 return x / y"));
assertEquals(1F, exec("def x = (long)2 def y = (float)2 return x / y"));
assertEquals(1F, exec("def x = (float)2 def y = (float)2 return x / y"));
assertEquals(1D, exec("def x = (double)2 def y = (float)2 return x / y"));
assertEquals(1D, exec("def x = (byte)2 def y = (double)2 return x / y"));
assertEquals(1D, exec("def x = (short)2 def y = (double)2 return x / y"));
assertEquals(1D, exec("def x = (char)2 def y = (double)2 return x / y"));
assertEquals(1D, exec("def x = (int)2 def y = (double)2 return x / y"));
assertEquals(1D, exec("def x = (long)2 def y = (double)2 return x / y"));
assertEquals(1D, exec("def x = (float)2 def y = (double)2 return x / y"));
assertEquals(1D, exec("def x = (double)2 def y = (double)2 return x / y"));
assertEquals(1, exec("def x = (Byte)2 def y = (byte)2 return x / y"));
assertEquals(1, exec("def x = (Short)2 def y = (short)2 return x / y"));
assertEquals(1, exec("def x = (Character)2 def y = (char)2 return x / y"));
assertEquals(1, exec("def x = (Integer)2 def y = (int)2 return x / y"));
assertEquals(1L, exec("def x = (Long)2 def y = (long)2 return x / y"));
assertEquals(1F, exec("def x = (Float)2 def y = (float)2 return x / y"));
assertEquals(1D, exec("def x = (Double)2 def y = (double)2 return x / y"));
}
public void testRem() {
assertEquals(0, exec("def x = (byte)2 def y = (byte)2 return x % y"));
assertEquals(0, exec("def x = (short)2 def y = (byte)2 return x % y"));
assertEquals(0, exec("def x = (char)2 def y = (byte)2 return x % y"));
assertEquals(0, exec("def x = (int)2 def y = (byte)2 return x % y"));
assertEquals(0L, exec("def x = (long)2 def y = (byte)2 return x % y"));
assertEquals(0F, exec("def x = (float)2 def y = (byte)2 return x % y"));
assertEquals(0D, exec("def x = (double)2 def y = (byte)2 return x % y"));
assertEquals(0, exec("def x = (byte)2 def y = (short)2 return x % y"));
assertEquals(0, exec("def x = (short)2 def y = (short)2 return x % y"));
assertEquals(0, exec("def x = (char)2 def y = (short)2 return x % y"));
assertEquals(0, exec("def x = (int)2 def y = (short)2 return x % y"));
assertEquals(0L, exec("def x = (long)2 def y = (short)2 return x % y"));
assertEquals(0F, exec("def x = (float)2 def y = (short)2 return x % y"));
assertEquals(0D, exec("def x = (double)2 def y = (short)2 return x % y"));
assertEquals(0, exec("def x = (byte)2 def y = (char)2 return x % y"));
assertEquals(0, exec("def x = (short)2 def y = (char)2 return x % y"));
assertEquals(0, exec("def x = (char)2 def y = (char)2 return x % y"));
assertEquals(0, exec("def x = (int)2 def y = (char)2 return x % y"));
assertEquals(0L, exec("def x = (long)2 def y = (char)2 return x % y"));
assertEquals(0F, exec("def x = (float)2 def y = (char)2 return x % y"));
assertEquals(0D, exec("def x = (double)2 def y = (char)2 return x % y"));
assertEquals(0, exec("def x = (byte)2 def y = (int)2 return x % y"));
assertEquals(0, exec("def x = (short)2 def y = (int)2 return x % y"));
assertEquals(0, exec("def x = (char)2 def y = (int)2 return x % y"));
assertEquals(0, exec("def x = (int)2 def y = (int)2 return x % y"));
assertEquals(0L, exec("def x = (long)2 def y = (int)2 return x % y"));
assertEquals(0F, exec("def x = (float)2 def y = (int)2 return x % y"));
assertEquals(0D, exec("def x = (double)2 def y = (int)2 return x % y"));
assertEquals(0L, exec("def x = (byte)2 def y = (long)2 return x % y"));
assertEquals(0L, exec("def x = (short)2 def y = (long)2 return x % y"));
assertEquals(0L, exec("def x = (char)2 def y = (long)2 return x % y"));
assertEquals(0L, exec("def x = (int)2 def y = (long)2 return x % y"));
assertEquals(0L, exec("def x = (long)2 def y = (long)2 return x % y"));
assertEquals(0F, exec("def x = (float)2 def y = (long)2 return x % y"));
assertEquals(0D, exec("def x = (double)2 def y = (long)2 return x % y"));
assertEquals(0F, exec("def x = (byte)2 def y = (float)2 return x % y"));
assertEquals(0F, exec("def x = (short)2 def y = (float)2 return x % y"));
assertEquals(0F, exec("def x = (char)2 def y = (float)2 return x % y"));
assertEquals(0F, exec("def x = (int)2 def y = (float)2 return x % y"));
assertEquals(0F, exec("def x = (long)2 def y = (float)2 return x % y"));
assertEquals(0F, exec("def x = (float)2 def y = (float)2 return x % y"));
assertEquals(0D, exec("def x = (double)2 def y = (float)2 return x % y"));
assertEquals(0D, exec("def x = (byte)2 def y = (double)2 return x % y"));
assertEquals(0D, exec("def x = (short)2 def y = (double)2 return x % y"));
assertEquals(0D, exec("def x = (char)2 def y = (double)2 return x % y"));
assertEquals(0D, exec("def x = (int)2 def y = (double)2 return x % y"));
assertEquals(0D, exec("def x = (long)2 def y = (double)2 return x % y"));
assertEquals(0D, exec("def x = (float)2 def y = (double)2 return x % y"));
assertEquals(0D, exec("def x = (double)2 def y = (double)2 return x % y"));
assertEquals(0, exec("def x = (Byte)2 def y = (byte)2 return x % y"));
assertEquals(0, exec("def x = (Short)2 def y = (short)2 return x % y"));
assertEquals(0, exec("def x = (Character)2 def y = (char)2 return x % y"));
assertEquals(0, exec("def x = (Integer)2 def y = (int)2 return x % y"));
assertEquals(0L, exec("def x = (Long)2 def y = (long)2 return x % y"));
assertEquals(0F, exec("def x = (Float)2 def y = (float)2 return x % y"));
assertEquals(0D, exec("def x = (Double)2 def y = (double)2 return x % y"));
}
public void testAdd() {
assertEquals(2, exec("def x = (byte)1 def y = (byte)1 return x + y"));
assertEquals(2, exec("def x = (short)1 def y = (byte)1 return x + y"));
assertEquals(2, exec("def x = (char)1 def y = (byte)1 return x + y"));
assertEquals(2, exec("def x = (int)1 def y = (byte)1 return x + y"));
assertEquals(2L, exec("def x = (long)1 def y = (byte)1 return x + y"));
assertEquals(2F, exec("def x = (float)1 def y = (byte)1 return x + y"));
assertEquals(2D, exec("def x = (double)1 def y = (byte)1 return x + y"));
assertEquals(2, exec("def x = (byte)1 def y = (short)1 return x + y"));
assertEquals(2, exec("def x = (short)1 def y = (short)1 return x + y"));
assertEquals(2, exec("def x = (char)1 def y = (short)1 return x + y"));
assertEquals(2, exec("def x = (int)1 def y = (short)1 return x + y"));
assertEquals(2L, exec("def x = (long)1 def y = (short)1 return x + y"));
assertEquals(2F, exec("def x = (float)1 def y = (short)1 return x + y"));
assertEquals(2D, exec("def x = (double)1 def y = (short)1 return x + y"));
assertEquals(2, exec("def x = (byte)1 def y = (char)1 return x + y"));
assertEquals(2, exec("def x = (short)1 def y = (char)1 return x + y"));
assertEquals(2, exec("def x = (char)1 def y = (char)1 return x + y"));
assertEquals(2, exec("def x = (int)1 def y = (char)1 return x + y"));
assertEquals(2L, exec("def x = (long)1 def y = (char)1 return x + y"));
assertEquals(2F, exec("def x = (float)1 def y = (char)1 return x + y"));
assertEquals(2D, exec("def x = (double)1 def y = (char)1 return x + y"));
assertEquals(2, exec("def x = (byte)1 def y = (int)1 return x + y"));
assertEquals(2, exec("def x = (short)1 def y = (int)1 return x + y"));
assertEquals(2, exec("def x = (char)1 def y = (int)1 return x + y"));
assertEquals(2, exec("def x = (int)1 def y = (int)1 return x + y"));
assertEquals(2L, exec("def x = (long)1 def y = (int)1 return x + y"));
assertEquals(2F, exec("def x = (float)1 def y = (int)1 return x + y"));
assertEquals(2D, exec("def x = (double)1 def y = (int)1 return x + y"));
assertEquals(2L, exec("def x = (byte)1 def y = (long)1 return x + y"));
assertEquals(2L, exec("def x = (short)1 def y = (long)1 return x + y"));
assertEquals(2L, exec("def x = (char)1 def y = (long)1 return x + y"));
assertEquals(2L, exec("def x = (int)1 def y = (long)1 return x + y"));
assertEquals(2L, exec("def x = (long)1 def y = (long)1 return x + y"));
assertEquals(2F, exec("def x = (float)1 def y = (long)1 return x + y"));
assertEquals(2D, exec("def x = (double)1 def y = (long)1 return x + y"));
assertEquals(2F, exec("def x = (byte)1 def y = (float)1 return x + y"));
assertEquals(2F, exec("def x = (short)1 def y = (float)1 return x + y"));
assertEquals(2F, exec("def x = (char)1 def y = (float)1 return x + y"));
assertEquals(2F, exec("def x = (int)1 def y = (float)1 return x + y"));
assertEquals(2F, exec("def x = (long)1 def y = (float)1 return x + y"));
assertEquals(2F, exec("def x = (float)1 def y = (float)1 return x + y"));
assertEquals(2D, exec("def x = (double)1 def y = (float)1 return x + y"));
assertEquals(2D, exec("def x = (byte)1 def y = (double)1 return x + y"));
assertEquals(2D, exec("def x = (short)1 def y = (double)1 return x + y"));
assertEquals(2D, exec("def x = (char)1 def y = (double)1 return x + y"));
assertEquals(2D, exec("def x = (int)1 def y = (double)1 return x + y"));
assertEquals(2D, exec("def x = (long)1 def y = (double)1 return x + y"));
assertEquals(2D, exec("def x = (float)1 def y = (double)1 return x + y"));
assertEquals(2D, exec("def x = (double)1 def y = (double)1 return x + y"));
assertEquals(2, exec("def x = (Byte)1 def y = (byte)1 return x + y"));
assertEquals(2, exec("def x = (Short)1 def y = (short)1 return x + y"));
assertEquals(2, exec("def x = (Character)1 def y = (char)1 return x + y"));
assertEquals(2, exec("def x = (Integer)1 def y = (int)1 return x + y"));
assertEquals(2L, exec("def x = (Long)1 def y = (long)1 return x + y"));
assertEquals(2F, exec("def x = (Float)1 def y = (float)1 return x + y"));
assertEquals(2D, exec("def x = (Double)1 def y = (double)1 return x + y"));
}
public void testSub() {
assertEquals(0, exec("def x = (byte)1 def y = (byte)1 return x - y"));
assertEquals(0, exec("def x = (short)1 def y = (byte)1 return x - y"));
assertEquals(0, exec("def x = (char)1 def y = (byte)1 return x - y"));
assertEquals(0, exec("def x = (int)1 def y = (byte)1 return x - y"));
assertEquals(0L, exec("def x = (long)1 def y = (byte)1 return x - y"));
assertEquals(0F, exec("def x = (float)1 def y = (byte)1 return x - y"));
assertEquals(0D, exec("def x = (double)1 def y = (byte)1 return x - y"));
assertEquals(0, exec("def x = (byte)1 def y = (short)1 return x - y"));
assertEquals(0, exec("def x = (short)1 def y = (short)1 return x - y"));
assertEquals(0, exec("def x = (char)1 def y = (short)1 return x - y"));
assertEquals(0, exec("def x = (int)1 def y = (short)1 return x - y"));
assertEquals(0L, exec("def x = (long)1 def y = (short)1 return x - y"));
assertEquals(0F, exec("def x = (float)1 def y = (short)1 return x - y"));
assertEquals(0D, exec("def x = (double)1 def y = (short)1 return x - y"));
assertEquals(0, exec("def x = (byte)1 def y = (char)1 return x - y"));
assertEquals(0, exec("def x = (short)1 def y = (char)1 return x - y"));
assertEquals(0, exec("def x = (char)1 def y = (char)1 return x - y"));
assertEquals(0, exec("def x = (int)1 def y = (char)1 return x - y"));
assertEquals(0L, exec("def x = (long)1 def y = (char)1 return x - y"));
assertEquals(0F, exec("def x = (float)1 def y = (char)1 return x - y"));
assertEquals(0D, exec("def x = (double)1 def y = (char)1 return x - y"));
assertEquals(0, exec("def x = (byte)1 def y = (int)1 return x - y"));
assertEquals(0, exec("def x = (short)1 def y = (int)1 return x - y"));
assertEquals(0, exec("def x = (char)1 def y = (int)1 return x - y"));
assertEquals(0, exec("def x = (int)1 def y = (int)1 return x - y"));
assertEquals(0L, exec("def x = (long)1 def y = (int)1 return x - y"));
assertEquals(0F, exec("def x = (float)1 def y = (int)1 return x - y"));
assertEquals(0D, exec("def x = (double)1 def y = (int)1 return x - y"));
assertEquals(0L, exec("def x = (byte)1 def y = (long)1 return x - y"));
assertEquals(0L, exec("def x = (short)1 def y = (long)1 return x - y"));
assertEquals(0L, exec("def x = (char)1 def y = (long)1 return x - y"));
assertEquals(0L, exec("def x = (int)1 def y = (long)1 return x - y"));
assertEquals(0L, exec("def x = (long)1 def y = (long)1 return x - y"));
assertEquals(0F, exec("def x = (float)1 def y = (long)1 return x - y"));
assertEquals(0D, exec("def x = (double)1 def y = (long)1 return x - y"));
assertEquals(0F, exec("def x = (byte)1 def y = (float)1 return x - y"));
assertEquals(0F, exec("def x = (short)1 def y = (float)1 return x - y"));
assertEquals(0F, exec("def x = (char)1 def y = (float)1 return x - y"));
assertEquals(0F, exec("def x = (int)1 def y = (float)1 return x - y"));
assertEquals(0F, exec("def x = (long)1 def y = (float)1 return x - y"));
assertEquals(0F, exec("def x = (float)1 def y = (float)1 return x - y"));
assertEquals(0D, exec("def x = (double)1 def y = (float)1 return x - y"));
assertEquals(0D, exec("def x = (byte)1 def y = (double)1 return x - y"));
assertEquals(0D, exec("def x = (short)1 def y = (double)1 return x - y"));
assertEquals(0D, exec("def x = (char)1 def y = (double)1 return x - y"));
assertEquals(0D, exec("def x = (int)1 def y = (double)1 return x - y"));
assertEquals(0D, exec("def x = (long)1 def y = (double)1 return x - y"));
assertEquals(0D, exec("def x = (float)1 def y = (double)1 return x - y"));
assertEquals(0D, exec("def x = (double)1 def y = (double)1 return x - y"));
assertEquals(0, exec("def x = (Byte)1 def y = (byte)1 return x - y"));
assertEquals(0, exec("def x = (Short)1 def y = (short)1 return x - y"));
assertEquals(0, exec("def x = (Character)1 def y = (char)1 return x - y"));
assertEquals(0, exec("def x = (Integer)1 def y = (int)1 return x - y"));
assertEquals(0L, exec("def x = (Long)1 def y = (long)1 return x - y"));
assertEquals(0F, exec("def x = (Float)1 def y = (float)1 return x - y"));
assertEquals(0D, exec("def x = (Double)1 def y = (double)1 return x - y"));
}
public void testLsh() {
assertEquals(2, exec("def x = (byte)1 def y = (byte)1 return x << y"));
assertEquals(2, exec("def x = (short)1 def y = (byte)1 return x << y"));
assertEquals(2, exec("def x = (char)1 def y = (byte)1 return x << y"));
assertEquals(2, exec("def x = (int)1 def y = (byte)1 return x << y"));
assertEquals(2L, exec("def x = (long)1 def y = (byte)1 return x << y"));
assertEquals(2L, exec("def x = (float)1 def y = (byte)1 return x << y"));
assertEquals(2L, exec("def x = (double)1 def y = (byte)1 return x << y"));
assertEquals(2, exec("def x = (byte)1 def y = (short)1 return x << y"));
assertEquals(2, exec("def x = (short)1 def y = (short)1 return x << y"));
assertEquals(2, exec("def x = (char)1 def y = (short)1 return x << y"));
assertEquals(2, exec("def x = (int)1 def y = (short)1 return x << y"));
assertEquals(2L, exec("def x = (long)1 def y = (short)1 return x << y"));
assertEquals(2L, exec("def x = (float)1 def y = (short)1 return x << y"));
assertEquals(2L, exec("def x = (double)1 def y = (short)1 return x << y"));
assertEquals(2, exec("def x = (byte)1 def y = (char)1 return x << y"));
assertEquals(2, exec("def x = (short)1 def y = (char)1 return x << y"));
assertEquals(2, exec("def x = (char)1 def y = (char)1 return x << y"));
assertEquals(2, exec("def x = (int)1 def y = (char)1 return x << y"));
assertEquals(2L, exec("def x = (long)1 def y = (char)1 return x << y"));
assertEquals(2L, exec("def x = (float)1 def y = (char)1 return x << y"));
assertEquals(2L, exec("def x = (double)1 def y = (char)1 return x << y"));
assertEquals(2, exec("def x = (byte)1 def y = (int)1 return x << y"));
assertEquals(2, exec("def x = (short)1 def y = (int)1 return x << y"));
assertEquals(2, exec("def x = (char)1 def y = (int)1 return x << y"));
assertEquals(2, exec("def x = (int)1 def y = (int)1 return x << y"));
assertEquals(2L, exec("def x = (long)1 def y = (int)1 return x << y"));
assertEquals(2L, exec("def x = (float)1 def y = (int)1 return x << y"));
assertEquals(2L, exec("def x = (double)1 def y = (int)1 return x << y"));
assertEquals(2L, exec("def x = (byte)1 def y = (long)1 return x << y"));
assertEquals(2L, exec("def x = (short)1 def y = (long)1 return x << y"));
assertEquals(2L, exec("def x = (char)1 def y = (long)1 return x << y"));
assertEquals(2L, exec("def x = (int)1 def y = (long)1 return x << y"));
assertEquals(2L, exec("def x = (long)1 def y = (long)1 return x << y"));
assertEquals(2L, exec("def x = (float)1 def y = (long)1 return x << y"));
assertEquals(2L, exec("def x = (double)1 def y = (long)1 return x << y"));
assertEquals(2L, exec("def x = (byte)1 def y = (float)1 return x << y"));
assertEquals(2L, exec("def x = (short)1 def y = (float)1 return x << y"));
assertEquals(2L, exec("def x = (char)1 def y = (float)1 return x << y"));
assertEquals(2L, exec("def x = (int)1 def y = (float)1 return x << y"));
assertEquals(2L, exec("def x = (long)1 def y = (float)1 return x << y"));
assertEquals(2L, exec("def x = (float)1 def y = (float)1 return x << y"));
assertEquals(2L, exec("def x = (double)1 def y = (float)1 return x << y"));
assertEquals(2L, exec("def x = (byte)1 def y = (double)1 return x << y"));
assertEquals(2L, exec("def x = (short)1 def y = (double)1 return x << y"));
assertEquals(2L, exec("def x = (char)1 def y = (double)1 return x << y"));
assertEquals(2L, exec("def x = (int)1 def y = (double)1 return x << y"));
assertEquals(2L, exec("def x = (long)1 def y = (double)1 return x << y"));
assertEquals(2L, exec("def x = (float)1 def y = (double)1 return x << y"));
assertEquals(2L, exec("def x = (double)1 def y = (double)1 return x << y"));
assertEquals(2, exec("def x = (Byte)1 def y = (byte)1 return x << y"));
assertEquals(2, exec("def x = (Short)1 def y = (short)1 return x << y"));
assertEquals(2, exec("def x = (Character)1 def y = (char)1 return x << y"));
assertEquals(2, exec("def x = (Integer)1 def y = (int)1 return x << y"));
assertEquals(2L, exec("def x = (Long)1 def y = (long)1 return x << y"));
assertEquals(2L, exec("def x = (Float)1 def y = (float)1 return x << y"));
assertEquals(2L, exec("def x = (Double)1 def y = (double)1 return x << y"));
}
public void testRsh() {
assertEquals(2, exec("def x = (byte)4 def y = (byte)1 return x >> y"));
assertEquals(2, exec("def x = (short)4 def y = (byte)1 return x >> y"));
assertEquals(2, exec("def x = (char)4 def y = (byte)1 return x >> y"));
assertEquals(2, exec("def x = (int)4 def y = (byte)1 return x >> y"));
assertEquals(2L, exec("def x = (long)4 def y = (byte)1 return x >> y"));
assertEquals(2L, exec("def x = (float)4 def y = (byte)1 return x >> y"));
assertEquals(2L, exec("def x = (double)4 def y = (byte)1 return x >> y"));
assertEquals(2, exec("def x = (byte)4 def y = (short)1 return x >> y"));
assertEquals(2, exec("def x = (short)4 def y = (short)1 return x >> y"));
assertEquals(2, exec("def x = (char)4 def y = (short)1 return x >> y"));
assertEquals(2, exec("def x = (int)4 def y = (short)1 return x >> y"));
assertEquals(2L, exec("def x = (long)4 def y = (short)1 return x >> y"));
assertEquals(2L, exec("def x = (float)4 def y = (short)1 return x >> y"));
assertEquals(2L, exec("def x = (double)4 def y = (short)1 return x >> y"));
assertEquals(2, exec("def x = (byte)4 def y = (char)1 return x >> y"));
assertEquals(2, exec("def x = (short)4 def y = (char)1 return x >> y"));
assertEquals(2, exec("def x = (char)4 def y = (char)1 return x >> y"));
assertEquals(2, exec("def x = (int)4 def y = (char)1 return x >> y"));
assertEquals(2L, exec("def x = (long)4 def y = (char)1 return x >> y"));
assertEquals(2L, exec("def x = (float)4 def y = (char)1 return x >> y"));
assertEquals(2L, exec("def x = (double)4 def y = (char)1 return x >> y"));
assertEquals(2, exec("def x = (byte)4 def y = (int)1 return x >> y"));
assertEquals(2, exec("def x = (short)4 def y = (int)1 return x >> y"));
assertEquals(2, exec("def x = (char)4 def y = (int)1 return x >> y"));
assertEquals(2, exec("def x = (int)4 def y = (int)1 return x >> y"));
assertEquals(2L, exec("def x = (long)4 def y = (int)1 return x >> y"));
assertEquals(2L, exec("def x = (float)4 def y = (int)1 return x >> y"));
assertEquals(2L, exec("def x = (double)4 def y = (int)1 return x >> y"));
assertEquals(2L, exec("def x = (byte)4 def y = (long)1 return x >> y"));
assertEquals(2L, exec("def x = (short)4 def y = (long)1 return x >> y"));
assertEquals(2L, exec("def x = (char)4 def y = (long)1 return x >> y"));
assertEquals(2L, exec("def x = (int)4 def y = (long)1 return x >> y"));
assertEquals(2L, exec("def x = (long)4 def y = (long)1 return x >> y"));
assertEquals(2L, exec("def x = (float)4 def y = (long)1 return x >> y"));
assertEquals(2L, exec("def x = (double)4 def y = (long)1 return x >> y"));
assertEquals(2L, exec("def x = (byte)4 def y = (float)1 return x >> y"));
assertEquals(2L, exec("def x = (short)4 def y = (float)1 return x >> y"));
assertEquals(2L, exec("def x = (char)4 def y = (float)1 return x >> y"));
assertEquals(2L, exec("def x = (int)4 def y = (float)1 return x >> y"));
assertEquals(2L, exec("def x = (long)4 def y = (float)1 return x >> y"));
assertEquals(2L, exec("def x = (float)4 def y = (float)1 return x >> y"));
assertEquals(2L, exec("def x = (double)4 def y = (float)1 return x >> y"));
assertEquals(2L, exec("def x = (byte)4 def y = (double)1 return x >> y"));
assertEquals(2L, exec("def x = (short)4 def y = (double)1 return x >> y"));
assertEquals(2L, exec("def x = (char)4 def y = (double)1 return x >> y"));
assertEquals(2L, exec("def x = (int)4 def y = (double)1 return x >> y"));
assertEquals(2L, exec("def x = (long)4 def y = (double)1 return x >> y"));
assertEquals(2L, exec("def x = (float)4 def y = (double)1 return x >> y"));
assertEquals(2L, exec("def x = (double)4 def y = (double)1 return x >> y"));
assertEquals(2, exec("def x = (Byte)4 def y = (byte)1 return x >> y"));
assertEquals(2, exec("def x = (Short)4 def y = (short)1 return x >> y"));
assertEquals(2, exec("def x = (Character)4 def y = (char)1 return x >> y"));
assertEquals(2, exec("def x = (Integer)4 def y = (int)1 return x >> y"));
assertEquals(2L, exec("def x = (Long)4 def y = (long)1 return x >> y"));
assertEquals(2L, exec("def x = (Float)4 def y = (float)1 return x >> y"));
assertEquals(2L, exec("def x = (Double)4 def y = (double)1 return x >> y"));
}
public void testUsh() {
assertEquals(2, exec("def x = (byte)4 def y = (byte)1 return x >>> y"));
assertEquals(2, exec("def x = (short)4 def y = (byte)1 return x >>> y"));
assertEquals(2, exec("def x = (char)4 def y = (byte)1 return x >>> y"));
assertEquals(2, exec("def x = (int)4 def y = (byte)1 return x >>> y"));
assertEquals(2L, exec("def x = (long)4 def y = (byte)1 return x >>> y"));
assertEquals(2L, exec("def x = (float)4 def y = (byte)1 return x >>> y"));
assertEquals(2L, exec("def x = (double)4 def y = (byte)1 return x >>> y"));
assertEquals(2, exec("def x = (byte)4 def y = (short)1 return x >>> y"));
assertEquals(2, exec("def x = (short)4 def y = (short)1 return x >>> y"));
assertEquals(2, exec("def x = (char)4 def y = (short)1 return x >>> y"));
assertEquals(2, exec("def x = (int)4 def y = (short)1 return x >>> y"));
assertEquals(2L, exec("def x = (long)4 def y = (short)1 return x >>> y"));
assertEquals(2L, exec("def x = (float)4 def y = (short)1 return x >>> y"));
assertEquals(2L, exec("def x = (double)4 def y = (short)1 return x >>> y"));
assertEquals(2, exec("def x = (byte)4 def y = (char)1 return x >>> y"));
assertEquals(2, exec("def x = (short)4 def y = (char)1 return x >>> y"));
assertEquals(2, exec("def x = (char)4 def y = (char)1 return x >>> y"));
assertEquals(2, exec("def x = (int)4 def y = (char)1 return x >>> y"));
assertEquals(2L, exec("def x = (long)4 def y = (char)1 return x >>> y"));
assertEquals(2L, exec("def x = (float)4 def y = (char)1 return x >>> y"));
assertEquals(2L, exec("def x = (double)4 def y = (char)1 return x >>> y"));
assertEquals(2, exec("def x = (byte)4 def y = (int)1 return x >>> y"));
assertEquals(2, exec("def x = (short)4 def y = (int)1 return x >>> y"));
assertEquals(2, exec("def x = (char)4 def y = (int)1 return x >>> y"));
assertEquals(2, exec("def x = (int)4 def y = (int)1 return x >>> y"));
assertEquals(2L, exec("def x = (long)4 def y = (int)1 return x >>> y"));
assertEquals(2L, exec("def x = (float)4 def y = (int)1 return x >>> y"));
assertEquals(2L, exec("def x = (double)4 def y = (int)1 return x >>> y"));
assertEquals(2L, exec("def x = (byte)4 def y = (long)1 return x >>> y"));
assertEquals(2L, exec("def x = (short)4 def y = (long)1 return x >>> y"));
assertEquals(2L, exec("def x = (char)4 def y = (long)1 return x >>> y"));
assertEquals(2L, exec("def x = (int)4 def y = (long)1 return x >>> y"));
assertEquals(2L, exec("def x = (long)4 def y = (long)1 return x >>> y"));
assertEquals(2L, exec("def x = (float)4 def y = (long)1 return x >>> y"));
assertEquals(2L, exec("def x = (double)4 def y = (long)1 return x >>> y"));
assertEquals(2L, exec("def x = (byte)4 def y = (float)1 return x >>> y"));
assertEquals(2L, exec("def x = (short)4 def y = (float)1 return x >>> y"));
assertEquals(2L, exec("def x = (char)4 def y = (float)1 return x >>> y"));
assertEquals(2L, exec("def x = (int)4 def y = (float)1 return x >>> y"));
assertEquals(2L, exec("def x = (long)4 def y = (float)1 return x >>> y"));
assertEquals(2L, exec("def x = (float)4 def y = (float)1 return x >>> y"));
assertEquals(2L, exec("def x = (double)4 def y = (float)1 return x >>> y"));
assertEquals(2L, exec("def x = (byte)4 def y = (double)1 return x >>> y"));
assertEquals(2L, exec("def x = (short)4 def y = (double)1 return x >>> y"));
assertEquals(2L, exec("def x = (char)4 def y = (double)1 return x >>> y"));
assertEquals(2L, exec("def x = (int)4 def y = (double)1 return x >>> y"));
assertEquals(2L, exec("def x = (long)4 def y = (double)1 return x >>> y"));
assertEquals(2L, exec("def x = (float)4 def y = (double)1 return x >>> y"));
assertEquals(2L, exec("def x = (double)4 def y = (double)1 return x >>> y"));
assertEquals(2, exec("def x = (Byte)4 def y = (byte)1 return x >>> y"));
assertEquals(2, exec("def x = (Short)4 def y = (short)1 return x >>> y"));
assertEquals(2, exec("def x = (Character)4 def y = (char)1 return x >>> y"));
assertEquals(2, exec("def x = (Integer)4 def y = (int)1 return x >>> y"));
assertEquals(2L, exec("def x = (Long)4 def y = (long)1 return x >>> y"));
assertEquals(2L, exec("def x = (Float)4 def y = (float)1 return x >>> y"));
assertEquals(2L, exec("def x = (Double)4 def y = (double)1 return x >>> y"));
}
public void testAnd() {
assertEquals(0, exec("def x = (byte)4 def y = (byte)1 return x & y"));
assertEquals(0, exec("def x = (short)4 def y = (byte)1 return x & y"));
assertEquals(0, exec("def x = (char)4 def y = (byte)1 return x & y"));
assertEquals(0, exec("def x = (int)4 def y = (byte)1 return x & y"));
assertEquals(0L, exec("def x = (long)4 def y = (byte)1 return x & y"));
assertEquals(0L, exec("def x = (float)4 def y = (byte)1 return x & y"));
assertEquals(0L, exec("def x = (double)4 def y = (byte)1 return x & y"));
assertEquals(0, exec("def x = (byte)4 def y = (short)1 return x & y"));
assertEquals(0, exec("def x = (short)4 def y = (short)1 return x & y"));
assertEquals(0, exec("def x = (char)4 def y = (short)1 return x & y"));
assertEquals(0, exec("def x = (int)4 def y = (short)1 return x & y"));
assertEquals(0L, exec("def x = (long)4 def y = (short)1 return x & y"));
assertEquals(0L, exec("def x = (float)4 def y = (short)1 return x & y"));
assertEquals(0L, exec("def x = (double)4 def y = (short)1 return x & y"));
assertEquals(0, exec("def x = (byte)4 def y = (char)1 return x & y"));
assertEquals(0, exec("def x = (short)4 def y = (char)1 return x & y"));
assertEquals(0, exec("def x = (char)4 def y = (char)1 return x & y"));
assertEquals(0, exec("def x = (int)4 def y = (char)1 return x & y"));
assertEquals(0L, exec("def x = (long)4 def y = (char)1 return x & y"));
assertEquals(0L, exec("def x = (float)4 def y = (char)1 return x & y"));
assertEquals(0L, exec("def x = (double)4 def y = (char)1 return x & y"));
assertEquals(0, exec("def x = (byte)4 def y = (int)1 return x & y"));
assertEquals(0, exec("def x = (short)4 def y = (int)1 return x & y"));
assertEquals(0, exec("def x = (char)4 def y = (int)1 return x & y"));
assertEquals(0, exec("def x = (int)4 def y = (int)1 return x & y"));
assertEquals(0L, exec("def x = (long)4 def y = (int)1 return x & y"));
assertEquals(0L, exec("def x = (float)4 def y = (int)1 return x & y"));
assertEquals(0L, exec("def x = (double)4 def y = (int)1 return x & y"));
assertEquals(0L, exec("def x = (byte)4 def y = (long)1 return x & y"));
assertEquals(0L, exec("def x = (short)4 def y = (long)1 return x & y"));
assertEquals(0L, exec("def x = (char)4 def y = (long)1 return x & y"));
assertEquals(0L, exec("def x = (int)4 def y = (long)1 return x & y"));
assertEquals(0L, exec("def x = (long)4 def y = (long)1 return x & y"));
assertEquals(0L, exec("def x = (float)4 def y = (long)1 return x & y"));
assertEquals(0L, exec("def x = (double)4 def y = (long)1 return x & y"));
assertEquals(0L, exec("def x = (byte)4 def y = (float)1 return x & y"));
assertEquals(0L, exec("def x = (short)4 def y = (float)1 return x & y"));
assertEquals(0L, exec("def x = (char)4 def y = (float)1 return x & y"));
assertEquals(0L, exec("def x = (int)4 def y = (float)1 return x & y"));
assertEquals(0L, exec("def x = (long)4 def y = (float)1 return x & y"));
assertEquals(0L, exec("def x = (float)4 def y = (float)1 return x & y"));
assertEquals(0L, exec("def x = (double)4 def y = (float)1 return x & y"));
assertEquals(0L, exec("def x = (byte)4 def y = (double)1 return x & y"));
assertEquals(0L, exec("def x = (short)4 def y = (double)1 return x & y"));
assertEquals(0L, exec("def x = (char)4 def y = (double)1 return x & y"));
assertEquals(0L, exec("def x = (int)4 def y = (double)1 return x & y"));
assertEquals(0L, exec("def x = (long)4 def y = (double)1 return x & y"));
assertEquals(0L, exec("def x = (float)4 def y = (double)1 return x & y"));
assertEquals(0L, exec("def x = (double)4 def y = (double)1 return x & y"));
assertEquals(0, exec("def x = (Byte)4 def y = (byte)1 return x & y"));
assertEquals(0, exec("def x = (Short)4 def y = (short)1 return x & y"));
assertEquals(0, exec("def x = (Character)4 def y = (char)1 return x & y"));
assertEquals(0, exec("def x = (Integer)4 def y = (int)1 return x & y"));
assertEquals(0L, exec("def x = (Long)4 def y = (long)1 return x & y"));
assertEquals(0L, exec("def x = (Float)4 def y = (float)1 return x & y"));
assertEquals(0L, exec("def x = (Double)4 def y = (double)1 return x & y"));
}
public void testXor() {
assertEquals(5, exec("def x = (byte)4 def y = (byte)1 return x ^ y"));
assertEquals(5, exec("def x = (short)4 def y = (byte)1 return x ^ y"));
assertEquals(5, exec("def x = (char)4 def y = (byte)1 return x ^ y"));
assertEquals(5, exec("def x = (int)4 def y = (byte)1 return x ^ y"));
assertEquals(5L, exec("def x = (long)4 def y = (byte)1 return x ^ y"));
assertEquals(5L, exec("def x = (float)4 def y = (byte)1 return x ^ y"));
assertEquals(5L, exec("def x = (double)4 def y = (byte)1 return x ^ y"));
assertEquals(5, exec("def x = (byte)4 def y = (short)1 return x ^ y"));
assertEquals(5, exec("def x = (short)4 def y = (short)1 return x ^ y"));
assertEquals(5, exec("def x = (char)4 def y = (short)1 return x ^ y"));
assertEquals(5, exec("def x = (int)4 def y = (short)1 return x ^ y"));
assertEquals(5L, exec("def x = (long)4 def y = (short)1 return x ^ y"));
assertEquals(5L, exec("def x = (float)4 def y = (short)1 return x ^ y"));
assertEquals(5L, exec("def x = (double)4 def y = (short)1 return x ^ y"));
assertEquals(5, exec("def x = (byte)4 def y = (char)1 return x ^ y"));
assertEquals(5, exec("def x = (short)4 def y = (char)1 return x ^ y"));
assertEquals(5, exec("def x = (char)4 def y = (char)1 return x ^ y"));
assertEquals(5, exec("def x = (int)4 def y = (char)1 return x ^ y"));
assertEquals(5L, exec("def x = (long)4 def y = (char)1 return x ^ y"));
assertEquals(5L, exec("def x = (float)4 def y = (char)1 return x ^ y"));
assertEquals(5L, exec("def x = (double)4 def y = (char)1 return x ^ y"));
assertEquals(5, exec("def x = (byte)4 def y = (int)1 return x ^ y"));
assertEquals(5, exec("def x = (short)4 def y = (int)1 return x ^ y"));
assertEquals(5, exec("def x = (char)4 def y = (int)1 return x ^ y"));
assertEquals(5, exec("def x = (int)4 def y = (int)1 return x ^ y"));
assertEquals(5L, exec("def x = (long)4 def y = (int)1 return x ^ y"));
assertEquals(5L, exec("def x = (float)4 def y = (int)1 return x ^ y"));
assertEquals(5L, exec("def x = (double)4 def y = (int)1 return x ^ y"));
assertEquals(5L, exec("def x = (byte)4 def y = (long)1 return x ^ y"));
assertEquals(5L, exec("def x = (short)4 def y = (long)1 return x ^ y"));
assertEquals(5L, exec("def x = (char)4 def y = (long)1 return x ^ y"));
assertEquals(5L, exec("def x = (int)4 def y = (long)1 return x ^ y"));
assertEquals(5L, exec("def x = (long)4 def y = (long)1 return x ^ y"));
assertEquals(5L, exec("def x = (float)4 def y = (long)1 return x ^ y"));
assertEquals(5L, exec("def x = (double)4 def y = (long)1 return x ^ y"));
assertEquals(5L, exec("def x = (byte)4 def y = (float)1 return x ^ y"));
assertEquals(5L, exec("def x = (short)4 def y = (float)1 return x ^ y"));
assertEquals(5L, exec("def x = (char)4 def y = (float)1 return x ^ y"));
assertEquals(5L, exec("def x = (int)4 def y = (float)1 return x ^ y"));
assertEquals(5L, exec("def x = (long)4 def y = (float)1 return x ^ y"));
assertEquals(5L, exec("def x = (float)4 def y = (float)1 return x ^ y"));
assertEquals(5L, exec("def x = (double)4 def y = (float)1 return x ^ y"));
assertEquals(5L, exec("def x = (byte)4 def y = (double)1 return x ^ y"));
assertEquals(5L, exec("def x = (short)4 def y = (double)1 return x ^ y"));
assertEquals(5L, exec("def x = (char)4 def y = (double)1 return x ^ y"));
assertEquals(5L, exec("def x = (int)4 def y = (double)1 return x ^ y"));
assertEquals(5L, exec("def x = (long)4 def y = (double)1 return x ^ y"));
assertEquals(5L, exec("def x = (float)4 def y = (double)1 return x ^ y"));
assertEquals(5L, exec("def x = (double)4 def y = (double)1 return x ^ y"));
assertEquals(5, exec("def x = (Byte)4 def y = (byte)1 return x ^ y"));
assertEquals(5, exec("def x = (Short)4 def y = (short)1 return x ^ y"));
assertEquals(5, exec("def x = (Character)4 def y = (char)1 return x ^ y"));
assertEquals(5, exec("def x = (Integer)4 def y = (int)1 return x ^ y"));
assertEquals(5L, exec("def x = (Long)4 def y = (long)1 return x ^ y"));
assertEquals(5L, exec("def x = (Float)4 def y = (float)1 return x ^ y"));
assertEquals(5L, exec("def x = (Double)4 def y = (double)1 return x ^ y"));
}
public void testOr() {
assertEquals(5, exec("def x = (byte)4 def y = (byte)1 return x | y"));
assertEquals(5, exec("def x = (short)4 def y = (byte)1 return x | y"));
assertEquals(5, exec("def x = (char)4 def y = (byte)1 return x | y"));
assertEquals(5, exec("def x = (int)4 def y = (byte)1 return x | y"));
assertEquals(5L, exec("def x = (long)4 def y = (byte)1 return x | y"));
assertEquals(5L, exec("def x = (float)4 def y = (byte)1 return x | y"));
assertEquals(5L, exec("def x = (double)4 def y = (byte)1 return x | y"));
assertEquals(5, exec("def x = (byte)4 def y = (short)1 return x | y"));
assertEquals(5, exec("def x = (short)4 def y = (short)1 return x | y"));
assertEquals(5, exec("def x = (char)4 def y = (short)1 return x | y"));
assertEquals(5, exec("def x = (int)4 def y = (short)1 return x | y"));
assertEquals(5L, exec("def x = (long)4 def y = (short)1 return x | y"));
assertEquals(5L, exec("def x = (float)4 def y = (short)1 return x | y"));
assertEquals(5L, exec("def x = (double)4 def y = (short)1 return x | y"));
assertEquals(5, exec("def x = (byte)4 def y = (char)1 return x | y"));
assertEquals(5, exec("def x = (short)4 def y = (char)1 return x | y"));
assertEquals(5, exec("def x = (char)4 def y = (char)1 return x | y"));
assertEquals(5, exec("def x = (int)4 def y = (char)1 return x | y"));
assertEquals(5L, exec("def x = (long)4 def y = (char)1 return x | y"));
assertEquals(5L, exec("def x = (float)4 def y = (char)1 return x | y"));
assertEquals(5L, exec("def x = (double)4 def y = (char)1 return x | y"));
assertEquals(5, exec("def x = (byte)4 def y = (int)1 return x | y"));
assertEquals(5, exec("def x = (short)4 def y = (int)1 return x | y"));
assertEquals(5, exec("def x = (char)4 def y = (int)1 return x | y"));
assertEquals(5, exec("def x = (int)4 def y = (int)1 return x | y"));
assertEquals(5L, exec("def x = (long)4 def y = (int)1 return x | y"));
assertEquals(5L, exec("def x = (float)4 def y = (int)1 return x | y"));
assertEquals(5L, exec("def x = (double)4 def y = (int)1 return x | y"));
assertEquals(5L, exec("def x = (byte)4 def y = (long)1 return x | y"));
assertEquals(5L, exec("def x = (short)4 def y = (long)1 return x | y"));
assertEquals(5L, exec("def x = (char)4 def y = (long)1 return x | y"));
assertEquals(5L, exec("def x = (int)4 def y = (long)1 return x | y"));
assertEquals(5L, exec("def x = (long)4 def y = (long)1 return x | y"));
assertEquals(5L, exec("def x = (float)4 def y = (long)1 return x | y"));
assertEquals(5L, exec("def x = (double)4 def y = (long)1 return x | y"));
assertEquals(5L, exec("def x = (byte)4 def y = (float)1 return x | y"));
assertEquals(5L, exec("def x = (short)4 def y = (float)1 return x | y"));
assertEquals(5L, exec("def x = (char)4 def y = (float)1 return x | y"));
assertEquals(5L, exec("def x = (int)4 def y = (float)1 return x | y"));
assertEquals(5L, exec("def x = (long)4 def y = (float)1 return x | y"));
assertEquals(5L, exec("def x = (float)4 def y = (float)1 return x | y"));
assertEquals(5L, exec("def x = (double)4 def y = (float)1 return x | y"));
assertEquals(5L, exec("def x = (byte)4 def y = (double)1 return x | y"));
assertEquals(5L, exec("def x = (short)4 def y = (double)1 return x | y"));
assertEquals(5L, exec("def x = (char)4 def y = (double)1 return x | y"));
assertEquals(5L, exec("def x = (int)4 def y = (double)1 return x | y"));
assertEquals(5L, exec("def x = (long)4 def y = (double)1 return x | y"));
assertEquals(5L, exec("def x = (float)4 def y = (double)1 return x | y"));
assertEquals(5L, exec("def x = (double)4 def y = (double)1 return x | y"));
assertEquals(5, exec("def x = (Byte)4 def y = (byte)1 return x | y"));
assertEquals(5, exec("def x = (Short)4 def y = (short)1 return x | y"));
assertEquals(5, exec("def x = (Character)4 def y = (char)1 return x | y"));
assertEquals(5, exec("def x = (Integer)4 def y = (int)1 return x | y"));
assertEquals(5L, exec("def x = (Long)4 def y = (long)1 return x | y"));
assertEquals(5L, exec("def x = (Float)4 def y = (float)1 return x | y"));
assertEquals(5L, exec("def x = (Double)4 def y = (double)1 return x | y"));
}
public void testEq() {
assertEquals(true, exec("def x = (byte)7 def y = (int)7 return x == y"));
assertEquals(true, exec("def x = (short)6 def y = (int)6 return x == y"));
assertEquals(true, exec("def x = (char)5 def y = (int)5 return x == y"));
assertEquals(true, exec("def x = (int)4 def y = (int)4 return x == y"));
assertEquals(false, exec("def x = (long)5 def y = (int)3 return x == y"));
assertEquals(false, exec("def x = (float)6 def y = (int)2 return x == y"));
assertEquals(false, exec("def x = (double)7 def y = (int)1 return x == y"));
assertEquals(true, exec("def x = (byte)7 def y = (double)7 return x == y"));
assertEquals(true, exec("def x = (short)6 def y = (double)6 return x == y"));
assertEquals(true, exec("def x = (char)5 def y = (double)5 return x == y"));
assertEquals(true, exec("def x = (int)4 def y = (double)4 return x == y"));
assertEquals(false, exec("def x = (long)5 def y = (double)3 return x == y"));
assertEquals(false, exec("def x = (float)6 def y = (double)2 return x == y"));
assertEquals(false, exec("def x = (double)7 def y = (double)1 return x == y"));
assertEquals(true, exec("def x = new HashMap() def y = new HashMap() return x == y"));
assertEquals(false, exec("def x = new HashMap() x.put(3, 3) def y = new HashMap() return x == y"));
assertEquals(true, exec("def x = new HashMap() x.put(3, 3) def y = new HashMap() y.put(3, 3) return x == y"));
assertEquals(true, exec("def x = new HashMap() def y = x x.put(3, 3) y.put(3, 3) return x == y"));
}
public void testEqr() {
assertEquals(false, exec("def x = (byte)7 def y = (int)7 return x === y"));
assertEquals(false, exec("def x = (short)6 def y = (int)6 return x === y"));
assertEquals(false, exec("def x = (char)5 def y = (int)5 return x === y"));
assertEquals(true, exec("def x = (int)4 def y = (int)4 return x === y"));
assertEquals(false, exec("def x = (long)5 def y = (int)3 return x === y"));
assertEquals(false, exec("def x = (float)6 def y = (int)2 return x === y"));
assertEquals(false, exec("def x = (double)7 def y = (int)1 return x === y"));
assertEquals(false, exec("def x = new HashMap() def y = new HashMap() return x === y"));
assertEquals(false, exec("def x = new HashMap() x.put(3, 3) def y = new HashMap() return x === y"));
assertEquals(false, exec("def x = new HashMap() x.put(3, 3) def y = new HashMap() y.put(3, 3) return x === y"));
assertEquals(true, exec("def x = new HashMap() def y = x x.put(3, 3) y.put(3, 3) return x === y"));
}
public void testNe() {
assertEquals(false, exec("def x = (byte)7 def y = (int)7 return x != y"));
assertEquals(false, exec("def x = (short)6 def y = (int)6 return x != y"));
assertEquals(false, exec("def x = (char)5 def y = (int)5 return x != y"));
assertEquals(false, exec("def x = (int)4 def y = (int)4 return x != y"));
assertEquals(true, exec("def x = (long)5 def y = (int)3 return x != y"));
assertEquals(true, exec("def x = (float)6 def y = (int)2 return x != y"));
assertEquals(true, exec("def x = (double)7 def y = (int)1 return x != y"));
assertEquals(false, exec("def x = (byte)7 def y = (double)7 return x != y"));
assertEquals(false, exec("def x = (short)6 def y = (double)6 return x != y"));
assertEquals(false, exec("def x = (char)5 def y = (double)5 return x != y"));
assertEquals(false, exec("def x = (int)4 def y = (double)4 return x != y"));
assertEquals(true, exec("def x = (long)5 def y = (double)3 return x != y"));
assertEquals(true, exec("def x = (float)6 def y = (double)2 return x != y"));
assertEquals(true, exec("def x = (double)7 def y = (double)1 return x != y"));
assertEquals(false, exec("def x = new HashMap() def y = new HashMap() return x != y"));
assertEquals(true, exec("def x = new HashMap() x.put(3, 3) def y = new HashMap() return x != y"));
assertEquals(false, exec("def x = new HashMap() x.put(3, 3) def y = new HashMap() y.put(3, 3) return x != y"));
assertEquals(false, exec("def x = new HashMap() def y = x x.put(3, 3) y.put(3, 3) return x != y"));
}
public void testNer() {
assertEquals(true, exec("def x = (byte)7 def y = (int)7 return x !== y"));
assertEquals(true, exec("def x = (short)6 def y = (int)6 return x !== y"));
assertEquals(true, exec("def x = (char)5 def y = (int)5 return x !== y"));
assertEquals(false, exec("def x = (int)4 def y = (int)4 return x !== y"));
assertEquals(true, exec("def x = (long)5 def y = (int)3 return x !== y"));
assertEquals(true, exec("def x = (float)6 def y = (int)2 return x !== y"));
assertEquals(true, exec("def x = (double)7 def y = (int)1 return x !== y"));
assertEquals(true, exec("def x = new HashMap() def y = new HashMap() return x !== y"));
assertEquals(true, exec("def x = new HashMap() x.put(3, 3) def y = new HashMap() return x !== y"));
assertEquals(true, exec("def x = new HashMap() x.put(3, 3) def y = new HashMap() y.put(3, 3) return x !== y"));
assertEquals(false, exec("def x = new HashMap() def y = x x.put(3, 3) y.put(3, 3) return x !== y"));
}
public void testLt() {
assertEquals(true, exec("def x = (byte)1 def y = (int)7 return x < y"));
assertEquals(true, exec("def x = (short)2 def y = (int)6 return x < y"));
assertEquals(true, exec("def x = (char)3 def y = (int)5 return x < y"));
assertEquals(false, exec("def x = (int)4 def y = (int)4 return x < y"));
assertEquals(false, exec("def x = (long)5 def y = (int)3 return x < y"));
assertEquals(false, exec("def x = (float)6 def y = (int)2 return x < y"));
assertEquals(false, exec("def x = (double)7 def y = (int)1 return x < y"));
assertEquals(true, exec("def x = (byte)1 def y = (double)7 return x < y"));
assertEquals(true, exec("def x = (short)2 def y = (double)6 return x < y"));
assertEquals(true, exec("def x = (char)3 def y = (double)5 return x < y"));
assertEquals(false, exec("def x = (int)4 def y = (double)4 return x < y"));
assertEquals(false, exec("def x = (long)5 def y = (double)3 return x < y"));
assertEquals(false, exec("def x = (float)6 def y = (double)2 return x < y"));
assertEquals(false, exec("def x = (double)7 def y = (double)1 return x < y"));
}
public void testLte() {
assertEquals(true, exec("def x = (byte)1 def y = (int)7 return x <= y"));
assertEquals(true, exec("def x = (short)2 def y = (int)6 return x <= y"));
assertEquals(true, exec("def x = (char)3 def y = (int)5 return x <= y"));
assertEquals(true, exec("def x = (int)4 def y = (int)4 return x <= y"));
assertEquals(false, exec("def x = (long)5 def y = (int)3 return x <= y"));
assertEquals(false, exec("def x = (float)6 def y = (int)2 return x <= y"));
assertEquals(false, exec("def x = (double)7 def y = (int)1 return x <= y"));
assertEquals(true, exec("def x = (byte)1 def y = (double)7 return x <= y"));
assertEquals(true, exec("def x = (short)2 def y = (double)6 return x <= y"));
assertEquals(true, exec("def x = (char)3 def y = (double)5 return x <= y"));
assertEquals(true, exec("def x = (int)4 def y = (double)4 return x <= y"));
assertEquals(false, exec("def x = (long)5 def y = (double)3 return x <= y"));
assertEquals(false, exec("def x = (float)6 def y = (double)2 return x <= y"));
assertEquals(false, exec("def x = (double)7 def y = (double)1 return x <= y"));
}
public void testGt() {
assertEquals(false, exec("def x = (byte)1 def y = (int)7 return x > y"));
assertEquals(false, exec("def x = (short)2 def y = (int)6 return x > y"));
assertEquals(false, exec("def x = (char)3 def y = (int)5 return x > y"));
assertEquals(false, exec("def x = (int)4 def y = (int)4 return x > y"));
assertEquals(true, exec("def x = (long)5 def y = (int)3 return x > y"));
assertEquals(true, exec("def x = (float)6 def y = (int)2 return x > y"));
assertEquals(true, exec("def x = (double)7 def y = (int)1 return x > y"));
assertEquals(false, exec("def x = (byte)1 def y = (double)7 return x > y"));
assertEquals(false, exec("def x = (short)2 def y = (double)6 return x > y"));
assertEquals(false, exec("def x = (char)3 def y = (double)5 return x > y"));
assertEquals(false, exec("def x = (int)4 def y = (double)4 return x > y"));
assertEquals(true, exec("def x = (long)5 def y = (double)3 return x > y"));
assertEquals(true, exec("def x = (float)6 def y = (double)2 return x > y"));
assertEquals(true, exec("def x = (double)7 def y = (double)1 return x > y"));
}
public void testGte() {
assertEquals(false, exec("def x = (byte)1 def y = (int)7 return x >= y"));
assertEquals(false, exec("def x = (short)2 def y = (int)6 return x >= y"));
assertEquals(false, exec("def x = (char)3 def y = (int)5 return x >= y"));
assertEquals(true, exec("def x = (int)4 def y = (int)4 return x >= y"));
assertEquals(true, exec("def x = (long)5 def y = (int)3 return x >= y"));
assertEquals(true, exec("def x = (float)6 def y = (int)2 return x >= y"));
assertEquals(true, exec("def x = (double)7 def y = (int)1 return x >= y"));
assertEquals(false, exec("def x = (byte)1 def y = (double)7 return x >= y"));
assertEquals(false, exec("def x = (short)2 def y = (double)6 return x >= y"));
assertEquals(false, exec("def x = (char)3 def y = (double)5 return x >= y"));
assertEquals(true, exec("def x = (int)4 def y = (double)4 return x >= y"));
assertEquals(true, exec("def x = (long)5 def y = (double)3 return x >= y"));
assertEquals(true, exec("def x = (float)6 def y = (double)2 return x >= y"));
assertEquals(true, exec("def x = (double)7 def y = (double)1 return x >= y"));
}
}

Some files were not shown because too many files have changed in this diff Show More