diff --git a/core/src/main/java/org/elasticsearch/ElasticsearchException.java b/core/src/main/java/org/elasticsearch/ElasticsearchException.java index 4a35bcbcfb0..e6dc7deff2b 100644 --- a/core/src/main/java/org/elasticsearch/ElasticsearchException.java +++ b/core/src/main/java/org/elasticsearch/ElasticsearchException.java @@ -39,6 +39,8 @@ import java.util.Map; import java.util.Set; import java.util.stream.Collectors; +import static org.elasticsearch.cluster.metadata.IndexMetaData.INDEX_UUID_NA_VALUE; + /** * A base class for all elasticsearch exceptions. */ @@ -49,6 +51,7 @@ public class ElasticsearchException extends RuntimeException implements ToXConte public static final boolean REST_EXCEPTION_SKIP_STACK_TRACE_DEFAULT = true; public static final boolean REST_EXCEPTION_SKIP_CAUSE_DEFAULT = false; private static final String INDEX_HEADER_KEY = "es.index"; + private static final String INDEX_HEADER_KEY_UUID = "es.index_uuid"; private static final String SHARD_HEADER_KEY = "es.shard"; private static final String RESOURCE_HEADER_TYPE_KEY = "es.resource.type"; private static final String RESOURCE_HEADER_ID_KEY = "es.resource.id"; @@ -70,7 +73,7 @@ public class ElasticsearchException extends RuntimeException implements ToXConte * The message can be parameterized using {} as placeholders for the given * arguments * - * @param msg the detail message + * @param msg the detail message * @param args the arguments for the message */ public ElasticsearchException(String msg, Object... args) { @@ -332,7 +335,7 @@ public class ElasticsearchException extends RuntimeException implements ToXConte private void xContentHeader(XContentBuilder builder, String key, List values) throws IOException { if (values != null && values.isEmpty() == false) { - if(values.size() == 1) { + if (values.size() == 1) { builder.field(key, values.get(0)); } else { builder.startArray(key); @@ -374,7 +377,7 @@ public class ElasticsearchException extends RuntimeException implements ToXConte if (cause != null && cause instanceof ElasticsearchException) { return ((ElasticsearchException) cause).guessRootCauses(); } - return new ElasticsearchException[] {this}; + return new ElasticsearchException[]{this}; } /** @@ -387,7 +390,7 @@ public class ElasticsearchException extends RuntimeException implements ToXConte if (ex instanceof ElasticsearchException) { return ((ElasticsearchException) ex).guessRootCauses(); } - return new ElasticsearchException[] {new ElasticsearchException(t.getMessage(), t) { + return new ElasticsearchException[]{new ElasticsearchException(t.getMessage(), t) { @Override protected String getExceptionName() { return getExceptionName(getCause()); @@ -414,7 +417,7 @@ public class ElasticsearchException extends RuntimeException implements ToXConte public String toString() { StringBuilder builder = new StringBuilder(); if (headers.containsKey(INDEX_HEADER_KEY)) { - builder.append('[').append(getIndex()).append(']'); + builder.append(getIndex()); if (headers.containsKey(SHARD_HEADER_KEY)) { builder.append('[').append(getShardId()).append(']'); } @@ -435,7 +438,7 @@ public class ElasticsearchException extends RuntimeException implements ToXConte final String fileName = in.readOptionalString(); final String methodName = in.readString(); final int lineNumber = in.readVInt(); - stackTrace[i] = new StackTraceElement(declaringClasss,methodName, fileName, lineNumber); + stackTrace[i] = new StackTraceElement(declaringClasss, methodName, fileName, lineNumber); } throwable.setStackTrace(stackTrace); @@ -631,10 +634,11 @@ public class ElasticsearchException extends RuntimeException implements ToXConte CLASS_TO_ELASTICSEARCH_EXCEPTION_HANDLE = Collections.unmodifiableMap(exceptions); } - public String getIndex() { + public Index getIndex() { List index = getHeader(INDEX_HEADER_KEY); if (index != null && index.isEmpty() == false) { - return index.get(0); + List index_uuid = getHeader(INDEX_HEADER_KEY_UUID); + return new Index(index.get(0), index_uuid.get(0)); } return null; @@ -651,22 +655,28 @@ public class ElasticsearchException extends RuntimeException implements ToXConte public void setIndex(Index index) { if (index != null) { addHeader(INDEX_HEADER_KEY, index.getName()); + addHeader(INDEX_HEADER_KEY_UUID, index.getUUID()); } } public void setIndex(String index) { if (index != null) { - addHeader(INDEX_HEADER_KEY, index); + setIndex(new Index(index, INDEX_UUID_NA_VALUE)); } } public void setShard(ShardId shardId) { if (shardId != null) { - addHeader(INDEX_HEADER_KEY, shardId.getIndex()); + setIndex(shardId.getIndex()); addHeader(SHARD_HEADER_KEY, Integer.toString(shardId.id())); } } + public void setShard(String index, int shardId) { + setIndex(index); + addHeader(SHARD_HEADER_KEY, Integer.toString(shardId)); + } + public void setResources(String type, String... id) { assert type != null; addHeader(RESOURCE_HEADER_ID_KEY, id); @@ -691,7 +701,7 @@ public class ElasticsearchException extends RuntimeException implements ToXConte final ElasticsearchException[] rootCauses = ElasticsearchException.guessRootCauses(t); builder.field("root_cause"); builder.startArray(); - for (ElasticsearchException rootCause : rootCauses){ + for (ElasticsearchException rootCause : rootCauses) { builder.startObject(); rootCause.toXContent(builder, new ToXContent.DelegatingMapParams(Collections.singletonMap(ElasticsearchException.REST_EXCEPTION_SKIP_CAUSE, "true"), params)); builder.endObject(); diff --git a/core/src/main/java/org/elasticsearch/ExceptionsHelper.java b/core/src/main/java/org/elasticsearch/ExceptionsHelper.java index 555a172c0d8..3842ab4e3bf 100644 --- a/core/src/main/java/org/elasticsearch/ExceptionsHelper.java +++ b/core/src/main/java/org/elasticsearch/ExceptionsHelper.java @@ -26,6 +26,7 @@ import org.elasticsearch.action.ShardOperationFailedException; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.Loggers; +import org.elasticsearch.index.Index; import org.elasticsearch.rest.RestStatus; import java.io.IOException; @@ -243,7 +244,12 @@ public final class ExceptionsHelper { public GroupBy(Throwable t) { if (t instanceof ElasticsearchException) { - index = ((ElasticsearchException) t).getIndex(); + final Index index = ((ElasticsearchException) t).getIndex(); + if (index != null) { + this.index = index.getName(); + } else { + this.index = null; + } } else { index = null; } diff --git a/core/src/main/java/org/elasticsearch/action/ActionRequest.java b/core/src/main/java/org/elasticsearch/action/ActionRequest.java index 6c522d03f05..7955855bc0d 100644 --- a/core/src/main/java/org/elasticsearch/action/ActionRequest.java +++ b/core/src/main/java/org/elasticsearch/action/ActionRequest.java @@ -32,10 +32,6 @@ public abstract class ActionRequest> exte public ActionRequest() { super(); - } - - protected ActionRequest(ActionRequest request) { - super(request); // this does not set the listenerThreaded API, if needed, its up to the caller to set it // since most times, we actually want it to not be threaded... // this.listenerThreaded = request.listenerThreaded(); diff --git a/core/src/main/java/org/elasticsearch/action/ActionRequestBuilder.java b/core/src/main/java/org/elasticsearch/action/ActionRequestBuilder.java index 9ad449f1f45..8cbc405dafb 100644 --- a/core/src/main/java/org/elasticsearch/action/ActionRequestBuilder.java +++ b/core/src/main/java/org/elasticsearch/action/ActionRequestBuilder.java @@ -49,12 +49,6 @@ public abstract class ActionRequestBuilder execute() { PlainListenableActionFuture future = new PlainListenableActionFuture<>(threadPool); execute(future); diff --git a/core/src/main/java/org/elasticsearch/action/DocWriteResponse.java b/core/src/main/java/org/elasticsearch/action/DocWriteResponse.java index c0389c6c194..76093aa9cb7 100644 --- a/core/src/main/java/org/elasticsearch/action/DocWriteResponse.java +++ b/core/src/main/java/org/elasticsearch/action/DocWriteResponse.java @@ -53,7 +53,7 @@ public abstract class DocWriteResponse extends ReplicationResponse implements St * The index the document was changed in. */ public String getIndex() { - return this.shardId.getIndex(); + return this.shardId.getIndexName(); } @@ -119,7 +119,7 @@ public abstract class DocWriteResponse extends ReplicationResponse implements St @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { ReplicationResponse.ShardInfo shardInfo = getShardInfo(); - builder.field(Fields._INDEX, shardId.getIndex()) + builder.field(Fields._INDEX, shardId.getIndexName()) .field(Fields._TYPE, type) .field(Fields._ID, id) .field(Fields._VERSION, version); diff --git a/core/src/main/java/org/elasticsearch/action/ReplicationResponse.java b/core/src/main/java/org/elasticsearch/action/ReplicationResponse.java index 4e358c8d42a..8a81446d459 100644 --- a/core/src/main/java/org/elasticsearch/action/ReplicationResponse.java +++ b/core/src/main/java/org/elasticsearch/action/ReplicationResponse.java @@ -29,6 +29,7 @@ import org.elasticsearch.common.io.stream.Streamable; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilderString; +import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.rest.RestStatus; import java.io.IOException; @@ -169,15 +170,13 @@ public class ReplicationResponse extends ActionResponse { public static class Failure implements ShardOperationFailedException, ToXContent { - private String index; - private int shardId; + private ShardId shardId; private String nodeId; private Throwable cause; private RestStatus status; private boolean primary; - public Failure(String index, int shardId, @Nullable String nodeId, Throwable cause, RestStatus status, boolean primary) { - this.index = index; + public Failure(ShardId shardId, @Nullable String nodeId, Throwable cause, RestStatus status, boolean primary) { this.shardId = shardId; this.nodeId = nodeId; this.cause = cause; @@ -193,7 +192,7 @@ public class ReplicationResponse extends ActionResponse { */ @Override public String index() { - return index; + return shardId.getIndexName(); } /** @@ -201,6 +200,10 @@ public class ReplicationResponse extends ActionResponse { */ @Override public int shardId() { + return shardId.id(); + } + + public ShardId fullShardId() { return shardId; } @@ -243,8 +246,7 @@ public class ReplicationResponse extends ActionResponse { @Override public void readFrom(StreamInput in) throws IOException { - index = in.readString(); - shardId = in.readVInt(); + shardId = ShardId.readShardId(in); nodeId = in.readOptionalString(); cause = in.readThrowable(); status = RestStatus.readFrom(in); @@ -253,8 +255,7 @@ public class ReplicationResponse extends ActionResponse { @Override public void writeTo(StreamOutput out) throws IOException { - out.writeString(index); - out.writeVInt(shardId); + shardId.writeTo(out); out.writeOptionalString(nodeId); out.writeThrowable(cause); RestStatus.writeTo(out, status); @@ -264,8 +265,8 @@ public class ReplicationResponse extends ActionResponse { @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); - builder.field(Fields._INDEX, index); - builder.field(Fields._SHARD, shardId); + builder.field(Fields._INDEX, shardId.getIndexName()); + builder.field(Fields._SHARD, shardId.id()); builder.field(Fields._NODE, nodeId); builder.field(Fields.REASON); builder.startObject(); diff --git a/core/src/main/java/org/elasticsearch/action/UnavailableShardsException.java b/core/src/main/java/org/elasticsearch/action/UnavailableShardsException.java index 35340213145..e2cff9244ea 100644 --- a/core/src/main/java/org/elasticsearch/action/UnavailableShardsException.java +++ b/core/src/main/java/org/elasticsearch/action/UnavailableShardsException.java @@ -36,13 +36,19 @@ public class UnavailableShardsException extends ElasticsearchException { super(buildMessage(shardId, message), args); } + public UnavailableShardsException(String index, int shardId, String message, Object... args) { + super(buildMessage(index, shardId, message), args); + } + private static String buildMessage(ShardId shardId, String message) { if (shardId == null) { return message; } - return "[" + shardId.index().name() + "][" + shardId.id() + "] " + message; + return buildMessage(shardId.getIndexName(), shardId.id(), message); } + private static String buildMessage(String index, int shardId, String message) {return "[" + index + "][" + shardId + "] " + message;} + public UnavailableShardsException(StreamInput in) throws IOException { super(in); } diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/health/TransportClusterHealthAction.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/health/TransportClusterHealthAction.java index 79adbafa9bb..b5c9577aff7 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/cluster/health/TransportClusterHealthAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/health/TransportClusterHealthAction.java @@ -141,7 +141,7 @@ public class TransportClusterHealthAction extends TransportMasterNodeReadAction< } assert waitFor >= 0; - final ClusterStateObserver observer = new ClusterStateObserver(clusterService, logger); + final ClusterStateObserver observer = new ClusterStateObserver(clusterService, logger, threadPool.getThreadContext()); final ClusterState state = observer.observedState(); if (waitFor == 0 || request.timeout().millis() == 0) { listener.onResponse(getResponse(request, state, waitFor, request.timeout().millis() == 0)); diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/node/hotthreads/TransportNodesHotThreadsAction.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/node/hotthreads/TransportNodesHotThreadsAction.java index f26177a0ce7..c743a1d2a91 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/cluster/node/hotthreads/TransportNodesHotThreadsAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/node/hotthreads/TransportNodesHotThreadsAction.java @@ -102,7 +102,7 @@ public class TransportNodesHotThreadsAction extends TransportNodesAction request, String[] nodesIds) { - super(request, nodesIds); + public Request(String[] nodesIds) { + super(nodesIds); } public Request snapshotIds(SnapshotId[] snapshotIds) { @@ -214,7 +213,7 @@ public class TransportNodesSnapshotsStatus extends TransportNodesAction() { @Override diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsIndices.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsIndices.java index ff754be2a20..ec7017160c0 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsIndices.java +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsIndices.java @@ -66,10 +66,10 @@ public class ClusterStatsIndices implements ToXContent, Streamable { for (ClusterStatsNodeResponse r : nodeResponses) { for (org.elasticsearch.action.admin.indices.stats.ShardStats shardStats : r.shardsStats()) { - ShardStats indexShardStats = countsPerIndex.get(shardStats.getShardRouting().getIndex()); + ShardStats indexShardStats = countsPerIndex.get(shardStats.getShardRouting().getIndexName()); if (indexShardStats == null) { indexShardStats = new ShardStats(); - countsPerIndex.put(shardStats.getShardRouting().getIndex(), indexShardStats); + countsPerIndex.put(shardStats.getShardRouting().getIndexName(), indexShardStats); } indexShardStats.total++; diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/stats/TransportClusterStatsAction.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/stats/TransportClusterStatsAction.java index 3e4880d0a68..3fc2f4b631e 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/cluster/stats/TransportClusterStatsAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/stats/TransportClusterStatsAction.java @@ -132,7 +132,7 @@ public class TransportClusterStatsAction extends TransportNodesAction public CreateIndexRequest() { } - /** - * Constructs a new request to create an index that was triggered by a different request, - * provided as an argument so that its headers and context can be copied to the new request. - */ - public CreateIndexRequest(ActionRequest request) { - super(request); - } - /** * Constructs a new request to create an index with the specified name. */ diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/flush/FlushRequest.java b/core/src/main/java/org/elasticsearch/action/admin/indices/flush/FlushRequest.java index 0152254d22a..7dc55c08fac 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/flush/FlushRequest.java +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/flush/FlushRequest.java @@ -42,17 +42,6 @@ public class FlushRequest extends BroadcastRequest { private boolean force = false; private boolean waitIfOngoing = false; - public FlushRequest() { - } - - /** - * Copy constructor that creates a new flush request that is a copy of the one provided as an argument. - * The new request will inherit though headers and context from the original request that caused it. - */ - public FlushRequest(ActionRequest originalRequest) { - super(originalRequest); - } - /** * Constructs a new flush request against one or more indices. If nothing is provided, all indices will * be flushed. diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/flush/ShardFlushRequest.java b/core/src/main/java/org/elasticsearch/action/admin/indices/flush/ShardFlushRequest.java index ccf06be8bd0..3a9ec89db5d 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/flush/ShardFlushRequest.java +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/flush/ShardFlushRequest.java @@ -31,7 +31,7 @@ public class ShardFlushRequest extends ReplicationRequest { private FlushRequest request = new FlushRequest(); public ShardFlushRequest(FlushRequest request, ShardId shardId) { - super(request, shardId); + super(shardId); this.request = request; } diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/flush/SyncedFlushRequest.java b/core/src/main/java/org/elasticsearch/action/admin/indices/flush/SyncedFlushRequest.java index 59719fe8877..2a14d66a765 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/flush/SyncedFlushRequest.java +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/flush/SyncedFlushRequest.java @@ -36,17 +36,6 @@ import java.util.Arrays; */ public class SyncedFlushRequest extends BroadcastRequest { - public SyncedFlushRequest() { - } - - /** - * Copy constructor that creates a new synced flush request that is a copy of the one provided as an argument. - * The new request will inherit though headers and context from the original request that caused it. - */ - public SyncedFlushRequest(ActionRequest originalRequest) { - super(originalRequest); - } - /** * Constructs a new synced flush request against one or more indices. If nothing is provided, all indices will * be sync flushed. diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/GetFieldMappingsIndexRequest.java b/core/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/GetFieldMappingsIndexRequest.java index 59844430b36..149cba9a33e 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/GetFieldMappingsIndexRequest.java +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/GetFieldMappingsIndexRequest.java @@ -42,7 +42,6 @@ public class GetFieldMappingsIndexRequest extends SingleShardRequest Regex.simpleMatch(request.types(), type)) .collect(Collectors.toCollection(ArrayList::new)); if (typeIntersection.isEmpty()) { - throw new TypeMissingException(shardId.index(), request.types()); + throw new TypeMissingException(shardId.getIndex(), request.types()); } } @@ -115,7 +115,7 @@ public class TransportGetFieldMappingsIndexAction extends TransportSingleShardAc } } - return new GetFieldMappingsResponse(singletonMap(shardId.getIndex(), typeMappings.immutableMap())); + return new GetFieldMappingsResponse(singletonMap(shardId.getIndexName(), typeMappings.immutableMap())); } @Override diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/recovery/TransportRecoveryAction.java b/core/src/main/java/org/elasticsearch/action/admin/indices/recovery/TransportRecoveryAction.java index 9798e189f7b..8590fc210a0 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/recovery/TransportRecoveryAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/recovery/TransportRecoveryAction.java @@ -75,7 +75,7 @@ public class TransportRecoveryAction extends TransportBroadcastByNodeAction()); } diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/refresh/RefreshRequest.java b/core/src/main/java/org/elasticsearch/action/admin/indices/refresh/RefreshRequest.java index ab9186c987f..b5bce3c85cc 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/refresh/RefreshRequest.java +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/refresh/RefreshRequest.java @@ -33,17 +33,6 @@ import org.elasticsearch.action.support.broadcast.BroadcastRequest; */ public class RefreshRequest extends BroadcastRequest { - public RefreshRequest() { - } - - /** - * Copy constructor that creates a new refresh request that is a copy of the one provided as an argument. - * The new request will inherit though headers and context from the original request that caused it. - */ - public RefreshRequest(ActionRequest originalRequest) { - super(originalRequest); - } - public RefreshRequest(String... indices) { super(indices); } diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/refresh/TransportRefreshAction.java b/core/src/main/java/org/elasticsearch/action/admin/indices/refresh/TransportRefreshAction.java index aaaf11e4534..bd879e0eaa9 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/refresh/TransportRefreshAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/refresh/TransportRefreshAction.java @@ -54,7 +54,7 @@ public class TransportRefreshAction extends TransportBroadcastReplicationAction< @Override protected BasicReplicationRequest newShardRequest(RefreshRequest request, ShardId shardId) { - return new BasicReplicationRequest(request, shardId); + return new BasicReplicationRequest(shardId); } @Override diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/segments/IndicesSegmentResponse.java b/core/src/main/java/org/elasticsearch/action/admin/indices/segments/IndicesSegmentResponse.java index 979e51f83c7..6bcf7d71c34 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/segments/IndicesSegmentResponse.java +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/segments/IndicesSegmentResponse.java @@ -62,17 +62,17 @@ public class IndicesSegmentResponse extends BroadcastResponse implements ToXCont Set indices = new HashSet<>(); for (ShardSegments shard : shards) { - indices.add(shard.getShardRouting().getIndex()); + indices.add(shard.getShardRouting().getIndexName()); } - for (String index : indices) { + for (String indexName : indices) { List shards = new ArrayList<>(); for (ShardSegments shard : this.shards) { - if (shard.getShardRouting().index().equals(index)) { + if (shard.getShardRouting().getIndexName().equals(indexName)) { shards.add(shard); } } - indicesSegments.put(index, new IndexSegments(index, shards.toArray(new ShardSegments[shards.size()]))); + indicesSegments.put(indexName, new IndexSegments(indexName, shards.toArray(new ShardSegments[shards.size()]))); } this.indicesSegments = indicesSegments; return indicesSegments; diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/segments/TransportIndicesSegmentsAction.java b/core/src/main/java/org/elasticsearch/action/admin/indices/segments/TransportIndicesSegmentsAction.java index 4a9f2c3b0c2..fd45e22a171 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/segments/TransportIndicesSegmentsAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/segments/TransportIndicesSegmentsAction.java @@ -93,7 +93,7 @@ public class TransportIndicesSegmentsAction extends TransportBroadcastByNodeActi @Override protected ShardSegments shardOperation(IndicesSegmentsRequest request, ShardRouting shardRouting) { - IndexService indexService = indicesService.indexServiceSafe(shardRouting.getIndex()); + IndexService indexService = indicesService.indexServiceSafe(shardRouting.getIndexName()); IndexShard indexShard = indexService.getShard(shardRouting.id()); return new ShardSegments(indexShard.routingEntry(), indexShard.segments(request.verbose())); } diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/shards/TransportIndicesShardStoresAction.java b/core/src/main/java/org/elasticsearch/action/admin/indices/shards/TransportIndicesShardStoresAction.java index 77f8608089c..79c49e292a3 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/shards/TransportIndicesShardStoresAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/shards/TransportIndicesShardStoresAction.java @@ -166,7 +166,7 @@ public class TransportIndicesShardStoresAction extends TransportMasterNodeReadAc ImmutableOpenMap.Builder>> indicesStoreStatusesBuilder = ImmutableOpenMap.builder(); java.util.List failureBuilder = new ArrayList<>(); for (Response fetchResponse : fetchResponses) { - ImmutableOpenIntMap> indexStoreStatuses = indicesStoreStatusesBuilder.get(fetchResponse.shardId.getIndex()); + ImmutableOpenIntMap> indexStoreStatuses = indicesStoreStatusesBuilder.get(fetchResponse.shardId.getIndexName()); final ImmutableOpenIntMap.Builder> indexShardsBuilder; if (indexStoreStatuses == null) { indexShardsBuilder = ImmutableOpenIntMap.builder(); @@ -179,15 +179,15 @@ public class TransportIndicesShardStoresAction extends TransportMasterNodeReadAc } for (NodeGatewayStartedShards response : fetchResponse.responses) { if (shardExistsInNode(response)) { - IndicesShardStoresResponse.StoreStatus.AllocationStatus allocationStatus = getAllocationStatus(fetchResponse.shardId.getIndex(), fetchResponse.shardId.id(), response.getNode()); + IndicesShardStoresResponse.StoreStatus.AllocationStatus allocationStatus = getAllocationStatus(fetchResponse.shardId.getIndexName(), fetchResponse.shardId.id(), response.getNode()); storeStatuses.add(new IndicesShardStoresResponse.StoreStatus(response.getNode(), response.version(), response.allocationId(), allocationStatus, response.storeException())); } } CollectionUtil.timSort(storeStatuses); indexShardsBuilder.put(fetchResponse.shardId.id(), storeStatuses); - indicesStoreStatusesBuilder.put(fetchResponse.shardId.getIndex(), indexShardsBuilder.build()); + indicesStoreStatusesBuilder.put(fetchResponse.shardId.getIndexName(), indexShardsBuilder.build()); for (FailedNodeException failure : fetchResponse.failures) { - failureBuilder.add(new IndicesShardStoresResponse.Failure(failure.nodeId(), fetchResponse.shardId.getIndex(), fetchResponse.shardId.id(), failure.getCause())); + failureBuilder.add(new IndicesShardStoresResponse.Failure(failure.nodeId(), fetchResponse.shardId.getIndexName(), fetchResponse.shardId.id(), failure.getCause())); } } listener.onResponse(new IndicesShardStoresResponse(indicesStoreStatusesBuilder.build(), Collections.unmodifiableList(failureBuilder))); @@ -196,7 +196,7 @@ public class TransportIndicesShardStoresAction extends TransportMasterNodeReadAc private IndicesShardStoresResponse.StoreStatus.AllocationStatus getAllocationStatus(String index, int shardID, DiscoveryNode node) { for (ShardRouting shardRouting : routingNodes.node(node.id())) { ShardId shardId = shardRouting.shardId(); - if (shardId.id() == shardID && shardId.getIndex().equals(index)) { + if (shardId.id() == shardID && shardId.getIndexName().equals(index)) { if (shardRouting.primary()) { return IndicesShardStoresResponse.StoreStatus.AllocationStatus.PRIMARY; } else if (shardRouting.assignedToNode()) { diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/stats/IndicesStatsResponse.java b/core/src/main/java/org/elasticsearch/action/admin/indices/stats/IndicesStatsResponse.java index 55ad75eab33..11f91397c52 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/stats/IndicesStatsResponse.java +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/stats/IndicesStatsResponse.java @@ -89,17 +89,17 @@ public class IndicesStatsResponse extends BroadcastResponse implements ToXConten Set indices = new HashSet<>(); for (ShardStats shard : shards) { - indices.add(shard.getShardRouting().getIndex()); + indices.add(shard.getShardRouting().getIndexName()); } - for (String index : indices) { + for (String indexName : indices) { List shards = new ArrayList<>(); for (ShardStats shard : this.shards) { - if (shard.getShardRouting().index().equals(index)) { + if (shard.getShardRouting().getIndexName().equals(indexName)) { shards.add(shard); } } - indicesStats.put(index, new IndexStats(index, shards.toArray(new ShardStats[shards.size()]))); + indicesStats.put(indexName, new IndexStats(indexName, shards.toArray(new ShardStats[shards.size()]))); } this.indicesStats = indicesStats; return indicesStats; diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/upgrade/get/UpgradeStatusResponse.java b/core/src/main/java/org/elasticsearch/action/admin/indices/upgrade/get/UpgradeStatusResponse.java index 5ace6641352..6c3a64e04fc 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/upgrade/get/UpgradeStatusResponse.java +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/upgrade/get/UpgradeStatusResponse.java @@ -59,14 +59,14 @@ public class UpgradeStatusResponse extends BroadcastResponse implements ToXConte indices.add(shard.getIndex()); } - for (String index : indices) { + for (String indexName : indices) { List shards = new ArrayList<>(); for (ShardUpgradeStatus shard : this.shards) { - if (shard.getShardRouting().index().equals(index)) { + if (shard.getShardRouting().getIndexName().equals(indexName)) { shards.add(shard); } } - indicesUpgradeStats.put(index, new IndexUpgradeStatus(index, shards.toArray(new ShardUpgradeStatus[shards.size()]))); + indicesUpgradeStats.put(indexName, new IndexUpgradeStatus(indexName, shards.toArray(new ShardUpgradeStatus[shards.size()]))); } this.indicesUpgradeStatus = indicesUpgradeStats; return indicesUpgradeStats; diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/upgrade/post/TransportUpgradeAction.java b/core/src/main/java/org/elasticsearch/action/admin/indices/upgrade/post/TransportUpgradeAction.java index 5e87e91b255..6e172f3e22f 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/upgrade/post/TransportUpgradeAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/upgrade/post/TransportUpgradeAction.java @@ -75,7 +75,7 @@ public class TransportUpgradeAction extends TransportBroadcastByNodeAction> versions = new HashMap<>(); for (ShardUpgradeResult result : shardUpgradeResults) { successfulShards++; - String index = result.getShardId().getIndex(); + String index = result.getShardId().getIndex().getName(); if (result.primary()) { Integer count = successfulPrimaryShards.get(index); successfulPrimaryShards.put(index, count == null ? 1 : count + 1); diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/validate/query/TransportValidateQueryAction.java b/core/src/main/java/org/elasticsearch/action/admin/indices/validate/query/TransportValidateQueryAction.java index 6d6bbd691b8..14086605f41 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/validate/query/TransportValidateQueryAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/validate/query/TransportValidateQueryAction.java @@ -96,7 +96,7 @@ public class TransportValidateQueryAction extends TransportBroadcastAction implements Composite public BulkRequest() { } - /** - * Creates a bulk request caused by some other request, which is provided as an - * argument so that its headers and context can be copied to the new request - */ - public BulkRequest(ActionRequest request) { - super(request); - } - /** * Adds a list of requests to be executed. Either index or delete requests. */ diff --git a/core/src/main/java/org/elasticsearch/action/bulk/BulkShardRequest.java b/core/src/main/java/org/elasticsearch/action/bulk/BulkShardRequest.java index 1edba16220a..275e2819cf6 100644 --- a/core/src/main/java/org/elasticsearch/action/bulk/BulkShardRequest.java +++ b/core/src/main/java/org/elasticsearch/action/bulk/BulkShardRequest.java @@ -41,7 +41,7 @@ public class BulkShardRequest extends ReplicationRequest { } BulkShardRequest(BulkRequest bulkRequest, ShardId shardId, boolean refresh, BulkItemRequest[] items) { - super(bulkRequest, shardId); + super(shardId); this.items = items; this.refresh = refresh; } diff --git a/core/src/main/java/org/elasticsearch/action/bulk/TransportBulkAction.java b/core/src/main/java/org/elasticsearch/action/bulk/TransportBulkAction.java index 7252993427f..9cdce240d7f 100644 --- a/core/src/main/java/org/elasticsearch/action/bulk/TransportBulkAction.java +++ b/core/src/main/java/org/elasticsearch/action/bulk/TransportBulkAction.java @@ -114,7 +114,7 @@ public class TransportBulkAction extends HandledTransportAction> entry : indicesAndTypes.entrySet()) { final String index = entry.getKey(); if (autoCreateIndex.shouldAutoCreate(index, state)) { - CreateIndexRequest createIndexRequest = new CreateIndexRequest(bulkRequest); + CreateIndexRequest createIndexRequest = new CreateIndexRequest(); createIndexRequest.index(index); for (String type : entry.getValue()) { createIndexRequest.mapping(type); @@ -377,7 +377,7 @@ public class TransportBulkAction extends HandledTransportAction implements * The new request will inherit though headers and context from the original request that caused it. */ public DeleteRequest(DeleteRequest request, ActionRequest originalRequest) { - super(request, originalRequest); + super(request); this.type = request.type(); this.id = request.id(); this.routing = request.routing(); @@ -102,14 +102,6 @@ public class DeleteRequest extends ReplicationRequest implements this.versionType = request.versionType(); } - /** - * Creates a delete request caused by some other request, which is provided as an - * argument so that its headers and context can be copied to the new request - */ - public DeleteRequest(ActionRequest request) { - super(request); - } - @Override public ActionRequestValidationException validate() { ActionRequestValidationException validationException = super.validate(); diff --git a/core/src/main/java/org/elasticsearch/action/delete/TransportDeleteAction.java b/core/src/main/java/org/elasticsearch/action/delete/TransportDeleteAction.java index f80b1a24396..c235144c662 100644 --- a/core/src/main/java/org/elasticsearch/action/delete/TransportDeleteAction.java +++ b/core/src/main/java/org/elasticsearch/action/delete/TransportDeleteAction.java @@ -72,7 +72,7 @@ public class TransportDeleteAction extends TransportReplicationAction listener) { ClusterState state = clusterService.state(); if (autoCreateIndex.shouldAutoCreate(request.index(), state)) { - createIndexAction.execute(new CreateIndexRequest(request).index(request.index()).cause("auto(delete api)").masterNodeTimeout(request.timeout()), new ActionListener() { + createIndexAction.execute(new CreateIndexRequest().index(request.index()).cause("auto(delete api)").masterNodeTimeout(request.timeout()), new ActionListener() { @Override public void onResponse(CreateIndexResponse result) { innerExecute(request, listener); diff --git a/core/src/main/java/org/elasticsearch/action/explain/TransportExplainAction.java b/core/src/main/java/org/elasticsearch/action/explain/TransportExplainAction.java index 7b6253c0ae8..26ae4bef493 100644 --- a/core/src/main/java/org/elasticsearch/action/explain/TransportExplainAction.java +++ b/core/src/main/java/org/elasticsearch/action/explain/TransportExplainAction.java @@ -108,7 +108,7 @@ public class TransportExplainAction extends TransportSingleShardAction implements Realti * Copy constructor that creates a new get request that is a copy of the one provided as an argument. * The new request will inherit though headers and context from the original request that caused it. */ - public GetRequest(GetRequest getRequest, ActionRequest originalRequest) { - super(originalRequest); + public GetRequest(GetRequest getRequest) { this.index = getRequest.index; this.type = getRequest.type; this.id = getRequest.id; @@ -98,14 +96,6 @@ public class GetRequest extends SingleShardRequest implements Realti this.type = "_all"; } - /** - * Constructs a new get request starting from the provided request, meaning that it will - * inherit its headers and context, and against the specified index. - */ - public GetRequest(ActionRequest request, String index) { - super(request, index); - } - /** * Constructs a new get request against the specified index with the type and id. * diff --git a/core/src/main/java/org/elasticsearch/action/get/MultiGetRequest.java b/core/src/main/java/org/elasticsearch/action/get/MultiGetRequest.java index db3c0f72bfd..f67e2b27b56 100644 --- a/core/src/main/java/org/elasticsearch/action/get/MultiGetRequest.java +++ b/core/src/main/java/org/elasticsearch/action/get/MultiGetRequest.java @@ -266,18 +266,6 @@ public class MultiGetRequest extends ActionRequest implements I List items = new ArrayList<>(); - public MultiGetRequest() { - - } - - /** - * Creates a multi get request caused by some other request, which is provided as an - * argument so that its headers and context can be copied to the new request - */ - public MultiGetRequest(ActionRequest request) { - super(request); - } - public List getItems() { return this.items; } diff --git a/core/src/main/java/org/elasticsearch/action/get/MultiGetShardRequest.java b/core/src/main/java/org/elasticsearch/action/get/MultiGetShardRequest.java index 6715319bc32..9250204858f 100644 --- a/core/src/main/java/org/elasticsearch/action/get/MultiGetShardRequest.java +++ b/core/src/main/java/org/elasticsearch/action/get/MultiGetShardRequest.java @@ -45,7 +45,7 @@ public class MultiGetShardRequest extends SingleShardRequest(); diff --git a/core/src/main/java/org/elasticsearch/action/get/TransportMultiGetAction.java b/core/src/main/java/org/elasticsearch/action/get/TransportMultiGetAction.java index 52554e0ede7..7f5de65c614 100644 --- a/core/src/main/java/org/elasticsearch/action/get/TransportMultiGetAction.java +++ b/core/src/main/java/org/elasticsearch/action/get/TransportMultiGetAction.java @@ -79,7 +79,7 @@ public class TransportMultiGetAction extends HandledTransportAction implements Do public IndexRequest() { } - /** - * Creates an index request caused by some other request, which is provided as an - * argument so that its headers and context can be copied to the new request - */ - public IndexRequest(ActionRequest request) { - super(request); - } - /** * Copy constructor that creates a new index request that is a copy of the one provided as an argument. * The new request will inherit though headers and context from the original request that caused it. */ - public IndexRequest(IndexRequest indexRequest, ActionRequest originalRequest) { - super(indexRequest, originalRequest); + public IndexRequest(IndexRequest indexRequest) { + super(indexRequest); this.type = indexRequest.type; this.id = indexRequest.id; this.routing = indexRequest.routing; diff --git a/core/src/main/java/org/elasticsearch/action/index/TransportIndexAction.java b/core/src/main/java/org/elasticsearch/action/index/TransportIndexAction.java index 620056ded4e..ae901e8575d 100644 --- a/core/src/main/java/org/elasticsearch/action/index/TransportIndexAction.java +++ b/core/src/main/java/org/elasticsearch/action/index/TransportIndexAction.java @@ -88,7 +88,7 @@ public class TransportIndexAction extends TransportReplicationAction impleme } PercolateRequest(PercolateRequest request, BytesReference docSource) { - super(request); this.indices = request.indices(); this.documentType = request.documentType(); this.routing = request.routing(); @@ -274,7 +273,7 @@ public class PercolateRequest extends BroadcastRequest impleme source = in.readBytesReference(); docSource = in.readBytesReference(); if (in.readBoolean()) { - getRequest = new GetRequest(null); + getRequest = new GetRequest(); getRequest.readFrom(in); } onlyCount = in.readBoolean(); diff --git a/core/src/main/java/org/elasticsearch/action/percolate/PercolateShardResponse.java b/core/src/main/java/org/elasticsearch/action/percolate/PercolateShardResponse.java index 1ca5e244962..e3fc737c2cc 100644 --- a/core/src/main/java/org/elasticsearch/action/percolate/PercolateShardResponse.java +++ b/core/src/main/java/org/elasticsearch/action/percolate/PercolateShardResponse.java @@ -57,7 +57,7 @@ public class PercolateShardResponse extends BroadcastShardResponse { } public PercolateShardResponse(TopDocs topDocs, Map ids, Map> hls, PercolateContext context) { - super(new ShardId(context.shardTarget().getIndex(), context.shardTarget().getShardId())); + super(context.indexShard().shardId()); this.topDocs = topDocs; this.ids = ids; this.hls = hls; diff --git a/core/src/main/java/org/elasticsearch/action/percolate/TransportMultiPercolateAction.java b/core/src/main/java/org/elasticsearch/action/percolate/TransportMultiPercolateAction.java index bf7b9e5307b..d86d91c654e 100644 --- a/core/src/main/java/org/elasticsearch/action/percolate/TransportMultiPercolateAction.java +++ b/core/src/main/java/org/elasticsearch/action/percolate/TransportMultiPercolateAction.java @@ -97,7 +97,7 @@ public class TransportMultiPercolateAction extends HandledTransportAction() { @Override public void onResponse(GetResponse getResponse) { @@ -150,7 +150,7 @@ public class TransportPercolateAction extends TransportBroadcastAction(); diff --git a/core/src/main/java/org/elasticsearch/action/search/ClearScrollRequest.java b/core/src/main/java/org/elasticsearch/action/search/ClearScrollRequest.java index b390b77504a..17343e86912 100644 --- a/core/src/main/java/org/elasticsearch/action/search/ClearScrollRequest.java +++ b/core/src/main/java/org/elasticsearch/action/search/ClearScrollRequest.java @@ -37,17 +37,6 @@ public class ClearScrollRequest extends ActionRequest { private List scrollIds; - public ClearScrollRequest() { - } - - /** - * Creates a clear scroll request caused by some other request, which is provided as an - * argument so that its headers and context can be copied to the new request - */ - public ClearScrollRequest(ActionRequest request) { - super(request); - } - public List getScrollIds() { return scrollIds; } diff --git a/core/src/main/java/org/elasticsearch/action/search/SearchRequest.java b/core/src/main/java/org/elasticsearch/action/search/SearchRequest.java index 8014e4acb6c..10a1ad2efce 100644 --- a/core/src/main/java/org/elasticsearch/action/search/SearchRequest.java +++ b/core/src/main/java/org/elasticsearch/action/search/SearchRequest.java @@ -80,8 +80,7 @@ public class SearchRequest extends ActionRequest implements Indic * Copy constructor that creates a new search request that is a copy of the one provided as an argument. * The new request will inherit though headers and context from the original request that caused it. */ - public SearchRequest(SearchRequest searchRequest, ActionRequest originalRequest) { - super(originalRequest); + public SearchRequest(SearchRequest searchRequest) { this.searchType = searchRequest.searchType; this.indices = searchRequest.indices; this.routing = searchRequest.routing; @@ -94,15 +93,6 @@ public class SearchRequest extends ActionRequest implements Indic this.indicesOptions = searchRequest.indicesOptions; } - /** - * Constructs a new search request starting from the provided request, meaning that it will - * inherit its headers and context - */ - public SearchRequest(ActionRequest request) { - super(request); - this.source = new SearchSourceBuilder(); - } - /** * Constructs a new search request against the indices. No indices provided here means that search * will run against all indices. diff --git a/core/src/main/java/org/elasticsearch/action/search/SearchRequestBuilder.java b/core/src/main/java/org/elasticsearch/action/search/SearchRequestBuilder.java index 9d6f61ed580..07d7b2fa3d0 100644 --- a/core/src/main/java/org/elasticsearch/action/search/SearchRequestBuilder.java +++ b/core/src/main/java/org/elasticsearch/action/search/SearchRequestBuilder.java @@ -28,7 +28,6 @@ import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.script.Script; import org.elasticsearch.script.Template; import org.elasticsearch.search.Scroll; -import org.elasticsearch.search.searchafter.SearchAfterBuilder; import org.elasticsearch.search.aggregations.AbstractAggregationBuilder; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.search.fetch.innerhits.InnerHitsBuilder; diff --git a/core/src/main/java/org/elasticsearch/action/search/SearchScrollRequest.java b/core/src/main/java/org/elasticsearch/action/search/SearchScrollRequest.java index 537d61ac558..c1ff788dae5 100644 --- a/core/src/main/java/org/elasticsearch/action/search/SearchScrollRequest.java +++ b/core/src/main/java/org/elasticsearch/action/search/SearchScrollRequest.java @@ -46,14 +46,6 @@ public class SearchScrollRequest extends ActionRequest { this.scrollId = scrollId; } - /** - * Creates a scroll request caused by some other request, which is provided as an - * argument so that its headers and context can be copied to the new request - */ - public SearchScrollRequest(ActionRequest request) { - super(request); - } - @Override public ActionRequestValidationException validate() { ActionRequestValidationException validationException = null; diff --git a/core/src/main/java/org/elasticsearch/action/search/TransportMultiSearchAction.java b/core/src/main/java/org/elasticsearch/action/search/TransportMultiSearchAction.java index fd2b257e53e..18490735631 100644 --- a/core/src/main/java/org/elasticsearch/action/search/TransportMultiSearchAction.java +++ b/core/src/main/java/org/elasticsearch/action/search/TransportMultiSearchAction.java @@ -59,7 +59,7 @@ public class TransportMultiSearchAction extends HandledTransportAction() { @Override public void onResponse(SearchResponse searchResponse) { diff --git a/core/src/main/java/org/elasticsearch/action/search/type/TransportSearchDfsQueryAndFetchAction.java b/core/src/main/java/org/elasticsearch/action/search/type/TransportSearchDfsQueryAndFetchAction.java index 7244a1ff58a..6d22264815b 100644 --- a/core/src/main/java/org/elasticsearch/action/search/type/TransportSearchDfsQueryAndFetchAction.java +++ b/core/src/main/java/org/elasticsearch/action/search/type/TransportSearchDfsQueryAndFetchAction.java @@ -135,7 +135,7 @@ public class TransportSearchDfsQueryAndFetchAction extends TransportSearchTypeAc public void doRun() throws IOException { sortedShardList = searchPhaseController.sortDocs(true, queryFetchResults); final InternalSearchResponse internalResponse = searchPhaseController.merge(sortedShardList, queryFetchResults, - queryFetchResults, request); + queryFetchResults); String scrollId = null; if (request.scroll() != null) { scrollId = TransportSearchHelper.buildScrollId(request.searchType(), firstResults, null); diff --git a/core/src/main/java/org/elasticsearch/action/search/type/TransportSearchDfsQueryThenFetchAction.java b/core/src/main/java/org/elasticsearch/action/search/type/TransportSearchDfsQueryThenFetchAction.java index faaf1215542..31128cea961 100644 --- a/core/src/main/java/org/elasticsearch/action/search/type/TransportSearchDfsQueryThenFetchAction.java +++ b/core/src/main/java/org/elasticsearch/action/search/type/TransportSearchDfsQueryThenFetchAction.java @@ -211,7 +211,7 @@ public class TransportSearchDfsQueryThenFetchAction extends TransportSearchTypeA @Override public void doRun() throws IOException { final InternalSearchResponse internalResponse = searchPhaseController.merge(sortedShardList, queryResults, - fetchResults, request); + fetchResults); String scrollId = null; if (request.scroll() != null) { scrollId = TransportSearchHelper.buildScrollId(request.searchType(), firstResults, null); diff --git a/core/src/main/java/org/elasticsearch/action/search/type/TransportSearchQueryAndFetchAction.java b/core/src/main/java/org/elasticsearch/action/search/type/TransportSearchQueryAndFetchAction.java index 3c4f5419f00..0e1e8db5519 100644 --- a/core/src/main/java/org/elasticsearch/action/search/type/TransportSearchQueryAndFetchAction.java +++ b/core/src/main/java/org/elasticsearch/action/search/type/TransportSearchQueryAndFetchAction.java @@ -82,7 +82,7 @@ public class TransportSearchQueryAndFetchAction extends TransportSearchTypeActio boolean useScroll = request.scroll() != null; sortedShardList = searchPhaseController.sortDocs(useScroll, firstResults); final InternalSearchResponse internalResponse = searchPhaseController.merge(sortedShardList, firstResults, - firstResults, request); + firstResults); String scrollId = null; if (request.scroll() != null) { scrollId = buildScrollId(request.searchType(), firstResults, null); diff --git a/core/src/main/java/org/elasticsearch/action/search/type/TransportSearchQueryThenFetchAction.java b/core/src/main/java/org/elasticsearch/action/search/type/TransportSearchQueryThenFetchAction.java index 1d8589e7a96..c63287d9956 100644 --- a/core/src/main/java/org/elasticsearch/action/search/type/TransportSearchQueryThenFetchAction.java +++ b/core/src/main/java/org/elasticsearch/action/search/type/TransportSearchQueryThenFetchAction.java @@ -146,7 +146,7 @@ public class TransportSearchQueryThenFetchAction extends TransportSearchTypeActi @Override public void doRun() throws IOException { final InternalSearchResponse internalResponse = searchPhaseController.merge(sortedShardList, firstResults, - fetchResults, request); + fetchResults); String scrollId = null; if (request.scroll() != null) { scrollId = TransportSearchHelper.buildScrollId(request.searchType(), firstResults, null); diff --git a/core/src/main/java/org/elasticsearch/action/search/type/TransportSearchScrollQueryAndFetchAction.java b/core/src/main/java/org/elasticsearch/action/search/type/TransportSearchScrollQueryAndFetchAction.java index 2a953f9b732..b718baaa294 100644 --- a/core/src/main/java/org/elasticsearch/action/search/type/TransportSearchScrollQueryAndFetchAction.java +++ b/core/src/main/java/org/elasticsearch/action/search/type/TransportSearchScrollQueryAndFetchAction.java @@ -193,7 +193,7 @@ public class TransportSearchScrollQueryAndFetchAction extends AbstractComponent private void innerFinishHim() throws Exception { ScoreDoc[] sortedShardList = searchPhaseController.sortDocs(true, queryFetchResults); final InternalSearchResponse internalResponse = searchPhaseController.merge(sortedShardList, queryFetchResults, - queryFetchResults, request); + queryFetchResults); String scrollId = null; if (request.scroll() != null) { scrollId = request.scrollId(); diff --git a/core/src/main/java/org/elasticsearch/action/search/type/TransportSearchScrollQueryThenFetchAction.java b/core/src/main/java/org/elasticsearch/action/search/type/TransportSearchScrollQueryThenFetchAction.java index 8dd9c139c33..93a28b29aa1 100644 --- a/core/src/main/java/org/elasticsearch/action/search/type/TransportSearchScrollQueryThenFetchAction.java +++ b/core/src/main/java/org/elasticsearch/action/search/type/TransportSearchScrollQueryThenFetchAction.java @@ -208,7 +208,7 @@ public class TransportSearchScrollQueryThenFetchAction extends AbstractComponent IntArrayList docIds = entry.value; final QuerySearchResult querySearchResult = queryResults.get(entry.index); ScoreDoc lastEmittedDoc = lastEmittedDocPerShard[entry.index]; - ShardFetchRequest shardFetchRequest = new ShardFetchRequest(request, querySearchResult.id(), docIds, lastEmittedDoc); + ShardFetchRequest shardFetchRequest = new ShardFetchRequest(querySearchResult.id(), docIds, lastEmittedDoc); DiscoveryNode node = nodes.get(querySearchResult.shardTarget().nodeId()); searchService.sendExecuteFetchScroll(node, shardFetchRequest, new ActionListener() { @Override @@ -243,7 +243,7 @@ public class TransportSearchScrollQueryThenFetchAction extends AbstractComponent } private void innerFinishHim() { - InternalSearchResponse internalResponse = searchPhaseController.merge(sortedShardList, queryResults, fetchResults, request); + InternalSearchResponse internalResponse = searchPhaseController.merge(sortedShardList, queryResults, fetchResults); String scrollId = null; if (request.scroll() != null) { scrollId = request.scrollId(); diff --git a/core/src/main/java/org/elasticsearch/action/search/type/TransportSearchTypeAction.java b/core/src/main/java/org/elasticsearch/action/search/type/TransportSearchTypeAction.java index 9d1004ccd5c..042534a2e7b 100644 --- a/core/src/main/java/org/elasticsearch/action/search/type/TransportSearchTypeAction.java +++ b/core/src/main/java/org/elasticsearch/action/search/type/TransportSearchTypeAction.java @@ -163,7 +163,7 @@ public abstract class TransportSearchTypeAction extends TransportAction() { @Override public void onResponse(FirstResult result) { diff --git a/core/src/main/java/org/elasticsearch/action/suggest/TransportSuggestAction.java b/core/src/main/java/org/elasticsearch/action/suggest/TransportSuggestAction.java index 6bc62cf83bb..0ed98578557 100644 --- a/core/src/main/java/org/elasticsearch/action/suggest/TransportSuggestAction.java +++ b/core/src/main/java/org/elasticsearch/action/suggest/TransportSuggestAction.java @@ -143,7 +143,7 @@ public class TransportSuggestAction extends TransportBroadcastAction AUTO_CREATE_INDEX_SETTING = new Setting<>("action.auto_create_index", "true", AutoCreate::new, false, Setting.Scope.CLUSTER); + private final boolean dynamicMappingDisabled; + private final IndexNameExpressionResolver resolver; + private final AutoCreate autoCreate; + @Inject public AutoCreateIndex(Settings settings, IndexNameExpressionResolver resolver) { this.resolver = resolver; dynamicMappingDisabled = !MapperService.INDEX_MAPPER_DYNAMIC_SETTING.get(settings); - final AutoCreate autoCreate = AUTO_CREATE_INDEX_SETTING.get(settings); - if (autoCreate.autoCreateIndex) { - needToCheck = true; - globallyDisabled = false; - matches = autoCreate.indices; - if (matches != null) { - matches2 = new String[matches.length]; - for (int i = 0; i < matches.length; i++) { - matches2[i] = matches[i].substring(1); - } - } else { - matches2 = null; - } - } else { - needToCheck = false; - globallyDisabled = true; - matches = null; - matches2 = null; - } + this.autoCreate = AUTO_CREATE_INDEX_SETTING.get(settings); } /** * Do we really need to check if an index should be auto created? */ public boolean needToCheck() { - return this.needToCheck; + return this.autoCreate.autoCreateIndex; } /** * Should the index be auto created? */ public boolean shouldAutoCreate(String index, ClusterState state) { - if (!needToCheck) { + if (autoCreate.autoCreateIndex == false) { return false; } - boolean exists = resolver.hasIndexOrAlias(index, state); - if (exists) { + if (dynamicMappingDisabled) { return false; } - if (globallyDisabled || dynamicMappingDisabled) { + if (resolver.hasIndexOrAlias(index, state)) { return false; } // matches not set, default value of "true" - if (matches == null) { + if (autoCreate.expressions.isEmpty()) { return true; } - for (int i = 0; i < matches.length; i++) { - char c = matches[i].charAt(0); - if (c == '-') { - if (Regex.simpleMatch(matches2[i], index)) { - return false; - } - } else if (c == '+') { - if (Regex.simpleMatch(matches2[i], index)) { - return true; - } - } else { - if (Regex.simpleMatch(matches[i], index)) { - return true; - } + for (Tuple expression : autoCreate.expressions) { + String indexExpression = expression.v1(); + boolean include = expression.v2(); + if (Regex.simpleMatch(indexExpression, index)) { + return include; } } return false; } - public static class AutoCreate { + private static class AutoCreate { private final boolean autoCreateIndex; - private final String[] indices; + private final List> expressions; - public AutoCreate(String value) { + private AutoCreate(String value) { boolean autoCreateIndex; - String[] indices = null; + List> expressions = new ArrayList<>(); try { autoCreateIndex = Booleans.parseBooleanExact(value); } catch (IllegalArgumentException ex) { try { - indices = Strings.commaDelimitedListToStringArray(value); - for (String string : indices) { - if (string == null || string.length() == 0) { - throw new IllegalArgumentException("Can't parse [" + value + "] for setting [action.auto_create_index] must be either [true, false, or a comma seperated list of index patterns]"); + String[] patterns = Strings.commaDelimitedListToStringArray(value); + for (String pattern : patterns) { + if (pattern == null || pattern.length() == 0) { + throw new IllegalArgumentException("Can't parse [" + value + "] for setting [action.auto_create_index] must be either [true, false, or a comma separated list of index patterns]"); } + Tuple expression; + if (pattern.startsWith("-")) { + if (pattern.length() == 1) { + throw new IllegalArgumentException("Can't parse [" + value + "] for setting [action.auto_create_index] must contain an index name after [-]"); + } + expression = new Tuple<>(pattern.substring(1), false); + } else if(pattern.startsWith("+")) { + if (pattern.length() == 1) { + throw new IllegalArgumentException("Can't parse [" + value + "] for setting [action.auto_create_index] must contain an index name after [+]"); + } + expression = new Tuple<>(pattern.substring(1), true); + } else { + expression = new Tuple<>(pattern, true); + } + expressions.add(expression); } autoCreateIndex = true; } catch (IllegalArgumentException ex1) { @@ -135,7 +124,7 @@ public final class AutoCreateIndex { throw ex1; } } - this.indices = indices; + this.expressions = expressions; this.autoCreateIndex = autoCreateIndex; } } diff --git a/core/src/main/java/org/elasticsearch/action/support/ChildTaskRequest.java b/core/src/main/java/org/elasticsearch/action/support/ChildTaskRequest.java index c231028f0bc..0483ec66e44 100644 --- a/core/src/main/java/org/elasticsearch/action/support/ChildTaskRequest.java +++ b/core/src/main/java/org/elasticsearch/action/support/ChildTaskRequest.java @@ -38,11 +38,6 @@ public class ChildTaskRequest extends TransportRequest { private long parentTaskId; protected ChildTaskRequest() { - - } - - protected ChildTaskRequest(TransportRequest parentTaskRequest) { - super(parentTaskRequest); } public void setParentTask(String parentTaskNode, long parentTaskId) { diff --git a/core/src/main/java/org/elasticsearch/action/support/DefaultShardOperationFailedException.java b/core/src/main/java/org/elasticsearch/action/support/DefaultShardOperationFailedException.java index 17d8bcfb998..676949e367d 100644 --- a/core/src/main/java/org/elasticsearch/action/support/DefaultShardOperationFailedException.java +++ b/core/src/main/java/org/elasticsearch/action/support/DefaultShardOperationFailedException.java @@ -48,7 +48,7 @@ public class DefaultShardOperationFailedException implements ShardOperationFaile } public DefaultShardOperationFailedException(ElasticsearchException e) { - this.index = e.getIndex(); + this.index = e.getIndex() == null ? null : e.getIndex().getName(); this.shardId = e.getShardId().id(); this.reason = e; this.status = e.status(); diff --git a/core/src/main/java/org/elasticsearch/action/support/ThreadedActionListener.java b/core/src/main/java/org/elasticsearch/action/support/ThreadedActionListener.java index 2a2bdd7a061..6ed52e1ac46 100644 --- a/core/src/main/java/org/elasticsearch/action/support/ThreadedActionListener.java +++ b/core/src/main/java/org/elasticsearch/action/support/ThreadedActionListener.java @@ -50,7 +50,7 @@ public final class ThreadedActionListener implements ActionListener ActionListener wrap(ActionListener listener) { diff --git a/core/src/main/java/org/elasticsearch/action/support/broadcast/BroadcastRequest.java b/core/src/main/java/org/elasticsearch/action/support/broadcast/BroadcastRequest.java index 96576d58098..508581050a6 100644 --- a/core/src/main/java/org/elasticsearch/action/support/broadcast/BroadcastRequest.java +++ b/core/src/main/java/org/elasticsearch/action/support/broadcast/BroadcastRequest.java @@ -37,11 +37,6 @@ public class BroadcastRequest> extends private IndicesOptions indicesOptions = IndicesOptions.strictExpandOpenAndForbidClosed(); public BroadcastRequest() { - - } - - protected BroadcastRequest(ActionRequest originalRequest) { - super(originalRequest); } protected BroadcastRequest(String[] indices) { diff --git a/core/src/main/java/org/elasticsearch/action/support/broadcast/BroadcastShardRequest.java b/core/src/main/java/org/elasticsearch/action/support/broadcast/BroadcastShardRequest.java index 8e22a90b9d7..921724e6572 100644 --- a/core/src/main/java/org/elasticsearch/action/support/broadcast/BroadcastShardRequest.java +++ b/core/src/main/java/org/elasticsearch/action/support/broadcast/BroadcastShardRequest.java @@ -42,7 +42,6 @@ public abstract class BroadcastShardRequest extends TransportRequest implements } protected BroadcastShardRequest(ShardId shardId, BroadcastRequest request) { - super(request); this.shardId = shardId; this.originalIndices = new OriginalIndices(request); } diff --git a/core/src/main/java/org/elasticsearch/action/support/broadcast/BroadcastShardResponse.java b/core/src/main/java/org/elasticsearch/action/support/broadcast/BroadcastShardResponse.java index bf7d271bb6f..ad79285051a 100644 --- a/core/src/main/java/org/elasticsearch/action/support/broadcast/BroadcastShardResponse.java +++ b/core/src/main/java/org/elasticsearch/action/support/broadcast/BroadcastShardResponse.java @@ -42,7 +42,7 @@ public abstract class BroadcastShardResponse extends TransportResponse { } public String getIndex() { - return this.shardId.getIndex(); + return this.shardId.getIndexName(); } public int getShardId() { diff --git a/core/src/main/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeAction.java b/core/src/main/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeAction.java index 613de1aa923..f1abbdaa1b6 100644 --- a/core/src/main/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeAction.java +++ b/core/src/main/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeAction.java @@ -118,7 +118,7 @@ public abstract class TransportBroadcastByNodeAction shards) { - super(request); this.indicesLevelRequest = request; this.shards = shards; this.nodeId = nodeId; diff --git a/core/src/main/java/org/elasticsearch/action/support/master/AcknowledgedRequest.java b/core/src/main/java/org/elasticsearch/action/support/master/AcknowledgedRequest.java index b142d0dae8c..5d45b7b11e6 100644 --- a/core/src/main/java/org/elasticsearch/action/support/master/AcknowledgedRequest.java +++ b/core/src/main/java/org/elasticsearch/action/support/master/AcknowledgedRequest.java @@ -42,10 +42,6 @@ public abstract class AcknowledgedRequest request) { - super(request); - } - /** * Allows to set the timeout * @param timeout timeout as a string (e.g. 1s) diff --git a/core/src/main/java/org/elasticsearch/action/support/master/MasterNodeRequest.java b/core/src/main/java/org/elasticsearch/action/support/master/MasterNodeRequest.java index d954caba8e9..a964a44a140 100644 --- a/core/src/main/java/org/elasticsearch/action/support/master/MasterNodeRequest.java +++ b/core/src/main/java/org/elasticsearch/action/support/master/MasterNodeRequest.java @@ -36,11 +36,6 @@ public abstract class MasterNodeRequest request) { - super(request); } /** diff --git a/core/src/main/java/org/elasticsearch/action/support/master/TransportMasterNodeAction.java b/core/src/main/java/org/elasticsearch/action/support/master/TransportMasterNodeAction.java index e0c9c9b8b61..087b3891a8b 100644 --- a/core/src/main/java/org/elasticsearch/action/support/master/TransportMasterNodeAction.java +++ b/core/src/main/java/org/elasticsearch/action/support/master/TransportMasterNodeAction.java @@ -121,7 +121,7 @@ public abstract class TransportMasterNodeAction } - protected BaseNodesRequest(ActionRequest request, String... nodesIds) { - super(request); - this.nodesIds = nodesIds; - } - protected BaseNodesRequest(String... nodesIds) { this.nodesIds = nodesIds; } diff --git a/core/src/main/java/org/elasticsearch/action/support/replication/BasicReplicationRequest.java b/core/src/main/java/org/elasticsearch/action/support/replication/BasicReplicationRequest.java index 3778275d400..274d13bf4cf 100644 --- a/core/src/main/java/org/elasticsearch/action/support/replication/BasicReplicationRequest.java +++ b/core/src/main/java/org/elasticsearch/action/support/replication/BasicReplicationRequest.java @@ -30,22 +30,13 @@ import org.elasticsearch.index.shard.ShardId; */ public class BasicReplicationRequest extends ReplicationRequest { public BasicReplicationRequest() { - - } - - /** - * Creates a new request that inherits headers and context from the request - * provided as argument. - */ - public BasicReplicationRequest(ActionRequest request) { - super(request); } /** * Creates a new request with resolved shard id */ - public BasicReplicationRequest(ActionRequest request, ShardId shardId) { - super(request, shardId); + public BasicReplicationRequest(ShardId shardId) { + super(shardId); } /** diff --git a/core/src/main/java/org/elasticsearch/action/support/replication/ReplicationRequest.java b/core/src/main/java/org/elasticsearch/action/support/replication/ReplicationRequest.java index a6c9b8f65a3..1f79d99981f 100644 --- a/core/src/main/java/org/elasticsearch/action/support/replication/ReplicationRequest.java +++ b/core/src/main/java/org/elasticsearch/action/support/replication/ReplicationRequest.java @@ -58,35 +58,20 @@ public abstract class ReplicationRequest request) { - super(request); - } /** * Creates a new request with resolved shard id */ - public ReplicationRequest(ActionRequest request, ShardId shardId) { - super(request); - this.index = shardId.getIndex(); + public ReplicationRequest(ShardId shardId) { + this.index = shardId.getIndexName(); this.shardId = shardId; } - /** - * Copy constructor that creates a new request that is a copy of the one provided as an argument. - */ - protected ReplicationRequest(Request request) { - this(request, request); - } - /** * Copy constructor that creates a new request that is a copy of the one provided as an argument. * The new request will inherit though headers and context from the original request that caused it. */ - protected ReplicationRequest(Request request, ActionRequest originalRequest) { - super(originalRequest); + protected ReplicationRequest(Request request) { this.timeout = request.timeout(); this.index = request.index(); this.consistencyLevel = request.consistencyLevel(); diff --git a/core/src/main/java/org/elasticsearch/action/support/replication/TransportBroadcastReplicationAction.java b/core/src/main/java/org/elasticsearch/action/support/replication/TransportBroadcastReplicationAction.java index ab88d73d3b0..3daafce50b7 100644 --- a/core/src/main/java/org/elasticsearch/action/support/replication/TransportBroadcastReplicationAction.java +++ b/core/src/main/java/org/elasticsearch/action/support/replication/TransportBroadcastReplicationAction.java @@ -90,13 +90,13 @@ public abstract class TransportBroadcastReplicationAction(); } for (ReplicationResponse.ShardInfo.Failure failure : shardResponse.getShardInfo().getFailures()) { - shardFailures.add(new DefaultShardOperationFailedException(new BroadcastShardOperationFailedException(new ShardId(failure.index(), failure.shardId()), failure.getCause()))); + shardFailures.add(new DefaultShardOperationFailedException(new BroadcastShardOperationFailedException(failure.fullShardId(), failure.getCause()))); } } } diff --git a/core/src/main/java/org/elasticsearch/action/support/replication/TransportReplicationAction.java b/core/src/main/java/org/elasticsearch/action/support/replication/TransportReplicationAction.java index b2972201808..58b73b5e672 100644 --- a/core/src/main/java/org/elasticsearch/action/support/replication/TransportReplicationAction.java +++ b/core/src/main/java/org/elasticsearch/action/support/replication/TransportReplicationAction.java @@ -52,6 +52,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.AbstractRunnable; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; +import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.index.IndexService; import org.elasticsearch.index.engine.VersionConflictEngineException; import org.elasticsearch.index.shard.IndexShard; @@ -297,7 +298,7 @@ public abstract class TransportReplicationAction listener) { this.request = request; this.listener = listener; - this.observer = new ClusterStateObserver(clusterService, request.timeout(), logger); + this.observer = new ClusterStateObserver(clusterService, request.timeout(), logger, threadPool.getThreadContext()); } @Override @@ -432,7 +436,7 @@ public abstract class TransportReplicationAction entry : shardReplicaFailures.entrySet()) { RestStatus restStatus = ExceptionsHelper.status(entry.getValue()); - failuresArray[slot++] = new ReplicationResponse.ShardInfo.Failure( - shardId.getIndex(), shardId.getId(), entry.getKey(), entry.getValue(), restStatus, false - ); + failuresArray[slot++] = new ReplicationResponse.ShardInfo.Failure(shardId, entry.getKey(), entry.getValue(), restStatus, false); } } else { failuresArray = ReplicationResponse.EMPTY; diff --git a/core/src/main/java/org/elasticsearch/action/support/single/instance/TransportInstanceSingleOperationAction.java b/core/src/main/java/org/elasticsearch/action/support/single/instance/TransportInstanceSingleOperationAction.java index 74d9f3c7017..97cc6b044a9 100644 --- a/core/src/main/java/org/elasticsearch/action/support/single/instance/TransportInstanceSingleOperationAction.java +++ b/core/src/main/java/org/elasticsearch/action/support/single/instance/TransportInstanceSingleOperationAction.java @@ -124,7 +124,7 @@ public abstract class TransportInstanceSingleOperationAction request) { - super(request); - } - - protected SingleShardRequest(ActionRequest request, String index) { - super(request); - this.index = index; - } - /** * @return a validation exception if the index property hasn't been set */ diff --git a/core/src/main/java/org/elasticsearch/action/support/tasks/BaseTasksRequest.java b/core/src/main/java/org/elasticsearch/action/support/tasks/BaseTasksRequest.java index b7498bc8bd9..2257eaf71b1 100644 --- a/core/src/main/java/org/elasticsearch/action/support/tasks/BaseTasksRequest.java +++ b/core/src/main/java/org/elasticsearch/action/support/tasks/BaseTasksRequest.java @@ -61,15 +61,6 @@ public class BaseTasksRequest> extends return null; } - /** - * Get information about tasks from nodes based on the nodes ids specified. - * If none are passed, information for all nodes will be returned. - */ - public BaseTasksRequest(ActionRequest request, String... nodesIds) { - super(request); - this.nodesIds = nodesIds; - } - /** * Get information about tasks from nodes based on the nodes ids specified. * If none are passed, information for all nodes will be returned. diff --git a/core/src/main/java/org/elasticsearch/action/support/tasks/TransportTasksAction.java b/core/src/main/java/org/elasticsearch/action/support/tasks/TransportTasksAction.java index 42be7e4eefc..d2ce298565f 100644 --- a/core/src/main/java/org/elasticsearch/action/support/tasks/TransportTasksAction.java +++ b/core/src/main/java/org/elasticsearch/action/support/tasks/TransportTasksAction.java @@ -291,7 +291,7 @@ public abstract class TransportTasksAction< } protected NodeTaskRequest(TasksRequest tasksRequest) { - super(tasksRequest); + super(); this.tasksRequest = tasksRequest; } diff --git a/core/src/main/java/org/elasticsearch/action/termvectors/MultiTermVectorsShardRequest.java b/core/src/main/java/org/elasticsearch/action/termvectors/MultiTermVectorsShardRequest.java index 5f541b02c71..6356c554991 100644 --- a/core/src/main/java/org/elasticsearch/action/termvectors/MultiTermVectorsShardRequest.java +++ b/core/src/main/java/org/elasticsearch/action/termvectors/MultiTermVectorsShardRequest.java @@ -41,8 +41,8 @@ public class MultiTermVectorsShardRequest extends SingleShardRequest(); diff --git a/core/src/main/java/org/elasticsearch/action/termvectors/TransportMultiTermVectorsAction.java b/core/src/main/java/org/elasticsearch/action/termvectors/TransportMultiTermVectorsAction.java index 3943d2e6a67..7047ee69040 100644 --- a/core/src/main/java/org/elasticsearch/action/termvectors/TransportMultiTermVectorsAction.java +++ b/core/src/main/java/org/elasticsearch/action/termvectors/TransportMultiTermVectorsAction.java @@ -82,7 +82,7 @@ public class TransportMultiTermVectorsAction extends HandledTransportAction listener) { // if we don't have a master, we don't have metadata, that's fine, let it find a master using create index API if (autoCreateIndex.shouldAutoCreate(request.index(), clusterService.state())) { - createIndexAction.execute(new CreateIndexRequest(request).index(request.index()).cause("auto(update api)").masterNodeTimeout(request.timeout()), new ActionListener() { + createIndexAction.execute(new CreateIndexRequest().index(request.index()).cause("auto(update api)").masterNodeTimeout(request.timeout()), new ActionListener() { @Override public void onResponse(CreateIndexResponse result) { innerExecute(request, listener); @@ -164,12 +164,12 @@ public class TransportUpdateAction extends TransportInstanceSingleOperationActio } protected void shardOperation(final UpdateRequest request, final ActionListener listener, final int retryCount) { - IndexService indexService = indicesService.indexServiceSafe(request.concreteIndex()); - IndexShard indexShard = indexService.getShard(request.shardId()); + final IndexService indexService = indicesService.indexServiceSafe(request.concreteIndex()); + final IndexShard indexShard = indexService.getShard(request.shardId()); final UpdateHelper.Result result = updateHelper.prepare(request, indexShard); switch (result.operation()) { case UPSERT: - IndexRequest upsertRequest = new IndexRequest(result.action(), request); + IndexRequest upsertRequest = new IndexRequest((IndexRequest)result.action()); // we fetch it from the index request so we don't generate the bytes twice, its already done in the index request final BytesReference upsertSourceBytes = upsertRequest.source(); indexAction.execute(upsertRequest, new ActionListener() { @@ -206,7 +206,7 @@ public class TransportUpdateAction extends TransportInstanceSingleOperationActio }); break; case INDEX: - IndexRequest indexRequest = new IndexRequest(result.action(), request); + IndexRequest indexRequest = new IndexRequest((IndexRequest)result.action()); // we fetch it from the index request so we don't generate the bytes twice, its already done in the index request final BytesReference indexSourceBytes = indexRequest.source(); indexAction.execute(indexRequest, new ActionListener() { diff --git a/core/src/main/java/org/elasticsearch/action/update/UpdateHelper.java b/core/src/main/java/org/elasticsearch/action/update/UpdateHelper.java index d28ba2986e2..34bc2b25187 100644 --- a/core/src/main/java/org/elasticsearch/action/update/UpdateHelper.java +++ b/core/src/main/java/org/elasticsearch/action/update/UpdateHelper.java @@ -44,6 +44,7 @@ import org.elasticsearch.index.mapper.internal.TimestampFieldMapper; import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.script.ExecutableScript; +import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptContext; import org.elasticsearch.script.ScriptService; import org.elasticsearch.search.fetch.source.FetchSourceContext; @@ -75,16 +76,15 @@ public class UpdateHelper extends AbstractComponent { final GetResult getResult = indexShard.getService().get(request.type(), request.id(), new String[]{RoutingFieldMapper.NAME, ParentFieldMapper.NAME, TTLFieldMapper.NAME, TimestampFieldMapper.NAME}, true, request.version(), request.versionType(), FetchSourceContext.FETCH_SOURCE, false); - return prepare(request, getResult); + return prepare(indexShard.shardId(), request, getResult); } /** * Prepares an update request by converting it into an index or delete request or an update response (no action). */ @SuppressWarnings("unchecked") - protected Result prepare(UpdateRequest request, final GetResult getResult) { + protected Result prepare(ShardId shardId, UpdateRequest request, final GetResult getResult) { long getDateNS = System.nanoTime(); - final ShardId shardId = new ShardId(getResult.getIndex(), request.shardId()); if (!getResult.isExists()) { if (request.upsertRequest() == null && !request.docAsUpsert()) { throw new DocumentMissingException(shardId, request.type(), request.id()); @@ -99,7 +99,7 @@ public class UpdateHelper extends AbstractComponent { // Tell the script that this is a create and not an update ctx.put("op", "create"); ctx.put("_source", upsertDoc); - ctx = executeScript(request, ctx); + ctx = executeScript(request.script, ctx); //Allow the script to set TTL using ctx._ttl if (ttl == null) { ttl = getTTLFromScriptContext(ctx); @@ -193,7 +193,7 @@ public class UpdateHelper extends AbstractComponent { ctx.put("_ttl", originalTtl); ctx.put("_source", sourceAndContent.v2()); - ctx = executeScript(request, ctx); + ctx = executeScript(request.script, ctx); operation = (String) ctx.get("op"); @@ -243,14 +243,14 @@ public class UpdateHelper extends AbstractComponent { } } - private Map executeScript(UpdateRequest request, Map ctx) { + private Map executeScript(Script script, Map ctx) { try { if (scriptService != null) { - ExecutableScript script = scriptService.executable(request.script, ScriptContext.Standard.UPDATE, request, Collections.emptyMap()); - script.setNextVar("ctx", ctx); - script.run(); + ExecutableScript executableScript = scriptService.executable(script, ScriptContext.Standard.UPDATE, Collections.emptyMap()); + executableScript.setNextVar("ctx", ctx); + executableScript.run(); // we need to unwrap the ctx... - ctx = (Map) script.unwrap(ctx); + ctx = (Map) executableScript.unwrap(ctx); } } catch (Exception e) { throw new IllegalArgumentException("failed to execute script", e); diff --git a/core/src/main/java/org/elasticsearch/bootstrap/Bootstrap.java b/core/src/main/java/org/elasticsearch/bootstrap/Bootstrap.java index 053f4aeaf33..345513d7f34 100644 --- a/core/src/main/java/org/elasticsearch/bootstrap/Bootstrap.java +++ b/core/src/main/java/org/elasticsearch/bootstrap/Bootstrap.java @@ -171,7 +171,7 @@ final class Bootstrap { // placeholder Settings nodeSettings = Settings.settingsBuilder() .put(settings) - .put(InternalSettingsPreparer.IGNORE_SYSTEM_PROPERTIES_SETTING, true) + .put(InternalSettingsPreparer.IGNORE_SYSTEM_PROPERTIES_SETTING.getKey(), true) .build(); node = new Node(nodeSettings); diff --git a/core/src/main/java/org/elasticsearch/client/Client.java b/core/src/main/java/org/elasticsearch/client/Client.java index e7461dabfe1..f81ba9eb1b1 100644 --- a/core/src/main/java/org/elasticsearch/client/Client.java +++ b/core/src/main/java/org/elasticsearch/client/Client.java @@ -19,8 +19,12 @@ package org.elasticsearch.client; +import org.elasticsearch.action.Action; import org.elasticsearch.action.ActionFuture; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionRequestBuilder; +import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.bulk.BulkRequest; import org.elasticsearch.action.bulk.BulkRequestBuilder; import org.elasticsearch.action.bulk.BulkResponse; @@ -80,11 +84,13 @@ import org.elasticsearch.action.termvectors.TermVectorsResponse; import org.elasticsearch.action.update.UpdateRequest; import org.elasticsearch.action.update.UpdateRequestBuilder; import org.elasticsearch.action.update.UpdateResponse; -import org.elasticsearch.client.support.Headers; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.lease.Releasable; +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; +import java.util.Map; + /** * A client provides a one stop interface for performing actions/operations against the cluster. *

@@ -100,7 +106,15 @@ import org.elasticsearch.common.settings.Settings; */ public interface Client extends ElasticsearchClient, Releasable { - String CLIENT_TYPE_SETTING = "client.type"; + Setting CLIENT_TYPE_SETTING_S = new Setting<>("client.type", "node", (s) -> { + switch (s) { + case "node": + case "transport": + return s; + default: + throw new IllegalArgumentException("Can't parse [client.type] must be one of [node, transport]"); + } + }, false, Setting.Scope.CLUSTER); /** * The admin client that can be used to perform administrative operations. @@ -597,5 +611,9 @@ public interface Client extends ElasticsearchClient, Releasable { */ Settings settings(); - Headers headers(); + /** + * Returns a new lightweight Client that applies all given headers to each of the requests + * issued from it. + */ + Client filterWithHeader(Map headers); } diff --git a/core/src/main/java/org/elasticsearch/client/FilterClient.java b/core/src/main/java/org/elasticsearch/client/FilterClient.java index 77abceef17a..d2ea209a8c2 100644 --- a/core/src/main/java/org/elasticsearch/client/FilterClient.java +++ b/core/src/main/java/org/elasticsearch/client/FilterClient.java @@ -42,7 +42,7 @@ public abstract class FilterClient extends AbstractClient { * @see #in() */ public FilterClient(Client in) { - super(in.settings(), in.threadPool(), in.headers()); + super(in.settings(), in.threadPool()); this.in = in; } diff --git a/core/src/main/java/org/elasticsearch/client/node/NodeClient.java b/core/src/main/java/org/elasticsearch/client/node/NodeClient.java index 4f64f63f8d7..3e9bed9e25d 100644 --- a/core/src/main/java/org/elasticsearch/client/node/NodeClient.java +++ b/core/src/main/java/org/elasticsearch/client/node/NodeClient.java @@ -27,7 +27,6 @@ import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.GenericAction; import org.elasticsearch.action.support.TransportAction; import org.elasticsearch.client.support.AbstractClient; -import org.elasticsearch.client.support.Headers; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.threadpool.ThreadPool; @@ -44,8 +43,8 @@ public class NodeClient extends AbstractClient { private final Map actions; @Inject - public NodeClient(Settings settings, ThreadPool threadPool, Headers headers, Map actions) { - super(settings, threadPool, headers); + public NodeClient(Settings settings, ThreadPool threadPool, Map actions) { + super(settings, threadPool); this.actions = unmodifiableMap(actions); } diff --git a/core/src/main/java/org/elasticsearch/client/node/NodeClientModule.java b/core/src/main/java/org/elasticsearch/client/node/NodeClientModule.java index fb0891da8cc..de134887303 100644 --- a/core/src/main/java/org/elasticsearch/client/node/NodeClientModule.java +++ b/core/src/main/java/org/elasticsearch/client/node/NodeClientModule.java @@ -20,7 +20,6 @@ package org.elasticsearch.client.node; import org.elasticsearch.client.Client; -import org.elasticsearch.client.support.Headers; import org.elasticsearch.common.inject.AbstractModule; /** @@ -30,7 +29,6 @@ public class NodeClientModule extends AbstractModule { @Override protected void configure() { - bind(Headers.class).asEagerSingleton(); bind(Client.class).to(NodeClient.class).asEagerSingleton(); } } diff --git a/core/src/main/java/org/elasticsearch/client/support/AbstractClient.java b/core/src/main/java/org/elasticsearch/client/support/AbstractClient.java index f8badc3a8fb..ac60df1d67a 100644 --- a/core/src/main/java/org/elasticsearch/client/support/AbstractClient.java +++ b/core/src/main/java/org/elasticsearch/client/support/AbstractClient.java @@ -332,13 +332,17 @@ import org.elasticsearch.client.AdminClient; import org.elasticsearch.client.Client; import org.elasticsearch.client.ClusterAdminClient; import org.elasticsearch.client.ElasticsearchClient; +import org.elasticsearch.client.FilterClient; import org.elasticsearch.client.IndicesAdminClient; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.threadpool.ThreadPool; +import java.util.Map; + /** * */ @@ -346,23 +350,15 @@ public abstract class AbstractClient extends AbstractComponent implements Client private final ThreadPool threadPool; private final Admin admin; - - private final Headers headers; private final ThreadedActionListener.Wrapper threadedWrapper; - public AbstractClient(Settings settings, ThreadPool threadPool, Headers headers) { + public AbstractClient(Settings settings, ThreadPool threadPool) { super(settings); this.threadPool = threadPool; - this.headers = headers; this.admin = new Admin(this); this.threadedWrapper = new ThreadedActionListener.Wrapper(logger, settings, threadPool); } - @Override - public Headers headers() { - return this.headers; - } - @Override public final Settings settings() { return this.settings; @@ -398,7 +394,6 @@ public abstract class AbstractClient extends AbstractComponent implements Client @Override public final , Response extends ActionResponse, RequestBuilder extends ActionRequestBuilder> void execute( Action action, Request request, ActionListener listener) { - headers.applyTo(request); listener = threadedWrapper.wrap(listener); doExecute(action, request, listener); } @@ -1757,4 +1752,17 @@ public abstract class AbstractClient extends AbstractComponent implements Client execute(GetSettingsAction.INSTANCE, request, listener); } } + + @Override + public Client filterWithHeader(Map headers) { + return new FilterClient(this) { + @Override + protected , Response extends ActionResponse, RequestBuilder extends ActionRequestBuilder> void doExecute(Action action, Request request, ActionListener listener) { + ThreadContext threadContext = threadPool().getThreadContext(); + try (ThreadContext.StoredContext ctx = threadContext.stashAndMergeHeaders(headers)) { + super.doExecute(action, request, listener); + } + } + }; + } } diff --git a/core/src/main/java/org/elasticsearch/client/support/Headers.java b/core/src/main/java/org/elasticsearch/client/support/Headers.java deleted file mode 100644 index f46bd0a1c9d..00000000000 --- a/core/src/main/java/org/elasticsearch/client/support/Headers.java +++ /dev/null @@ -1,65 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.client.support; - -import org.elasticsearch.common.inject.Inject; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.transport.TransportMessage; - -/** - * Client request headers picked up from the client settings. Applied to every - * request sent by the client (both transport and node clients) - */ -public class Headers { - - public static final String PREFIX = "request.headers"; - - public static final Headers EMPTY = new Headers(Settings.EMPTY) { - @Override - public > M applyTo(M message) { - return message; - } - }; - - private final Settings headers; - - @Inject - public Headers(Settings settings) { - headers = resolveHeaders(settings); - } - - public > M applyTo(M message) { - for (String key : headers.names()) { - if (!message.hasHeader(key)) { - message.putHeader(key, headers.get(key)); - } - } - return message; - } - - public Settings headers() { - return headers; - } - - static Settings resolveHeaders(Settings settings) { - Settings headers = settings.getAsSettings(PREFIX); - return headers != null ? headers : Settings.EMPTY; - } -} diff --git a/core/src/main/java/org/elasticsearch/client/transport/TransportClient.java b/core/src/main/java/org/elasticsearch/client/transport/TransportClient.java index ea809a8cc38..419c4d566a5 100644 --- a/core/src/main/java/org/elasticsearch/client/transport/TransportClient.java +++ b/core/src/main/java/org/elasticsearch/client/transport/TransportClient.java @@ -28,7 +28,6 @@ import org.elasticsearch.action.ActionRequestBuilder; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.cache.recycler.PageCacheRecycler; import org.elasticsearch.client.support.AbstractClient; -import org.elasticsearch.client.support.Headers; import org.elasticsearch.client.transport.support.TransportProxyClient; import org.elasticsearch.cluster.ClusterNameModule; import org.elasticsearch.cluster.node.DiscoveryNode; @@ -115,7 +114,7 @@ public class TransportClient extends AbstractClient { .put( InternalSettingsPreparer.prepareSettings(settings)) .put("network.server", false) .put(Node.NODE_CLIENT_SETTING.getKey(), true) - .put(CLIENT_TYPE_SETTING, CLIENT_TYPE); + .put(CLIENT_TYPE_SETTING_S.getKey(), CLIENT_TYPE); return new PluginsService(settingsBuilder.build(), null, null, pluginClasses); } @@ -177,7 +176,7 @@ public class TransportClient extends AbstractClient { private final TransportProxyClient proxy; private TransportClient(Injector injector) { - super(injector.getInstance(Settings.class), injector.getInstance(ThreadPool.class), injector.getInstance(Headers.class)); + super(injector.getInstance(Settings.class), injector.getInstance(ThreadPool.class)); this.injector = injector; nodesService = injector.getInstance(TransportClientNodesService.class); proxy = injector.getInstance(TransportProxyClient.class); diff --git a/core/src/main/java/org/elasticsearch/client/transport/TransportClientNodesService.java b/core/src/main/java/org/elasticsearch/client/transport/TransportClientNodesService.java index 99c70255ca8..f9ee988d811 100644 --- a/core/src/main/java/org/elasticsearch/client/transport/TransportClientNodesService.java +++ b/core/src/main/java/org/elasticsearch/client/transport/TransportClientNodesService.java @@ -29,7 +29,6 @@ import org.elasticsearch.action.admin.cluster.node.liveness.TransportLivenessAct import org.elasticsearch.action.admin.cluster.state.ClusterStateAction; import org.elasticsearch.action.admin.cluster.state.ClusterStateResponse; import org.elasticsearch.client.Requests; -import org.elasticsearch.client.support.Headers; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.component.AbstractComponent; @@ -80,8 +79,6 @@ public class TransportClientNodesService extends AbstractComponent { private final Version minCompatibilityVersion; - private final Headers headers; - // nodes that are added to be discovered private volatile List listedNodes = Collections.emptyList(); @@ -109,13 +106,12 @@ public class TransportClientNodesService extends AbstractComponent { @Inject public TransportClientNodesService(Settings settings, ClusterName clusterName, TransportService transportService, - ThreadPool threadPool, Headers headers, Version version) { + ThreadPool threadPool, Version version) { super(settings); this.clusterName = clusterName; this.transportService = transportService; this.threadPool = threadPool; this.minCompatibilityVersion = version.minimumCompatibilityVersion(); - this.headers = headers; this.nodesSamplerInterval = CLIENT_TRANSPORT_NODES_SAMPLER_INTERVAL.get(this.settings); this.pingTimeout = CLIENT_TRANSPORT_PING_TIMEOUT.get(this.settings).millis(); @@ -364,7 +360,7 @@ public class TransportClientNodesService extends AbstractComponent { } try { LivenessResponse livenessResponse = transportService.submitRequest(listedNode, TransportLivenessAction.NAME, - headers.applyTo(new LivenessRequest()), + new LivenessRequest(), TransportRequestOptions.builder().withType(TransportRequestOptions.Type.STATE).withTimeout(pingTimeout).build(), new FutureTransportResponseHandler() { @Override @@ -434,8 +430,7 @@ public class TransportClientNodesService extends AbstractComponent { return; } } - transportService.sendRequest(listedNode, ClusterStateAction.NAME, - headers.applyTo(Requests.clusterStateRequest().clear().nodes(true).local(true)), + transportService.sendRequest(listedNode, ClusterStateAction.NAME, Requests.clusterStateRequest().clear().nodes(true).local(true), TransportRequestOptions.builder().withType(TransportRequestOptions.Type.STATE).withTimeout(pingTimeout).build(), new BaseTransportResponseHandler() { diff --git a/core/src/main/java/org/elasticsearch/cluster/ClusterName.java b/core/src/main/java/org/elasticsearch/cluster/ClusterName.java index 3a9dd82732c..daf3000d710 100644 --- a/core/src/main/java/org/elasticsearch/cluster/ClusterName.java +++ b/core/src/main/java/org/elasticsearch/cluster/ClusterName.java @@ -22,6 +22,7 @@ package org.elasticsearch.cluster; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Streamable; +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import java.io.IOException; @@ -31,18 +32,23 @@ import java.io.IOException; */ public class ClusterName implements Streamable { - public static final String SETTING = "cluster.name"; + public static final Setting CLUSTER_NAME_SETTING = new Setting<>("cluster.name", "elasticsearch", (s) -> { + if (s.isEmpty()) { + throw new IllegalArgumentException("[cluster.name] must not be empty"); + } + return s; + }, false, Setting.Scope.CLUSTER); - public static final ClusterName DEFAULT = new ClusterName("elasticsearch".intern()); + + public static final ClusterName DEFAULT = new ClusterName(CLUSTER_NAME_SETTING.getDefault(Settings.EMPTY).intern()); private String value; public static ClusterName clusterNameFromSettings(Settings settings) { - return new ClusterName(settings.get("cluster.name", ClusterName.DEFAULT.value())); + return new ClusterName(CLUSTER_NAME_SETTING.get(settings)); } private ClusterName() { - } public ClusterName(String value) { diff --git a/core/src/main/java/org/elasticsearch/cluster/ClusterState.java b/core/src/main/java/org/elasticsearch/cluster/ClusterState.java index dd8c737b6b0..2a4d57724cd 100644 --- a/core/src/main/java/org/elasticsearch/cluster/ClusterState.java +++ b/core/src/main/java/org/elasticsearch/cluster/ClusterState.java @@ -449,7 +449,7 @@ public class ClusterState implements ToXContent, Diffable { builder.startObject("indices"); for (IndexMetaData indexMetaData : metaData()) { - builder.startObject(indexMetaData.getIndex(), XContentBuilder.FieldCaseConversion.NONE); + builder.startObject(indexMetaData.getIndex().getName(), XContentBuilder.FieldCaseConversion.NONE); builder.field("state", indexMetaData.getState().toString().toLowerCase(Locale.ENGLISH)); @@ -506,7 +506,7 @@ public class ClusterState implements ToXContent, Diffable { builder.startObject("routing_table"); builder.startObject("indices"); for (IndexRoutingTable indexRoutingTable : routingTable()) { - builder.startObject(indexRoutingTable.index(), XContentBuilder.FieldCaseConversion.NONE); + builder.startObject(indexRoutingTable.getIndex().getName(), XContentBuilder.FieldCaseConversion.NONE); builder.startObject("shards"); for (IndexShardRoutingTable indexShardRoutingTable : indexRoutingTable) { builder.startArray(Integer.toString(indexShardRoutingTable.shardId().id())); diff --git a/core/src/main/java/org/elasticsearch/cluster/ClusterStateObserver.java b/core/src/main/java/org/elasticsearch/cluster/ClusterStateObserver.java index df857623570..dd30a711688 100644 --- a/core/src/main/java/org/elasticsearch/cluster/ClusterStateObserver.java +++ b/core/src/main/java/org/elasticsearch/cluster/ClusterStateObserver.java @@ -23,6 +23,7 @@ import org.elasticsearch.ElasticsearchException; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.util.concurrent.ThreadContext; import java.util.concurrent.atomic.AtomicReference; @@ -44,6 +45,7 @@ public class ClusterStateObserver { }; private final ClusterService clusterService; + private final ThreadContext contextHolder; volatile TimeValue timeOutValue; @@ -55,8 +57,8 @@ public class ClusterStateObserver { volatile boolean timedOut; - public ClusterStateObserver(ClusterService clusterService, ESLogger logger) { - this(clusterService, new TimeValue(60000), logger); + public ClusterStateObserver(ClusterService clusterService, ESLogger logger, ThreadContext contextHolder) { + this(clusterService, new TimeValue(60000), logger, contextHolder); } /** @@ -64,7 +66,7 @@ public class ClusterStateObserver { * will fail any existing or new #waitForNextChange calls. Set to null * to wait indefinitely */ - public ClusterStateObserver(ClusterService clusterService, @Nullable TimeValue timeout, ESLogger logger) { + public ClusterStateObserver(ClusterService clusterService, @Nullable TimeValue timeout, ESLogger logger, ThreadContext contextHolder) { this.clusterService = clusterService; this.lastObservedState = new AtomicReference<>(new ObservedState(clusterService.state())); this.timeOutValue = timeout; @@ -72,6 +74,7 @@ public class ClusterStateObserver { this.startTimeNS = System.nanoTime(); } this.logger = logger; + this.contextHolder = contextHolder; } /** last cluster state observer by this observer. Note that this may not be the current one */ @@ -146,7 +149,7 @@ public class ClusterStateObserver { listener.onNewClusterState(newState.clusterState); } else { logger.trace("observer: sampled state rejected by predicate ({}). adding listener to ClusterService", newState); - ObservingContext context = new ObservingContext(listener, changePredicate); + ObservingContext context = new ObservingContext(new ContextPreservingListener(listener, contextHolder.newStoredContext()), changePredicate); if (!observingContext.compareAndSet(null, context)) { throw new ElasticsearchException("already waiting for a cluster state change"); } @@ -317,4 +320,33 @@ public class ClusterStateObserver { return "version [" + clusterState.version() + "], status [" + status + "]"; } } + + private final static class ContextPreservingListener implements Listener { + private final Listener delegate; + private final ThreadContext.StoredContext tempContext; + + + private ContextPreservingListener(Listener delegate, ThreadContext.StoredContext storedContext) { + this.tempContext = storedContext; + this.delegate = delegate; + } + + @Override + public void onNewClusterState(ClusterState state) { + tempContext.restore(); + delegate.onNewClusterState(state); + } + + @Override + public void onClusterServiceClose() { + tempContext.restore(); + delegate.onClusterServiceClose(); + } + + @Override + public void onTimeout(TimeValue timeout) { + tempContext.restore(); + delegate.onTimeout(timeout); + } + } } diff --git a/core/src/main/java/org/elasticsearch/cluster/SnapshotsInProgress.java b/core/src/main/java/org/elasticsearch/cluster/SnapshotsInProgress.java index 9a112613b1d..78eef316332 100644 --- a/core/src/main/java/org/elasticsearch/cluster/SnapshotsInProgress.java +++ b/core/src/main/java/org/elasticsearch/cluster/SnapshotsInProgress.java @@ -162,7 +162,7 @@ public class SnapshotsInProgress extends AbstractDiffable implements Cus List waitingShards = waitingIndicesMap.get(entry.key.getIndex()); if (waitingShards == null) { waitingShards = new ArrayList<>(); - waitingIndicesMap.put(entry.key.getIndex(), waitingShards); + waitingIndicesMap.put(entry.key.getIndexName(), waitingShards); } waitingShards.add(entry.key); } diff --git a/core/src/main/java/org/elasticsearch/cluster/action/index/NodeIndexDeletedAction.java b/core/src/main/java/org/elasticsearch/cluster/action/index/NodeIndexDeletedAction.java index d4f453530bc..012cc66e110 100644 --- a/core/src/main/java/org/elasticsearch/cluster/action/index/NodeIndexDeletedAction.java +++ b/core/src/main/java/org/elasticsearch/cluster/action/index/NodeIndexDeletedAction.java @@ -103,7 +103,7 @@ public class NodeIndexDeletedAction extends AbstractComponent { // master. If we can't acquire the locks here immediately there might be a shard of this index still holding on to the lock // due to a "currently canceled recovery" or so. The shard will delete itself BEFORE the lock is released so it's guaranteed to be // deleted by the time we get the lock - indicesService.processPendingDeletes(new Index(index), indexSettings, new TimeValue(30, TimeUnit.MINUTES)); + indicesService.processPendingDeletes(indexSettings.getIndex(), indexSettings, new TimeValue(30, TimeUnit.MINUTES)); transportService.sendRequest(clusterState.nodes().masterNode(), INDEX_STORE_DELETED_ACTION_NAME, new NodeIndexStoreDeletedMessage(index, nodeId), EmptyTransportResponseHandler.INSTANCE_SAME); } catch (LockObtainFailedException exc) { diff --git a/core/src/main/java/org/elasticsearch/cluster/action/shard/ShardStateAction.java b/core/src/main/java/org/elasticsearch/cluster/action/shard/ShardStateAction.java index 276edc9b23d..4aca9a4e235 100644 --- a/core/src/main/java/org/elasticsearch/cluster/action/shard/ShardStateAction.java +++ b/core/src/main/java/org/elasticsearch/cluster/action/shard/ShardStateAction.java @@ -74,13 +74,15 @@ public class ShardStateAction extends AbstractComponent { private final TransportService transportService; private final ClusterService clusterService; + private final ThreadPool threadPool; @Inject public ShardStateAction(Settings settings, ClusterService clusterService, TransportService transportService, - AllocationService allocationService, RoutingService routingService) { + AllocationService allocationService, RoutingService routingService, ThreadPool threadPool) { super(settings); this.transportService = transportService; this.clusterService = clusterService; + this.threadPool = threadPool; transportService.registerRequestHandler(SHARD_STARTED_ACTION_NAME, ShardRoutingEntry::new, ThreadPool.Names.SAME, new ShardStartedTransportHandler(clusterService, new ShardStartedClusterStateTaskExecutor(allocationService, logger), logger)); transportService.registerRequestHandler(SHARD_FAILED_ACTION_NAME, ShardRoutingEntry::new, ThreadPool.Names.SAME, new ShardFailedTransportHandler(clusterService, new ShardFailedClusterStateTaskExecutor(allocationService, routingService, logger), logger)); @@ -124,7 +126,7 @@ public class ShardStateAction extends AbstractComponent { } public void shardFailed(final ShardRouting shardRouting, final String indexUUID, final String message, @Nullable final Throwable failure, Listener listener) { - ClusterStateObserver observer = new ClusterStateObserver(clusterService, null, logger); + ClusterStateObserver observer = new ClusterStateObserver(clusterService, null, logger, threadPool.getThreadContext()); ShardRoutingEntry shardRoutingEntry = new ShardRoutingEntry(shardRouting, indexUUID, message, failure); sendShardAction(SHARD_FAILED_ACTION_NAME, observer, shardRoutingEntry, listener); } @@ -290,7 +292,7 @@ public class ShardStateAction extends AbstractComponent { } public void shardStarted(final ShardRouting shardRouting, String indexUUID, final String message, Listener listener) { - ClusterStateObserver observer = new ClusterStateObserver(clusterService, null, logger); + ClusterStateObserver observer = new ClusterStateObserver(clusterService, null, logger, threadPool.getThreadContext()); ShardRoutingEntry shardRoutingEntry = new ShardRoutingEntry(shardRouting, indexUUID, message, null); sendShardAction(SHARD_STARTED_ACTION_NAME, observer, shardRoutingEntry, listener); } diff --git a/core/src/main/java/org/elasticsearch/cluster/block/ClusterBlocks.java b/core/src/main/java/org/elasticsearch/cluster/block/ClusterBlocks.java index 0006c7da8c9..d48fc3138d9 100644 --- a/core/src/main/java/org/elasticsearch/cluster/block/ClusterBlocks.java +++ b/core/src/main/java/org/elasticsearch/cluster/block/ClusterBlocks.java @@ -304,29 +304,29 @@ public class ClusterBlocks extends AbstractDiffable { public Builder addBlocks(IndexMetaData indexMetaData) { if (indexMetaData.getState() == IndexMetaData.State.CLOSE) { - addIndexBlock(indexMetaData.getIndex(), MetaDataIndexStateService.INDEX_CLOSED_BLOCK); + addIndexBlock(indexMetaData.getIndex().getName(), MetaDataIndexStateService.INDEX_CLOSED_BLOCK); } if (IndexMetaData.INDEX_READ_ONLY_SETTING.get(indexMetaData.getSettings())) { - addIndexBlock(indexMetaData.getIndex(), IndexMetaData.INDEX_READ_ONLY_BLOCK); + addIndexBlock(indexMetaData.getIndex().getName(), IndexMetaData.INDEX_READ_ONLY_BLOCK); } if (IndexMetaData.INDEX_BLOCKS_READ_SETTING.get(indexMetaData.getSettings())) { - addIndexBlock(indexMetaData.getIndex(), IndexMetaData.INDEX_READ_BLOCK); + addIndexBlock(indexMetaData.getIndex().getName(), IndexMetaData.INDEX_READ_BLOCK); } if (IndexMetaData.INDEX_BLOCKS_WRITE_SETTING.get(indexMetaData.getSettings())) { - addIndexBlock(indexMetaData.getIndex(), IndexMetaData.INDEX_WRITE_BLOCK); + addIndexBlock(indexMetaData.getIndex().getName(), IndexMetaData.INDEX_WRITE_BLOCK); } if (IndexMetaData.INDEX_BLOCKS_METADATA_SETTING.get(indexMetaData.getSettings())) { - addIndexBlock(indexMetaData.getIndex(), IndexMetaData.INDEX_METADATA_BLOCK); + addIndexBlock(indexMetaData.getIndex().getName(), IndexMetaData.INDEX_METADATA_BLOCK); } return this; } public Builder updateBlocks(IndexMetaData indexMetaData) { - removeIndexBlock(indexMetaData.getIndex(), MetaDataIndexStateService.INDEX_CLOSED_BLOCK); - removeIndexBlock(indexMetaData.getIndex(), IndexMetaData.INDEX_READ_ONLY_BLOCK); - removeIndexBlock(indexMetaData.getIndex(), IndexMetaData.INDEX_READ_BLOCK); - removeIndexBlock(indexMetaData.getIndex(), IndexMetaData.INDEX_WRITE_BLOCK); - removeIndexBlock(indexMetaData.getIndex(), IndexMetaData.INDEX_METADATA_BLOCK); + removeIndexBlock(indexMetaData.getIndex().getName(), MetaDataIndexStateService.INDEX_CLOSED_BLOCK); + removeIndexBlock(indexMetaData.getIndex().getName(), IndexMetaData.INDEX_READ_ONLY_BLOCK); + removeIndexBlock(indexMetaData.getIndex().getName(), IndexMetaData.INDEX_READ_BLOCK); + removeIndexBlock(indexMetaData.getIndex().getName(), IndexMetaData.INDEX_WRITE_BLOCK); + removeIndexBlock(indexMetaData.getIndex().getName(), IndexMetaData.INDEX_METADATA_BLOCK); return addBlocks(indexMetaData); } diff --git a/core/src/main/java/org/elasticsearch/cluster/health/ClusterIndexHealth.java b/core/src/main/java/org/elasticsearch/cluster/health/ClusterIndexHealth.java index 3fd10fd91da..ce1f3adb539 100644 --- a/core/src/main/java/org/elasticsearch/cluster/health/ClusterIndexHealth.java +++ b/core/src/main/java/org/elasticsearch/cluster/health/ClusterIndexHealth.java @@ -67,7 +67,7 @@ public final class ClusterIndexHealth implements Iterable, S } public ClusterIndexHealth(IndexMetaData indexMetaData, IndexRoutingTable indexRoutingTable) { - this.index = indexMetaData.getIndex(); + this.index = indexMetaData.getIndex().getName(); this.numberOfShards = indexMetaData.getNumberOfShards(); this.numberOfReplicas = indexMetaData.getNumberOfReplicas(); this.validationFailures = indexRoutingTable.validate(indexMetaData); diff --git a/core/src/main/java/org/elasticsearch/cluster/metadata/AliasOrIndex.java b/core/src/main/java/org/elasticsearch/cluster/metadata/AliasOrIndex.java index b8de2ea5256..4ad9b7e5317 100644 --- a/core/src/main/java/org/elasticsearch/cluster/metadata/AliasOrIndex.java +++ b/core/src/main/java/org/elasticsearch/cluster/metadata/AliasOrIndex.java @@ -117,7 +117,7 @@ public interface AliasOrIndex { @Override public Tuple next() { IndexMetaData indexMetaData = referenceIndexMetaDatas.get(index++); - return new Tuple<>(indexMetaData.getIndex(), indexMetaData.getAliases().get(aliasName)); + return new Tuple<>(indexMetaData.getIndex().getName(), indexMetaData.getAliases().get(aliasName)); } @Override diff --git a/core/src/main/java/org/elasticsearch/cluster/metadata/AliasValidator.java b/core/src/main/java/org/elasticsearch/cluster/metadata/AliasValidator.java index e5b170b05a6..091fde6dec8 100644 --- a/core/src/main/java/org/elasticsearch/cluster/metadata/AliasValidator.java +++ b/core/src/main/java/org/elasticsearch/cluster/metadata/AliasValidator.java @@ -97,7 +97,7 @@ public class AliasValidator extends AbstractComponent { assert metaData != null; if (metaData.hasIndex(alias)) { - throw new InvalidAliasNameException(new Index(index), alias, "an index exists with the same name as the alias"); + throw new InvalidAliasNameException(metaData.index(alias).getIndex(), alias, "an index exists with the same name as the alias"); } } diff --git a/core/src/main/java/org/elasticsearch/cluster/metadata/IndexMetaData.java b/core/src/main/java/org/elasticsearch/cluster/metadata/IndexMetaData.java index b2772c0b2c1..4fdd11c4dd4 100644 --- a/core/src/main/java/org/elasticsearch/cluster/metadata/IndexMetaData.java +++ b/core/src/main/java/org/elasticsearch/cluster/metadata/IndexMetaData.java @@ -29,13 +29,11 @@ import org.elasticsearch.cluster.DiffableUtils; import org.elasticsearch.cluster.block.ClusterBlock; import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.node.DiscoveryNodeFilters; -import org.elasticsearch.common.Booleans; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.collect.ImmutableOpenIntMap; import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.collect.MapBuilder; -import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -47,6 +45,7 @@ import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.index.Index; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.rest.RestStatus; import org.joda.time.DateTime; @@ -72,7 +71,7 @@ import static org.elasticsearch.common.settings.Settings.writeSettingsToStream; /** * */ -public class IndexMetaData implements Diffable, FromXContentBuilder, ToXContent { +public class IndexMetaData implements Diffable, FromXContentBuilder, ToXContent { public interface Custom extends Diffable, ToXContent { @@ -150,6 +149,7 @@ public class IndexMetaData implements Diffable, FromXContentBuild throw new IllegalStateException("No state match for [" + state + "]"); } } + public static final String INDEX_SETTING_PREFIX = "index."; public static final String SETTING_NUMBER_OF_SHARDS = "index.number_of_shards"; public static final Setting INDEX_NUMBER_OF_SHARDS_SETTING = Setting.intSetting(SETTING_NUMBER_OF_SHARDS, 5, 1, false, Setting.Scope.INDEX); @@ -196,15 +196,15 @@ public class IndexMetaData implements Diffable, FromXContentBuild public static final Setting INDEX_ROUTING_EXCLUDE_GROUP_SETTING = Setting.groupSetting("index.routing.allocation.exclude.", true, Setting.Scope.INDEX); public static final IndexMetaData PROTO = IndexMetaData.builder("") - .settings(Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)) - .numberOfShards(1).numberOfReplicas(0).build(); + .settings(Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)) + .numberOfShards(1).numberOfReplicas(0).build(); public static final String KEY_ACTIVE_ALLOCATIONS = "active_allocations"; private final int numberOfShards; private final int numberOfReplicas; - private final String index; + private final Index index; private final long version; private final State state; @@ -229,7 +229,7 @@ public class IndexMetaData implements Diffable, FromXContentBuild private final Version indexUpgradedVersion; private final org.apache.lucene.util.Version minimumCompatibleLuceneVersion; - private IndexMetaData(String index, long version, State state, int numberOfShards, int numberOfReplicas, Settings settings, + private IndexMetaData(Index index, long version, State state, int numberOfShards, int numberOfReplicas, Settings settings, ImmutableOpenMap mappings, ImmutableOpenMap aliases, ImmutableOpenMap customs, ImmutableOpenIntMap> activeAllocationIds, DiscoveryNodeFilters requireFilters, DiscoveryNodeFilters includeFilters, DiscoveryNodeFilters excludeFilters, @@ -254,12 +254,12 @@ public class IndexMetaData implements Diffable, FromXContentBuild this.minimumCompatibleLuceneVersion = minimumCompatibleLuceneVersion; } - public String getIndex() { + public Index getIndex() { return index; } public String getIndexUUID() { - return settings.get(SETTING_INDEX_UUID, INDEX_UUID_NA_VALUE); + return index.getUUID(); } /** @@ -466,7 +466,7 @@ public class IndexMetaData implements Diffable, FromXContentBuild private final Diff>> activeAllocationIds; public IndexMetaDataDiff(IndexMetaData before, IndexMetaData after) { - index = after.index; + index = after.index.getName(); version = after.version; state = after.state; settings = after.settings; @@ -486,16 +486,16 @@ public class IndexMetaData implements Diffable, FromXContentBuild aliases = DiffableUtils.readImmutableOpenMapDiff(in, DiffableUtils.getStringKeySerializer(), AliasMetaData.PROTO); customs = DiffableUtils.readImmutableOpenMapDiff(in, DiffableUtils.getStringKeySerializer(), new DiffableUtils.DiffableValueSerializer() { - @Override - public Custom read(StreamInput in, String key) throws IOException { - return lookupPrototypeSafe(key).readFrom(in); - } + @Override + public Custom read(StreamInput in, String key) throws IOException { + return lookupPrototypeSafe(key).readFrom(in); + } - @Override - public Diff readDiff(StreamInput in, String key) throws IOException { - return lookupPrototypeSafe(key).readDiffFrom(in); - } - }); + @Override + public Diff readDiff(StreamInput in, String key) throws IOException { + return lookupPrototypeSafe(key).readDiffFrom(in); + } + }); activeAllocationIds = DiffableUtils.readImmutableOpenIntMapDiff(in, DiffableUtils.getVIntKeySerializer(), DiffableUtils.StringSetValueSerializer.getInstance()); } @@ -559,7 +559,7 @@ public class IndexMetaData implements Diffable, FromXContentBuild @Override public void writeTo(StreamOutput out) throws IOException { - out.writeString(index); + out.writeString(index.getName()); // uuid will come as part of settings out.writeLong(version); out.writeByte(state.id()); writeSettingsToStream(settings, out); @@ -611,7 +611,7 @@ public class IndexMetaData implements Diffable, FromXContentBuild } public Builder(IndexMetaData indexMetaData) { - this.index = indexMetaData.getIndex(); + this.index = indexMetaData.getIndex().getName(); this.state = indexMetaData.state; this.version = indexMetaData.version; this.settings = indexMetaData.getSettings(); @@ -791,19 +791,20 @@ public class IndexMetaData implements Diffable, FromXContentBuild try { minimumCompatibleLuceneVersion = org.apache.lucene.util.Version.parse(stringLuceneVersion); } catch (ParseException ex) { - throw new IllegalStateException("Cannot parse lucene version [" + stringLuceneVersion + "] in the [" + SETTING_VERSION_MINIMUM_COMPATIBLE +"] setting", ex); + throw new IllegalStateException("Cannot parse lucene version [" + stringLuceneVersion + "] in the [" + SETTING_VERSION_MINIMUM_COMPATIBLE + "] setting", ex); } } else { minimumCompatibleLuceneVersion = null; } - return new IndexMetaData(index, version, state, numberOfShards, numberOfReplicas, tmpSettings, mappings.build(), + final String uuid = settings.get(SETTING_INDEX_UUID, INDEX_UUID_NA_VALUE); + return new IndexMetaData(new Index(index, uuid), version, state, numberOfShards, numberOfReplicas, tmpSettings, mappings.build(), tmpAliases.build(), customs.build(), filledActiveAllocationIds.build(), requireFilters, includeFilters, excludeFilters, indexCreatedVersion, indexUpgradedVersion, minimumCompatibleLuceneVersion); } public static void toXContent(IndexMetaData indexMetaData, XContentBuilder builder, ToXContent.Params params) throws IOException { - builder.startObject(indexMetaData.getIndex(), XContentBuilder.FieldCaseConversion.NONE); + builder.startObject(indexMetaData.getIndex().getName(), XContentBuilder.FieldCaseConversion.NONE); builder.field("version", indexMetaData.getVersion()); builder.field("state", indexMetaData.getState().toString().toLowerCase(Locale.ENGLISH)); diff --git a/core/src/main/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolver.java b/core/src/main/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolver.java index d2f3a47b754..0661f6c4362 100644 --- a/core/src/main/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolver.java +++ b/core/src/main/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolver.java @@ -159,7 +159,7 @@ public class IndexNameExpressionResolver extends AbstractComponent { String[] indexNames = new String[resolvedIndices.size()]; int i = 0; for (IndexMetaData indexMetaData : resolvedIndices) { - indexNames[i++] = indexMetaData.getIndex(); + indexNames[i++] = indexMetaData.getIndex().getName(); } throw new IllegalArgumentException("Alias [" + expression + "] has more than one indices associated with it [" + Arrays.toString(indexNames) + "], can't execute a single index op"); } @@ -167,14 +167,14 @@ public class IndexNameExpressionResolver extends AbstractComponent { for (IndexMetaData index : resolvedIndices) { if (index.getState() == IndexMetaData.State.CLOSE) { if (failClosed) { - throw new IndexClosedException(new Index(index.getIndex())); + throw new IndexClosedException(index.getIndex()); } else { if (options.forbidClosedIndices() == false) { - concreteIndices.add(index.getIndex()); + concreteIndices.add(index.getIndex().getName()); } } } else if (index.getState() == IndexMetaData.State.OPEN) { - concreteIndices.add(index.getIndex()); + concreteIndices.add(index.getIndex().getName()); } else { throw new IllegalStateException("index state [" + index.getState() + "] not supported"); } @@ -640,7 +640,7 @@ public class IndexNameExpressionResolver extends AbstractComponent { } else { for (IndexMetaData meta : aliasOrIndex.getIndices()) { if (excludeState == null || meta.getState() != excludeState) { - expand.add(meta.getIndex()); + expand.add(meta.getIndex().getName()); } } } diff --git a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaData.java b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaData.java index 0e41dda1888..9fb526b7408 100644 --- a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaData.java +++ b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaData.java @@ -50,6 +50,7 @@ import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.discovery.DiscoverySettings; +import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.index.store.IndexStoreConfig; import org.elasticsearch.indices.recovery.RecoverySettings; @@ -229,7 +230,7 @@ public class MetaData implements Iterable, Diffable, Fr public boolean equalsAliases(MetaData other) { for (ObjectCursor cursor : other.indices().values()) { IndexMetaData otherIndex = cursor.value; - IndexMetaData thisIndex= indices().get(otherIndex.getIndex()); + IndexMetaData thisIndex= index(otherIndex.getIndex()); if (thisIndex == null) { return false; } @@ -416,7 +417,7 @@ public class MetaData implements Iterable, Diffable, Fr String[] indexNames = new String[result.getIndices().size()]; int i = 0; for (IndexMetaData indexMetaData : result.getIndices()) { - indexNames[i++] = indexMetaData.getIndex(); + indexNames[i++] = indexMetaData.getIndex().getName(); } throw new IllegalArgumentException("Alias [" + aliasOrIndex + "] has more than one index associated with it [" + Arrays.toString(indexNames) + "], can't execute a single index op"); } @@ -451,6 +452,10 @@ public class MetaData implements Iterable, Diffable, Fr return indices.get(index); } + public IndexMetaData index(Index index) { + return index(index.getName()); + } + public ImmutableOpenMap indices() { return this.indices; } @@ -815,19 +820,19 @@ public class MetaData implements Iterable, Diffable, Fr // we know its a new one, increment the version and store indexMetaDataBuilder.version(indexMetaDataBuilder.version() + 1); IndexMetaData indexMetaData = indexMetaDataBuilder.build(); - indices.put(indexMetaData.getIndex(), indexMetaData); + indices.put(indexMetaData.getIndex().getName(), indexMetaData); return this; } public Builder put(IndexMetaData indexMetaData, boolean incrementVersion) { - if (indices.get(indexMetaData.getIndex()) == indexMetaData) { + if (indices.get(indexMetaData.getIndex().getName()) == indexMetaData) { return this; } // if we put a new index metadata, increment its version if (incrementVersion) { indexMetaData = IndexMetaData.builder(indexMetaData).version(indexMetaData.getVersion() + 1).build(); } - indices.put(indexMetaData.getIndex(), indexMetaData); + indices.put(indexMetaData.getIndex().getName(), indexMetaData); return this; } @@ -964,7 +969,7 @@ public class MetaData implements Iterable, Diffable, Fr // do the required operations, the bottleneck isn't resolving expressions into concrete indices. List allIndicesLst = new ArrayList<>(); for (ObjectCursor cursor : indices.values()) { - allIndicesLst.add(cursor.value.getIndex()); + allIndicesLst.add(cursor.value.getIndex().getName()); } String[] allIndices = allIndicesLst.toArray(new String[allIndicesLst.size()]); @@ -973,9 +978,9 @@ public class MetaData implements Iterable, Diffable, Fr for (ObjectCursor cursor : indices.values()) { IndexMetaData indexMetaData = cursor.value; if (indexMetaData.getState() == IndexMetaData.State.OPEN) { - allOpenIndicesLst.add(indexMetaData.getIndex()); + allOpenIndicesLst.add(indexMetaData.getIndex().getName()); } else if (indexMetaData.getState() == IndexMetaData.State.CLOSE) { - allClosedIndicesLst.add(indexMetaData.getIndex()); + allClosedIndicesLst.add(indexMetaData.getIndex().getName()); } } String[] allOpenIndices = allOpenIndicesLst.toArray(new String[allOpenIndicesLst.size()]); @@ -985,7 +990,7 @@ public class MetaData implements Iterable, Diffable, Fr SortedMap aliasAndIndexLookup = new TreeMap<>(); for (ObjectCursor cursor : indices.values()) { IndexMetaData indexMetaData = cursor.value; - aliasAndIndexLookup.put(indexMetaData.getIndex(), new AliasOrIndex.Index(indexMetaData)); + aliasAndIndexLookup.put(indexMetaData.getIndex().getName(), new AliasOrIndex.Index(indexMetaData)); for (ObjectObjectCursor aliasCursor : indexMetaData.getAliases()) { AliasMetaData aliasMetaData = aliasCursor.value; @@ -998,7 +1003,7 @@ public class MetaData implements Iterable, Diffable, Fr alias.addIndex(indexMetaData); } else if (aliasOrIndex instanceof AliasOrIndex.Index) { AliasOrIndex.Index index = (AliasOrIndex.Index) aliasOrIndex; - throw new IllegalStateException("index and alias names need to be unique, but alias [" + aliasMetaData.getAlias() + "] and index [" + index.getIndex().getIndex() + "] have the same name"); + throw new IllegalStateException("index and alias names need to be unique, but alias [" + aliasMetaData.getAlias() + "] and index " + index.getIndex().getIndex() + " have the same name"); } else { throw new IllegalStateException("unexpected alias [" + aliasMetaData.getAlias() + "][" + aliasOrIndex + "]"); } diff --git a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataCreateIndexService.java b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataCreateIndexService.java index 2344e9af77c..fbf3446b2dd 100644 --- a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataCreateIndexService.java +++ b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataCreateIndexService.java @@ -137,22 +137,22 @@ public class MetaDataCreateIndexService extends AbstractComponent { public void validateIndexName(String index, ClusterState state) { if (state.routingTable().hasIndex(index)) { - throw new IndexAlreadyExistsException(new Index(index)); + throw new IndexAlreadyExistsException(state.routingTable().index(index).getIndex()); } if (state.metaData().hasIndex(index)) { - throw new IndexAlreadyExistsException(new Index(index)); + throw new IndexAlreadyExistsException(state.metaData().index(index).getIndex()); } if (!Strings.validFileName(index)) { - throw new InvalidIndexNameException(new Index(index), index, "must not contain the following characters " + Strings.INVALID_FILENAME_CHARS); + throw new InvalidIndexNameException(index, "must not contain the following characters " + Strings.INVALID_FILENAME_CHARS); } if (index.contains("#")) { - throw new InvalidIndexNameException(new Index(index), index, "must not contain '#'"); + throw new InvalidIndexNameException(index, "must not contain '#'"); } if (index.charAt(0) == '_') { - throw new InvalidIndexNameException(new Index(index), index, "must not start with '_'"); + throw new InvalidIndexNameException(index, "must not start with '_'"); } if (!index.toLowerCase(Locale.ROOT).equals(index)) { - throw new InvalidIndexNameException(new Index(index), index, "must be lowercase"); + throw new InvalidIndexNameException(index, "must be lowercase"); } int byteCount = 0; try { @@ -162,15 +162,15 @@ public class MetaDataCreateIndexService extends AbstractComponent { throw new ElasticsearchException("Unable to determine length of index name", e); } if (byteCount > MAX_INDEX_NAME_BYTES) { - throw new InvalidIndexNameException(new Index(index), index, + throw new InvalidIndexNameException(index, "index name is too long, (" + byteCount + - " > " + MAX_INDEX_NAME_BYTES + ")"); + " > " + MAX_INDEX_NAME_BYTES + ")"); } if (state.metaData().hasAlias(index)) { - throw new InvalidIndexNameException(new Index(index), index, "already exists as alias"); + throw new InvalidIndexNameException(index, "already exists as alias"); } if (index.equals(".") || index.equals("..")) { - throw new InvalidIndexNameException(new Index(index), index, "must not be '.' or '..'"); + throw new InvalidIndexNameException(index, "must not be '.' or '..'"); } } @@ -187,242 +187,242 @@ public class MetaDataCreateIndexService extends AbstractComponent { return new ClusterStateUpdateResponse(acknowledged); } - @Override - public ClusterState execute(ClusterState currentState) throws Exception { - boolean indexCreated = false; - String removalReason = null; - try { - validate(request, currentState); - - for (Alias alias : request.aliases()) { - aliasValidator.validateAlias(alias, request.index(), currentState.metaData()); - } - - // we only find a template when its an API call (a new index) - // find templates, highest order are better matching - List templates = findTemplates(request, currentState, indexTemplateFilter); - - Map customs = new HashMap<>(); - - // add the request mapping - Map> mappings = new HashMap<>(); - - Map templatesAliases = new HashMap<>(); - - List templateNames = new ArrayList<>(); - - for (Map.Entry entry : request.mappings().entrySet()) { - mappings.put(entry.getKey(), parseMapping(entry.getValue())); - } - - for (Map.Entry entry : request.customs().entrySet()) { - customs.put(entry.getKey(), entry.getValue()); - } - - // apply templates, merging the mappings into the request mapping if exists - for (IndexTemplateMetaData template : templates) { - templateNames.add(template.getName()); - for (ObjectObjectCursor cursor : template.mappings()) { - if (mappings.containsKey(cursor.key)) { - XContentHelper.mergeDefaults(mappings.get(cursor.key), parseMapping(cursor.value.string())); - } else { - mappings.put(cursor.key, parseMapping(cursor.value.string())); - } - } - // handle custom - for (ObjectObjectCursor cursor : template.customs()) { - String type = cursor.key; - IndexMetaData.Custom custom = cursor.value; - IndexMetaData.Custom existing = customs.get(type); - if (existing == null) { - customs.put(type, custom); - } else { - IndexMetaData.Custom merged = existing.mergeWith(custom); - customs.put(type, merged); - } - } - //handle aliases - for (ObjectObjectCursor cursor : template.aliases()) { - AliasMetaData aliasMetaData = cursor.value; - //if an alias with same name came with the create index request itself, - // ignore this one taken from the index template - if (request.aliases().contains(new Alias(aliasMetaData.alias()))) { - continue; - } - //if an alias with same name was already processed, ignore this one - if (templatesAliases.containsKey(cursor.key)) { - continue; - } - - //Allow templatesAliases to be templated by replacing a token with the name of the index that we are applying it to - if (aliasMetaData.alias().contains("{index}")) { - String templatedAlias = aliasMetaData.alias().replace("{index}", request.index()); - aliasMetaData = AliasMetaData.newAliasMetaData(aliasMetaData, templatedAlias); - } - - aliasValidator.validateAliasMetaData(aliasMetaData, request.index(), currentState.metaData()); - templatesAliases.put(aliasMetaData.alias(), aliasMetaData); - } - } - - Settings.Builder indexSettingsBuilder = settingsBuilder(); - // apply templates, here, in reverse order, since first ones are better matching - for (int i = templates.size() - 1; i >= 0; i--) { - indexSettingsBuilder.put(templates.get(i).settings()); - } - // now, put the request settings, so they override templates - indexSettingsBuilder.put(request.settings()); - if (request.index().equals(ScriptService.SCRIPT_INDEX)) { - indexSettingsBuilder.put(SETTING_NUMBER_OF_SHARDS, settings.getAsInt(SETTING_NUMBER_OF_SHARDS, 1)); - } else { - if (indexSettingsBuilder.get(SETTING_NUMBER_OF_SHARDS) == null) { - indexSettingsBuilder.put(SETTING_NUMBER_OF_SHARDS, settings.getAsInt(SETTING_NUMBER_OF_SHARDS, 5)); - } - } - if (request.index().equals(ScriptService.SCRIPT_INDEX)) { - indexSettingsBuilder.put(SETTING_NUMBER_OF_REPLICAS, settings.getAsInt(SETTING_NUMBER_OF_REPLICAS, 0)); - indexSettingsBuilder.put(SETTING_AUTO_EXPAND_REPLICAS, "0-all"); - } else { - if (indexSettingsBuilder.get(SETTING_NUMBER_OF_REPLICAS) == null) { - indexSettingsBuilder.put(SETTING_NUMBER_OF_REPLICAS, settings.getAsInt(SETTING_NUMBER_OF_REPLICAS, 1)); - } - } - - if (settings.get(SETTING_AUTO_EXPAND_REPLICAS) != null && indexSettingsBuilder.get(SETTING_AUTO_EXPAND_REPLICAS) == null) { - indexSettingsBuilder.put(SETTING_AUTO_EXPAND_REPLICAS, settings.get(SETTING_AUTO_EXPAND_REPLICAS)); - } - - if (indexSettingsBuilder.get(SETTING_VERSION_CREATED) == null) { - DiscoveryNodes nodes = currentState.nodes(); - final Version createdVersion = Version.smallest(version, nodes.smallestNonClientNodeVersion()); - indexSettingsBuilder.put(SETTING_VERSION_CREATED, createdVersion); - } - - if (indexSettingsBuilder.get(SETTING_CREATION_DATE) == null) { - indexSettingsBuilder.put(SETTING_CREATION_DATE, new DateTime(DateTimeZone.UTC).getMillis()); - } - - indexSettingsBuilder.put(SETTING_INDEX_UUID, Strings.randomBase64UUID()); - - Settings actualIndexSettings = indexSettingsBuilder.build(); - - // Set up everything, now locally create the index to see that things are ok, and apply - final IndexMetaData tmpImd = IndexMetaData.builder(request.index()).settings(actualIndexSettings).build(); - // create the index here (on the master) to validate it can be created, as well as adding the mapping - indicesService.createIndex(nodeServicesProvider, tmpImd, Collections.emptyList()); - indexCreated = true; - // now add the mappings - IndexService indexService = indicesService.indexServiceSafe(request.index()); - MapperService mapperService = indexService.mapperService(); - // first, add the default mapping - if (mappings.containsKey(MapperService.DEFAULT_MAPPING)) { + @Override + public ClusterState execute(ClusterState currentState) throws Exception { + boolean indexCreated = false; + String removalReason = null; try { - mapperService.merge(MapperService.DEFAULT_MAPPING, new CompressedXContent(XContentFactory.jsonBuilder().map(mappings.get(MapperService.DEFAULT_MAPPING)).string()), MapperService.MergeReason.MAPPING_UPDATE, request.updateAllTypes()); - } catch (Exception e) { - removalReason = "failed on parsing default mapping on index creation"; - throw new MapperParsingException("Failed to parse mapping [{}]: {}", e, MapperService.DEFAULT_MAPPING, e.getMessage()); + validate(request, currentState); + + for (Alias alias : request.aliases()) { + aliasValidator.validateAlias(alias, request.index(), currentState.metaData()); + } + + // we only find a template when its an API call (a new index) + // find templates, highest order are better matching + List templates = findTemplates(request, currentState, indexTemplateFilter); + + Map customs = new HashMap<>(); + + // add the request mapping + Map> mappings = new HashMap<>(); + + Map templatesAliases = new HashMap<>(); + + List templateNames = new ArrayList<>(); + + for (Map.Entry entry : request.mappings().entrySet()) { + mappings.put(entry.getKey(), parseMapping(entry.getValue())); + } + + for (Map.Entry entry : request.customs().entrySet()) { + customs.put(entry.getKey(), entry.getValue()); + } + + // apply templates, merging the mappings into the request mapping if exists + for (IndexTemplateMetaData template : templates) { + templateNames.add(template.getName()); + for (ObjectObjectCursor cursor : template.mappings()) { + if (mappings.containsKey(cursor.key)) { + XContentHelper.mergeDefaults(mappings.get(cursor.key), parseMapping(cursor.value.string())); + } else { + mappings.put(cursor.key, parseMapping(cursor.value.string())); + } + } + // handle custom + for (ObjectObjectCursor cursor : template.customs()) { + String type = cursor.key; + IndexMetaData.Custom custom = cursor.value; + IndexMetaData.Custom existing = customs.get(type); + if (existing == null) { + customs.put(type, custom); + } else { + IndexMetaData.Custom merged = existing.mergeWith(custom); + customs.put(type, merged); + } + } + //handle aliases + for (ObjectObjectCursor cursor : template.aliases()) { + AliasMetaData aliasMetaData = cursor.value; + //if an alias with same name came with the create index request itself, + // ignore this one taken from the index template + if (request.aliases().contains(new Alias(aliasMetaData.alias()))) { + continue; + } + //if an alias with same name was already processed, ignore this one + if (templatesAliases.containsKey(cursor.key)) { + continue; + } + + //Allow templatesAliases to be templated by replacing a token with the name of the index that we are applying it to + if (aliasMetaData.alias().contains("{index}")) { + String templatedAlias = aliasMetaData.alias().replace("{index}", request.index()); + aliasMetaData = AliasMetaData.newAliasMetaData(aliasMetaData, templatedAlias); + } + + aliasValidator.validateAliasMetaData(aliasMetaData, request.index(), currentState.metaData()); + templatesAliases.put(aliasMetaData.alias(), aliasMetaData); + } + } + + Settings.Builder indexSettingsBuilder = settingsBuilder(); + // apply templates, here, in reverse order, since first ones are better matching + for (int i = templates.size() - 1; i >= 0; i--) { + indexSettingsBuilder.put(templates.get(i).settings()); + } + // now, put the request settings, so they override templates + indexSettingsBuilder.put(request.settings()); + if (request.index().equals(ScriptService.SCRIPT_INDEX)) { + indexSettingsBuilder.put(SETTING_NUMBER_OF_SHARDS, settings.getAsInt(SETTING_NUMBER_OF_SHARDS, 1)); + } else { + if (indexSettingsBuilder.get(SETTING_NUMBER_OF_SHARDS) == null) { + indexSettingsBuilder.put(SETTING_NUMBER_OF_SHARDS, settings.getAsInt(SETTING_NUMBER_OF_SHARDS, 5)); + } + } + if (request.index().equals(ScriptService.SCRIPT_INDEX)) { + indexSettingsBuilder.put(SETTING_NUMBER_OF_REPLICAS, settings.getAsInt(SETTING_NUMBER_OF_REPLICAS, 0)); + indexSettingsBuilder.put(SETTING_AUTO_EXPAND_REPLICAS, "0-all"); + } else { + if (indexSettingsBuilder.get(SETTING_NUMBER_OF_REPLICAS) == null) { + indexSettingsBuilder.put(SETTING_NUMBER_OF_REPLICAS, settings.getAsInt(SETTING_NUMBER_OF_REPLICAS, 1)); + } + } + + if (settings.get(SETTING_AUTO_EXPAND_REPLICAS) != null && indexSettingsBuilder.get(SETTING_AUTO_EXPAND_REPLICAS) == null) { + indexSettingsBuilder.put(SETTING_AUTO_EXPAND_REPLICAS, settings.get(SETTING_AUTO_EXPAND_REPLICAS)); + } + + if (indexSettingsBuilder.get(SETTING_VERSION_CREATED) == null) { + DiscoveryNodes nodes = currentState.nodes(); + final Version createdVersion = Version.smallest(version, nodes.smallestNonClientNodeVersion()); + indexSettingsBuilder.put(SETTING_VERSION_CREATED, createdVersion); + } + + if (indexSettingsBuilder.get(SETTING_CREATION_DATE) == null) { + indexSettingsBuilder.put(SETTING_CREATION_DATE, new DateTime(DateTimeZone.UTC).getMillis()); + } + + indexSettingsBuilder.put(SETTING_INDEX_UUID, Strings.randomBase64UUID()); + + Settings actualIndexSettings = indexSettingsBuilder.build(); + + // Set up everything, now locally create the index to see that things are ok, and apply + final IndexMetaData tmpImd = IndexMetaData.builder(request.index()).settings(actualIndexSettings).build(); + // create the index here (on the master) to validate it can be created, as well as adding the mapping + indicesService.createIndex(nodeServicesProvider, tmpImd, Collections.emptyList()); + indexCreated = true; + // now add the mappings + IndexService indexService = indicesService.indexServiceSafe(request.index()); + MapperService mapperService = indexService.mapperService(); + // first, add the default mapping + if (mappings.containsKey(MapperService.DEFAULT_MAPPING)) { + try { + mapperService.merge(MapperService.DEFAULT_MAPPING, new CompressedXContent(XContentFactory.jsonBuilder().map(mappings.get(MapperService.DEFAULT_MAPPING)).string()), MapperService.MergeReason.MAPPING_UPDATE, request.updateAllTypes()); + } catch (Exception e) { + removalReason = "failed on parsing default mapping on index creation"; + throw new MapperParsingException("Failed to parse mapping [{}]: {}", e, MapperService.DEFAULT_MAPPING, e.getMessage()); + } + } + for (Map.Entry> entry : mappings.entrySet()) { + if (entry.getKey().equals(MapperService.DEFAULT_MAPPING)) { + continue; + } + try { + // apply the default here, its the first time we parse it + mapperService.merge(entry.getKey(), new CompressedXContent(XContentFactory.jsonBuilder().map(entry.getValue()).string()), MapperService.MergeReason.MAPPING_UPDATE, request.updateAllTypes()); + } catch (Exception e) { + removalReason = "failed on parsing mappings on index creation"; + throw new MapperParsingException("Failed to parse mapping [{}]: {}", e, entry.getKey(), e.getMessage()); + } + } + + QueryShardContext queryShardContext = indexService.getQueryShardContext(); + for (Alias alias : request.aliases()) { + if (Strings.hasLength(alias.filter())) { + aliasValidator.validateAliasFilter(alias.name(), alias.filter(), queryShardContext); + } + } + for (AliasMetaData aliasMetaData : templatesAliases.values()) { + if (aliasMetaData.filter() != null) { + aliasValidator.validateAliasFilter(aliasMetaData.alias(), aliasMetaData.filter().uncompressed(), queryShardContext); + } + } + + // now, update the mappings with the actual source + Map mappingsMetaData = new HashMap<>(); + for (DocumentMapper mapper : mapperService.docMappers(true)) { + MappingMetaData mappingMd = new MappingMetaData(mapper); + mappingsMetaData.put(mapper.type(), mappingMd); + } + + final IndexMetaData.Builder indexMetaDataBuilder = IndexMetaData.builder(request.index()).settings(actualIndexSettings); + for (MappingMetaData mappingMd : mappingsMetaData.values()) { + indexMetaDataBuilder.putMapping(mappingMd); + } + + for (AliasMetaData aliasMetaData : templatesAliases.values()) { + indexMetaDataBuilder.putAlias(aliasMetaData); + } + for (Alias alias : request.aliases()) { + AliasMetaData aliasMetaData = AliasMetaData.builder(alias.name()).filter(alias.filter()) + .indexRouting(alias.indexRouting()).searchRouting(alias.searchRouting()).build(); + indexMetaDataBuilder.putAlias(aliasMetaData); + } + + for (Map.Entry customEntry : customs.entrySet()) { + indexMetaDataBuilder.putCustom(customEntry.getKey(), customEntry.getValue()); + } + + indexMetaDataBuilder.state(request.state()); + + final IndexMetaData indexMetaData; + try { + indexMetaData = indexMetaDataBuilder.build(); + } catch (Exception e) { + removalReason = "failed to build index metadata"; + throw e; + } + + indexService.getIndexEventListener().beforeIndexAddedToCluster(indexMetaData.getIndex(), + indexMetaData.getSettings()); + + MetaData newMetaData = MetaData.builder(currentState.metaData()) + .put(indexMetaData, false) + .build(); + + String maybeShadowIndicator = IndexMetaData.isIndexUsingShadowReplicas(indexMetaData.getSettings()) ? "s" : ""; + logger.info("[{}] creating index, cause [{}], templates {}, shards [{}]/[{}{}], mappings {}", + request.index(), request.cause(), templateNames, indexMetaData.getNumberOfShards(), + indexMetaData.getNumberOfReplicas(), maybeShadowIndicator, mappings.keySet()); + + ClusterBlocks.Builder blocks = ClusterBlocks.builder().blocks(currentState.blocks()); + if (!request.blocks().isEmpty()) { + for (ClusterBlock block : request.blocks()) { + blocks.addIndexBlock(request.index(), block); + } + } + blocks.updateBlocks(indexMetaData); + + ClusterState updatedState = ClusterState.builder(currentState).blocks(blocks).metaData(newMetaData).build(); + + if (request.state() == State.OPEN) { + RoutingTable.Builder routingTableBuilder = RoutingTable.builder(updatedState.routingTable()) + .addAsNew(updatedState.metaData().index(request.index())); + RoutingAllocation.Result routingResult = allocationService.reroute( + ClusterState.builder(updatedState).routingTable(routingTableBuilder.build()).build(), + "index [" + request.index() + "] created"); + updatedState = ClusterState.builder(updatedState).routingResult(routingResult).build(); + } + removalReason = "cleaning up after validating index on master"; + return updatedState; + } finally { + if (indexCreated) { + // Index was already partially created - need to clean up + indicesService.removeIndex(request.index(), removalReason != null ? removalReason : "failed to create index"); + } } } - for (Map.Entry> entry : mappings.entrySet()) { - if (entry.getKey().equals(MapperService.DEFAULT_MAPPING)) { - continue; - } - try { - // apply the default here, its the first time we parse it - mapperService.merge(entry.getKey(), new CompressedXContent(XContentFactory.jsonBuilder().map(entry.getValue()).string()), MapperService.MergeReason.MAPPING_UPDATE, request.updateAllTypes()); - } catch (Exception e) { - removalReason = "failed on parsing mappings on index creation"; - throw new MapperParsingException("Failed to parse mapping [{}]: {}", e, entry.getKey(), e.getMessage()); - } - } - - QueryShardContext queryShardContext = indexService.getQueryShardContext(); - for (Alias alias : request.aliases()) { - if (Strings.hasLength(alias.filter())) { - aliasValidator.validateAliasFilter(alias.name(), alias.filter(), queryShardContext); - } - } - for (AliasMetaData aliasMetaData : templatesAliases.values()) { - if (aliasMetaData.filter() != null) { - aliasValidator.validateAliasFilter(aliasMetaData.alias(), aliasMetaData.filter().uncompressed(), queryShardContext); - } - } - - // now, update the mappings with the actual source - Map mappingsMetaData = new HashMap<>(); - for (DocumentMapper mapper : mapperService.docMappers(true)) { - MappingMetaData mappingMd = new MappingMetaData(mapper); - mappingsMetaData.put(mapper.type(), mappingMd); - } - - final IndexMetaData.Builder indexMetaDataBuilder = IndexMetaData.builder(request.index()).settings(actualIndexSettings); - for (MappingMetaData mappingMd : mappingsMetaData.values()) { - indexMetaDataBuilder.putMapping(mappingMd); - } - - for (AliasMetaData aliasMetaData : templatesAliases.values()) { - indexMetaDataBuilder.putAlias(aliasMetaData); - } - for (Alias alias : request.aliases()) { - AliasMetaData aliasMetaData = AliasMetaData.builder(alias.name()).filter(alias.filter()) - .indexRouting(alias.indexRouting()).searchRouting(alias.searchRouting()).build(); - indexMetaDataBuilder.putAlias(aliasMetaData); - } - - for (Map.Entry customEntry : customs.entrySet()) { - indexMetaDataBuilder.putCustom(customEntry.getKey(), customEntry.getValue()); - } - - indexMetaDataBuilder.state(request.state()); - - final IndexMetaData indexMetaData; - try { - indexMetaData = indexMetaDataBuilder.build(); - } catch (Exception e) { - removalReason = "failed to build index metadata"; - throw e; - } - - indexService.getIndexEventListener().beforeIndexAddedToCluster(new Index(request.index()), - indexMetaData.getSettings()); - - MetaData newMetaData = MetaData.builder(currentState.metaData()) - .put(indexMetaData, false) - .build(); - - String maybeShadowIndicator = IndexMetaData.isIndexUsingShadowReplicas(indexMetaData.getSettings()) ? "s" : ""; - logger.info("[{}] creating index, cause [{}], templates {}, shards [{}]/[{}{}], mappings {}", - request.index(), request.cause(), templateNames, indexMetaData.getNumberOfShards(), - indexMetaData.getNumberOfReplicas(), maybeShadowIndicator, mappings.keySet()); - - ClusterBlocks.Builder blocks = ClusterBlocks.builder().blocks(currentState.blocks()); - if (!request.blocks().isEmpty()) { - for (ClusterBlock block : request.blocks()) { - blocks.addIndexBlock(request.index(), block); - } - } - blocks.updateBlocks(indexMetaData); - - ClusterState updatedState = ClusterState.builder(currentState).blocks(blocks).metaData(newMetaData).build(); - - if (request.state() == State.OPEN) { - RoutingTable.Builder routingTableBuilder = RoutingTable.builder(updatedState.routingTable()) - .addAsNew(updatedState.metaData().index(request.index())); - RoutingAllocation.Result routingResult = allocationService.reroute( - ClusterState.builder(updatedState).routingTable(routingTableBuilder.build()).build(), - "index [" + request.index() + "] created"); - updatedState = ClusterState.builder(updatedState).routingResult(routingResult).build(); - } - removalReason = "cleaning up after validating index on master"; - return updatedState; - } finally { - if (indexCreated) { - // Index was already partially created - need to clean up - indicesService.removeIndex(request.index(), removalReason != null ? removalReason : "failed to create index"); - } - } - } - }); + }); } private Map parseMapping(String mappingSource) throws Exception { @@ -459,7 +459,7 @@ public class MetaDataCreateIndexService extends AbstractComponent { if (validationErrors.isEmpty() == false) { ValidationException validationException = new ValidationException(); validationException.addValidationErrors(validationErrors); - throw new IndexCreationException(new Index(indexName), validationException); + throw new IndexCreationException(indexName, validationException); } } diff --git a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexAliasesService.java b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexAliasesService.java index 1e9f968f7a6..fe53f206b19 100644 --- a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexAliasesService.java +++ b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexAliasesService.java @@ -112,9 +112,9 @@ public class MetaDataIndexAliasesService extends AbstractComponent { logger.warn("[{}] failed to temporary create in order to apply alias action", e, indexMetaData.getIndex()); continue; } - indicesToClose.add(indexMetaData.getIndex()); + indicesToClose.add(indexMetaData.getIndex().getName()); } - indices.put(indexMetaData.getIndex(), indexService); + indices.put(indexMetaData.getIndex().getName(), indexService); } aliasValidator.validateAliasFilter(aliasAction.alias(), filter, indexService.getQueryShardContext()); diff --git a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataMappingService.java b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataMappingService.java index 1d13fc2079e..a86d65779b4 100644 --- a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataMappingService.java +++ b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataMappingService.java @@ -169,7 +169,7 @@ public class MetaDataMappingService extends AbstractComponent { private boolean refreshIndexMapping(IndexService indexService, IndexMetaData.Builder builder) { boolean dirty = false; - String index = indexService.index().name(); + String index = indexService.index().getName(); try { List updatedTypes = new ArrayList<>(); for (DocumentMapper mapper : indexService.mapperService().docMappers(true)) { diff --git a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataUpdateSettingsService.java b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataUpdateSettingsService.java index 8e9dbc6b673..2d7ba4c3c05 100644 --- a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataUpdateSettingsService.java +++ b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataUpdateSettingsService.java @@ -117,7 +117,7 @@ public class MetaDataUpdateSettingsService extends AbstractComponent implements nrReplicasChanged.put(numberOfReplicas, new ArrayList<>()); } - nrReplicasChanged.get(numberOfReplicas).add(indexMetaData.getIndex()); + nrReplicasChanged.get(numberOfReplicas).add(indexMetaData.getIndex().getName()); } } } diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/IndexRoutingTable.java b/core/src/main/java/org/elasticsearch/cluster/routing/IndexRoutingTable.java index bb186a64a8c..2778d287975 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/IndexRoutingTable.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/IndexRoutingTable.java @@ -30,12 +30,12 @@ import org.elasticsearch.common.Randomness; import org.elasticsearch.common.collect.ImmutableOpenIntMap; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.index.Index; import org.elasticsearch.index.shard.ShardId; import java.io.IOException; import java.util.ArrayList; import java.util.Collections; -import java.util.Comparator; import java.util.HashSet; import java.util.Iterator; import java.util.List; @@ -58,9 +58,9 @@ import java.util.Set; */ public class IndexRoutingTable extends AbstractDiffable implements Iterable { - public static final IndexRoutingTable PROTO = builder("").build(); + public static final IndexRoutingTable PROTO = builder(new Index("", "_na_")).build(); - private final String index; + private final Index index; private final ShardShuffler shuffler; // note, we assume that when the index routing is created, ShardRoutings are created for all possible number of @@ -69,7 +69,7 @@ public class IndexRoutingTable extends AbstractDiffable imple private final List allActiveShards; - IndexRoutingTable(String index, ImmutableOpenIntMap shards) { + IndexRoutingTable(Index index, ImmutableOpenIntMap shards) { this.index = index; this.shuffler = new RotationShardShuffler(Randomness.get().nextInt()); this.shards = shards; @@ -90,18 +90,8 @@ public class IndexRoutingTable extends AbstractDiffable imple * * @return id of the index */ - public String index() { - return this.index; - } - - - /** - * Return the index id - * - * @return id of the index - */ - public String getIndex() { - return index(); + public Index getIndex() { + return index; } /** @@ -118,13 +108,17 @@ public class IndexRoutingTable extends AbstractDiffable imple } public void validate(RoutingTableValidation validation, MetaData metaData) { - if (!metaData.hasIndex(index())) { - validation.addIndexFailure(index(), "Exists in routing does not exists in metadata"); + if (!metaData.hasIndex(index.getName())) { + validation.addIndexFailure(index.getName(), "Exists in routing does not exists in metadata"); + return; + } + IndexMetaData indexMetaData = metaData.index(index.getName()); + if (indexMetaData.getIndexUUID().equals(index.getUUID()) == false) { + validation.addIndexFailure(index.getName(), "Exists in routing does not exists in metadata with the same uuid"); return; } - IndexMetaData indexMetaData = metaData.index(index()); for (String failure : validate(indexMetaData)) { - validation.addIndexFailure(index, failure); + validation.addIndexFailure(index.getName(), failure); } } @@ -154,7 +148,7 @@ public class IndexRoutingTable extends AbstractDiffable imple + "] routing table has wrong number of replicas, expected [" + indexMetaData.getNumberOfReplicas() + "], got [" + routingNumberOfReplicas + "]"); } for (ShardRouting shardRouting : indexShardRoutingTable) { - if (!shardRouting.index().equals(index())) { + if (!shardRouting.index().equals(index)) { failures.add("shard routing has an index [" + shardRouting.index() + "] that is different than the routing table"); } } @@ -332,7 +326,7 @@ public class IndexRoutingTable extends AbstractDiffable imple @Override public IndexRoutingTable readFrom(StreamInput in) throws IOException { - String index = in.readString(); + Index index = Index.readIndex(in); Builder builder = new Builder(index); int size = in.readVInt(); @@ -345,23 +339,23 @@ public class IndexRoutingTable extends AbstractDiffable imple @Override public void writeTo(StreamOutput out) throws IOException { - out.writeString(index); + index.writeTo(out); out.writeVInt(shards.size()); for (IndexShardRoutingTable indexShard : this) { IndexShardRoutingTable.Builder.writeToThin(indexShard, out); } } - public static Builder builder(String index) { + public static Builder builder(Index index) { return new Builder(index); } public static class Builder { - private final String index; + private final Index index; private final ImmutableOpenIntMap.Builder shards = ImmutableOpenIntMap.builder(); - public Builder(String index) { + public Builder(Index index) { this.index = index; } @@ -422,11 +416,12 @@ public class IndexRoutingTable extends AbstractDiffable imple * Initializes an index, to be restored from snapshot */ private Builder initializeAsRestore(IndexMetaData indexMetaData, RestoreSource restoreSource, IntSet ignoreShards, boolean asNew, UnassignedInfo unassignedInfo) { + assert indexMetaData.getIndex().equals(index); if (!shards.isEmpty()) { throw new IllegalStateException("trying to initialize an index with fresh shards, but already has shards created"); } for (int shardId = 0; shardId < indexMetaData.getNumberOfShards(); shardId++) { - IndexShardRoutingTable.Builder indexShardRoutingBuilder = new IndexShardRoutingTable.Builder(new ShardId(indexMetaData.getIndex(), shardId)); + IndexShardRoutingTable.Builder indexShardRoutingBuilder = new IndexShardRoutingTable.Builder(new ShardId(index, shardId)); for (int i = 0; i <= indexMetaData.getNumberOfReplicas(); i++) { if (asNew && ignoreShards.contains(shardId)) { // This shards wasn't completely snapshotted - restore it as new shard @@ -444,11 +439,12 @@ public class IndexRoutingTable extends AbstractDiffable imple * Initializes a new empty index, with an option to control if its from an API or not. */ private Builder initializeEmpty(IndexMetaData indexMetaData, UnassignedInfo unassignedInfo) { + assert indexMetaData.getIndex().equals(index); if (!shards.isEmpty()) { throw new IllegalStateException("trying to initialize an index with fresh shards, but already has shards created"); } for (int shardId = 0; shardId < indexMetaData.getNumberOfShards(); shardId++) { - IndexShardRoutingTable.Builder indexShardRoutingBuilder = new IndexShardRoutingTable.Builder(new ShardId(indexMetaData.getIndex(), shardId)); + IndexShardRoutingTable.Builder indexShardRoutingBuilder = new IndexShardRoutingTable.Builder(new ShardId(index, shardId)); for (int i = 0; i <= indexMetaData.getNumberOfReplicas(); i++) { indexShardRoutingBuilder.addShard(ShardRouting.newUnassigned(index, shardId, null, i == 0, unassignedInfo)); } @@ -539,21 +535,18 @@ public class IndexRoutingTable extends AbstractDiffable imple ordered.add(indexShard); } - CollectionUtil.timSort(ordered, new Comparator() { - @Override - public int compare(IndexShardRoutingTable o1, IndexShardRoutingTable o2) { - int v = o1.shardId().index().name().compareTo( - o2.shardId().index().name()); - if (v == 0) { - v = Integer.compare(o1.shardId().id(), - o2.shardId().id()); - } - return v; + CollectionUtil.timSort(ordered, (o1, o2) -> { + int v = o1.shardId().getIndex().getName().compareTo( + o2.shardId().getIndex().getName()); + if (v == 0) { + v = Integer.compare(o1.shardId().id(), + o2.shardId().id()); } + return v; }); for (IndexShardRoutingTable indexShard : ordered) { - sb.append("----shard_id [").append(indexShard.shardId().index().name()).append("][").append(indexShard.shardId().id()).append("]\n"); + sb.append("----shard_id [").append(indexShard.shardId().getIndex().getName()).append("][").append(indexShard.shardId().id()).append("]\n"); for (ShardRouting shard : indexShard) { sb.append("--------").append(shard.shortSummary()).append("\n"); } diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/IndexShardRoutingTable.java b/core/src/main/java/org/elasticsearch/cluster/routing/IndexShardRoutingTable.java index bcdb7a43fef..d5169428450 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/IndexShardRoutingTable.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/IndexShardRoutingTable.java @@ -26,6 +26,7 @@ import org.elasticsearch.common.collect.MapBuilder; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.util.set.Sets; +import org.elasticsearch.index.Index; import org.elasticsearch.index.shard.ShardId; import java.io.IOException; @@ -617,11 +618,11 @@ public class IndexShardRoutingTable implements Iterable { } public static IndexShardRoutingTable readFrom(StreamInput in) throws IOException { - String index = in.readString(); + Index index = Index.readIndex(in); return readFromThin(in, index); } - public static IndexShardRoutingTable readFromThin(StreamInput in, String index) throws IOException { + public static IndexShardRoutingTable readFromThin(StreamInput in, Index index) throws IOException { int iShardId = in.readVInt(); Builder builder = new Builder(new ShardId(index, iShardId)); @@ -635,7 +636,7 @@ public class IndexShardRoutingTable implements Iterable { } public static void writeTo(IndexShardRoutingTable indexShard, StreamOutput out) throws IOException { - out.writeString(indexShard.shardId().index().name()); + out.writeString(indexShard.shardId().getIndex().getName()); writeToThin(indexShard, out); } diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/OperationRouting.java b/core/src/main/java/org/elasticsearch/cluster/routing/OperationRouting.java index 267dae80d55..184db017c10 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/OperationRouting.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/OperationRouting.java @@ -29,6 +29,7 @@ import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.math.MathUtils; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.shard.ShardNotFoundException; @@ -95,13 +96,14 @@ public class OperationRouting extends AbstractComponent { // we use set here and not list since we might get duplicates for (String index : concreteIndices) { final IndexRoutingTable indexRouting = indexRoutingTable(clusterState, index); + final IndexMetaData indexMetaData = indexMetaData(clusterState, index); final Set effectiveRouting = routing.get(index); if (effectiveRouting != null) { for (String r : effectiveRouting) { - int shardId = generateShardId(clusterState, index, null, r); + int shardId = generateShardId(indexMetaData, null, r); IndexShardRoutingTable indexShard = indexRouting.shard(shardId); if (indexShard == null) { - throw new ShardNotFoundException(new ShardId(index, shardId)); + throw new ShardNotFoundException(new ShardId(indexRouting.getIndex(), shardId)); } // we might get duplicates, but that's ok, they will override one another set.add(indexShard); @@ -204,20 +206,25 @@ public class OperationRouting extends AbstractComponent { return indexRouting; } - protected IndexShardRoutingTable shards(ClusterState clusterState, String index, String id, String routing) { - int shardId = generateShardId(clusterState, index, id, routing); - return clusterState.getRoutingTable().shardRoutingTable(index, shardId); - } - - public ShardId shardId(ClusterState clusterState, String index, String id, @Nullable String routing) { - return new ShardId(index, generateShardId(clusterState, index, id, routing)); - } - - private int generateShardId(ClusterState clusterState, String index, String id, @Nullable String routing) { + protected IndexMetaData indexMetaData(ClusterState clusterState, String index) { IndexMetaData indexMetaData = clusterState.metaData().index(index); if (indexMetaData == null) { throw new IndexNotFoundException(index); } + return indexMetaData; + } + + protected IndexShardRoutingTable shards(ClusterState clusterState, String index, String id, String routing) { + int shardId = generateShardId(indexMetaData(clusterState, index), id, routing); + return clusterState.getRoutingTable().shardRoutingTable(index, shardId); + } + + public ShardId shardId(ClusterState clusterState, String index, String id, @Nullable String routing) { + IndexMetaData indexMetaData = indexMetaData(clusterState, index); + return new ShardId(indexMetaData.getIndex(), generateShardId(indexMetaData, id, routing)); + } + + private int generateShardId(IndexMetaData indexMetaData, String id, @Nullable String routing) { final int hash; if (routing == null) { hash = Murmur3HashFunction.hash(id); diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/RoutingNode.java b/core/src/main/java/org/elasticsearch/cluster/routing/RoutingNode.java index ff6c8293420..77ae7b41d91 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/RoutingNode.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/RoutingNode.java @@ -87,7 +87,7 @@ public class RoutingNode implements Iterable { // TODO use Set with ShardIds for faster lookup. for (ShardRouting shardRouting : shards) { if (shardRouting.isSameShard(shard)) { - throw new IllegalStateException("Trying to add a shard [" + shard.shardId().index().name() + "][" + shard.shardId().id() + "] to a node [" + nodeId + "] where it already exists"); + throw new IllegalStateException("Trying to add a shard [" + shard.shardId().getIndex().getName() + "][" + shard.shardId().id() + "] to a node [" + nodeId + "] where it already exists"); } } shards.add(shard); @@ -137,7 +137,7 @@ public class RoutingNode implements Iterable { List shards = new ArrayList<>(); for (ShardRouting shardEntry : this) { - if (!shardEntry.index().equals(index)) { + if (!shardEntry.getIndexName().equals(index)) { continue; } for (ShardRoutingState state : states) { diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/RoutingNodes.java b/core/src/main/java/org/elasticsearch/cluster/routing/RoutingNodes.java index 3a2567e3f46..6a6373f977d 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/RoutingNodes.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/RoutingNodes.java @@ -28,6 +28,7 @@ import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.Randomness; import org.elasticsearch.common.collect.ImmutableOpenMap; +import org.elasticsearch.index.Index; import org.elasticsearch.index.shard.ShardId; import java.util.ArrayList; @@ -169,7 +170,7 @@ public class RoutingNodes implements Iterable { } else if (routing.primary() == false) { // primary without relocationID is initial recovery ShardRouting primary = findPrimary(routing); if (primary == null && initializing) { - primary = routingTable.index(routing.index()).shard(routing.shardId().id()).primary; + primary = routingTable.index(routing.index().getName()).shard(routing.shardId().id()).primary; } else if (primary == null) { throw new IllegalStateException("replica is initializing but primary is unassigned"); } @@ -348,7 +349,7 @@ public class RoutingNodes implements Iterable { */ public boolean allReplicasActive(ShardRouting shardRouting) { final List shards = assignedShards(shardRouting.shardId()); - if (shards.isEmpty() || shards.size() < this.routingTable.index(shardRouting.index()).shard(shardRouting.id()).size()) { + if (shards.isEmpty() || shards.size() < this.routingTable.index(shardRouting.index().getName()).shard(shardRouting.id()).size()) { return false; // if we are empty nothing is active if we have less than total at least one is unassigned } for (ShardRouting shard : shards) { @@ -778,7 +779,7 @@ public class RoutingNodes implements Iterable { int inactivePrimaryCount = 0; int inactiveShardCount = 0; int relocating = 0; - Map indicesAndShards = new HashMap<>(); + Map indicesAndShards = new HashMap<>(); for (RoutingNode node : routingNodes) { for (ShardRouting shard : node) { if (!shard.active() && shard.relocatingNodeId() == null) { @@ -800,10 +801,10 @@ public class RoutingNodes implements Iterable { } } // Assert that the active shard routing are identical. - Set> entries = indicesAndShards.entrySet(); + Set> entries = indicesAndShards.entrySet(); final List shards = new ArrayList<>(); - for (Map.Entry e : entries) { - String index = e.getKey(); + for (Map.Entry e : entries) { + Index index = e.getKey(); for (int i = 0; i < e.getValue(); i++) { for (RoutingNode routingNode : routingNodes) { for (ShardRouting shardRouting : routingNode) { diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/RoutingTable.java b/core/src/main/java/org/elasticsearch/cluster/routing/RoutingTable.java index 4bf196d07d0..6d81556eb2c 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/RoutingTable.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/RoutingTable.java @@ -31,6 +31,7 @@ import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.util.iterable.Iterables; +import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.shard.ShardNotFoundException; @@ -88,6 +89,10 @@ public class RoutingTable implements Iterable, Diffable indicesRouting() { return indicesRouting; } @@ -109,7 +114,7 @@ public class RoutingTable implements Iterable, Diffable, Diffable, Diffable, Diffable, Diffable shardRoutingEntries = Iterables.concat(routingNodes.unassigned(), routingNodes.unassigned().ignored()); for (ShardRouting shardRoutingEntry : shardRoutingEntries) { - String index = shardRoutingEntry.index(); - IndexRoutingTable.Builder indexBuilder = indexRoutingTableBuilders.get(index); + Index index = shardRoutingEntry.index(); + IndexRoutingTable.Builder indexBuilder = indexRoutingTableBuilders.get(index.getName()); if (indexBuilder == null) { indexBuilder = new IndexRoutingTable.Builder(index); - indexRoutingTableBuilders.put(index, indexBuilder); + indexRoutingTableBuilders.put(index.getName(), indexBuilder); } - IndexShardRoutingTable refData = routingNodes.routingTable().index(shardRoutingEntry.index()).shard(shardRoutingEntry.id()); + IndexShardRoutingTable refData = routingNodes.routingTable().index(shardRoutingEntry.index().getName()).shard(shardRoutingEntry.id()); indexBuilder.addShard(refData, shardRoutingEntry); } @@ -446,7 +459,7 @@ public class RoutingTable implements Iterable, Diffable, Diffable, Diffable indexRoutingTable : indicesRouting.values()) { - indicesRouting.put(indexRoutingTable.value.index(), indexRoutingTable.value.normalizeVersions()); + indicesRouting.put(indexRoutingTable.value.getIndex().getName(), indexRoutingTable.value.normalizeVersions()); } RoutingTable table = new RoutingTable(version, indicesRouting.build()); indicesRouting = null; diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/ShardRouting.java b/core/src/main/java/org/elasticsearch/cluster/routing/ShardRouting.java index 5ffaee0f2f9..47509852d93 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/ShardRouting.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/ShardRouting.java @@ -27,6 +27,7 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Streamable; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.index.Index; import org.elasticsearch.index.shard.ShardId; import java.io.IOException; @@ -44,7 +45,7 @@ public final class ShardRouting implements Streamable, ToXContent { */ public static final long UNAVAILABLE_EXPECTED_SHARD_SIZE = -1; - private String index; + private Index index; private int shardId; private String currentNodeId; private String relocatingNodeId; @@ -75,7 +76,7 @@ public final class ShardRouting implements Streamable, ToXContent { * A constructor to internally create shard routing instances, note, the internal flag should only be set to true * by either this class or tests. Visible for testing. */ - ShardRouting(String index, int shardId, String currentNodeId, + ShardRouting(Index index, int shardId, String currentNodeId, String relocatingNodeId, RestoreSource restoreSource, boolean primary, ShardRoutingState state, long version, UnassignedInfo unassignedInfo, AllocationId allocationId, boolean internal, long expectedShardSize) { this.index = index; @@ -105,22 +106,19 @@ public final class ShardRouting implements Streamable, ToXContent { /** * Creates a new unassigned shard. */ - public static ShardRouting newUnassigned(String index, int shardId, RestoreSource restoreSource, boolean primary, UnassignedInfo unassignedInfo) { + public static ShardRouting newUnassigned(Index index, int shardId, RestoreSource restoreSource, boolean primary, UnassignedInfo unassignedInfo) { return new ShardRouting(index, shardId, null, null, restoreSource, primary, ShardRoutingState.UNASSIGNED, 0, unassignedInfo, null, true, UNAVAILABLE_EXPECTED_SHARD_SIZE); } - /** - * The index name. - */ - public String index() { + public Index index() { return this.index; } /** * The index name. */ - public String getIndex() { - return index(); + public String getIndexName() { + return index().getName(); } /** @@ -302,13 +300,13 @@ public final class ShardRouting implements Streamable, ToXContent { return entry; } - public static ShardRouting readShardRoutingEntry(StreamInput in, String index, int shardId) throws IOException { + public static ShardRouting readShardRoutingEntry(StreamInput in, Index index, int shardId) throws IOException { ShardRouting entry = new ShardRouting(); entry.readFrom(in, index, shardId); return entry; } - public void readFrom(StreamInput in, String index, int shardId) throws IOException { + public void readFrom(StreamInput in, Index index, int shardId) throws IOException { this.index = index; this.shardId = shardId; readFromThin(in); @@ -344,7 +342,7 @@ public final class ShardRouting implements Streamable, ToXContent { @Override public void readFrom(StreamInput in) throws IOException { - readFrom(in, in.readString(), in.readVInt()); + readFrom(in, Index.readIndex(in), in.readVInt()); } /** @@ -398,7 +396,7 @@ public final class ShardRouting implements Streamable, ToXContent { @Override public void writeTo(StreamOutput out) throws IOException { - out.writeString(index); + index.writeTo(out); out.writeVInt(shardId); writeToThin(out); } @@ -720,7 +718,7 @@ public final class ShardRouting implements Streamable, ToXContent { .field("node", currentNodeId()) .field("relocating_node", relocatingNodeId()) .field("shard", shardId().id()) - .field("index", shardId().index().name()) + .field("index", shardId().getIndex().getName()) .field("version", version); if (expectedShardSize != UNAVAILABLE_EXPECTED_SHARD_SIZE) { builder.field("expected_shard_size_in_bytes", expectedShardSize); diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/UnassignedInfo.java b/core/src/main/java/org/elasticsearch/cluster/routing/UnassignedInfo.java index 7c446aa13d0..68f210fc144 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/UnassignedInfo.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/UnassignedInfo.java @@ -269,7 +269,7 @@ public class UnassignedInfo implements ToXContent, Writeable { long minDelaySetting = Long.MAX_VALUE; for (ShardRouting shard : state.routingTable().shardsWithState(ShardRoutingState.UNASSIGNED)) { if (shard.primary() == false) { - IndexMetaData indexMetaData = state.metaData().index(shard.getIndex()); + IndexMetaData indexMetaData = state.metaData().index(shard.getIndexName()); boolean delayed = shard.unassignedInfo().getLastComputedLeftDelayNanos() > 0; long delayTimeoutSetting = shard.unassignedInfo().getAllocationDelayTimeoutSettingNanos(settings, indexMetaData.getSettings()); if (delayed && delayTimeoutSetting > 0 && delayTimeoutSetting < minDelaySetting) { diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/AllocationService.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/AllocationService.java index 25937595556..99b1974f5d7 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/AllocationService.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/AllocationService.java @@ -131,7 +131,7 @@ public class AllocationService extends AbstractComponent { for (IndexRoutingTable indexRoutingTable : newRoutingTable) { final IndexMetaData indexMetaData = currentMetaData.index(indexRoutingTable.getIndex()); if (indexMetaData == null) { - throw new IllegalStateException("no metadata found for index [" + indexRoutingTable.index() + "]"); + throw new IllegalStateException("no metadata found for index " + indexRoutingTable.getIndex().getName()); } IndexMetaData.Builder indexMetaDataBuilder = null; for (IndexShardRoutingTable shardRoutings : indexRoutingTable) { diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/BalancedShardsAllocator.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/BalancedShardsAllocator.java index 80f634e13cf..574f12265a7 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/BalancedShardsAllocator.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/BalancedShardsAllocator.java @@ -42,7 +42,6 @@ import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.gateway.PriorityComparator; -import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.Comparator; @@ -513,7 +512,7 @@ public class BalancedShardsAllocator extends AbstractComponent implements Shards final ModelNode sourceNode = nodes.get(node.nodeId()); assert sourceNode != null; final NodeSorter sorter = newNodeSorter(); - sorter.reset(shard.getIndex()); + sorter.reset(shard.getIndexName()); final ModelNode[] nodes = sorter.modelNodes; assert sourceNode.containsShard(shard); /* @@ -591,24 +590,20 @@ public class BalancedShardsAllocator extends AbstractComponent implements Shards */ final AllocationDeciders deciders = allocation.deciders(); final PriorityComparator secondaryComparator = PriorityComparator.getAllocationComparator(allocation); - final Comparator comparator = new Comparator() { - @Override - public int compare(ShardRouting o1, - ShardRouting o2) { - if (o1.primary() ^ o2.primary()) { - return o1.primary() ? -1 : o2.primary() ? 1 : 0; - } - final int indexCmp; - if ((indexCmp = o1.index().compareTo(o2.index())) == 0) { - return o1.getId() - o2.getId(); - } - // this comparator is more expensive than all the others up there - // that's why it's added last even though it could be easier to read - // if we'd apply it earlier. this comparator will only differentiate across - // indices all shards of the same index is treated equally. - final int secondary = secondaryComparator.compare(o1, o2); - return secondary == 0 ? indexCmp : secondary; + final Comparator comparator = (o1, o2) -> { + if (o1.primary() ^ o2.primary()) { + return o1.primary() ? -1 : o2.primary() ? 1 : 0; } + final int indexCmp; + if ((indexCmp = o1.getIndexName().compareTo(o2.getIndexName())) == 0) { + return o1.getId() - o2.getId(); + } + // this comparator is more expensive than all the others up there + // that's why it's added last even though it could be easier to read + // if we'd apply it earlier. this comparator will only differentiate across + // indices all shards of the same index is treated equally. + final int secondary = secondaryComparator.compare(o1, o2); + return secondary == 0 ? indexCmp : secondary; }; /* * we use 2 arrays and move replicas to the second array once we allocated an identical @@ -655,7 +650,7 @@ public class BalancedShardsAllocator extends AbstractComponent implements Shards } if (!node.containsShard(shard)) { // simulate weight if we would add shard to node - float currentWeight = weight.weightShardAdded(this, node, shard.index()); + float currentWeight = weight.weightShardAdded(this, node, shard.getIndexName()); /* * Unless the operation is not providing any gains we * don't check deciders @@ -678,8 +673,8 @@ public class BalancedShardsAllocator extends AbstractComponent implements Shards */ if (currentDecision.type() == decision.type()) { final int repId = shard.id(); - final int nodeHigh = node.highestPrimary(shard.index()); - final int minNodeHigh = minNode.highestPrimary(shard.index()); + final int nodeHigh = node.highestPrimary(shard.index().getName()); + final int minNodeHigh = minNode.highestPrimary(shard.getIndexName()); if ((((nodeHigh > repId && minNodeHigh > repId) || (nodeHigh < repId && minNodeHigh < repId)) && (nodeHigh < minNodeHigh)) || (nodeHigh > minNodeHigh && nodeHigh > repId && minNodeHigh < repId)) { minNode = node; @@ -855,9 +850,9 @@ public class BalancedShardsAllocator extends AbstractComponent implements Shards } public void addShard(ShardRouting shard, Decision decision) { - ModelIndex index = indices.get(shard.index()); + ModelIndex index = indices.get(shard.getIndexName()); if (index == null) { - index = new ModelIndex(shard.index()); + index = new ModelIndex(shard.getIndexName()); indices.put(index.getIndexId(), index); } index.addShard(shard, decision); @@ -865,12 +860,12 @@ public class BalancedShardsAllocator extends AbstractComponent implements Shards } public Decision removeShard(ShardRouting shard) { - ModelIndex index = indices.get(shard.index()); + ModelIndex index = indices.get(shard.getIndexName()); Decision removed = null; if (index != null) { removed = index.removeShard(shard); if (removed != null && index.numShards() == 0) { - indices.remove(shard.index()); + indices.remove(shard.getIndexName()); } } numShards--; @@ -890,7 +885,7 @@ public class BalancedShardsAllocator extends AbstractComponent implements Shards } public boolean containsShard(ShardRouting shard) { - ModelIndex index = getIndex(shard.getIndex()); + ModelIndex index = getIndex(shard.getIndexName()); return index == null ? false : index.containsShard(shard); } diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/command/AbstractAllocateAllocationCommand.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/command/AbstractAllocateAllocationCommand.java index 31fc51a8979..ed136d67d56 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/command/AbstractAllocateAllocationCommand.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/command/AbstractAllocateAllocationCommand.java @@ -35,7 +35,6 @@ import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.index.shard.ShardId; import java.io.IOException; import java.util.function.Consumer; @@ -101,15 +100,15 @@ public abstract class AbstractAllocateAllocationCommand implements AllocationCom @Override public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException { - builder.field(INDEX_KEY, shardId().index().name()); - builder.field(SHARD_KEY, shardId().id()); + builder.field(INDEX_KEY, index()); + builder.field(SHARD_KEY, shardId()); builder.field(NODE_KEY, node()); return builder; } public void writeTo(StreamOutput out) throws IOException { - out.writeString(shardId.getIndex()); - out.writeVInt(shardId.getId()); + out.writeString(index); + out.writeVInt(shardId); out.writeString(node); } @@ -143,20 +142,32 @@ public abstract class AbstractAllocateAllocationCommand implements AllocationCom } } - protected final ShardId shardId; + protected final String index; + protected final int shardId; protected final String node; - protected AbstractAllocateAllocationCommand(ShardId shardId, String node) { + protected AbstractAllocateAllocationCommand(String index, int shardId, String node) { + this.index = index; this.shardId = shardId; this.node = node; } + + /** + * Get the index name + * + * @return name of the index + */ + public String index() { + return this.index; + } + /** * Get the shard id * * @return id of the shard */ - public ShardId shardId() { + public int shardId() { return this.shardId; } diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/command/AllocateEmptyPrimaryAllocationCommand.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/command/AllocateEmptyPrimaryAllocationCommand.java index f607755bca1..c7ddefedc24 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/command/AllocateEmptyPrimaryAllocationCommand.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/command/AllocateEmptyPrimaryAllocationCommand.java @@ -51,8 +51,8 @@ public class AllocateEmptyPrimaryAllocationCommand extends BasePrimaryAllocation * @param node node id of the node to assign the shard to * @param acceptDataLoss whether the user agrees to data loss */ - public AllocateEmptyPrimaryAllocationCommand(ShardId shardId, String node, boolean acceptDataLoss) { - super(shardId, node, acceptDataLoss); + public AllocateEmptyPrimaryAllocationCommand(String index, int shardId, String node, boolean acceptDataLoss) { + super(index, shardId, node, acceptDataLoss); } @Override @@ -70,7 +70,7 @@ public class AllocateEmptyPrimaryAllocationCommand extends BasePrimaryAllocation @Override public AllocateEmptyPrimaryAllocationCommand build() { validate(); - return new AllocateEmptyPrimaryAllocationCommand(new ShardId(index, shard), node, acceptDataLoss); + return new AllocateEmptyPrimaryAllocationCommand(index, shard, node, acceptDataLoss); } } @@ -98,17 +98,17 @@ public class AllocateEmptyPrimaryAllocationCommand extends BasePrimaryAllocation final ShardRouting shardRouting; try { - shardRouting = allocation.routingTable().shardRoutingTable(shardId).primaryShard(); + shardRouting = allocation.routingTable().shardRoutingTable(index, shardId).primaryShard(); } catch (IndexNotFoundException | ShardNotFoundException e) { return explainOrThrowRejectedCommand(explain, allocation, e); } if (shardRouting.unassigned() == false) { - return explainOrThrowRejectedCommand(explain, allocation, "primary " + shardId + " is already assigned"); + return explainOrThrowRejectedCommand(explain, allocation, "primary [" + index + "][" + shardId + "] is already assigned"); } if (shardRouting.unassignedInfo().getReason() != UnassignedInfo.Reason.INDEX_CREATED && acceptDataLoss == false) { return explainOrThrowRejectedCommand(explain, allocation, - "allocating an empty primary for " + shardId + " can result in data loss. Please confirm by setting the accept_data_loss parameter to true"); + "allocating an empty primary for [" + index + "][" + shardId + "] can result in data loss. Please confirm by setting the accept_data_loss parameter to true"); } initializeUnassignedShard(allocation, routingNodes, routingNode, shardRouting, diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/command/AllocateReplicaAllocationCommand.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/command/AllocateReplicaAllocationCommand.java index f9d443a6618..616e08b8f3f 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/command/AllocateReplicaAllocationCommand.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/command/AllocateReplicaAllocationCommand.java @@ -47,11 +47,12 @@ public class AllocateReplicaAllocationCommand extends AbstractAllocateAllocation /** * Creates a new {@link AllocateReplicaAllocationCommand} * - * @param shardId {@link ShardId} of the shard to assign + * @param index index of the shard to assign + * @param shardId id of the shard to assign * @param node node id of the node to assign the shard to */ - public AllocateReplicaAllocationCommand(ShardId shardId, String node) { - super(shardId, node); + public AllocateReplicaAllocationCommand(String index, int shardId, String node) { + super(index, shardId, node); } @Override @@ -69,7 +70,7 @@ public class AllocateReplicaAllocationCommand extends AbstractAllocateAllocation @Override public AllocateReplicaAllocationCommand build() { validate(); - return new AllocateReplicaAllocationCommand(new ShardId(index, shard), node); + return new AllocateReplicaAllocationCommand(index, shard, node); } } @@ -96,20 +97,20 @@ public class AllocateReplicaAllocationCommand extends AbstractAllocateAllocation final ShardRouting primaryShardRouting; try { - primaryShardRouting = allocation.routingTable().shardRoutingTable(shardId).primaryShard(); + primaryShardRouting = allocation.routingTable().shardRoutingTable(index, shardId).primaryShard(); } catch (IndexNotFoundException | ShardNotFoundException e) { return explainOrThrowRejectedCommand(explain, allocation, e); } if (primaryShardRouting.unassigned()) { return explainOrThrowRejectedCommand(explain, allocation, - "trying to allocate a replica shard " + shardId + ", while corresponding primary shard is still unassigned"); + "trying to allocate a replica shard [" + index + "][" + shardId + "], while corresponding primary shard is still unassigned"); } - List replicaShardRoutings = allocation.routingTable().shardRoutingTable(shardId).replicaShardsWithState(ShardRoutingState.UNASSIGNED); + List replicaShardRoutings = allocation.routingTable().shardRoutingTable(index, shardId).replicaShardsWithState(ShardRoutingState.UNASSIGNED); ShardRouting shardRouting; if (replicaShardRoutings.isEmpty()) { return explainOrThrowRejectedCommand(explain, allocation, - "all copies of " + shardId +" are already assigned. Use the move allocation command instead"); + "all copies of [" + index + "][" + shardId + "] are already assigned. Use the move allocation command instead"); } else { shardRouting = replicaShardRoutings.get(0); } @@ -120,7 +121,7 @@ public class AllocateReplicaAllocationCommand extends AbstractAllocateAllocation if (explain) { return new RerouteExplanation(this, decision); } - throw new IllegalArgumentException("[" + name() + "] allocation of " + shardId + " on node " + discoNode + " is not allowed, reason: " + decision); + throw new IllegalArgumentException("[" + name() + "] allocation of [" + index + "][" + shardId + "] on node " + discoNode + " is not allowed, reason: " + decision); } initializeUnassignedShard(allocation, routingNodes, routingNode, shardRouting); diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/command/AllocateStalePrimaryAllocationCommand.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/command/AllocateStalePrimaryAllocationCommand.java index 22cedfc6aa3..5ccd9e9bb63 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/command/AllocateStalePrimaryAllocationCommand.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/command/AllocateStalePrimaryAllocationCommand.java @@ -47,12 +47,13 @@ public class AllocateStalePrimaryAllocationCommand extends BasePrimaryAllocation /** * Creates a new {@link AllocateStalePrimaryAllocationCommand} * - * @param shardId {@link ShardId} of the shard to assign + * @param index index of the shard to assign + * @param shardId id of the shard to assign * @param node node id of the node to assign the shard to * @param acceptDataLoss whether the user agrees to data loss */ - public AllocateStalePrimaryAllocationCommand(ShardId shardId, String node, boolean acceptDataLoss) { - super(shardId, node, acceptDataLoss); + public AllocateStalePrimaryAllocationCommand(String index, int shardId, String node, boolean acceptDataLoss) { + super(index, shardId, node, acceptDataLoss); } @Override @@ -70,7 +71,7 @@ public class AllocateStalePrimaryAllocationCommand extends BasePrimaryAllocation @Override public AllocateStalePrimaryAllocationCommand build() { validate(); - return new AllocateStalePrimaryAllocationCommand(new ShardId(index, shard), node, acceptDataLoss); + return new AllocateStalePrimaryAllocationCommand(index, shard, node, acceptDataLoss); } } @@ -98,23 +99,23 @@ public class AllocateStalePrimaryAllocationCommand extends BasePrimaryAllocation final ShardRouting shardRouting; try { - shardRouting = allocation.routingTable().shardRoutingTable(shardId).primaryShard(); + shardRouting = allocation.routingTable().shardRoutingTable(index, shardId).primaryShard(); } catch (IndexNotFoundException | ShardNotFoundException e) { return explainOrThrowRejectedCommand(explain, allocation, e); } if (shardRouting.unassigned() == false) { - return explainOrThrowRejectedCommand(explain, allocation, "primary " + shardId + " is already assigned"); + return explainOrThrowRejectedCommand(explain, allocation, "primary [" + index + "][" + shardId + "] is already assigned"); } if (acceptDataLoss == false) { return explainOrThrowRejectedCommand(explain, allocation, - "allocating an empty primary for " + shardId + " can result in data loss. Please confirm by setting the accept_data_loss parameter to true"); + "allocating an empty primary for [" + index + "][" + shardId + "] can result in data loss. Please confirm by setting the accept_data_loss parameter to true"); } - final IndexMetaData indexMetaData = allocation.metaData().index(shardRouting.getIndex()); + final IndexMetaData indexMetaData = allocation.metaData().index(shardRouting.getIndexName()); if (shardRouting.allocatedPostIndexCreate(indexMetaData) == false) { return explainOrThrowRejectedCommand(explain, allocation, - "trying to allocate an existing primary shard " + shardId + ", while no such shard has ever been active"); + "trying to allocate an existing primary shard [" + index + "][" + shardId + "], while no such shard has ever been active"); } initializeUnassignedShard(allocation, routingNodes, routingNode, shardRouting); diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/command/BasePrimaryAllocationCommand.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/command/BasePrimaryAllocationCommand.java index 35c1711d646..b982952c1f1 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/command/BasePrimaryAllocationCommand.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/command/BasePrimaryAllocationCommand.java @@ -44,8 +44,8 @@ public abstract class BasePrimaryAllocationCommand extends AbstractAllocateAlloc protected final boolean acceptDataLoss; - protected BasePrimaryAllocationCommand(ShardId shardId, String node, boolean acceptDataLoss) { - super(shardId, node); + protected BasePrimaryAllocationCommand(String index, int shardId, String node, boolean acceptDataLoss) { + super(index, shardId, node); this.acceptDataLoss = acceptDataLoss; } diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/command/CancelAllocationCommand.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/command/CancelAllocationCommand.java index c485cb3eab5..32eae8c19d7 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/command/CancelAllocationCommand.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/command/CancelAllocationCommand.java @@ -33,7 +33,6 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.index.shard.ShardId; import java.io.IOException; @@ -53,12 +52,13 @@ public class CancelAllocationCommand implements AllocationCommand { @Override public CancelAllocationCommand readFrom(StreamInput in) throws IOException { - return new CancelAllocationCommand(ShardId.readShardId(in), in.readString(), in.readBoolean()); + return new CancelAllocationCommand(in.readString(), in.readVInt(), in.readString(), in.readBoolean()); } @Override public void writeTo(CancelAllocationCommand command, StreamOutput out) throws IOException { - command.shardId().writeTo(out); + out.writeString(command.index()); + out.writeVInt(command.shardId()); out.writeString(command.node()); out.writeBoolean(command.allowPrimary()); } @@ -100,7 +100,7 @@ public class CancelAllocationCommand implements AllocationCommand { if (nodeId == null) { throw new ElasticsearchParseException("[{}] command missing the node parameter", NAME); } - return new CancelAllocationCommand(new ShardId(index, shardId), nodeId, allowPrimary); + return new CancelAllocationCommand(index, shardId, nodeId, allowPrimary); } @Override @@ -110,8 +110,8 @@ public class CancelAllocationCommand implements AllocationCommand { } else { builder.startObject(objectName); } - builder.field("index", command.shardId().index().name()); - builder.field("shard", command.shardId().id()); + builder.field("index", command.index()); + builder.field("shard", command.shardId()); builder.field("node", command.node()); builder.field("allow_primary", command.allowPrimary()); builder.endObject(); @@ -119,17 +119,20 @@ public class CancelAllocationCommand implements AllocationCommand { } - private final ShardId shardId; + private final String index; + private final int shardId; private final String node; private final boolean allowPrimary; /** * Creates a new {@link CancelAllocationCommand} * + * @param index index of the shard which allocation should be canceled * @param shardId id of the shard which allocation should be canceled * @param node id of the node that manages the shard which allocation should be canceled */ - public CancelAllocationCommand(ShardId shardId, String node, boolean allowPrimary) { + public CancelAllocationCommand(String index, int shardId, String node, boolean allowPrimary) { + this.index = index; this.shardId = shardId; this.node = node; this.allowPrimary = allowPrimary; @@ -141,10 +144,18 @@ public class CancelAllocationCommand implements AllocationCommand { } /** + * Get the index of the shard which allocation should be canceled + * @return index of the shard which allocation should be canceled + */ + public String index() { + return this.index; + } + /** + * Get the id of the shard which allocation should be canceled * @return id of the shard which allocation should be canceled */ - public ShardId shardId() { + public int shardId() { return this.shardId; } @@ -166,7 +177,10 @@ public class CancelAllocationCommand implements AllocationCommand { boolean found = false; for (RoutingNodes.RoutingNodeIterator it = allocation.routingNodes().routingNodeIter(discoNode.id()); it.hasNext(); ) { ShardRouting shardRouting = it.next(); - if (!shardRouting.shardId().equals(shardId)) { + if (!shardRouting.shardId().getIndex().getName().equals(index)) { + continue; + } + if (shardRouting.shardId().id() != shardId) { continue; } found = true; diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/command/MoveAllocationCommand.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/command/MoveAllocationCommand.java index ed535df2f48..75a9400807c 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/command/MoveAllocationCommand.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/command/MoveAllocationCommand.java @@ -32,7 +32,6 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.index.shard.ShardId; import java.io.IOException; @@ -49,12 +48,13 @@ public class MoveAllocationCommand implements AllocationCommand { @Override public MoveAllocationCommand readFrom(StreamInput in) throws IOException { - return new MoveAllocationCommand(ShardId.readShardId(in), in.readString(), in.readString()); + return new MoveAllocationCommand(in.readString(), in.readVInt(), in.readString(), in.readString()); } @Override public void writeTo(MoveAllocationCommand command, StreamOutput out) throws IOException { - command.shardId().writeTo(out); + out.writeString(command.index()); + out.writeVInt(command.shardId()); out.writeString(command.fromNode()); out.writeString(command.toNode()); } @@ -99,7 +99,7 @@ public class MoveAllocationCommand implements AllocationCommand { if (toNode == null) { throw new ElasticsearchParseException("[{}] command missing the to_node parameter", NAME); } - return new MoveAllocationCommand(new ShardId(index, shardId), fromNode, toNode); + return new MoveAllocationCommand(index, shardId, fromNode, toNode); } @Override @@ -109,19 +109,21 @@ public class MoveAllocationCommand implements AllocationCommand { } else { builder.startObject(objectName); } - builder.field("index", command.shardId().index().name()); - builder.field("shard", command.shardId().id()); + builder.field("index", command.index()); + builder.field("shard", command.shardId()); builder.field("from_node", command.fromNode()); builder.field("to_node", command.toNode()); builder.endObject(); } } - private final ShardId shardId; + private final String index; + private final int shardId; private final String fromNode; private final String toNode; - public MoveAllocationCommand(ShardId shardId, String fromNode, String toNode) { + public MoveAllocationCommand(String index, int shardId, String fromNode, String toNode) { + this.index = index; this.shardId = shardId; this.fromNode = fromNode; this.toNode = toNode; @@ -132,7 +134,9 @@ public class MoveAllocationCommand implements AllocationCommand { return NAME; } - public ShardId shardId() { + public String index() {return index; } + + public int shardId() { return this.shardId; } @@ -152,7 +156,10 @@ public class MoveAllocationCommand implements AllocationCommand { boolean found = false; for (ShardRouting shardRouting : allocation.routingNodes().node(fromDiscoNode.id())) { - if (!shardRouting.shardId().equals(shardId)) { + if (!shardRouting.shardId().getIndexName().equals(index)) { + continue; + } + if (shardRouting.shardId().id() != shardId) { continue; } found = true; diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDecider.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDecider.java index 23624f050a9..c0120179767 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDecider.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDecider.java @@ -330,7 +330,7 @@ public class DiskThresholdDecider extends AllocationDecider { } // a flag for whether the primary shard has been previously allocated - IndexMetaData indexMetaData = allocation.metaData().index(shardRouting.getIndex()); + IndexMetaData indexMetaData = allocation.metaData().index(shardRouting.getIndexName()); boolean primaryHasBeenAllocated = shardRouting.primary() && shardRouting.allocatedPostIndexCreate(indexMetaData); // checks for exact byte comparisons diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/EnableAllocationDecider.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/EnableAllocationDecider.java index 3c2e649387a..9131355876b 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/EnableAllocationDecider.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/EnableAllocationDecider.java @@ -92,7 +92,7 @@ public class EnableAllocationDecider extends AllocationDecider { return allocation.decision(Decision.YES, NAME, "allocation disabling is ignored"); } - final IndexMetaData indexMetaData = allocation.metaData().index(shardRouting.getIndex()); + final IndexMetaData indexMetaData = allocation.metaData().index(shardRouting.getIndexName()); final Allocation enable; if (INDEX_ROUTING_ALLOCATION_ENABLE_SETTING.exists(indexMetaData.getSettings())) { enable = INDEX_ROUTING_ALLOCATION_ENABLE_SETTING.get(indexMetaData.getSettings()); diff --git a/core/src/main/java/org/elasticsearch/cluster/service/InternalClusterService.java b/core/src/main/java/org/elasticsearch/cluster/service/InternalClusterService.java index 98d98414db3..b592eeb1469 100644 --- a/core/src/main/java/org/elasticsearch/cluster/service/InternalClusterService.java +++ b/core/src/main/java/org/elasticsearch/cluster/service/InternalClusterService.java @@ -190,7 +190,7 @@ public class InternalClusterService extends AbstractLifecycleComponent nodeAttributes = discoveryNodeService.buildAttributes(); // note, we rely on the fact that its a new id each time we start, see FD and "kill -9" handling diff --git a/core/src/main/java/org/elasticsearch/common/ContextAndHeaderHolder.java b/core/src/main/java/org/elasticsearch/common/ContextAndHeaderHolder.java deleted file mode 100644 index 9a3140dba1a..00000000000 --- a/core/src/main/java/org/elasticsearch/common/ContextAndHeaderHolder.java +++ /dev/null @@ -1,153 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.common; - -import com.carrotsearch.hppc.ObjectObjectAssociativeContainer; -import com.carrotsearch.hppc.ObjectObjectHashMap; -import org.elasticsearch.common.collect.ImmutableOpenMap; - -import java.util.Collections; -import java.util.HashMap; -import java.util.Map; -import java.util.Set; - -/** - * - */ -public class ContextAndHeaderHolder implements HasContextAndHeaders { - - private ObjectObjectHashMap context; - protected Map headers; - - @SuppressWarnings("unchecked") - @Override - public final synchronized V putInContext(Object key, Object value) { - if (context == null) { - context = new ObjectObjectHashMap<>(2); - } - return (V) context.put(key, value); - } - - @Override - public final synchronized void putAllInContext(ObjectObjectAssociativeContainer map) { - if (map == null) { - return; - } - if (context == null) { - context = new ObjectObjectHashMap<>(map); - } else { - context.putAll(map); - } - } - - @SuppressWarnings("unchecked") - @Override - public final synchronized V getFromContext(Object key) { - return context != null ? (V) context.get(key) : null; - } - - @SuppressWarnings("unchecked") - @Override - public final synchronized V getFromContext(Object key, V defaultValue) { - V value = getFromContext(key); - return value == null ? defaultValue : value; - } - - @Override - public final synchronized boolean hasInContext(Object key) { - return context != null && context.containsKey(key); - } - - @Override - public final synchronized int contextSize() { - return context != null ? context.size() : 0; - } - - @Override - public final synchronized boolean isContextEmpty() { - return context == null || context.isEmpty(); - } - - @Override - public synchronized ImmutableOpenMap getContext() { - return context != null ? ImmutableOpenMap.copyOf(context) : ImmutableOpenMap.of(); - } - - @Override - public synchronized void copyContextFrom(HasContext other) { - if (other == null) { - return; - } - - synchronized (other) { - ImmutableOpenMap otherContext = other.getContext(); - if (otherContext == null) { - return; - } - if (context == null) { - ObjectObjectHashMap map = new ObjectObjectHashMap<>(other.getContext().size()); - map.putAll(otherContext); - this.context = map; - } else { - context.putAll(otherContext); - } - } - } - - @SuppressWarnings("unchecked") - @Override - public final void putHeader(String key, Object value) { - if (headers == null) { - headers = new HashMap<>(); - } - headers.put(key, value); - } - - @SuppressWarnings("unchecked") - @Override - public final V getHeader(String key) { - return headers != null ? (V) headers.get(key) : null; - } - - @Override - public final boolean hasHeader(String key) { - return headers != null && headers.containsKey(key); - } - - @Override - public Set getHeaders() { - return headers != null ? headers.keySet() : Collections.emptySet(); - } - - @Override - public void copyHeadersFrom(HasHeaders from) { - if (from != null && from.getHeaders() != null && !from.getHeaders().isEmpty()) { - for (String headerName : from.getHeaders()) { - putHeader(headerName, from.getHeader(headerName)); - } - } - } - - @Override - public void copyContextAndHeadersFrom(HasContextAndHeaders other) { - copyContextFrom(other); - copyHeadersFrom(other); - } -} diff --git a/core/src/main/java/org/elasticsearch/common/DelegatingHasContextAndHeaders.java b/core/src/main/java/org/elasticsearch/common/DelegatingHasContextAndHeaders.java deleted file mode 100644 index 52d5af5e334..00000000000 --- a/core/src/main/java/org/elasticsearch/common/DelegatingHasContextAndHeaders.java +++ /dev/null @@ -1,111 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.common; - -import com.carrotsearch.hppc.ObjectObjectAssociativeContainer; -import org.elasticsearch.common.collect.ImmutableOpenMap; - -import java.util.Set; - -public class DelegatingHasContextAndHeaders implements HasContextAndHeaders { - - private HasContextAndHeaders delegate; - - public DelegatingHasContextAndHeaders(HasContextAndHeaders delegate) { - this.delegate = delegate; - } - - @Override - public void putHeader(String key, V value) { - delegate.putHeader(key, value); - } - - @Override - public void copyContextAndHeadersFrom(HasContextAndHeaders other) { - delegate.copyContextAndHeadersFrom(other); - } - - @Override - public V getHeader(String key) { - return delegate.getHeader(key); - } - - @Override - public boolean hasHeader(String key) { - return delegate.hasHeader(key); - } - - @Override - public V putInContext(Object key, Object value) { - return delegate.putInContext(key, value); - } - - @Override - public Set getHeaders() { - return delegate.getHeaders(); - } - - @Override - public void copyHeadersFrom(HasHeaders from) { - delegate.copyHeadersFrom(from); - } - - @Override - public void putAllInContext(ObjectObjectAssociativeContainer map) { - delegate.putAllInContext(map); - } - - @Override - public V getFromContext(Object key) { - return delegate.getFromContext(key); - } - - @Override - public V getFromContext(Object key, V defaultValue) { - return delegate.getFromContext(key, defaultValue); - } - - @Override - public boolean hasInContext(Object key) { - return delegate.hasInContext(key); - } - - @Override - public int contextSize() { - return delegate.contextSize(); - } - - @Override - public boolean isContextEmpty() { - return delegate.isContextEmpty(); - } - - @Override - public ImmutableOpenMap getContext() { - return delegate.getContext(); - } - - @Override - public void copyContextFrom(HasContext other) { - delegate.copyContextFrom(other); - } - - -} diff --git a/core/src/main/java/org/elasticsearch/common/HasContext.java b/core/src/main/java/org/elasticsearch/common/HasContext.java deleted file mode 100644 index 6a303e39bae..00000000000 --- a/core/src/main/java/org/elasticsearch/common/HasContext.java +++ /dev/null @@ -1,82 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.common; - -import com.carrotsearch.hppc.ObjectObjectAssociativeContainer; -import org.elasticsearch.common.collect.ImmutableOpenMap; - -public interface HasContext { - - /** - * Attaches the given value to the context. - * - * @return The previous value that was associated with the given key in the context, or - * {@code null} if there was none. - */ - V putInContext(Object key, Object value); - - /** - * Attaches the given values to the context - */ - void putAllInContext(ObjectObjectAssociativeContainer map); - - /** - * @return The context value that is associated with the given key - * - * @see #putInContext(Object, Object) - */ - V getFromContext(Object key); - - /** - * @param defaultValue The default value that should be returned for the given key, if no - * value is currently associated with it. - * - * @return The value that is associated with the given key in the context - * - * @see #putInContext(Object, Object) - */ - V getFromContext(Object key, V defaultValue); - - /** - * Checks if the context contains an entry with the given key - */ - boolean hasInContext(Object key); - - /** - * @return The number of values attached in the context. - */ - int contextSize(); - - /** - * Checks if the context is empty. - */ - boolean isContextEmpty(); - - /** - * @return A safe immutable copy of the current context. - */ - ImmutableOpenMap getContext(); - - /** - * Copies the context from the given context holder to this context holder. Any shared keys between - * the two context will be overridden by the given context holder. - */ - void copyContextFrom(HasContext other); -} diff --git a/core/src/main/java/org/elasticsearch/common/HasContextAndHeaders.java b/core/src/main/java/org/elasticsearch/common/HasContextAndHeaders.java deleted file mode 100644 index 35bea9a7778..00000000000 --- a/core/src/main/java/org/elasticsearch/common/HasContextAndHeaders.java +++ /dev/null @@ -1,33 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.common; - -/** - * marker interface - */ -public interface HasContextAndHeaders extends HasContext, HasHeaders { - - /** - * copies over the context and the headers - * @param other another object supporting headers and context - */ - void copyContextAndHeadersFrom(HasContextAndHeaders other); - -} diff --git a/core/src/main/java/org/elasticsearch/common/blobstore/BlobStore.java b/core/src/main/java/org/elasticsearch/common/blobstore/BlobStore.java index df3a8c6be8d..9275b379158 100644 --- a/core/src/main/java/org/elasticsearch/common/blobstore/BlobStore.java +++ b/core/src/main/java/org/elasticsearch/common/blobstore/BlobStore.java @@ -18,16 +18,16 @@ */ package org.elasticsearch.common.blobstore; +import java.io.Closeable; import java.io.IOException; /** * */ -public interface BlobStore { +public interface BlobStore extends Closeable { BlobContainer blobContainer(BlobPath path); void delete(BlobPath path) throws IOException; - void close(); } diff --git a/core/src/main/java/org/elasticsearch/common/logging/Loggers.java b/core/src/main/java/org/elasticsearch/common/logging/Loggers.java index 1dafcff62d3..14479591766 100644 --- a/core/src/main/java/org/elasticsearch/common/logging/Loggers.java +++ b/core/src/main/java/org/elasticsearch/common/logging/Loggers.java @@ -59,16 +59,16 @@ public class Loggers { } public static ESLogger getLogger(Class clazz, Settings settings, ShardId shardId, String... prefixes) { - return getLogger(clazz, settings, shardId.index(), asArrayList(Integer.toString(shardId.id()), prefixes).toArray(new String[0])); + return getLogger(clazz, settings, shardId.getIndex(), asArrayList(Integer.toString(shardId.id()), prefixes).toArray(new String[0])); } /** Just like {@link #getLogger(Class, org.elasticsearch.common.settings.Settings,ShardId,String...)} but String loggerName instead of Class. */ public static ESLogger getLogger(String loggerName, Settings settings, ShardId shardId, String... prefixes) { - return getLogger(loggerName, settings, asArrayList(shardId.index().name(), Integer.toString(shardId.id()), prefixes).toArray(new String[0])); + return getLogger(loggerName, settings, asArrayList(shardId.getIndexName(), Integer.toString(shardId.id()), prefixes).toArray(new String[0])); } public static ESLogger getLogger(Class clazz, Settings settings, Index index, String... prefixes) { - return getLogger(clazz, settings, asArrayList(SPACE, index.name(), prefixes).toArray(new String[0])); + return getLogger(clazz, settings, asArrayList(SPACE, index.getName(), prefixes).toArray(new String[0])); } public static ESLogger getLogger(Class clazz, Settings settings, String... prefixes) { diff --git a/core/src/main/java/org/elasticsearch/common/lucene/ShardCoreKeyMap.java b/core/src/main/java/org/elasticsearch/common/lucene/ShardCoreKeyMap.java index 92aa02ba002..146fb7ba05e 100644 --- a/core/src/main/java/org/elasticsearch/common/lucene/ShardCoreKeyMap.java +++ b/core/src/main/java/org/elasticsearch/common/lucene/ShardCoreKeyMap.java @@ -64,7 +64,7 @@ public final class ShardCoreKeyMap { throw new IllegalArgumentException("Could not extract shard id from " + reader); } final Object coreKey = reader.getCoreCacheKey(); - final String index = shardId.getIndex(); + final String index = shardId.getIndexName(); synchronized (this) { if (coreKeyToShard.put(coreKey, shardId) == null) { Set objects = indexToCoreKey.get(index); diff --git a/core/src/main/java/org/elasticsearch/common/network/NetworkModule.java b/core/src/main/java/org/elasticsearch/common/network/NetworkModule.java index fab02b66ff6..49d20d88b98 100644 --- a/core/src/main/java/org/elasticsearch/common/network/NetworkModule.java +++ b/core/src/main/java/org/elasticsearch/common/network/NetworkModule.java @@ -22,7 +22,6 @@ package org.elasticsearch.common.network; import java.util.Arrays; import java.util.List; -import org.elasticsearch.client.support.Headers; import org.elasticsearch.client.transport.TransportClientNodesService; import org.elasticsearch.client.transport.support.TransportProxyClient; import org.elasticsearch.cluster.node.DiscoveryNode; @@ -376,7 +375,6 @@ public class NetworkModule extends AbstractModule { transportTypes.bindType(binder(), settings, TRANSPORT_TYPE_KEY, defaultTransport); if (transportClient) { - bind(Headers.class).asEagerSingleton(); bind(TransportProxyClient.class).asEagerSingleton(); bind(TransportClientNodesService.class).asEagerSingleton(); } else { diff --git a/core/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java b/core/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java index 4325da8f6e1..d06df0bf7f4 100644 --- a/core/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java +++ b/core/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java @@ -22,8 +22,10 @@ import org.elasticsearch.action.admin.indices.close.TransportCloseIndexAction; import org.elasticsearch.action.support.AutoCreateIndex; import org.elasticsearch.action.support.DestructiveOperations; import org.elasticsearch.action.support.master.TransportMasterNodeReadAction; +import org.elasticsearch.client.Client; import org.elasticsearch.client.transport.TransportClientNodesService; import org.elasticsearch.cluster.ClusterModule; +import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.InternalClusterInfoService; import org.elasticsearch.cluster.action.index.MappingUpdatedAction; import org.elasticsearch.cluster.metadata.MetaData; @@ -64,8 +66,10 @@ import org.elasticsearch.indices.recovery.RecoverySettings; import org.elasticsearch.indices.store.IndicesStore; import org.elasticsearch.indices.ttl.IndicesTTLService; import org.elasticsearch.node.Node; +import org.elasticsearch.node.internal.InternalSettingsPreparer; import org.elasticsearch.repositories.fs.FsRepository; import org.elasticsearch.repositories.uri.URLRepository; +import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.script.ScriptService; import org.elasticsearch.search.SearchService; import org.elasticsearch.threadpool.ThreadPool; @@ -268,6 +272,10 @@ public final class ClusterSettings extends AbstractScopedSettings { URLRepository.SUPPORTED_PROTOCOLS_SETTING, TransportMasterNodeReadAction.FORCE_LOCAL_SETTING, AutoCreateIndex.AUTO_CREATE_INDEX_SETTING, + BaseRestHandler.MULTI_ALLOW_EXPLICIT_INDEX, + ClusterName.CLUSTER_NAME_SETTING, + Client.CLIENT_TYPE_SETTING_S, + InternalSettingsPreparer.IGNORE_SYSTEM_PROPERTIES_SETTING, ClusterModule.SHARDS_ALLOCATOR_TYPE_SETTING, EsExecutors.PROCESSORS_SETTING))); } diff --git a/core/src/main/java/org/elasticsearch/common/util/concurrent/EsExecutors.java b/core/src/main/java/org/elasticsearch/common/util/concurrent/EsExecutors.java index bc44494000e..723d7df5114 100644 --- a/core/src/main/java/org/elasticsearch/common/util/concurrent/EsExecutors.java +++ b/core/src/main/java/org/elasticsearch/common/util/concurrent/EsExecutors.java @@ -54,30 +54,30 @@ public class EsExecutors { return PROCESSORS_SETTING.get(settings); } - public static PrioritizedEsThreadPoolExecutor newSinglePrioritizing(String name, ThreadFactory threadFactory) { - return new PrioritizedEsThreadPoolExecutor(name, 1, 1, 0L, TimeUnit.MILLISECONDS, threadFactory); + public static PrioritizedEsThreadPoolExecutor newSinglePrioritizing(String name, ThreadFactory threadFactory, ThreadContext contextHolder) { + return new PrioritizedEsThreadPoolExecutor(name, 1, 1, 0L, TimeUnit.MILLISECONDS, threadFactory, contextHolder); } - public static EsThreadPoolExecutor newScaling(String name, int min, int max, long keepAliveTime, TimeUnit unit, ThreadFactory threadFactory) { + public static EsThreadPoolExecutor newScaling(String name, int min, int max, long keepAliveTime, TimeUnit unit, ThreadFactory threadFactory, ThreadContext contextHolder) { ExecutorScalingQueue queue = new ExecutorScalingQueue<>(); // we force the execution, since we might run into concurrency issues in offer for ScalingBlockingQueue - EsThreadPoolExecutor executor = new EsThreadPoolExecutor(name, min, max, keepAliveTime, unit, queue, threadFactory, new ForceQueuePolicy()); + EsThreadPoolExecutor executor = new EsThreadPoolExecutor(name, min, max, keepAliveTime, unit, queue, threadFactory, new ForceQueuePolicy(), contextHolder); queue.executor = executor; return executor; } - public static EsThreadPoolExecutor newCached(String name, long keepAliveTime, TimeUnit unit, ThreadFactory threadFactory) { - return new EsThreadPoolExecutor(name, 0, Integer.MAX_VALUE, keepAliveTime, unit, new SynchronousQueue(), threadFactory, new EsAbortPolicy()); + public static EsThreadPoolExecutor newCached(String name, long keepAliveTime, TimeUnit unit, ThreadFactory threadFactory, ThreadContext contextHolder) { + return new EsThreadPoolExecutor(name, 0, Integer.MAX_VALUE, keepAliveTime, unit, new SynchronousQueue(), threadFactory, new EsAbortPolicy(), contextHolder); } - public static EsThreadPoolExecutor newFixed(String name, int size, int queueCapacity, ThreadFactory threadFactory) { + public static EsThreadPoolExecutor newFixed(String name, int size, int queueCapacity, ThreadFactory threadFactory, ThreadContext contextHolder) { BlockingQueue queue; if (queueCapacity < 0) { queue = ConcurrentCollections.newBlockingQueue(); } else { queue = new SizeBlockingQueue<>(ConcurrentCollections.newBlockingQueue(), queueCapacity); } - return new EsThreadPoolExecutor(name, size, size, 0, TimeUnit.MILLISECONDS, queue, threadFactory, new EsAbortPolicy()); + return new EsThreadPoolExecutor(name, size, size, 0, TimeUnit.MILLISECONDS, queue, threadFactory, new EsAbortPolicy(), contextHolder); } public static String threadName(Settings settings, String ... names) { diff --git a/core/src/main/java/org/elasticsearch/common/util/concurrent/EsThreadPoolExecutor.java b/core/src/main/java/org/elasticsearch/common/util/concurrent/EsThreadPoolExecutor.java index 4c02aab1fe8..fde8d828295 100644 --- a/core/src/main/java/org/elasticsearch/common/util/concurrent/EsThreadPoolExecutor.java +++ b/core/src/main/java/org/elasticsearch/common/util/concurrent/EsThreadPoolExecutor.java @@ -24,12 +24,14 @@ import java.util.concurrent.BlockingQueue; import java.util.concurrent.ThreadFactory; import java.util.concurrent.ThreadPoolExecutor; import java.util.concurrent.TimeUnit; +import java.util.stream.Stream; /** * An extension to thread pool executor, allowing (in the future) to add specific additional stats to it. */ public class EsThreadPoolExecutor extends ThreadPoolExecutor { + private final ThreadContext contextHolder; private volatile ShutdownListener listener; private final Object monitor = new Object(); @@ -38,13 +40,14 @@ public class EsThreadPoolExecutor extends ThreadPoolExecutor { */ private final String name; - EsThreadPoolExecutor(String name, int corePoolSize, int maximumPoolSize, long keepAliveTime, TimeUnit unit, BlockingQueue workQueue, ThreadFactory threadFactory) { - this(name, corePoolSize, maximumPoolSize, keepAliveTime, unit, workQueue, threadFactory, new EsAbortPolicy()); + EsThreadPoolExecutor(String name, int corePoolSize, int maximumPoolSize, long keepAliveTime, TimeUnit unit, BlockingQueue workQueue, ThreadFactory threadFactory, ThreadContext contextHolder) { + this(name, corePoolSize, maximumPoolSize, keepAliveTime, unit, workQueue, threadFactory, new EsAbortPolicy(), contextHolder); } - EsThreadPoolExecutor(String name, int corePoolSize, int maximumPoolSize, long keepAliveTime, TimeUnit unit, BlockingQueue workQueue, ThreadFactory threadFactory, XRejectedExecutionHandler handler) { + EsThreadPoolExecutor(String name, int corePoolSize, int maximumPoolSize, long keepAliveTime, TimeUnit unit, BlockingQueue workQueue, ThreadFactory threadFactory, XRejectedExecutionHandler handler, ThreadContext contextHolder) { super(corePoolSize, maximumPoolSize, keepAliveTime, unit, workQueue, threadFactory, handler); this.name = name; + this.contextHolder = contextHolder; } public void shutdown(ShutdownListener listener) { @@ -80,7 +83,11 @@ public class EsThreadPoolExecutor extends ThreadPoolExecutor { } @Override - public void execute(Runnable command) { + public void execute(final Runnable command) { + doExecute(wrapRunnable(command)); + } + + protected void doExecute(final Runnable command) { try { super.execute(command); } catch (EsRejectedExecutionException ex) { @@ -99,6 +106,14 @@ public class EsThreadPoolExecutor extends ThreadPoolExecutor { } } + /** + * Returns a stream of all pending tasks. This is similar to {@link #getQueue()} but will expose the originally submitted + * {@link Runnable} instances rather than potentially wrapped ones. + */ + public Stream getTasks() { + return this.getQueue().stream().map(this::unwrap); + } + @Override public String toString() { StringBuilder b = new StringBuilder(); @@ -116,4 +131,114 @@ public class EsThreadPoolExecutor extends ThreadPoolExecutor { b.append(super.toString()).append(']'); return b.toString(); } + + protected Runnable wrapRunnable(Runnable command) { + final Runnable wrappedCommand; + if (command instanceof AbstractRunnable) { + wrappedCommand = new FilterAbstractRunnable(contextHolder, (AbstractRunnable) command); + } else { + wrappedCommand = new FilterRunnable(contextHolder, command); + } + return wrappedCommand; + } + + protected Runnable unwrap(Runnable runnable) { + if (runnable instanceof FilterAbstractRunnable) { + return ((FilterAbstractRunnable) runnable).in; + } else if (runnable instanceof FilterRunnable) { + return ((FilterRunnable) runnable).in; + } + return runnable; + } + + private class FilterAbstractRunnable extends AbstractRunnable { + private final ThreadContext contextHolder; + private final AbstractRunnable in; + private final ThreadContext.StoredContext ctx; + + FilterAbstractRunnable(ThreadContext contextHolder, AbstractRunnable in) { + this.contextHolder = contextHolder; + ctx = contextHolder.newStoredContext(); + this.in = in; + } + + @Override + public boolean isForceExecution() { + return in.isForceExecution(); + } + + @Override + public void onAfter() { + in.onAfter(); + } + + @Override + public void onFailure(Throwable t) { + in.onFailure(t); + } + + @Override + public void onRejection(Throwable t) { + in.onRejection(t); + } + + @Override + protected void doRun() throws Exception { + boolean whileRunning = false; + try (ThreadContext.StoredContext ingore = contextHolder.stashContext()){ + ctx.restore(); + whileRunning = true; + in.doRun(); + whileRunning = false; + } catch (IllegalStateException ex) { + if (whileRunning || isShutdown() == false) { + throw ex; + } + // if we hit an ISE here we have been shutting down + // this comes from the threadcontext and barfs if + // our threadpool has been shutting down + } + } + + @Override + public String toString() { + return in.toString(); + } + + } + + private class FilterRunnable implements Runnable { + private final ThreadContext contextHolder; + private final Runnable in; + private final ThreadContext.StoredContext ctx; + + FilterRunnable(ThreadContext contextHolder, Runnable in) { + this.contextHolder = contextHolder; + ctx = contextHolder.newStoredContext(); + this.in = in; + } + + @Override + public void run() { + boolean whileRunning = false; + try (ThreadContext.StoredContext ingore = contextHolder.stashContext()){ + ctx.restore(); + whileRunning = true; + in.run(); + whileRunning = false; + } catch (IllegalStateException ex) { + if (whileRunning || isShutdown() == false) { + throw ex; + } + // if we hit an ISE here we have been shutting down + // this comes from the threadcontext and barfs if + // our threadpool has been shutting down + } + } + @Override + public String toString() { + return in.toString(); + } + } + } diff --git a/core/src/main/java/org/elasticsearch/common/util/concurrent/PrioritizedEsThreadPoolExecutor.java b/core/src/main/java/org/elasticsearch/common/util/concurrent/PrioritizedEsThreadPoolExecutor.java index d0d2906deed..f55c84e943a 100644 --- a/core/src/main/java/org/elasticsearch/common/util/concurrent/PrioritizedEsThreadPoolExecutor.java +++ b/core/src/main/java/org/elasticsearch/common/util/concurrent/PrioritizedEsThreadPoolExecutor.java @@ -47,8 +47,8 @@ public class PrioritizedEsThreadPoolExecutor extends EsThreadPoolExecutor { private AtomicLong insertionOrder = new AtomicLong(); private Queue current = ConcurrentCollections.newQueue(); - PrioritizedEsThreadPoolExecutor(String name, int corePoolSize, int maximumPoolSize, long keepAliveTime, TimeUnit unit, ThreadFactory threadFactory) { - super(name, corePoolSize, maximumPoolSize, keepAliveTime, unit, new PriorityBlockingQueue(), threadFactory); + PrioritizedEsThreadPoolExecutor(String name, int corePoolSize, int maximumPoolSize, long keepAliveTime, TimeUnit unit, ThreadFactory threadFactory, ThreadContext contextHolder) { + super(name, corePoolSize, maximumPoolSize, keepAliveTime, unit, new PriorityBlockingQueue<>(), threadFactory, contextHolder); } public Pending[] getPending() { @@ -88,10 +88,14 @@ public class PrioritizedEsThreadPoolExecutor extends EsThreadPoolExecutor { for (Runnable runnable : runnables) { if (runnable instanceof TieBreakingPrioritizedRunnable) { TieBreakingPrioritizedRunnable t = (TieBreakingPrioritizedRunnable) runnable; - pending.add(new Pending(t.runnable, t.priority(), t.insertionOrder, executing)); + pending.add(new Pending(unwrap(t.runnable), t.priority(), t.insertionOrder, executing)); } else if (runnable instanceof PrioritizedFutureTask) { PrioritizedFutureTask t = (PrioritizedFutureTask) runnable; - pending.add(new Pending(t.task, t.priority, t.insertionOrder, executing)); + Object task = t.task; + if (t.task instanceof Runnable) { + task = unwrap((Runnable) t.task); + } + pending.add(new Pending(task, t.priority, t.insertionOrder, executing)); } } } @@ -107,12 +111,8 @@ public class PrioritizedEsThreadPoolExecutor extends EsThreadPoolExecutor { } public void execute(Runnable command, final ScheduledExecutorService timer, final TimeValue timeout, final Runnable timeoutCallback) { - if (command instanceof PrioritizedRunnable) { - command = new TieBreakingPrioritizedRunnable((PrioritizedRunnable) command, insertionOrder.incrementAndGet()); - } else if (!(command instanceof PrioritizedFutureTask)) { // it might be a callable wrapper... - command = new TieBreakingPrioritizedRunnable(command, Priority.NORMAL, insertionOrder.incrementAndGet()); - } - super.execute(command); + command = wrapRunnable(command); + doExecute(command); if (timeout.nanos() >= 0) { if (command instanceof TieBreakingPrioritizedRunnable) { ((TieBreakingPrioritizedRunnable) command).scheduleTimeout(timer, timeoutCallback, timeout); @@ -125,21 +125,31 @@ public class PrioritizedEsThreadPoolExecutor extends EsThreadPoolExecutor { } @Override - public void execute(Runnable command) { + protected Runnable wrapRunnable(Runnable command) { if (command instanceof PrioritizedRunnable) { - command = new TieBreakingPrioritizedRunnable((PrioritizedRunnable) command, insertionOrder.incrementAndGet()); - } else if (!(command instanceof PrioritizedFutureTask)) { // it might be a callable wrapper... - command = new TieBreakingPrioritizedRunnable(command, Priority.NORMAL, insertionOrder.incrementAndGet()); + if ((command instanceof TieBreakingPrioritizedRunnable)) { + return command; + } + Priority priority = ((PrioritizedRunnable) command).priority(); + return new TieBreakingPrioritizedRunnable(super.wrapRunnable(command), priority, insertionOrder.incrementAndGet()); + } else if (command instanceof PrioritizedFutureTask) { + return command; + } else { // it might be a callable wrapper... + if (command instanceof TieBreakingPrioritizedRunnable) { + return command; + } + return new TieBreakingPrioritizedRunnable(super.wrapRunnable(command), Priority.NORMAL, insertionOrder.incrementAndGet()); } - super.execute(command); } + @Override protected RunnableFuture newTaskFor(Runnable runnable, T value) { if (!(runnable instanceof PrioritizedRunnable)) { runnable = PrioritizedRunnable.wrap(runnable, Priority.NORMAL); } - return new PrioritizedFutureTask<>((PrioritizedRunnable) runnable, value, insertionOrder.incrementAndGet()); + Priority priority = ((PrioritizedRunnable) runnable).priority(); + return new PrioritizedFutureTask<>(runnable, priority, value, insertionOrder.incrementAndGet()); } @Override @@ -147,7 +157,7 @@ public class PrioritizedEsThreadPoolExecutor extends EsThreadPoolExecutor { if (!(callable instanceof PrioritizedCallable)) { callable = PrioritizedCallable.wrap(callable, Priority.NORMAL); } - return new PrioritizedFutureTask<>((PrioritizedCallable) callable, insertionOrder.incrementAndGet()); + return new PrioritizedFutureTask<>((PrioritizedCallable)callable, insertionOrder.incrementAndGet()); } public static class Pending { @@ -173,10 +183,6 @@ public class PrioritizedEsThreadPoolExecutor extends EsThreadPoolExecutor { private ScheduledFuture timeoutFuture; private boolean started = false; - TieBreakingPrioritizedRunnable(PrioritizedRunnable runnable, long insertionOrder) { - this(runnable, runnable.priority(), insertionOrder); - } - TieBreakingPrioritizedRunnable(Runnable runnable, Priority priority, long insertionOrder) { super(priority); this.runnable = runnable; @@ -233,6 +239,7 @@ public class PrioritizedEsThreadPoolExecutor extends EsThreadPoolExecutor { runnable = null; timeoutFuture = null; } + } } @@ -242,10 +249,10 @@ public class PrioritizedEsThreadPoolExecutor extends EsThreadPoolExecutor { final Priority priority; final long insertionOrder; - public PrioritizedFutureTask(PrioritizedRunnable runnable, T value, long insertionOrder) { + public PrioritizedFutureTask(Runnable runnable, Priority priority, T value, long insertionOrder) { super(runnable, value); this.task = runnable; - this.priority = runnable.priority(); + this.priority = priority; this.insertionOrder = insertionOrder; } @@ -265,4 +272,5 @@ public class PrioritizedEsThreadPoolExecutor extends EsThreadPoolExecutor { return insertionOrder < pft.insertionOrder ? -1 : 1; } } + } diff --git a/core/src/main/java/org/elasticsearch/common/util/concurrent/ThreadContext.java b/core/src/main/java/org/elasticsearch/common/util/concurrent/ThreadContext.java new file mode 100644 index 00000000000..5ad4b76af13 --- /dev/null +++ b/core/src/main/java/org/elasticsearch/common/util/concurrent/ThreadContext.java @@ -0,0 +1,357 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.common.util.concurrent; + +import org.apache.lucene.util.CloseableThreadLocal; +import org.elasticsearch.common.collect.Iterators; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.settings.Settings; + +import java.io.Closeable; +import java.io.IOException; +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; +import java.util.concurrent.atomic.AtomicBoolean; + +/** + * A ThreadContext is a map of string headers and a transient map of keyed objects that are associated with + * a thread. It allows to store and retrieve header information across method calls, network calls as well as threads spawned from a + * thread that has a {@link ThreadContext} associated with. Threads spawned from a {@link org.elasticsearch.threadpool.ThreadPool} have out of the box + * support for {@link ThreadContext} and all threads spawned will inherit the {@link ThreadContext} from the thread that it is forking from.". + * Network calls will also preserve the senders headers automatically. + *

+ * Consumers of ThreadContext usually don't need to interact with adding or stashing contexts. Every elasticsearch thread is managed by a thread pool or executor + * being responsible for stashing and restoring the threads context. For instance if a network request is received, all headers are deserialized from the network + * and directly added as the headers of the threads {@link ThreadContext} (see {@link #readHeaders(StreamInput)}. In order to not modify the context that is currently + * active on this thread the network code uses a try/with pattern to stash it's current context, read headers into a fresh one and once the request is handled or a handler thread + * is forked (which in turn inherits the context) it restores the previous context. For instance: + *

+ *
+ *     // current context is stashed and replaced with a default context
+ *     try (StoredContext context = threadContext.stashContext()) {
+ *         threadContext.readHeaders(in); // read headers into current context
+ *         if (fork) {
+ *             threadPool.execute(() -> request.handle()); // inherits context
+ *         } else {
+ *             request.handle();
+ *         }
+ *     }
+ *     // previous context is restored on StoredContext#close()
+ * 
+ * + */ +public final class ThreadContext implements Closeable, Writeable{ + + public static final String PREFIX = "request.headers"; + private final Map defaultHeader; + private static final ThreadContextStruct DEFAULT_CONTEXT = new ThreadContextStruct(Collections.emptyMap()); + private final ContextThreadLocal threadLocal; + + /** + * Creates a new ThreadContext instance + * @param settings the settings to read the default request headers from + */ + public ThreadContext(Settings settings) { + Settings headers = settings.getAsSettings(PREFIX); + if (headers == null) { + this.defaultHeader = Collections.emptyMap(); + } else { + Map defaultHeader = new HashMap<>(); + for (String key : headers.names()) { + defaultHeader.put(key, headers.get(key)); + } + this.defaultHeader = Collections.unmodifiableMap(defaultHeader); + } + threadLocal = new ContextThreadLocal(); + } + + @Override + public void close() throws IOException { + threadLocal.close(); + } + + /** + * Removes the current context and resets a default context. The removed context can be + * restored when closing the returned {@link StoredContext} + */ + public StoredContext stashContext() { + final ThreadContextStruct context = threadLocal.get(); + threadLocal.set(null); + return () -> { + threadLocal.set(context); + }; + } + + /** + * Removes the current context and resets a new context that contains a merge of the current headers and the given headers. The removed context can be + * restored when closing the returned {@link StoredContext}. The merge strategy is that headers that are already existing are preserved unless they are defaults. + */ + public StoredContext stashAndMergeHeaders(Map headers) { + final ThreadContextStruct context = threadLocal.get(); + Map newHeader = new HashMap<>(headers); + newHeader.putAll(context.headers); + threadLocal.set(DEFAULT_CONTEXT.putHeaders(newHeader)); + return () -> { + threadLocal.set(context); + }; + } + + /** + * Just like {@link #stashContext()} but no default context is set. + */ + public StoredContext newStoredContext() { + final ThreadContextStruct context = threadLocal.get(); + return () -> { + threadLocal.set(context); + }; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + threadLocal.get().writeTo(out, defaultHeader); + } + + @Override + public ThreadContextStruct readFrom(StreamInput in) throws IOException { + return DEFAULT_CONTEXT.readFrom(in); + } + + /** + * Reads the headers from the stream into the current context + */ + public void readHeaders(StreamInput in) throws IOException { + threadLocal.set(readFrom(in)); + } + + + /** + * Returns the header for the given key or null if not present + */ + public String getHeader(String key) { + String value = threadLocal.get().headers.get(key); + if (value == null) { + return defaultHeader.get(key); + } + return value; + } + + /** + * Returns all of the current contexts headers + */ + public Map getHeaders() { + HashMap map = new HashMap<>(defaultHeader); + map.putAll(threadLocal.get().headers); + return Collections.unmodifiableMap(map); + } + + /** + * Copies all header key, value pairs into the current context + */ + public void copyHeaders(Iterable> headers) { + threadLocal.set(threadLocal.get().copyHeaders(headers)); + } + + /** + * Puts a header into the context + */ + public void putHeader(String key, String value) { + threadLocal.set(threadLocal.get().putPersistent(key, value)); + } + + /** + * Puts all of the given headers into this context + */ + public void putHeader(Map header) { + threadLocal.set(threadLocal.get().putHeaders(header)); + } + + /** + * Puts a transient header object into this context + */ + public void putTransient(String key, Object value) { + threadLocal.set(threadLocal.get().putTransient(key, value)); + } + + /** + * Returns a transient header object or null if there is no header for the given key + */ + public T getTransient(String key) { + return (T) threadLocal.get().transientHeaders.get(key); + } + + public interface StoredContext extends AutoCloseable { + @Override + void close(); + + default void restore() { + close(); + } + } + + static final class ThreadContextStruct implements Writeable { + private final Map headers; + private final Map transientHeaders; + + private ThreadContextStruct(StreamInput in) throws IOException { + int numValues = in.readVInt(); + Map headers = numValues == 0 ? Collections.emptyMap() : new HashMap<>(numValues); + for (int i = 0; i < numValues; i++) { + headers.put(in.readString(), in.readString()); + } + this.headers = headers; + this.transientHeaders = Collections.emptyMap(); + } + + private ThreadContextStruct(Map headers, Map transientHeaders) { + this.headers = headers; + this.transientHeaders = transientHeaders; + } + + private ThreadContextStruct(Map headers) { + this(headers, Collections.emptyMap()); + } + + private ThreadContextStruct putPersistent(String key, String value) { + Map newHeaders = new HashMap<>(this.headers); + putSingleHeader(key, value, newHeaders); + return new ThreadContextStruct(newHeaders, transientHeaders); + } + + private void putSingleHeader(String key, String value, Map newHeaders) { + final String existingValue; + if ((existingValue = newHeaders.putIfAbsent(key, value)) != null) { + throw new IllegalArgumentException("value for key [" + key + "] already present"); + } + } + + private ThreadContextStruct putHeaders(Map headers) { + if (headers.isEmpty()) { + return this; + } else { + final Map newHeaders = new HashMap<>(); + for (Map.Entry entry : headers.entrySet()) { + putSingleHeader(entry.getKey(), entry.getValue(), newHeaders); + } + newHeaders.putAll(this.headers); + return new ThreadContextStruct(newHeaders, transientHeaders); + } + } + + private ThreadContextStruct putTransient(String key, Object value) { + Map newTransient = new HashMap<>(this.transientHeaders); + if (newTransient.putIfAbsent(key, value) != null) { + throw new IllegalArgumentException("value for key [" + key + "] already present"); + } + return new ThreadContextStruct(headers, newTransient); + } + + boolean isEmpty() { + return headers.isEmpty() && transientHeaders.isEmpty(); + } + + + private ThreadContextStruct copyHeaders(Iterable> headers) { + Map newHeaders = new HashMap<>(); + for (Map.Entry header : headers) { + newHeaders.put(header.getKey(), header.getValue()); + } + return putHeaders(newHeaders); + } + + @Override + public ThreadContextStruct readFrom(StreamInput in) throws IOException { + return new ThreadContextStruct(in); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + throw new UnsupportedOperationException("use the other write to"); + } + + public void writeTo(StreamOutput out, Map defaultHeaders) throws IOException { + final Map headers; + if (defaultHeaders.isEmpty()) { + headers = this.headers; + } else { + headers = new HashMap<>(defaultHeaders); + headers.putAll(this.headers); + } + + int keys = headers.size(); + out.writeVInt(keys); + for (Map.Entry entry : headers.entrySet()) { + out.writeString(entry.getKey()); + out.writeString(entry.getValue()); + } + } + + } + + private static class ContextThreadLocal extends CloseableThreadLocal { + private final AtomicBoolean closed = new AtomicBoolean(false); + + @Override + public void set(ThreadContextStruct object) { + try { + if (object == DEFAULT_CONTEXT) { + super.set(null); + } else { + super.set(object); + } + } catch (NullPointerException ex) { + /* This is odd but CloseableThreadLocal throws a NPE if it was closed but still accessed. + to get a real exception we call ensureOpen() to tell the user we are already closed.*/ + ensureOpen(); + throw ex; + } + } + + @Override + public ThreadContextStruct get() { + try { + ThreadContextStruct threadContextStruct = super.get(); + if (threadContextStruct != null) { + return threadContextStruct; + } + return DEFAULT_CONTEXT; + } catch (NullPointerException ex) { + /* This is odd but CloseableThreadLocal throws a NPE if it was closed but still accessed. + to get a real exception we call ensureOpen() to tell the user we are already closed.*/ + ensureOpen(); + throw ex; + } + } + + private void ensureOpen() { + if (closed.get()) { + throw new IllegalStateException("threadcontext is already closed"); + } + } + + @Override + public void close() { + if (closed.compareAndSet(false, true)) { + super.close(); + } + } + } +} diff --git a/core/src/main/java/org/elasticsearch/discovery/zen/ping/unicast/UnicastZenPing.java b/core/src/main/java/org/elasticsearch/discovery/zen/ping/unicast/UnicastZenPing.java index a9960272d0e..e661002e649 100644 --- a/core/src/main/java/org/elasticsearch/discovery/zen/ping/unicast/UnicastZenPing.java +++ b/core/src/main/java/org/elasticsearch/discovery/zen/ping/unicast/UnicastZenPing.java @@ -169,7 +169,7 @@ public class UnicastZenPing extends AbstractLifecycleComponent implemen transportService.registerRequestHandler(ACTION_NAME, UnicastPingRequest::new, ThreadPool.Names.SAME, new UnicastPingRequestHandler()); ThreadFactory threadFactory = EsExecutors.daemonThreadFactory(settings, "[unicast_connect]"); - unicastConnectExecutor = EsExecutors.newScaling("unicast_connect", 0, concurrentConnects, 60, TimeUnit.SECONDS, threadFactory); + unicastConnectExecutor = EsExecutors.newScaling("unicast_connect", 0, concurrentConnects, 60, TimeUnit.SECONDS, threadFactory, threadPool.getThreadContext()); } @Override diff --git a/core/src/main/java/org/elasticsearch/env/NodeEnvironment.java b/core/src/main/java/org/elasticsearch/env/NodeEnvironment.java index 658a48ecc87..91d1fbeb756 100644 --- a/core/src/main/java/org/elasticsearch/env/NodeEnvironment.java +++ b/core/src/main/java/org/elasticsearch/env/NodeEnvironment.java @@ -66,6 +66,7 @@ import java.util.Set; import java.util.concurrent.Semaphore; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; +import java.util.stream.Collectors; import static java.util.Collections.unmodifiableSet; @@ -100,14 +101,14 @@ public class NodeEnvironment extends AbstractComponent implements Closeable { * Resolves the given shards directory against this NodePath */ public Path resolve(ShardId shardId) { - return resolve(shardId.index()).resolve(Integer.toString(shardId.id())); + return resolve(shardId.getIndex()).resolve(Integer.toString(shardId.id())); } /** * Resolves the given indexes directory against this NodePath */ public Path resolve(Index index) { - return indicesPath.resolve(index.name()); + return indicesPath.resolve(index.getName()); } @Override @@ -127,7 +128,7 @@ public class NodeEnvironment extends AbstractComponent implements Closeable { private final int localNodeId; private final AtomicBoolean closed = new AtomicBoolean(false); - private final Map shardLocks = new HashMap<>(); + private final Map shardLocks = new HashMap<>(); // Setting to automatically append node id to custom data paths public static final String ADD_NODE_ID_TO_CUSTOM_PATH = "node.add_id_to_custom_path"; @@ -422,11 +423,11 @@ public class NodeEnvironment extends AbstractComponent implements Closeable { * @param indexSettings settings for the index being deleted */ public void deleteIndexDirectoryUnderLock(Index index, IndexSettings indexSettings) throws IOException { - final Path[] indexPaths = indexPaths(index); + final Path[] indexPaths = indexPaths(index.getName()); logger.trace("deleting index {} directory, paths({}): [{}]", index, indexPaths.length, indexPaths); IOUtils.rm(indexPaths); if (indexSettings.hasCustomDataPath()) { - Path customLocation = resolveCustomLocation(indexSettings, index.name()); + Path customLocation = resolveCustomLocation(indexSettings, index.getName()); logger.trace("deleting custom index {} directory [{}]", index, customLocation); IOUtils.rm(customLocation); } @@ -487,23 +488,24 @@ public class NodeEnvironment extends AbstractComponent implements Closeable { * write operation on a shards data directory like deleting files, creating a new index writer * or recover from a different shard instance into it. If the shard lock can not be acquired * an {@link org.apache.lucene.store.LockObtainFailedException} is thrown - * @param id the shard ID to lock + * @param shardId the shard ID to lock * @param lockTimeoutMS the lock timeout in milliseconds * @return the shard lock. Call {@link ShardLock#close()} to release the lock * @throws IOException if an IOException occurs. */ - public ShardLock shardLock(final ShardId id, long lockTimeoutMS) throws IOException { - logger.trace("acquiring node shardlock on [{}], timeout [{}]", id, lockTimeoutMS); + public ShardLock shardLock(final ShardId shardId, long lockTimeoutMS) throws IOException { + logger.trace("acquiring node shardlock on [{}], timeout [{}]", shardId, lockTimeoutMS); + final ShardLockKey shardLockKey = new ShardLockKey(shardId); final InternalShardLock shardLock; final boolean acquired; synchronized (shardLocks) { - if (shardLocks.containsKey(id)) { - shardLock = shardLocks.get(id); + if (shardLocks.containsKey(shardLockKey)) { + shardLock = shardLocks.get(shardLockKey); shardLock.incWaitCount(); acquired = false; } else { - shardLock = new InternalShardLock(id); - shardLocks.put(id, shardLock); + shardLock = new InternalShardLock(shardLockKey); + shardLocks.put(shardLockKey, shardLock); acquired = true; } } @@ -518,22 +520,68 @@ public class NodeEnvironment extends AbstractComponent implements Closeable { } } } - logger.trace("successfully acquired shardlock for [{}]", id); - return new ShardLock(id) { // new instance prevents double closing + logger.trace("successfully acquired shardlock for [{}]", shardId); + return new ShardLock(shardId) { // new instance prevents double closing @Override protected void closeInternal() { shardLock.release(); - logger.trace("released shard lock for [{}]", id); + logger.trace("released shard lock for [{}]", shardLockKey); } }; } /** * Returns all currently lock shards. + * + * Note: the shard ids return do not contain a valid Index UUID */ public Set lockedShards() { synchronized (shardLocks) { - return unmodifiableSet(new HashSet<>(shardLocks.keySet())); + Set lockedShards = shardLocks.keySet().stream() + .map(shardLockKey -> new ShardId(new Index(shardLockKey.indexName, "_na_"), shardLockKey.shardId)).collect(Collectors.toSet()); + return unmodifiableSet(lockedShards); + } + } + + // a key for the shard lock. we can't use shardIds, because the contain + // the index uuid, but we want the lock semantics to the same as we map indices to disk folders, i.e., without the uuid (for now). + private final class ShardLockKey { + final String indexName; + final int shardId; + + public ShardLockKey(final ShardId shardId) { + this.indexName = shardId.getIndexName(); + this.shardId = shardId.id(); + } + + @Override + public String toString() { + return "[" + indexName + "][" + shardId + "]"; + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + + ShardLockKey that = (ShardLockKey) o; + + if (shardId != that.shardId) { + return false; + } + return indexName.equals(that.indexName); + + } + + @Override + public int hashCode() { + int result = indexName.hashCode(); + result = 31 * result + shardId; + return result; } } @@ -546,10 +594,10 @@ public class NodeEnvironment extends AbstractComponent implements Closeable { */ private final Semaphore mutex = new Semaphore(1); private int waitCount = 1; // guarded by shardLocks - private ShardId shardId; + private final ShardLockKey lockKey; - InternalShardLock(ShardId id) { - shardId = id; + InternalShardLock(ShardLockKey id) { + lockKey = id; mutex.acquireUninterruptibly(); } @@ -569,10 +617,10 @@ public class NodeEnvironment extends AbstractComponent implements Closeable { synchronized (shardLocks) { assert waitCount > 0 : "waitCount is " + waitCount + " but should be > 0"; --waitCount; - logger.trace("shard lock wait count for [{}] is now [{}]", shardId, waitCount); + logger.trace("shard lock wait count for [{}] is now [{}]", lockKey, waitCount); if (waitCount == 0) { - logger.trace("last shard lock wait decremented, removing lock for [{}]", shardId); - InternalShardLock remove = shardLocks.remove(shardId); + logger.trace("last shard lock wait decremented, removing lock for [{}]", lockKey); + InternalShardLock remove = shardLocks.remove(lockKey); assert remove != null : "Removed lock was null"; } } @@ -581,11 +629,11 @@ public class NodeEnvironment extends AbstractComponent implements Closeable { void acquire(long timeoutInMillis) throws LockObtainFailedException{ try { if (mutex.tryAcquire(timeoutInMillis, TimeUnit.MILLISECONDS) == false) { - throw new LockObtainFailedException("Can't lock shard " + shardId + ", timed out after " + timeoutInMillis + "ms"); + throw new LockObtainFailedException("Can't lock shard " + lockKey + ", timed out after " + timeoutInMillis + "ms"); } } catch (InterruptedException e) { Thread.currentThread().interrupt(); - throw new LockObtainFailedException("Can't lock shard " + shardId + ", interrupted", e); + throw new LockObtainFailedException("Can't lock shard " + lockKey + ", interrupted", e); } } } @@ -628,11 +676,11 @@ public class NodeEnvironment extends AbstractComponent implements Closeable { /** * Returns all index paths. */ - public Path[] indexPaths(Index index) { + public Path[] indexPaths(String indexName) { assert assertEnvIsLocked(); Path[] indexPaths = new Path[nodePaths.length]; for (int i = 0; i < nodePaths.length; i++) { - indexPaths[i] = nodePaths[i].indicesPath.resolve(index.name()); + indexPaths[i] = nodePaths[i].indicesPath.resolve(indexName); } return indexPaths; } @@ -691,14 +739,14 @@ public class NodeEnvironment extends AbstractComponent implements Closeable { } assert assertEnvIsLocked(); final Set shardIds = new HashSet<>(); - String indexName = index.name(); + String indexName = index.getName(); for (final NodePath nodePath : nodePaths) { Path location = nodePath.indicesPath; if (Files.isDirectory(location)) { try (DirectoryStream indexStream = Files.newDirectoryStream(location)) { for (Path indexPath : indexStream) { if (indexName.equals(indexPath.getFileName().toString())) { - shardIds.addAll(findAllShardsForIndex(indexPath)); + shardIds.addAll(findAllShardsForIndex(indexPath, index)); } } } @@ -707,16 +755,16 @@ public class NodeEnvironment extends AbstractComponent implements Closeable { return shardIds; } - private static Set findAllShardsForIndex(Path indexPath) throws IOException { + private static Set findAllShardsForIndex(Path indexPath, Index index) throws IOException { + assert indexPath.getFileName().toString().equals(index.getName()); Set shardIds = new HashSet<>(); if (Files.isDirectory(indexPath)) { try (DirectoryStream stream = Files.newDirectoryStream(indexPath)) { - String currentIndex = indexPath.getFileName().toString(); for (Path shardPath : stream) { String fileName = shardPath.getFileName().toString(); if (Files.isDirectory(shardPath) && fileName.chars().allMatch(Character::isDigit)) { int shardId = Integer.parseInt(fileName); - ShardId id = new ShardId(currentIndex, shardId); + ShardId id = new ShardId(index, shardId); shardIds.add(id); } } @@ -827,7 +875,7 @@ public class NodeEnvironment extends AbstractComponent implements Closeable { * @param shardId shard to resolve the path to */ public Path resolveCustomLocation(IndexSettings indexSettings, final ShardId shardId) { - return resolveCustomLocation(indexSettings, shardId.index().name()).resolve(Integer.toString(shardId.id())); + return resolveCustomLocation(indexSettings, shardId.getIndexName()).resolve(Integer.toString(shardId.id())); } /** diff --git a/core/src/main/java/org/elasticsearch/gateway/DanglingIndicesState.java b/core/src/main/java/org/elasticsearch/gateway/DanglingIndicesState.java index d2bbeafed4d..e2fcb56b1e1 100644 --- a/core/src/main/java/org/elasticsearch/gateway/DanglingIndicesState.java +++ b/core/src/main/java/org/elasticsearch/gateway/DanglingIndicesState.java @@ -120,7 +120,7 @@ public class DanglingIndicesState extends AbstractComponent { IndexMetaData indexMetaData = metaStateService.loadIndexState(indexName); if (indexMetaData != null) { logger.info("[{}] dangling index, exists on local file system, but not in cluster metadata, auto import to cluster state", indexName); - if (!indexMetaData.getIndex().equals(indexName)) { + if (!indexMetaData.getIndex().getName().equals(indexName)) { logger.info("dangled index directory name is [{}], state name is [{}], renaming to directory name", indexName, indexMetaData.getIndex()); indexMetaData = IndexMetaData.builder(indexMetaData).index(indexName).build(); } diff --git a/core/src/main/java/org/elasticsearch/gateway/Gateway.java b/core/src/main/java/org/elasticsearch/gateway/Gateway.java index bbb2670e194..c42cc43dac4 100644 --- a/core/src/main/java/org/elasticsearch/gateway/Gateway.java +++ b/core/src/main/java/org/elasticsearch/gateway/Gateway.java @@ -99,7 +99,7 @@ public class Gateway extends AbstractComponent implements ClusterStateListener { electedGlobalState = nodeState.metaData(); } for (ObjectCursor cursor : nodeState.metaData().indices().values()) { - indices.addTo(cursor.value.getIndex(), 1); + indices.addTo(cursor.value.getIndex().getName(), 1); } } if (found < requiredAllocation) { diff --git a/core/src/main/java/org/elasticsearch/gateway/GatewayMetaState.java b/core/src/main/java/org/elasticsearch/gateway/GatewayMetaState.java index c6a65ff082c..e90cb750cf5 100644 --- a/core/src/main/java/org/elasticsearch/gateway/GatewayMetaState.java +++ b/core/src/main/java/org/elasticsearch/gateway/GatewayMetaState.java @@ -130,10 +130,10 @@ public class GatewayMetaState extends AbstractComponent implements ClusterStateL for (IndexMetaData indexMetaData : newMetaData) { IndexMetaData indexMetaDataOnDisk = null; if (indexMetaData.getState().equals(IndexMetaData.State.CLOSE)) { - indexMetaDataOnDisk = metaStateService.loadIndexState(indexMetaData.getIndex()); + indexMetaDataOnDisk = metaStateService.loadIndexState(indexMetaData.getIndex().getName()); } if (indexMetaDataOnDisk != null) { - newPreviouslyWrittenIndices.add(indexMetaDataOnDisk.getIndex()); + newPreviouslyWrittenIndices.add(indexMetaDataOnDisk.getIndex().getName()); } } newPreviouslyWrittenIndices.addAll(previouslyWrittenIndices); @@ -289,19 +289,19 @@ public class GatewayMetaState extends AbstractComponent implements ClusterStateL } Set indices = new HashSet<>(); for (ShardRouting routing : newRoutingNode) { - indices.add(routing.index()); + indices.add(routing.index().getName()); } // we have to check the meta data also: closed indices will not appear in the routing table, but we must still write the state if we have it written on disk previously for (IndexMetaData indexMetaData : state.metaData()) { boolean isOrWasClosed = indexMetaData.getState().equals(IndexMetaData.State.CLOSE); // if the index is open we might still have to write the state if it just transitioned from closed to open // so we have to check for that as well. - IndexMetaData previousMetaData = previousState.metaData().getIndices().get(indexMetaData.getIndex()); + IndexMetaData previousMetaData = previousState.metaData().index(indexMetaData.getIndex()); if (previousMetaData != null) { isOrWasClosed = isOrWasClosed || previousMetaData.getState().equals(IndexMetaData.State.CLOSE); } - if (previouslyWrittenIndices.contains(indexMetaData.getIndex()) && isOrWasClosed) { - indices.add(indexMetaData.getIndex()); + if (previouslyWrittenIndices.contains(indexMetaData.getIndex().getName()) && isOrWasClosed) { + indices.add(indexMetaData.getIndex().getName()); } } return indices; @@ -312,7 +312,7 @@ public class GatewayMetaState extends AbstractComponent implements ClusterStateL relevantIndices = new HashSet<>(); // we have to iterate over the metadata to make sure we also capture closed indices for (IndexMetaData indexMetaData : state.metaData()) { - relevantIndices.add(indexMetaData.getIndex()); + relevantIndices.add(indexMetaData.getIndex().getName()); } return relevantIndices; } diff --git a/core/src/main/java/org/elasticsearch/gateway/LocalAllocateDangledIndices.java b/core/src/main/java/org/elasticsearch/gateway/LocalAllocateDangledIndices.java index 0e151cec5e5..041b8cafecc 100644 --- a/core/src/main/java/org/elasticsearch/gateway/LocalAllocateDangledIndices.java +++ b/core/src/main/java/org/elasticsearch/gateway/LocalAllocateDangledIndices.java @@ -116,7 +116,7 @@ public class LocalAllocateDangledIndices extends AbstractComponent { public void messageReceived(final AllocateDangledRequest request, final TransportChannel channel) throws Exception { String[] indexNames = new String[request.indices.length]; for (int i = 0; i < request.indices.length; i++) { - indexNames[i] = request.indices[i].getIndex(); + indexNames[i] = request.indices[i].getIndex().getName(); } clusterService.submitStateUpdateTask("allocation dangled indices " + Arrays.toString(indexNames), new ClusterStateUpdateTask() { @Override @@ -131,10 +131,10 @@ public class LocalAllocateDangledIndices extends AbstractComponent { boolean importNeeded = false; StringBuilder sb = new StringBuilder(); for (IndexMetaData indexMetaData : request.indices) { - if (currentState.metaData().hasIndex(indexMetaData.getIndex())) { + if (currentState.metaData().hasIndex(indexMetaData.getIndex().getName())) { continue; } - if (currentState.metaData().hasAlias(indexMetaData.getIndex())) { + if (currentState.metaData().hasAlias(indexMetaData.getIndex().getName())) { logger.warn("ignoring dangled index [{}] on node [{}] due to an existing alias with the same name", indexMetaData.getIndex(), request.fromNode); continue; diff --git a/core/src/main/java/org/elasticsearch/gateway/MetaStateService.java b/core/src/main/java/org/elasticsearch/gateway/MetaStateService.java index 43f9ffbe26f..6f38b039fec 100644 --- a/core/src/main/java/org/elasticsearch/gateway/MetaStateService.java +++ b/core/src/main/java/org/elasticsearch/gateway/MetaStateService.java @@ -109,7 +109,7 @@ public class MetaStateService extends AbstractComponent { */ @Nullable IndexMetaData loadIndexState(String index) throws IOException { - return indexStateFormat.loadLatestState(logger, nodeEnv.indexPaths(new Index(index))); + return indexStateFormat.loadLatestState(logger, nodeEnv.indexPaths(index)); } /** @@ -132,8 +132,7 @@ public class MetaStateService extends AbstractComponent { void writeIndex(String reason, IndexMetaData indexMetaData, @Nullable IndexMetaData previousIndexMetaData) throws Exception { logger.trace("[{}] writing state, reason [{}]", indexMetaData.getIndex(), reason); try { - indexStateFormat.write(indexMetaData, indexMetaData.getVersion(), - nodeEnv.indexPaths(new Index(indexMetaData.getIndex()))); + indexStateFormat.write(indexMetaData, indexMetaData.getVersion(), nodeEnv.indexPaths(indexMetaData.getIndex().getName())); } catch (Throwable ex) { logger.warn("[{}]: failed to write index state", ex, indexMetaData.getIndex()); throw new IOException("failed to write state for [" + indexMetaData.getIndex() + "]", ex); diff --git a/core/src/main/java/org/elasticsearch/gateway/PrimaryShardAllocator.java b/core/src/main/java/org/elasticsearch/gateway/PrimaryShardAllocator.java index 3d3a0e3b59c..4ac45a3e2fa 100644 --- a/core/src/main/java/org/elasticsearch/gateway/PrimaryShardAllocator.java +++ b/core/src/main/java/org/elasticsearch/gateway/PrimaryShardAllocator.java @@ -32,7 +32,6 @@ import org.elasticsearch.cluster.routing.allocation.decider.Decision; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.index.IndexSettings; import java.util.ArrayList; import java.util.Collections; @@ -90,7 +89,7 @@ public abstract class PrimaryShardAllocator extends AbstractComponent { continue; } - final IndexMetaData indexMetaData = metaData.index(shard.getIndex()); + final IndexMetaData indexMetaData = metaData.index(shard.getIndexName()); // don't go wild here and create a new IndexSetting object for every shard this could cause a lot of garbage // on cluster restart if we allocate a boat load of shards if (shard.allocatedPostIndexCreate(indexMetaData) == false) { diff --git a/core/src/main/java/org/elasticsearch/gateway/PriorityComparator.java b/core/src/main/java/org/elasticsearch/gateway/PriorityComparator.java index c5c5794a788..c491b804069 100644 --- a/core/src/main/java/org/elasticsearch/gateway/PriorityComparator.java +++ b/core/src/main/java/org/elasticsearch/gateway/PriorityComparator.java @@ -38,8 +38,8 @@ public abstract class PriorityComparator implements Comparator { @Override public final int compare(ShardRouting o1, ShardRouting o2) { - final String o1Index = o1.index(); - final String o2Index = o2.index(); + final String o1Index = o1.getIndexName(); + final String o2Index = o2.getIndexName(); int cmp = 0; if (o1Index.equals(o2Index) == false) { final Settings settingsO1 = getIndexSettings(o1Index); diff --git a/core/src/main/java/org/elasticsearch/gateway/ReplicaShardAllocator.java b/core/src/main/java/org/elasticsearch/gateway/ReplicaShardAllocator.java index 0b5f2bc58d9..a171cafd226 100644 --- a/core/src/main/java/org/elasticsearch/gateway/ReplicaShardAllocator.java +++ b/core/src/main/java/org/elasticsearch/gateway/ReplicaShardAllocator.java @@ -74,7 +74,7 @@ public abstract class ReplicaShardAllocator extends AbstractComponent { } // if we are allocating a replica because of index creation, no need to go and find a copy, there isn't one... - IndexMetaData indexMetaData = metaData.index(shard.getIndex()); + IndexMetaData indexMetaData = metaData.index(shard.getIndexName()); if (shard.allocatedPostIndexCreate(indexMetaData) == false) { continue; } @@ -127,7 +127,7 @@ public abstract class ReplicaShardAllocator extends AbstractComponent { } // if we are allocating a replica because of index creation, no need to go and find a copy, there isn't one... - IndexMetaData indexMetaData = metaData.index(shard.getIndex()); + IndexMetaData indexMetaData = metaData.index(shard.getIndexName()); if (shard.allocatedPostIndexCreate(indexMetaData) == false) { continue; } diff --git a/core/src/main/java/org/elasticsearch/gateway/TransportNodesListGatewayMetaState.java b/core/src/main/java/org/elasticsearch/gateway/TransportNodesListGatewayMetaState.java index a117eb709af..fb174f4bd45 100644 --- a/core/src/main/java/org/elasticsearch/gateway/TransportNodesListGatewayMetaState.java +++ b/core/src/main/java/org/elasticsearch/gateway/TransportNodesListGatewayMetaState.java @@ -183,7 +183,7 @@ public class TransportNodesListGatewayMetaState extends TransportNodesAction { +public class HttpServer extends AbstractLifecycleComponent implements HttpServerAdapter { private final Environment environment; @@ -73,22 +61,9 @@ public class HttpServer extends AbstractLifecycleComponent { this.restController = restController; this.nodeService = nodeService; nodeService.setHttpServer(this); - transport.httpServerAdapter(new Dispatcher(this)); + transport.httpServerAdapter(this); } - static class Dispatcher implements HttpServerAdapter { - - private final HttpServer server; - - Dispatcher(HttpServer server) { - this.server = server; - } - - @Override - public void dispatchRequest(HttpRequest request, HttpChannel channel) { - server.internalDispatchRequest(request, channel); - } - } @Override protected void doStart() { @@ -118,12 +93,12 @@ public class HttpServer extends AbstractLifecycleComponent { return transport.stats(); } - public void internalDispatchRequest(final HttpRequest request, final HttpChannel channel) { + public void dispatchRequest(HttpRequest request, HttpChannel channel, ThreadContext threadContext) { if (request.rawPath().equals("/favicon.ico")) { handleFavicon(request, channel); return; } - restController.dispatchRequest(request, channel); + restController.dispatchRequest(request, channel, threadContext); } void handleFavicon(HttpRequest request, HttpChannel channel) { diff --git a/core/src/main/java/org/elasticsearch/http/HttpServerAdapter.java b/core/src/main/java/org/elasticsearch/http/HttpServerAdapter.java index a73456f6b52..c49265cae8c 100644 --- a/core/src/main/java/org/elasticsearch/http/HttpServerAdapter.java +++ b/core/src/main/java/org/elasticsearch/http/HttpServerAdapter.java @@ -19,10 +19,12 @@ package org.elasticsearch.http; +import org.elasticsearch.common.util.concurrent.ThreadContext; + /** * */ public interface HttpServerAdapter { - void dispatchRequest(HttpRequest request, HttpChannel channel); + void dispatchRequest(HttpRequest request, HttpChannel channel, ThreadContext context); } diff --git a/core/src/main/java/org/elasticsearch/http/netty/HttpRequestHandler.java b/core/src/main/java/org/elasticsearch/http/netty/HttpRequestHandler.java index 5c05efcd170..71d63d8d1dc 100644 --- a/core/src/main/java/org/elasticsearch/http/netty/HttpRequestHandler.java +++ b/core/src/main/java/org/elasticsearch/http/netty/HttpRequestHandler.java @@ -19,6 +19,7 @@ package org.elasticsearch.http.netty; +import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.http.netty.pipelining.OrderedUpstreamMessageEvent; import org.elasticsearch.rest.support.RestUtils; import org.jboss.netty.channel.ChannelHandler; @@ -41,12 +42,14 @@ public class HttpRequestHandler extends SimpleChannelUpstreamHandler { private final Pattern corsPattern; private final boolean httpPipeliningEnabled; private final boolean detailedErrorsEnabled; + private final ThreadContext threadContext; - public HttpRequestHandler(NettyHttpServerTransport serverTransport, boolean detailedErrorsEnabled) { + public HttpRequestHandler(NettyHttpServerTransport serverTransport, boolean detailedErrorsEnabled, ThreadContext threadContext) { this.serverTransport = serverTransport; this.corsPattern = RestUtils.checkCorsSettingForRegex(serverTransport.settings().get(NettyHttpServerTransport.SETTING_CORS_ALLOW_ORIGIN)); this.httpPipeliningEnabled = serverTransport.pipelining; this.detailedErrorsEnabled = detailedErrorsEnabled; + this.threadContext = threadContext; } @Override @@ -60,6 +63,7 @@ public class HttpRequestHandler extends SimpleChannelUpstreamHandler { request = (HttpRequest) e.getMessage(); } + threadContext.copyHeaders(request.headers()); // the netty HTTP handling always copy over the buffer to its own buffer, either in NioWorker internally // when reading, or using a cumalation buffer NettyHttpRequest httpRequest = new NettyHttpRequest(request, e.getChannel()); diff --git a/core/src/main/java/org/elasticsearch/http/netty/NettyHttpServerTransport.java b/core/src/main/java/org/elasticsearch/http/netty/NettyHttpServerTransport.java index 0cd0cef336c..00b3c0f8afa 100644 --- a/core/src/main/java/org/elasticsearch/http/netty/NettyHttpServerTransport.java +++ b/core/src/main/java/org/elasticsearch/http/netty/NettyHttpServerTransport.java @@ -38,6 +38,7 @@ import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.concurrent.EsExecutors; +import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.http.BindHttpException; import org.elasticsearch.http.HttpChannel; import org.elasticsearch.http.HttpInfo; @@ -47,6 +48,7 @@ import org.elasticsearch.http.HttpServerTransport; import org.elasticsearch.http.HttpStats; import org.elasticsearch.http.netty.pipelining.HttpPipeliningHandler; import org.elasticsearch.monitor.jvm.JvmInfo; +import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.BindTransportException; import org.jboss.netty.bootstrap.ServerBootstrap; import org.jboss.netty.channel.AdaptiveReceiveBufferSizePredictorFactory; @@ -136,6 +138,7 @@ public class NettyHttpServerTransport extends AbstractLifecycleComponent { - private String name; + private final static Index PROTO = new Index("", ""); - private Index() { + private final String name; + private final String uuid; - } - - public Index(String name) { + public Index(String name, String uuid) { this.name = name.intern(); - } - - public String name() { - return this.name; + this.uuid = uuid.intern(); } public String getName() { - return name(); + return this.name; + } + + public String getUUID() { + return uuid; } @Override @@ -55,30 +55,35 @@ public class Index implements Streamable { @Override public boolean equals(Object o) { - if (this == o) return true; - if (o == null) return false; + if (this == o) { + return true; + } + if (o == null) { + return false; + } Index index1 = (Index) o; - return name.equals(index1.name); + return uuid.equals(index1.uuid) && name.equals(index1.name); // allow for _na_ uuid } @Override public int hashCode() { - return name.hashCode(); + int result = name.hashCode(); + result = 31 * result + uuid.hashCode(); + return result; } - public static Index readIndexName(StreamInput in) throws IOException { - Index index = new Index(); - index.readFrom(in); - return index; + public static Index readIndex(StreamInput in) throws IOException { + return PROTO.readFrom(in); } @Override - public void readFrom(StreamInput in) throws IOException { - name = in.readString().intern(); + public Index readFrom(StreamInput in) throws IOException { + return new Index(in.readString(), in.readString()); } @Override public void writeTo(StreamOutput out) throws IOException { out.writeString(name); + out.writeString(uuid); } } diff --git a/core/src/main/java/org/elasticsearch/index/IndexNotFoundException.java b/core/src/main/java/org/elasticsearch/index/IndexNotFoundException.java index bc7e55d5a01..035b90dd25e 100644 --- a/core/src/main/java/org/elasticsearch/index/IndexNotFoundException.java +++ b/core/src/main/java/org/elasticsearch/index/IndexNotFoundException.java @@ -34,6 +34,15 @@ public final class IndexNotFoundException extends ResourceNotFoundException { setIndex(index); } + public IndexNotFoundException(Index index) { + this(index, null); + } + + public IndexNotFoundException(Index index, Throwable cause) { + super("no such index", cause); + setIndex(index); + } + public IndexNotFoundException(StreamInput in) throws IOException { super(in); } diff --git a/core/src/main/java/org/elasticsearch/index/IndexSettings.java b/core/src/main/java/org/elasticsearch/index/IndexSettings.java index 4029cea479b..3919584b780 100644 --- a/core/src/main/java/org/elasticsearch/index/IndexSettings.java +++ b/core/src/main/java/org/elasticsearch/index/IndexSettings.java @@ -92,7 +92,6 @@ public final class IndexSettings { public static final TimeValue DEFAULT_GC_DELETES = TimeValue.timeValueSeconds(60); public static final Setting INDEX_GC_DELETES_SETTING = Setting.timeSetting("index.gc_deletes", DEFAULT_GC_DELETES, new TimeValue(-1, TimeUnit.MILLISECONDS), true, Setting.Scope.INDEX); - private final String uuid; private final Index index; private final Version version; private final ESLogger logger; @@ -165,7 +164,7 @@ public final class IndexSettings { * @param nodeSettings the nodes settings this index is allocated on. */ public IndexSettings(final IndexMetaData indexMetaData, final Settings nodeSettings) { - this(indexMetaData, nodeSettings, (index) -> Regex.simpleMatch(index, indexMetaData.getIndex()), IndexScopedSettings.DEFAULT_SCOPED_SETTINGS); + this(indexMetaData, nodeSettings, (index) -> Regex.simpleMatch(index, indexMetaData.getIndex().getName()), IndexScopedSettings.DEFAULT_SCOPED_SETTINGS); } /** @@ -180,9 +179,8 @@ public final class IndexSettings { scopedSettings = indexScopedSettings.copy(nodeSettings, indexMetaData); this.nodeSettings = nodeSettings; this.settings = Settings.builder().put(nodeSettings).put(indexMetaData.getSettings()).build(); - this.index = new Index(indexMetaData.getIndex()); + this.index = indexMetaData.getIndex(); version = Version.indexCreated(settings); - uuid = settings.get(IndexMetaData.SETTING_INDEX_UUID, IndexMetaData.INDEX_UUID_NA_VALUE); logger = Loggers.getLogger(getClass(), settings, index); nodeName = settings.get("name", ""); this.indexMetaData = indexMetaData; @@ -206,7 +204,7 @@ public final class IndexSettings { maxResultWindow = scopedSettings.get(MAX_RESULT_WINDOW_SETTING); TTLPurgeDisabled = scopedSettings.get(INDEX_TTL_DISABLE_PURGE_SETTING); this.mergePolicyConfig = new MergePolicyConfig(logger, this); - assert indexNameMatcher.test(indexMetaData.getIndex()); + assert indexNameMatcher.test(indexMetaData.getIndex().getName()); scopedSettings.addSettingsUpdateConsumer(MergePolicyConfig.INDEX_COMPOUND_FORMAT_SETTING, mergePolicyConfig::setNoCFSRatio); scopedSettings.addSettingsUpdateConsumer(MergePolicyConfig.INDEX_MERGE_POLICY_EXPUNGE_DELETES_ALLOWED_SETTING, mergePolicyConfig::setExpungeDeletesAllowed); @@ -257,7 +255,7 @@ public final class IndexSettings { * Returns the indexes UUID */ public String getUUID() { - return uuid; + return getIndex().getUUID(); } /** @@ -362,7 +360,7 @@ public final class IndexSettings { } final String newUUID = newSettings.get(IndexMetaData.SETTING_INDEX_UUID, IndexMetaData.INDEX_UUID_NA_VALUE); if (newUUID.equals(getUUID()) == false) { - throw new IllegalArgumentException("uuid mismatch on settings update expected: " + uuid + " but was: " + newUUID); + throw new IllegalArgumentException("uuid mismatch on settings update expected: " + getUUID() + " but was: " + newUUID); } this.indexMetaData = indexMetaData; final Settings existingSettings = this.settings; diff --git a/core/src/main/java/org/elasticsearch/index/cache/bitset/BitsetFilterCache.java b/core/src/main/java/org/elasticsearch/index/cache/bitset/BitsetFilterCache.java index f18cc631aea..52858416ee0 100644 --- a/core/src/main/java/org/elasticsearch/index/cache/bitset/BitsetFilterCache.java +++ b/core/src/main/java/org/elasticsearch/index/cache/bitset/BitsetFilterCache.java @@ -23,7 +23,6 @@ import org.apache.lucene.index.IndexReaderContext; import org.apache.lucene.index.LeafReader; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.ReaderUtil; -import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.Query; import org.apache.lucene.search.Scorer; @@ -120,10 +119,10 @@ public final class BitsetFilterCache extends AbstractIndexComponent implements L final Object coreCacheReader = context.reader().getCoreCacheKey(); final ShardId shardId = ShardUtils.extractShardId(context.reader()); if (shardId != null // can't require it because of the percolator - && indexSettings.getIndex().getName().equals(shardId.getIndex()) == false) { + && indexSettings.getIndex().equals(shardId.getIndex()) == false) { // insanity - throw new IllegalStateException("Trying to load bit set for index [" + shardId.getIndex() - + "] with cache of index [" + indexSettings.getIndex().getName() + "]"); + throw new IllegalStateException("Trying to load bit set for index " + shardId.getIndex() + + " with cache of index " + indexSettings.getIndex()); } Cache filterToFbs = loadedFilters.computeIfAbsent(coreCacheReader, key -> { context.reader().addCoreClosedListener(BitsetFilterCache.this); diff --git a/core/src/main/java/org/elasticsearch/index/engine/ElasticsearchConcurrentMergeScheduler.java b/core/src/main/java/org/elasticsearch/index/engine/ElasticsearchConcurrentMergeScheduler.java index 38410ab0cb6..6dd710e4e89 100644 --- a/core/src/main/java/org/elasticsearch/index/engine/ElasticsearchConcurrentMergeScheduler.java +++ b/core/src/main/java/org/elasticsearch/index/engine/ElasticsearchConcurrentMergeScheduler.java @@ -162,7 +162,7 @@ class ElasticsearchConcurrentMergeScheduler extends ConcurrentMergeScheduler { @Override protected MergeThread getMergeThread(IndexWriter writer, MergePolicy.OneMerge merge) throws IOException { MergeThread thread = super.getMergeThread(writer, merge); - thread.setName(EsExecutors.threadName(indexSettings, "[" + shardId.index().name() + "][" + shardId.id() + "]: " + thread.getName())); + thread.setName(EsExecutors.threadName(indexSettings, "[" + shardId.getIndexName() + "][" + shardId.id() + "]: " + thread.getName())); return thread; } diff --git a/core/src/main/java/org/elasticsearch/index/fielddata/plain/IndexIndexFieldData.java b/core/src/main/java/org/elasticsearch/index/fielddata/plain/IndexIndexFieldData.java index f2c4fa826f1..7fdacbf53ce 100644 --- a/core/src/main/java/org/elasticsearch/index/fielddata/plain/IndexIndexFieldData.java +++ b/core/src/main/java/org/elasticsearch/index/fielddata/plain/IndexIndexFieldData.java @@ -102,7 +102,7 @@ public class IndexIndexFieldData extends AbstractIndexOrdinalsFieldData { private IndexIndexFieldData(IndexSettings indexSettings, String name) { super(indexSettings, name, new FieldDataType("string"), null, null); - atomicFieldData = new IndexAtomicFieldData(index().name()); + atomicFieldData = new IndexAtomicFieldData(index().getName()); } @Override diff --git a/core/src/main/java/org/elasticsearch/index/get/ShardGetService.java b/core/src/main/java/org/elasticsearch/index/get/ShardGetService.java index ef901263d0a..51391061a6e 100644 --- a/core/src/main/java/org/elasticsearch/index/get/ShardGetService.java +++ b/core/src/main/java/org/elasticsearch/index/get/ShardGetService.java @@ -19,9 +19,7 @@ package org.elasticsearch.index.get; -import org.apache.lucene.index.SortedDocValues; import org.apache.lucene.index.Term; -import org.apache.lucene.util.BytesRef; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.bytes.BytesReference; @@ -52,10 +50,8 @@ import org.elasticsearch.index.mapper.internal.UidFieldMapper; import org.elasticsearch.index.shard.AbstractIndexShardComponent; import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.index.translog.Translog; -import org.elasticsearch.search.SearchHitField; import org.elasticsearch.search.fetch.parent.ParentFieldSubFetchPhase; import org.elasticsearch.search.fetch.source.FetchSourceContext; -import org.elasticsearch.search.internal.InternalSearchHitField; import org.elasticsearch.search.lookup.LeafSearchLookup; import org.elasticsearch.search.lookup.SearchLookup; @@ -116,7 +112,7 @@ public final class ShardGetService extends AbstractIndexShardComponent { */ public GetResult get(Engine.GetResult engineGetResult, String id, String type, String[] fields, FetchSourceContext fetchSourceContext, boolean ignoreErrorsOnGeneratedFields) { if (!engineGetResult.exists()) { - return new GetResult(shardId.index().name(), type, id, -1, false, null, null); + return new GetResult(shardId.getIndexName(), type, id, -1, false, null, null); } currentMetric.inc(); @@ -125,7 +121,7 @@ public final class ShardGetService extends AbstractIndexShardComponent { DocumentMapper docMapper = mapperService.documentMapper(type); if (docMapper == null) { missingMetric.inc(System.nanoTime() - now); - return new GetResult(shardId.index().name(), type, id, -1, false, null, null); + return new GetResult(shardId.getIndexName(), type, id, -1, false, null, null); } fetchSourceContext = normalizeFetchSourceContent(fetchSourceContext, fields); GetResult getResult = innerGetLoadFromStoredFields(type, id, fields, fetchSourceContext, engineGetResult, docMapper, ignoreErrorsOnGeneratedFields); @@ -174,25 +170,25 @@ public final class ShardGetService extends AbstractIndexShardComponent { } } if (get == null) { - return new GetResult(shardId.index().name(), type, id, -1, false, null, null); + return new GetResult(shardId.getIndexName(), type, id, -1, false, null, null); } if (!get.exists()) { // no need to release here as well..., we release in the for loop for non exists - return new GetResult(shardId.index().name(), type, id, -1, false, null, null); + return new GetResult(shardId.getIndexName(), type, id, -1, false, null, null); } } else { get = indexShard.get(new Engine.Get(realtime, new Term(UidFieldMapper.NAME, Uid.createUidAsBytes(type, id))) .version(version).versionType(versionType)); if (!get.exists()) { get.release(); - return new GetResult(shardId.index().name(), type, id, -1, false, null, null); + return new GetResult(shardId.getIndexName(), type, id, -1, false, null, null); } } DocumentMapper docMapper = mapperService.documentMapper(type); if (docMapper == null) { get.release(); - return new GetResult(shardId.index().name(), type, id, -1, false, null, null); + return new GetResult(shardId.getIndexName(), type, id, -1, false, null, null); } try { @@ -306,7 +302,7 @@ public final class ShardGetService extends AbstractIndexShardComponent { } } - return new GetResult(shardId.index().name(), type, id, get.version(), get.exists(), sourceToBeReturned, fields); + return new GetResult(shardId.getIndexName(), type, id, get.version(), get.exists(), sourceToBeReturned, fields); } } finally { get.release(); @@ -422,7 +418,7 @@ public final class ShardGetService extends AbstractIndexShardComponent { } } - return new GetResult(shardId.index().name(), type, id, get.version(), get.exists(), source, fields); + return new GetResult(shardId.getIndexName(), type, id, get.version(), get.exists(), source, fields); } private static FieldsVisitor buildFieldsVisitors(String[] fields, FetchSourceContext fetchSourceContext) { diff --git a/core/src/main/java/org/elasticsearch/index/mapper/MapperService.java b/core/src/main/java/org/elasticsearch/index/mapper/MapperService.java index 67ab567126e..8b754b8bc29 100755 --- a/core/src/main/java/org/elasticsearch/index/mapper/MapperService.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/MapperService.java @@ -23,27 +23,14 @@ import com.carrotsearch.hppc.ObjectHashSet; import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.DelegatingAnalyzerWrapper; -import org.apache.lucene.index.IndexOptions; -import org.apache.lucene.index.Term; -import org.apache.lucene.queries.TermsQuery; -import org.apache.lucene.search.BooleanClause; -import org.apache.lucene.search.BooleanClause.Occur; -import org.apache.lucene.search.BooleanQuery; -import org.apache.lucene.search.ConstantScoreQuery; -import org.apache.lucene.search.Query; -import org.apache.lucene.search.TermQuery; -import org.apache.lucene.util.BytesRef; import org.elasticsearch.ElasticsearchGenerationException; -import org.elasticsearch.common.Nullable; import org.elasticsearch.common.compress.CompressedXContent; -import org.elasticsearch.common.lucene.search.Queries; import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.index.AbstractIndexComponent; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.analysis.AnalysisService; import org.elasticsearch.index.mapper.Mapper.BuilderContext; -import org.elasticsearch.index.mapper.internal.TypeFieldMapper; import org.elasticsearch.index.mapper.object.ObjectMapper; import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.index.similarity.SimilarityService; @@ -63,7 +50,6 @@ import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; -import java.util.concurrent.CopyOnWriteArrayList; import java.util.function.Function; import java.util.function.Supplier; import java.util.stream.Collectors; @@ -410,7 +396,7 @@ public class MapperService extends AbstractIndexComponent implements Closeable { } } if (allowedNestedFields >= 0 && actualNestedFields > allowedNestedFields) { - throw new IllegalArgumentException("Limit of nested fields [" + allowedNestedFields + "] in index [" + index().name() + "] has been exceeded"); + throw new IllegalArgumentException("Limit of nested fields [" + allowedNestedFields + "] in index [" + index().getName() + "] has been exceeded"); } } diff --git a/core/src/main/java/org/elasticsearch/index/percolator/PercolatorQueriesRegistry.java b/core/src/main/java/org/elasticsearch/index/percolator/PercolatorQueriesRegistry.java index f55a739caf3..67ba0aaf1d2 100644 --- a/core/src/main/java/org/elasticsearch/index/percolator/PercolatorQueriesRegistry.java +++ b/core/src/main/java/org/elasticsearch/index/percolator/PercolatorQueriesRegistry.java @@ -127,7 +127,7 @@ public final class PercolatorQueriesRegistry extends AbstractIndexShardComponent } } } catch (Exception e) { - throw new PercolatorException(shardId().index(), "failed to parse query [" + id + "]", e); + throw new PercolatorException(shardId().getIndex(), "failed to parse query [" + id + "]", e); } return null; } @@ -174,7 +174,7 @@ public final class PercolatorQueriesRegistry extends AbstractIndexShardComponent } loadedQueries = queries.size(); } catch (Exception e) { - throw new PercolatorException(shardId.index(), "failed to load queries from percolator index", e); + throw new PercolatorException(shardId.getIndex(), "failed to load queries from percolator index", e); } logger.debug("done loading [{}] percolator queries", loadedQueries); } diff --git a/core/src/main/java/org/elasticsearch/index/query/GeoShapeQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/GeoShapeQueryBuilder.java index 454465727b7..f7d8b22d785 100644 --- a/core/src/main/java/org/elasticsearch/index/query/GeoShapeQueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/GeoShapeQueryBuilder.java @@ -240,7 +240,6 @@ public class GeoShapeQueryBuilder extends AbstractQueryBuilder moreLikeFields, boolean useDefaultField) { if (item.index() == null) { - item.index(context.index().name()); + item.index(context.index().getName()); } if (item.type() == null) { if (context.queryTypes().size() > 1) { @@ -917,7 +917,6 @@ public class MoreLikeThisQueryBuilder extends AbstractQueryBuilder { if (this.termsLookup != null) { termsLookup = new TermsLookup(this.termsLookup); if (termsLookup.index() == null) { - termsLookup.index(context.index().name()); + termsLookup.index(context.index().getName()); } Client client = context.getClient(); terms = fetch(termsLookup, client); @@ -249,7 +249,6 @@ public class TermsQueryBuilder extends AbstractQueryBuilder { List terms = new ArrayList<>(); GetRequest getRequest = new GetRequest(termsLookup.index(), termsLookup.type(), termsLookup.id()) .preference("_local").routing(termsLookup.routing()); - getRequest.copyContextAndHeadersFrom(SearchContext.current()); final GetResponse getResponse = client.get(getRequest).actionGet(); if (getResponse.isExists()) { List extractedValues = XContentMapValues.extractRawValues(termsLookup.path(), getResponse.getSourceAsMap()); diff --git a/core/src/main/java/org/elasticsearch/index/query/functionscore/random/RandomScoreFunctionBuilder.java b/core/src/main/java/org/elasticsearch/index/query/functionscore/random/RandomScoreFunctionBuilder.java index 2a7169d9501..f67e60f33fa 100644 --- a/core/src/main/java/org/elasticsearch/index/query/functionscore/random/RandomScoreFunctionBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/functionscore/random/RandomScoreFunctionBuilder.java @@ -125,7 +125,7 @@ public class RandomScoreFunctionBuilder extends ScoreFunctionBuilder uidFieldData = context.getForField(fieldType); return new RandomScoreFunction(this.seed == null ? hash(context.nowInMillis()) : seed, salt, uidFieldData); } diff --git a/core/src/main/java/org/elasticsearch/index/shard/ShardId.java b/core/src/main/java/org/elasticsearch/index/shard/ShardId.java index 2c0eb7c9e38..f021cb4c162 100644 --- a/core/src/main/java/org/elasticsearch/index/shard/ShardId.java +++ b/core/src/main/java/org/elasticsearch/index/shard/ShardId.java @@ -40,22 +40,22 @@ public class ShardId implements Streamable, Comparable { private ShardId() { } - public ShardId(String index, int shardId) { - this(new Index(index), shardId); - } - public ShardId(Index index, int shardId) { this.index = index; this.shardId = shardId; this.hashCode = computeHashCode(); } - public Index index() { - return this.index; + public ShardId(String index, String indexUUID, int shardId) { + this(new Index(index, indexUUID), shardId); } - public String getIndex() { - return index().name(); + public Index getIndex() { + return index; + } + + public String getIndexName() { + return index.getName(); } public int id() { @@ -68,7 +68,7 @@ public class ShardId implements Streamable, Comparable { @Override public String toString() { - return "[" + index.name() + "][" + shardId + "]"; + return "[" + index.getName() + "][" + shardId + "]"; } @Override @@ -76,7 +76,7 @@ public class ShardId implements Streamable, Comparable { if (this == o) return true; if (o == null) return false; ShardId shardId1 = (ShardId) o; - return shardId == shardId1.shardId && index.name().equals(shardId1.index.name()); + return shardId == shardId1.shardId && index.getName().equals(shardId1.index.getName()); } @Override @@ -98,7 +98,7 @@ public class ShardId implements Streamable, Comparable { @Override public void readFrom(StreamInput in) throws IOException { - index = Index.readIndexName(in); + index = Index.readIndex(in); shardId = in.readVInt(); hashCode = computeHashCode(); } @@ -112,7 +112,7 @@ public class ShardId implements Streamable, Comparable { @Override public int compareTo(ShardId o) { if (o.getId() == shardId) { - return index.name().compareTo(o.getIndex()); + return index.getName().compareTo(o.getIndex().getName()); } return Integer.compare(shardId, o.getId()); } diff --git a/core/src/main/java/org/elasticsearch/index/shard/ShardPath.java b/core/src/main/java/org/elasticsearch/index/shard/ShardPath.java index d940d1a93cd..e870057a149 100644 --- a/core/src/main/java/org/elasticsearch/index/shard/ShardPath.java +++ b/core/src/main/java/org/elasticsearch/index/shard/ShardPath.java @@ -45,8 +45,8 @@ public final class ShardPath { public ShardPath(boolean isCustomDataPath, Path dataPath, Path shardStatePath, String indexUUID, ShardId shardId) { assert dataPath.getFileName().toString().equals(Integer.toString(shardId.id())) : "dataPath must end with the shard ID but didn't: " + dataPath.toString(); assert shardStatePath.getFileName().toString().equals(Integer.toString(shardId.id())) : "shardStatePath must end with the shard ID but didn't: " + dataPath.toString(); - assert dataPath.getParent().getFileName().toString().equals(shardId.getIndex()) : "dataPath must end with index/shardID but didn't: " + dataPath.toString(); - assert shardStatePath.getParent().getFileName().toString().equals(shardId.getIndex()) : "shardStatePath must end with index/shardID but didn't: " + dataPath.toString(); + assert dataPath.getParent().getFileName().toString().equals(shardId.getIndexName()) : "dataPath must end with index/shardID but didn't: " + dataPath.toString(); + assert shardStatePath.getParent().getFileName().toString().equals(shardId.getIndexName()) : "shardStatePath must end with index/shardID but didn't: " + dataPath.toString(); if (isCustomDataPath && dataPath.equals(shardStatePath)) { throw new IllegalArgumentException("shard state path must be different to the data path when using custom data paths"); } diff --git a/core/src/main/java/org/elasticsearch/index/shard/StoreRecovery.java b/core/src/main/java/org/elasticsearch/index/shard/StoreRecovery.java index 0fc166cacc5..5f5aa95a994 100644 --- a/core/src/main/java/org/elasticsearch/index/shard/StoreRecovery.java +++ b/core/src/main/java/org/elasticsearch/index/shard/StoreRecovery.java @@ -24,6 +24,7 @@ import org.apache.lucene.index.IndexWriterConfig; import org.apache.lucene.index.SegmentInfos; import org.apache.lucene.store.Directory; import org.elasticsearch.ExceptionsHelper; +import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.routing.RestoreSource; import org.elasticsearch.cluster.routing.ShardRouting; @@ -254,8 +255,8 @@ final class StoreRecovery { translogState.totalOperationsOnStart(0); indexShard.prepareForIndexRecovery(); ShardId snapshotShardId = shardId; - if (!shardId.getIndex().equals(restoreSource.index())) { - snapshotShardId = new ShardId(restoreSource.index(), shardId.id()); + if (!shardId.getIndexName().equals(restoreSource.index())) { + snapshotShardId = new ShardId(restoreSource.index(), IndexMetaData.INDEX_UUID_NA_VALUE, shardId.id()); } indexShardRepository.restore(restoreSource.snapshotId(), restoreSource.version(), shardId, snapshotShardId, indexShard.recoveryState()); indexShard.skipTranslogRecovery(); diff --git a/core/src/main/java/org/elasticsearch/index/snapshots/blobstore/BlobStoreIndexShardRepository.java b/core/src/main/java/org/elasticsearch/index/snapshots/blobstore/BlobStoreIndexShardRepository.java index f05f64fa19c..c2a134b08e8 100644 --- a/core/src/main/java/org/elasticsearch/index/snapshots/blobstore/BlobStoreIndexShardRepository.java +++ b/core/src/main/java/org/elasticsearch/index/snapshots/blobstore/BlobStoreIndexShardRepository.java @@ -299,7 +299,7 @@ public class BlobStoreIndexShardRepository extends AbstractComponent implements this.snapshotId = snapshotId; this.version = version; this.shardId = shardId; - blobContainer = blobStore.blobContainer(basePath.add("indices").add(snapshotShardId.getIndex()).add(Integer.toString(snapshotShardId.getId()))); + blobContainer = blobStore.blobContainer(basePath.add("indices").add(snapshotShardId.getIndexName()).add(Integer.toString(snapshotShardId.getId()))); } /** diff --git a/core/src/main/java/org/elasticsearch/index/store/Store.java b/core/src/main/java/org/elasticsearch/index/store/Store.java index f6e48e718b9..ec643154fed 100644 --- a/core/src/main/java/org/elasticsearch/index/store/Store.java +++ b/core/src/main/java/org/elasticsearch/index/store/Store.java @@ -70,6 +70,7 @@ import org.elasticsearch.common.util.concurrent.AbstractRefCounted; import org.elasticsearch.common.util.concurrent.RefCounted; import org.elasticsearch.common.util.iterable.Iterables; import org.elasticsearch.env.ShardLock; +import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.engine.Engine; import org.elasticsearch.index.shard.AbstractIndexShardComponent; @@ -408,9 +409,9 @@ public class Store extends AbstractIndexShardComponent implements Closeable, Ref * * @throws IOException if the index we try to read is corrupted */ - public static MetadataSnapshot readMetadataSnapshot(Path indexLocation, ESLogger logger) throws IOException { + public static MetadataSnapshot readMetadataSnapshot(Path indexLocation, ShardId shardId, ESLogger logger) throws IOException { try (Directory dir = new SimpleFSDirectory(indexLocation)) { - failIfCorrupted(dir, new ShardId("", 1)); + failIfCorrupted(dir, shardId); return new MetadataSnapshot(null, dir, logger); } catch (IndexNotFoundException ex) { // that's fine - happens all the time no need to log @@ -425,9 +426,9 @@ public class Store extends AbstractIndexShardComponent implements Closeable, Ref * can be successfully opened. This includes reading the segment infos and possible * corruption markers. */ - public static boolean canOpenIndex(ESLogger logger, Path indexLocation) throws IOException { + public static boolean canOpenIndex(ESLogger logger, Path indexLocation, ShardId shardId) throws IOException { try { - tryOpenIndex(indexLocation); + tryOpenIndex(indexLocation, shardId); } catch (Exception ex) { logger.trace("Can't open index for path [{}]", ex, indexLocation); return false; @@ -440,9 +441,9 @@ public class Store extends AbstractIndexShardComponent implements Closeable, Ref * segment infos and possible corruption markers. If the index can not * be opened, an exception is thrown */ - public static void tryOpenIndex(Path indexLocation) throws IOException { + public static void tryOpenIndex(Path indexLocation, ShardId shardId) throws IOException { try (Directory dir = new SimpleFSDirectory(indexLocation)) { - failIfCorrupted(dir, new ShardId("", 1)); + failIfCorrupted(dir, shardId); Lucene.readSegmentInfos(dir); } } diff --git a/core/src/main/java/org/elasticsearch/index/termvectors/TermVectorsService.java b/core/src/main/java/org/elasticsearch/index/termvectors/TermVectorsService.java index 17777756056..fbc18fd578a 100644 --- a/core/src/main/java/org/elasticsearch/index/termvectors/TermVectorsService.java +++ b/core/src/main/java/org/elasticsearch/index/termvectors/TermVectorsService.java @@ -82,7 +82,7 @@ public class TermVectorsService { public TermVectorsResponse getTermVectors(IndexShard indexShard, TermVectorsRequest request) { - final TermVectorsResponse termVectorsResponse = new TermVectorsResponse(indexShard.shardId().index().name(), request.type(), request.id()); + final TermVectorsResponse termVectorsResponse = new TermVectorsResponse(indexShard.shardId().getIndex().getName(), request.type(), request.id()); final Term uidTerm = new Term(UidFieldMapper.NAME, Uid.createUidAsBytes(request.type(), request.id())); Engine.GetResult get = indexShard.get(new Engine.Get(request.realtime(), uidTerm).version(request.version()).versionType(request.versionType())); @@ -262,7 +262,7 @@ public class TermVectorsService { private Fields generateTermVectorsFromDoc(IndexShard indexShard, TermVectorsRequest request, boolean doAllFields) throws Throwable { // parse the document, at the moment we do update the mapping, just like percolate - ParsedDocument parsedDocument = parseDocument(indexShard, indexShard.shardId().getIndex(), request.type(), request.doc()); + ParsedDocument parsedDocument = parseDocument(indexShard, indexShard.shardId().getIndexName(), request.type(), request.doc()); // select the right fields and generate term vectors ParseContext.Document doc = parsedDocument.rootDoc(); diff --git a/core/src/main/java/org/elasticsearch/indices/IndexCreationException.java b/core/src/main/java/org/elasticsearch/indices/IndexCreationException.java index 09b6696e112..1248d73b81e 100644 --- a/core/src/main/java/org/elasticsearch/indices/IndexCreationException.java +++ b/core/src/main/java/org/elasticsearch/indices/IndexCreationException.java @@ -30,8 +30,8 @@ import java.io.IOException; */ public class IndexCreationException extends ElasticsearchException implements ElasticsearchWrapperException { - public IndexCreationException(Index index, Throwable cause) { - super("failed to create index", cause); + public IndexCreationException(String index, Throwable cause) { + super("failed to create index [{}]", cause, index); setIndex(index); } diff --git a/core/src/main/java/org/elasticsearch/indices/IndicesService.java b/core/src/main/java/org/elasticsearch/indices/IndicesService.java index fdc448989d5..bb61fed4362 100644 --- a/core/src/main/java/org/elasticsearch/indices/IndicesService.java +++ b/core/src/main/java/org/elasticsearch/indices/IndicesService.java @@ -53,7 +53,6 @@ import org.elasticsearch.index.NodeServicesProvider; import org.elasticsearch.index.analysis.AnalysisRegistry; import org.elasticsearch.index.flush.FlushStats; import org.elasticsearch.index.get.GetStats; -import org.elasticsearch.index.shard.IndexingStats; import org.elasticsearch.index.merge.MergeStats; import org.elasticsearch.index.recovery.RecoveryStats; import org.elasticsearch.index.refresh.RefreshStats; @@ -61,6 +60,7 @@ import org.elasticsearch.index.search.stats.SearchStats; import org.elasticsearch.index.shard.IllegalIndexShardStateException; import org.elasticsearch.index.shard.IndexEventListener; import org.elasticsearch.index.shard.IndexShard; +import org.elasticsearch.index.shard.IndexingStats; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.store.IndexStoreConfig; import org.elasticsearch.indices.mapper.MapperRegistry; @@ -257,6 +257,15 @@ public class IndicesService extends AbstractLifecycleComponent i return indices.get(index); } + /** + * Returns an IndexService for the specified index if exists otherwise returns null. + * + */ + @Nullable + public IndexService indexService(Index index) { + return indexService(index.getName()); + } + /** * Returns an IndexService for the specified index if exists otherwise a {@link IndexNotFoundException} is thrown. */ @@ -268,6 +277,17 @@ public class IndicesService extends AbstractLifecycleComponent i return indexService; } + /** + * Returns an IndexService for the specified index if exists otherwise a {@link IndexNotFoundException} is thrown. + */ + public IndexService indexServiceSafe(Index index) { + IndexService indexService = indexServiceSafe(index.getName()); + if (indexService.indexUUID().equals(index.getUUID()) == false) { + throw new IndexNotFoundException(index); + } + return indexService; + } + /** @@ -280,11 +300,10 @@ public class IndicesService extends AbstractLifecycleComponent i if (!lifecycle.started()) { throw new IllegalStateException("Can't create an index [" + indexMetaData.getIndex() + "], node is closed"); } - final String indexName = indexMetaData.getIndex(); - final Predicate indexNameMatcher = (indexExpression) -> indexNameExpressionResolver.matchesIndex(indexName, indexExpression, clusterService.state()); + final Index index = indexMetaData.getIndex(); + final Predicate indexNameMatcher = (indexExpression) -> indexNameExpressionResolver.matchesIndex(index.getName(), indexExpression, clusterService.state()); final IndexSettings idxSettings = new IndexSettings(indexMetaData, this.settings, indexNameMatcher, indexScopeSetting); - Index index = new Index(indexMetaData.getIndex()); - if (indices.containsKey(index.name())) { + if (indices.containsKey(index.getName())) { throw new IndexAlreadyExistsException(index); } logger.debug("creating Index [{}], shards [{}]/[{}{}]", @@ -306,7 +325,7 @@ public class IndicesService extends AbstractLifecycleComponent i try { assert indexService.getIndexEventListener() == listener; listener.afterIndexCreated(indexService); - indices = newMapBuilder(indices).put(index.name(), indexService).immutableMap(); + indices = newMapBuilder(indices).put(index.getName(), indexService).immutableMap(); success = true; return indexService; } finally { @@ -401,7 +420,7 @@ public class IndicesService extends AbstractLifecycleComponent i public void deleteClosedIndex(String reason, IndexMetaData metaData, ClusterState clusterState) { if (nodeEnv.hasNodeFile()) { - String indexName = metaData.getIndex(); + String indexName = metaData.getIndex().getName(); try { if (clusterState.metaData().hasIndex(indexName)) { final IndexMetaData index = clusterState.metaData().index(indexName); @@ -421,7 +440,7 @@ public class IndicesService extends AbstractLifecycleComponent i public void deleteIndexStore(String reason, IndexMetaData metaData, ClusterState clusterState, boolean closed) throws IOException { if (nodeEnv.hasNodeFile()) { synchronized (this) { - String indexName = metaData.getIndex(); + String indexName = metaData.getIndex().getName(); if (indices.containsKey(indexName)) { String localUUid = indices.get(indexName).indexUUID(); throw new IllegalStateException("Can't delete index store for [" + indexName + "] - it's still part of the indices service [" + localUUid + "] [" + metaData.getIndexUUID() + "]"); @@ -433,9 +452,8 @@ public class IndicesService extends AbstractLifecycleComponent i throw new IllegalStateException("Can't delete closed index store for [" + indexName + "] - it's still part of the cluster state [" + index.getIndexUUID() + "] [" + metaData.getIndexUUID() + "]"); } } - Index index = new Index(metaData.getIndex()); final IndexSettings indexSettings = buildIndexSettings(metaData); - deleteIndexStore(reason, index, indexSettings, closed); + deleteIndexStore(reason, indexSettings.getIndex(), indexSettings, closed); } } @@ -460,7 +478,7 @@ public class IndicesService extends AbstractLifecycleComponent i } // this is a pure protection to make sure this index doesn't get re-imported as a dangeling index. // we should in the future rather write a tombstone rather than wiping the metadata. - MetaDataStateFormat.deleteMetaState(nodeEnv.indexPaths(index)); + MetaDataStateFormat.deleteMetaState(nodeEnv.indexPaths(index.getName())); } } @@ -490,7 +508,7 @@ public class IndicesService extends AbstractLifecycleComponent i * @throws IOException if an IOException occurs */ public void deleteShardStore(String reason, ShardId shardId, ClusterState clusterState) throws IOException { - final IndexMetaData metaData = clusterState.getMetaData().indices().get(shardId.getIndex()); + final IndexMetaData metaData = clusterState.getMetaData().indices().get(shardId.getIndexName()); final IndexSettings indexSettings = buildIndexSettings(metaData); if (canDeleteShardContent(shardId, indexSettings) == false) { @@ -500,8 +518,8 @@ public class IndicesService extends AbstractLifecycleComponent i logger.debug("{} deleted shard reason [{}]", shardId, reason); if (clusterState.nodes().localNode().isMasterNode() == false && // master nodes keep the index meta data, even if having no shards.. - canDeleteIndexContents(shardId.index(), indexSettings, false)) { - if (nodeEnv.findAllShardIds(shardId.index()).isEmpty()) { + canDeleteIndexContents(shardId.getIndex(), indexSettings, false)) { + if (nodeEnv.findAllShardIds(shardId.getIndex()).isEmpty()) { try { // note that deleteIndexStore have more safety checks and may throw an exception if index was concurrently created. deleteIndexStore("no longer used", metaData, clusterState, false); @@ -510,7 +528,7 @@ public class IndicesService extends AbstractLifecycleComponent i throw new ElasticsearchException("failed to delete unused index after deleting its last shard (" + shardId + ")", e); } } else { - logger.trace("[{}] still has shard stores, leaving as is", shardId.index()); + logger.trace("[{}] still has shard stores, leaving as is", shardId.getIndex()); } } } @@ -524,7 +542,7 @@ public class IndicesService extends AbstractLifecycleComponent i * @return true if the index can be deleted on this node */ public boolean canDeleteIndexContents(Index index, IndexSettings indexSettings, boolean closed) { - final IndexService indexService = this.indices.get(index.name()); + final IndexService indexService = this.indices.get(index.getName()); // Closed indices may be deleted, even if they are on a shared // filesystem. Since it is closed we aren't deleting it for relocation if (indexSettings.isOnSharedFilesystem() == false || closed) { @@ -550,8 +568,8 @@ public class IndicesService extends AbstractLifecycleComponent i * @param indexSettings the shards's relevant {@link IndexSettings}. This is required to access the indexes settings etc. */ public boolean canDeleteShardContent(ShardId shardId, IndexSettings indexSettings) { - assert shardId.getIndex().equals(indexSettings.getIndex().name()); - final IndexService indexService = this.indices.get(shardId.getIndex()); + assert shardId.getIndex().equals(indexSettings.getIndex()); + final IndexService indexService = this.indices.get(shardId.getIndexName()); if (indexSettings.isOnSharedFilesystem() == false) { if (indexService != null && nodeEnv.hasNodeFile()) { return indexService.hasShard(shardId.id()) == false; @@ -586,7 +604,7 @@ public class IndicesService extends AbstractLifecycleComponent i throw new IllegalArgumentException("settings must not be null"); } PendingDelete pendingDelete = new PendingDelete(shardId, settings); - addPendingDelete(shardId.index(), pendingDelete); + addPendingDelete(shardId.getIndex(), pendingDelete); } /** @@ -609,7 +627,7 @@ public class IndicesService extends AbstractLifecycleComponent i } private static final class PendingDelete implements Comparable { - final String index; + final Index index; final int shardId; final IndexSettings settings; final boolean deleteIndex; @@ -628,7 +646,7 @@ public class IndicesService extends AbstractLifecycleComponent i * Creates a new pending delete of a shard */ public PendingDelete(Index index, IndexSettings settings) { - this.index = index.getName(); + this.index = index; this.shardId = -1; this.settings = settings; this.deleteIndex = true; @@ -742,5 +760,4 @@ public class IndicesService extends AbstractLifecycleComponent i public AnalysisRegistry getAnalysis() { return analysisRegistry; } - } diff --git a/core/src/main/java/org/elasticsearch/indices/InvalidIndexNameException.java b/core/src/main/java/org/elasticsearch/indices/InvalidIndexNameException.java index 163f4df26a2..34dd327c91a 100644 --- a/core/src/main/java/org/elasticsearch/indices/InvalidIndexNameException.java +++ b/core/src/main/java/org/elasticsearch/indices/InvalidIndexNameException.java @@ -31,6 +31,10 @@ import java.io.IOException; */ public class InvalidIndexNameException extends ElasticsearchException { + public InvalidIndexNameException(String name, String desc) { + super("Invalid index name [" + name + "], " + desc); + setIndex(name); + } public InvalidIndexNameException(Index index, String name, String desc) { super("Invalid index name [" + name + "], " + desc); setIndex(index); diff --git a/core/src/main/java/org/elasticsearch/indices/NodeIndicesStats.java b/core/src/main/java/org/elasticsearch/indices/NodeIndicesStats.java index 0a036cbd801..7d24d4fa897 100644 --- a/core/src/main/java/org/elasticsearch/indices/NodeIndicesStats.java +++ b/core/src/main/java/org/elasticsearch/indices/NodeIndicesStats.java @@ -172,7 +172,7 @@ public class NodeIndicesStats implements Streamable, ToXContent { int entries = in.readVInt(); statsByShard = new HashMap<>(); for (int i = 0; i < entries; i++) { - Index index = Index.readIndexName(in); + Index index = Index.readIndex(in); int indexShardListSize = in.readVInt(); List indexShardStats = new ArrayList<>(indexShardListSize); for (int j = 0; j < indexShardListSize; j++) { @@ -215,7 +215,7 @@ public class NodeIndicesStats implements Streamable, ToXContent { Map indexStats = createStatsByIndex(); builder.startObject(Fields.INDICES); for (Map.Entry entry : indexStats.entrySet()) { - builder.startObject(entry.getKey().name()); + builder.startObject(entry.getKey().getName()); entry.getValue().toXContent(builder, params); builder.endObject(); } @@ -223,7 +223,7 @@ public class NodeIndicesStats implements Streamable, ToXContent { } else if ("shards".equals(level)) { builder.startObject("shards"); for (Map.Entry> entry : statsByShard.entrySet()) { - builder.startArray(entry.getKey().name()); + builder.startArray(entry.getKey().getName()); for (IndexShardStats indexShardStats : entry.getValue()) { builder.startObject().startObject(String.valueOf(indexShardStats.getShardId().getId())); for (ShardStats shardStats : indexShardStats.getShards()) { diff --git a/core/src/main/java/org/elasticsearch/indices/TypeMissingException.java b/core/src/main/java/org/elasticsearch/indices/TypeMissingException.java index 0a332dbaf18..6c952fb2416 100644 --- a/core/src/main/java/org/elasticsearch/indices/TypeMissingException.java +++ b/core/src/main/java/org/elasticsearch/indices/TypeMissingException.java @@ -37,6 +37,11 @@ public class TypeMissingException extends ElasticsearchException { setIndex(index); } + public TypeMissingException(String index, String... types) { + super("type[" + Arrays.toString(types) + "] missing"); + setIndex(index); + } + public TypeMissingException(StreamInput in) throws IOException{ super(in); } diff --git a/core/src/main/java/org/elasticsearch/indices/cluster/IndicesClusterStateService.java b/core/src/main/java/org/elasticsearch/indices/cluster/IndicesClusterStateService.java index 34da596646d..8c2f23f7081 100644 --- a/core/src/main/java/org/elasticsearch/indices/cluster/IndicesClusterStateService.java +++ b/core/src/main/java/org/elasticsearch/indices/cluster/IndicesClusterStateService.java @@ -234,11 +234,11 @@ public class IndicesClusterStateService extends AbstractLifecycleComponent { @Override public void handle(final IndexShard.ShardFailure shardFailure) { - final IndexService indexService = indicesService.indexService(shardFailure.routing.shardId().index().name()); + final IndexService indexService = indicesService.indexService(shardFailure.routing.shardId().getIndex().getName()); final ShardRouting shardRouting = shardFailure.routing; threadPool.generic().execute(() -> { synchronized (mutex) { diff --git a/core/src/main/java/org/elasticsearch/indices/flush/SyncedFlushService.java b/core/src/main/java/org/elasticsearch/indices/flush/SyncedFlushService.java index 0918ad2afee..90f2cb5073b 100644 --- a/core/src/main/java/org/elasticsearch/indices/flush/SyncedFlushService.java +++ b/core/src/main/java/org/elasticsearch/indices/flush/SyncedFlushService.java @@ -119,7 +119,7 @@ public class SyncedFlushService extends AbstractComponent implements IndexEventL final IndexMetaData indexMetaData = state.metaData().index(index); totalNumberOfShards += indexMetaData.getTotalNumberOfShards(); numberOfShards += indexMetaData.getNumberOfShards(); - results.put(index, Collections.synchronizedList(new ArrayList())); + results.put(index, Collections.synchronizedList(new ArrayList<>())); } if (numberOfShards == 0) { @@ -130,9 +130,10 @@ public class SyncedFlushService extends AbstractComponent implements IndexEventL final CountDown countDown = new CountDown(numberOfShards); for (final String index : concreteIndices) { - final int indexNumberOfShards = state.metaData().index(index).getNumberOfShards(); + final IndexMetaData indexMetaData = state.metaData().index(index); + final int indexNumberOfShards = indexMetaData.getNumberOfShards(); for (int shard = 0; shard < indexNumberOfShards; shard++) { - final ShardId shardId = new ShardId(index, shard); + final ShardId shardId = new ShardId(indexMetaData.getIndex(), shard); attemptSyncedFlush(shardId, new ActionListener() { @Override public void onResponse(ShardsSyncedFlushResult syncedFlushResult) { @@ -237,13 +238,13 @@ public class SyncedFlushService extends AbstractComponent implements IndexEventL } final IndexShardRoutingTable getShardRoutingTable(ShardId shardId, ClusterState state) { - final IndexRoutingTable indexRoutingTable = state.routingTable().index(shardId.index().name()); + final IndexRoutingTable indexRoutingTable = state.routingTable().index(shardId.getIndexName()); if (indexRoutingTable == null) { - IndexMetaData index = state.getMetaData().index(shardId.index().getName()); + IndexMetaData index = state.getMetaData().index(shardId.getIndexName()); if (index != null && index.getState() == IndexMetaData.State.CLOSE) { - throw new IndexClosedException(shardId.index()); + throw new IndexClosedException(shardId.getIndex()); } - throw new IndexNotFoundException(shardId.index().getName()); + throw new IndexNotFoundException(shardId.getIndexName()); } final IndexShardRoutingTable shardRoutingTable = indexRoutingTable.shard(shardId.id()); if (shardRoutingTable == null) { diff --git a/core/src/main/java/org/elasticsearch/indices/recovery/RecoverySettings.java b/core/src/main/java/org/elasticsearch/indices/recovery/RecoverySettings.java index c86309db136..8d610dce05b 100644 --- a/core/src/main/java/org/elasticsearch/indices/recovery/RecoverySettings.java +++ b/core/src/main/java/org/elasticsearch/indices/recovery/RecoverySettings.java @@ -83,7 +83,6 @@ public class RecoverySettings extends AbstractComponent { this.internalActionLongTimeout = INDICES_RECOVERY_INTERNAL_LONG_ACTION_TIMEOUT_SETTING.get(settings); this.activityTimeout = INDICES_RECOVERY_ACTIVITY_TIMEOUT_SETTING.get(settings); - this.maxBytesPerSec = INDICES_RECOVERY_MAX_BYTES_PER_SEC_SETTING.get(settings); if (maxBytesPerSec.bytes() <= 0) { rateLimiter = null; diff --git a/core/src/main/java/org/elasticsearch/indices/recovery/RecoverySource.java b/core/src/main/java/org/elasticsearch/indices/recovery/RecoverySource.java index 1ef9215b7b4..4c2d3d7f60b 100644 --- a/core/src/main/java/org/elasticsearch/indices/recovery/RecoverySource.java +++ b/core/src/main/java/org/elasticsearch/indices/recovery/RecoverySource.java @@ -84,7 +84,7 @@ public class RecoverySource extends AbstractComponent implements IndexEventListe } private RecoveryResponse recover(final StartRecoveryRequest request) throws IOException { - final IndexService indexService = indicesService.indexServiceSafe(request.shardId().index().name()); + final IndexService indexService = indicesService.indexServiceSafe(request.shardId().getIndex().getName()); final IndexShard shard = indexService.getShard(request.shardId().id()); // starting recovery from that our (the source) shard state is marking the shard to be in recovery mode as well, otherwise @@ -111,7 +111,7 @@ public class RecoverySource extends AbstractComponent implements IndexEventListe throw new DelayRecoveryException("source node has the state of the target shard to be [" + targetShardRouting.state() + "], expecting to be [initializing]"); } - logger.trace("[{}][{}] starting recovery to {}, mark_as_relocated {}", request.shardId().index().name(), request.shardId().id(), request.targetNode(), request.markAsRelocated()); + logger.trace("[{}][{}] starting recovery to {}, mark_as_relocated {}", request.shardId().getIndex().getName(), request.shardId().id(), request.targetNode(), request.markAsRelocated()); final RecoverySourceHandler handler; if (shard.indexSettings().isOnSharedFilesystem()) { handler = new SharedFSRecoverySourceHandler(shard, request, recoverySettings, transportService, logger); diff --git a/core/src/main/java/org/elasticsearch/indices/recovery/RecoverySourceHandler.java b/core/src/main/java/org/elasticsearch/indices/recovery/RecoverySourceHandler.java index 4699e8d5ace..ec390d3b23e 100644 --- a/core/src/main/java/org/elasticsearch/indices/recovery/RecoverySourceHandler.java +++ b/core/src/main/java/org/elasticsearch/indices/recovery/RecoverySourceHandler.java @@ -111,7 +111,7 @@ public class RecoverySourceHandler { this.recoverySettings = recoverySettings; this.logger = logger; this.transportService = transportService; - this.indexName = this.request.shardId().index().name(); + this.indexName = this.request.shardId().getIndex().getName(); this.shardId = this.request.shardId().id(); this.chunkSizeInBytes = recoverySettings.getChunkSize().bytesAsInt(); this.response = new RecoveryResponse(); diff --git a/core/src/main/java/org/elasticsearch/indices/recovery/RecoveryTarget.java b/core/src/main/java/org/elasticsearch/indices/recovery/RecoveryTarget.java index f7e683b8f14..0912a22a0f5 100644 --- a/core/src/main/java/org/elasticsearch/indices/recovery/RecoveryTarget.java +++ b/core/src/main/java/org/elasticsearch/indices/recovery/RecoveryTarget.java @@ -182,7 +182,7 @@ public class RecoveryTarget extends AbstractComponent implements IndexEventListe final AtomicReference responseHolder = new AtomicReference<>(); try { - logger.trace("[{}][{}] starting recovery from {}", request.shardId().index().name(), request.shardId().id(), request.sourceNode()); + logger.trace("[{}][{}] starting recovery from {}", request.shardId().getIndex().getName(), request.shardId().id(), request.sourceNode()); recoveryStatus.indexShard().prepareForIndexRecovery(); recoveryStatus.CancellableThreads().execute(new CancellableThreads.Interruptable() { @Override @@ -202,7 +202,7 @@ public class RecoveryTarget extends AbstractComponent implements IndexEventListe onGoingRecoveries.markRecoveryAsDone(recoveryStatus.recoveryId()); if (logger.isTraceEnabled()) { StringBuilder sb = new StringBuilder(); - sb.append('[').append(request.shardId().index().name()).append(']').append('[').append(request.shardId().id()).append("] "); + sb.append('[').append(request.shardId().getIndex().getName()).append(']').append('[').append(request.shardId().id()).append("] "); sb.append("recovery completed from ").append(request.sourceNode()).append(", took[").append(recoveryTime).append("]\n"); sb.append(" phase1: recovered_files [").append(recoveryResponse.phase1FileNames.size()).append("]").append(" with total_size of [").append(new ByteSizeValue(recoveryResponse.phase1TotalSize)).append("]") .append(", took [").append(timeValueMillis(recoveryResponse.phase1Time)).append("], throttling_wait [").append(timeValueMillis(recoveryResponse.phase1ThrottlingWaitTime)).append(']') @@ -220,7 +220,7 @@ public class RecoveryTarget extends AbstractComponent implements IndexEventListe logger.trace("recovery cancelled", e); } catch (Throwable e) { if (logger.isTraceEnabled()) { - logger.trace("[{}][{}] Got exception on recovery", e, request.shardId().index().name(), request.shardId().id()); + logger.trace("[{}][{}] Got exception on recovery", e, request.shardId().getIndex().getName(), request.shardId().id()); } Throwable cause = ExceptionsHelper.unwrapCause(e); if (cause instanceof CancellableThreads.ExecutionCancelledException) { @@ -308,7 +308,7 @@ public class RecoveryTarget extends AbstractComponent implements IndexEventListe @Override public void messageReceived(final RecoveryTranslogOperationsRequest request, final TransportChannel channel) throws Exception { try (RecoveriesCollection.StatusRef statusRef = onGoingRecoveries.getStatusSafe(request.recoveryId(), request.shardId())) { - final ClusterStateObserver observer = new ClusterStateObserver(clusterService, null, logger); + final ClusterStateObserver observer = new ClusterStateObserver(clusterService, null, logger, threadPool.getThreadContext()); final RecoveryStatus recoveryStatus = statusRef.status(); final RecoveryState.Translog translog = recoveryStatus.state().getTranslog(); translog.totalOperations(request.totalTranslogOps()); diff --git a/core/src/main/java/org/elasticsearch/indices/store/IndicesStore.java b/core/src/main/java/org/elasticsearch/indices/store/IndicesStore.java index 49851180ec7..6d225af43df 100644 --- a/core/src/main/java/org/elasticsearch/indices/store/IndicesStore.java +++ b/core/src/main/java/org/elasticsearch/indices/store/IndicesStore.java @@ -75,27 +75,23 @@ public class IndicesStore extends AbstractComponent implements ClusterStateListe private final IndicesService indicesService; private final ClusterService clusterService; private final TransportService transportService; + private final ThreadPool threadPool; private TimeValue deleteShardTimeout; @Inject public IndicesStore(Settings settings, IndicesService indicesService, - ClusterService clusterService, TransportService transportService) { + ClusterService clusterService, TransportService transportService, ThreadPool threadPool) { super(settings); this.indicesService = indicesService; this.clusterService = clusterService; this.transportService = transportService; + this.threadPool = threadPool; transportService.registerRequestHandler(ACTION_SHARD_EXISTS, ShardActiveRequest::new, ThreadPool.Names.SAME, new ShardActiveRequestHandler()); this.deleteShardTimeout = INDICES_STORE_DELETE_SHARD_TIMEOUT.get(settings); clusterService.addLast(this); } - IndicesStore() { - super(Settings.EMPTY); - indicesService = null; - this.clusterService = null; - this.transportService = null; - } @Override public void close() { clusterService.remove(this); @@ -117,7 +113,7 @@ public class IndicesStore extends AbstractComponent implements ClusterStateListe if (shardCanBeDeleted(event.state(), indexShardRoutingTable)) { ShardId shardId = indexShardRoutingTable.shardId(); IndexService indexService = indicesService.indexService(indexRoutingTable.getIndex()); - IndexSettings indexSettings = indexService != null ? indexService.getIndexSettings() : new IndexSettings(event.state().getMetaData().index(indexRoutingTable.index()), settings); + IndexSettings indexSettings = indexService != null ? indexService.getIndexSettings() : new IndexSettings(event.state().getMetaData().index(indexRoutingTable.getIndex()), settings); if (indicesService.canDeleteShardContent(shardId, indexSettings)) { deleteShardIfExistElseWhere(event.state(), indexShardRoutingTable); } @@ -280,6 +276,7 @@ public class IndicesStore extends AbstractComponent implements ClusterStateListe @Override public void messageReceived(final ShardActiveRequest request, final TransportChannel channel) throws Exception { IndexShard indexShard = getShard(request); + // make sure shard is really there before register cluster state observer if (indexShard == null) { channel.sendResponse(new ShardActiveResponse(false, clusterService.localNode())); @@ -290,7 +287,7 @@ public class IndicesStore extends AbstractComponent implements ClusterStateListe // in general, using a cluster state observer here is a workaround for the fact that we cannot listen on shard state changes explicitly. // instead we wait for the cluster state changes because we know any shard state change will trigger or be // triggered by a cluster state change. - ClusterStateObserver observer = new ClusterStateObserver(clusterService, request.timeout, logger); + ClusterStateObserver observer = new ClusterStateObserver(clusterService, request.timeout, logger, threadPool.getThreadContext()); // check if shard is active. if so, all is good boolean shardActive = shardActive(indexShard); if (shardActive) { @@ -350,14 +347,14 @@ public class IndicesStore extends AbstractComponent implements ClusterStateListe logger.trace("shard exists request meant for cluster[{}], but this is cluster[{}], ignoring request", request.clusterName, thisClusterName); return null; } - ShardId shardId = request.shardId; - IndexService indexService = indicesService.indexService(shardId.index().getName()); + IndexService indexService = indicesService.indexService(shardId.getIndexName()); if (indexService != null && indexService.indexUUID().equals(request.indexUUID)) { return indexService.getShardOrNull(shardId.id()); } return null; } + } private static class ShardActiveRequest extends TransportRequest { diff --git a/core/src/main/java/org/elasticsearch/indices/store/TransportNodesListShardStoreMetaData.java b/core/src/main/java/org/elasticsearch/indices/store/TransportNodesListShardStoreMetaData.java index 6a6b05c4ad1..bcc2d7f74c4 100644 --- a/core/src/main/java/org/elasticsearch/indices/store/TransportNodesListShardStoreMetaData.java +++ b/core/src/main/java/org/elasticsearch/indices/store/TransportNodesListShardStoreMetaData.java @@ -34,7 +34,6 @@ import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.node.DiscoveryNode; -import org.elasticsearch.common.Strings; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -43,7 +42,6 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.env.NodeEnvironment; import org.elasticsearch.gateway.AsyncShardFetch; -import org.elasticsearch.index.IndexModule; import org.elasticsearch.index.IndexService; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.shard.IndexShard; @@ -57,7 +55,6 @@ import org.elasticsearch.transport.TransportService; import java.io.IOException; import java.util.ArrayList; -import java.util.Collections; import java.util.Iterator; import java.util.List; import java.util.Set; @@ -129,7 +126,7 @@ public class TransportNodesListShardStoreMetaData extends TransportNodesAction> classpathPlugins) { Settings tmpSettings = settingsBuilder().put(tmpEnv.settings()) - .put(Client.CLIENT_TYPE_SETTING, CLIENT_TYPE).build(); + .put(Client.CLIENT_TYPE_SETTING_S.getKey(), CLIENT_TYPE).build(); tmpSettings = TribeService.processSettings(tmpSettings); ESLogger logger = Loggers.getLogger(Node.class, tmpSettings.get("name")); diff --git a/core/src/main/java/org/elasticsearch/node/internal/InternalSettingsPreparer.java b/core/src/main/java/org/elasticsearch/node/internal/InternalSettingsPreparer.java index df4e09d28e8..c5dd64a67bb 100644 --- a/core/src/main/java/org/elasticsearch/node/internal/InternalSettingsPreparer.java +++ b/core/src/main/java/org/elasticsearch/node/internal/InternalSettingsPreparer.java @@ -25,6 +25,7 @@ import org.elasticsearch.common.Randomness; import org.elasticsearch.common.Strings; import org.elasticsearch.common.cli.Terminal; import org.elasticsearch.common.collect.Tuple; +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.SettingsException; import org.elasticsearch.env.Environment; @@ -56,7 +57,7 @@ public class InternalSettingsPreparer { public static final String SECRET_PROMPT_VALUE = "${prompt.secret}"; public static final String TEXT_PROMPT_VALUE = "${prompt.text}"; - public static final String IGNORE_SYSTEM_PROPERTIES_SETTING = "config.ignore_system_properties"; + public static final Setting IGNORE_SYSTEM_PROPERTIES_SETTING = Setting.boolSetting("config.ignore_system_properties", false, false, Setting.Scope.CLUSTER); /** * Prepares the settings by gathering all elasticsearch system properties and setting defaults. @@ -112,7 +113,7 @@ public class InternalSettingsPreparer { } private static boolean useSystemProperties(Settings input) { - return !input.getAsBoolean(IGNORE_SYSTEM_PROPERTIES_SETTING, false); + return !IGNORE_SYSTEM_PROPERTIES_SETTING.get(input); } /** @@ -162,8 +163,8 @@ public class InternalSettingsPreparer { } // put the cluster name - if (output.get(ClusterName.SETTING) == null) { - output.put(ClusterName.SETTING, ClusterName.DEFAULT.value()); + if (output.get(ClusterName.CLUSTER_NAME_SETTING.getKey()) == null) { + output.put(ClusterName.CLUSTER_NAME_SETTING.getKey(), ClusterName.CLUSTER_NAME_SETTING.getDefault(Settings.EMPTY)); } replacePromptPlaceholders(output, terminal); diff --git a/core/src/main/java/org/elasticsearch/percolator/PercolateContext.java b/core/src/main/java/org/elasticsearch/percolator/PercolateContext.java index bece4fd4418..622656396bc 100644 --- a/core/src/main/java/org/elasticsearch/percolator/PercolateContext.java +++ b/core/src/main/java/org/elasticsearch/percolator/PercolateContext.java @@ -31,9 +31,6 @@ import org.apache.lucene.util.Counter; import org.elasticsearch.action.percolate.PercolateShardRequest; import org.elasticsearch.action.search.SearchType; import org.elasticsearch.cache.recycler.PageCacheRecycler; -import org.elasticsearch.common.HasContext; -import org.elasticsearch.common.HasContextAndHeaders; -import org.elasticsearch.common.HasHeaders; import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.lease.Releasables; @@ -123,7 +120,7 @@ public class PercolateContext extends SearchContext { public PercolateContext(PercolateShardRequest request, SearchShardTarget searchShardTarget, IndexShard indexShard, IndexService indexService, PageCacheRecycler pageCacheRecycler, BigArrays bigArrays, ScriptService scriptService, Query aliasFilter, ParseFieldMatcher parseFieldMatcher) { - super(parseFieldMatcher, request); + super(parseFieldMatcher); this.indexShard = indexShard; this.indexService = indexService; this.fieldDataService = indexService.fieldData(); @@ -144,7 +141,7 @@ public class PercolateContext extends SearchContext { // for testing: PercolateContext(PercolateShardRequest request, SearchShardTarget searchShardTarget, MapperService mapperService) { - super(null, request); + super(null); this.searchShardTarget = searchShardTarget; this.mapperService = mapperService; this.indexService = null; @@ -687,82 +684,6 @@ public class PercolateContext extends SearchContext { throw new UnsupportedOperationException(); } - @Override - public V putInContext(Object key, Object value) { - assert false : "percolatecontext does not support contexts & headers"; - return null; - } - - @Override - public void putAllInContext(ObjectObjectAssociativeContainer map) { - assert false : "percolatocontext does not support contexts & headers"; - } - - @Override - public V getFromContext(Object key) { - return null; - } - - @Override - public V getFromContext(Object key, V defaultValue) { - return defaultValue; - } - - @Override - public boolean hasInContext(Object key) { - return false; - } - - @Override - public int contextSize() { - return 0; - } - - @Override - public boolean isContextEmpty() { - return true; - } - - @Override - public ImmutableOpenMap getContext() { - return ImmutableOpenMap.of(); - } - - @Override - public void copyContextFrom(HasContext other) { - assert false : "percolatecontext does not support contexts & headers"; - } - - @Override - public void putHeader(String key, V value) { - assert false : "percolatecontext does not support contexts & headers"; - } - - @Override - public V getHeader(String key) { - return null; - } - - @Override - public boolean hasHeader(String key) { - return false; - } - - @Override - public Set getHeaders() { - return Collections.emptySet(); - } - - @Override - public void copyHeadersFrom(HasHeaders from) { - assert false : "percolatecontext does not support contexts & headers"; - } - - @Override - public void copyContextAndHeadersFrom(HasContextAndHeaders other) { - assert false : "percolatecontext does not support contexts & headers"; - } - @Override public Map, Collector> queryCollectors() { return queryCollectors; diff --git a/core/src/main/java/org/elasticsearch/percolator/PercolateDocumentParser.java b/core/src/main/java/org/elasticsearch/percolator/PercolateDocumentParser.java index 8edc5212523..973aa18b8fc 100644 --- a/core/src/main/java/org/elasticsearch/percolator/PercolateDocumentParser.java +++ b/core/src/main/java/org/elasticsearch/percolator/PercolateDocumentParser.java @@ -62,7 +62,7 @@ public class PercolateDocumentParser { BytesReference source = request.source(); if (source == null || source.length() == 0) { if (request.docSource() != null && request.docSource().length() != 0) { - return parseFetchedDoc(context, request.docSource(), mapperService, request.shardId().getIndex(), request.documentType()); + return parseFetchedDoc(context, request.docSource(), mapperService, request.shardId().getIndexName(), request.documentType()); } else { return null; } @@ -182,7 +182,7 @@ public class PercolateDocumentParser { throw new IllegalArgumentException("Can't specify the document to percolate in the source of the request and as document id"); } - doc = parseFetchedDoc(context, request.docSource(), mapperService, request.shardId().getIndex(), request.documentType()); + doc = parseFetchedDoc(context, request.docSource(), mapperService, request.shardId().getIndexName(), request.documentType()); } if (doc == null) { diff --git a/core/src/main/java/org/elasticsearch/percolator/PercolatorService.java b/core/src/main/java/org/elasticsearch/percolator/PercolatorService.java index 6ac0ca680c1..a75bf41c52d 100644 --- a/core/src/main/java/org/elasticsearch/percolator/PercolatorService.java +++ b/core/src/main/java/org/elasticsearch/percolator/PercolatorService.java @@ -39,7 +39,6 @@ import org.elasticsearch.cache.recycler.PageCacheRecycler; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; -import org.elasticsearch.common.HasContextAndHeaders; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.component.AbstractComponent; @@ -136,14 +135,14 @@ public class PercolatorService extends AbstractComponent { multi = new MultiDocumentPercolatorIndex(cache); } - public ReduceResult reduce(boolean onlyCount, List shardResponses, HasContextAndHeaders headersContext) throws IOException { + public ReduceResult reduce(boolean onlyCount, List shardResponses) throws IOException { if (onlyCount) { long finalCount = 0; for (PercolateShardResponse shardResponse : shardResponses) { finalCount += shardResponse.topDocs().totalHits; } - InternalAggregations reducedAggregations = reduceAggregations(shardResponses, headersContext); + InternalAggregations reducedAggregations = reduceAggregations(shardResponses); return new PercolatorService.ReduceResult(finalCount, reducedAggregations); } else { int requestedSize = shardResponses.get(0).requestedSize(); @@ -163,7 +162,7 @@ public class PercolatorService extends AbstractComponent { Map hl = shardResponse.hls().get(doc.doc); matches[i] = new PercolateResponse.Match(new Text(shardResponse.getIndex()), new Text(id), doc.score, hl); } - InternalAggregations reducedAggregations = reduceAggregations(shardResponses, headersContext); + InternalAggregations reducedAggregations = reduceAggregations(shardResponses); return new PercolatorService.ReduceResult(foundMatches, matches, reducedAggregations); } } @@ -180,7 +179,7 @@ public class PercolatorService extends AbstractComponent { // just like is done in other apis. String[] filteringAliases = indexNameExpressionResolver.filteringAliases( clusterService.state(), - indexShard.shardId().index().name(), + indexShard.shardId().getIndex().getName(), request.indices() ); Query aliasFilter = percolateIndexService.aliasFilter(indexShard.getQueryShardContext(), filteringAliases); @@ -309,7 +308,7 @@ public class PercolatorService extends AbstractComponent { cache.close(); } - private InternalAggregations reduceAggregations(List shardResults, HasContextAndHeaders headersContext) { + private InternalAggregations reduceAggregations(List shardResults) { if (shardResults.get(0).aggregations() == null) { return null; } @@ -318,7 +317,7 @@ public class PercolatorService extends AbstractComponent { for (PercolateShardResponse shardResult : shardResults) { aggregationsList.add(shardResult.aggregations()); } - InternalAggregations aggregations = InternalAggregations.reduce(aggregationsList, new InternalAggregation.ReduceContext(bigArrays, scriptService, headersContext)); + InternalAggregations aggregations = InternalAggregations.reduce(aggregationsList, new InternalAggregation.ReduceContext(bigArrays, scriptService)); if (aggregations != null) { List pipelineAggregators = shardResults.get(0).pipelineAggregators(); if (pipelineAggregators != null) { @@ -326,7 +325,7 @@ public class PercolatorService extends AbstractComponent { return (InternalAggregation) p; }).collect(Collectors.toList()); for (SiblingPipelineAggregator pipelineAggregator : pipelineAggregators) { - InternalAggregation newAgg = pipelineAggregator.doReduce(new InternalAggregations(newAggs), new InternalAggregation.ReduceContext(bigArrays, scriptService, headersContext)); + InternalAggregation newAgg = pipelineAggregator.doReduce(new InternalAggregations(newAggs), new InternalAggregation.ReduceContext(bigArrays, scriptService)); newAggs.add(newAgg); } aggregations = new InternalAggregations(newAggs); diff --git a/core/src/main/java/org/elasticsearch/plugins/PluginManager.java b/core/src/main/java/org/elasticsearch/plugins/PluginManager.java index 29da911f07f..a107c957bd4 100644 --- a/core/src/main/java/org/elasticsearch/plugins/PluginManager.java +++ b/core/src/main/java/org/elasticsearch/plugins/PluginManager.java @@ -103,7 +103,7 @@ public class PluginManager { "discovery-multicast", "ingest-geoip", "lang-javascript", - "lang-plan-a", + "lang-painless", "lang-python", "mapper-attachments", "mapper-murmur3", diff --git a/core/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java b/core/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java index e941492e7f0..59812f908f5 100644 --- a/core/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java +++ b/core/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java @@ -348,12 +348,11 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent @@ -41,51 +34,20 @@ import java.util.Set; * {@link org.elasticsearch.rest.RestController#registerRelevantHeaders(String...)} */ public abstract class BaseRestHandler extends AbstractComponent implements RestHandler { - - private final RestController controller; + public static final Setting MULTI_ALLOW_EXPLICIT_INDEX = Setting.boolSetting("rest.action.multi.allow_explicit_index", true, false, Setting.Scope.CLUSTER); private final Client client; protected final ParseFieldMatcher parseFieldMatcher; - protected BaseRestHandler(Settings settings, RestController controller, Client client) { + protected BaseRestHandler(Settings settings, Client client) { super(settings); - this.controller = controller; this.client = client; this.parseFieldMatcher = new ParseFieldMatcher(settings); } @Override public final void handleRequest(RestRequest request, RestChannel channel) throws Exception { - handleRequest(request, channel, new HeadersAndContextCopyClient(client, request, controller.relevantHeaders())); + handleRequest(request, channel, client); } protected abstract void handleRequest(RestRequest request, RestChannel channel, Client client) throws Exception; - - static final class HeadersAndContextCopyClient extends FilterClient { - - private final RestRequest restRequest; - private final Set headers; - - HeadersAndContextCopyClient(Client in, RestRequest restRequest, Set headers) { - super(in); - this.restRequest = restRequest; - this.headers = headers; - } - - private static void copyHeadersAndContext(ActionRequest actionRequest, RestRequest restRequest, Set headers) { - for (String usefulHeader : headers) { - String headerValue = restRequest.header(usefulHeader); - if (headerValue != null) { - actionRequest.putHeader(usefulHeader, headerValue); - } - } - actionRequest.copyContextFrom(restRequest); - } - - @Override - protected , Response extends ActionResponse, RequestBuilder extends ActionRequestBuilder> void doExecute( - Action action, Request request, ActionListener listener) { - copyHeadersAndContext(request, restRequest, headers); - super.doExecute(action, request, listener); - } - } } diff --git a/core/src/main/java/org/elasticsearch/rest/RestController.java b/core/src/main/java/org/elasticsearch/rest/RestController.java index d0a46d29f65..64e21002d8c 100644 --- a/core/src/main/java/org/elasticsearch/rest/RestController.java +++ b/core/src/main/java/org/elasticsearch/rest/RestController.java @@ -24,13 +24,13 @@ import org.elasticsearch.common.component.AbstractLifecycleComponent; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.path.PathTrie; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.rest.support.RestUtils; import java.io.IOException; import java.util.Arrays; import java.util.Collections; -import java.util.Comparator; import java.util.HashSet; import java.util.Set; import java.util.concurrent.atomic.AtomicInteger; @@ -107,12 +107,7 @@ public class RestController extends AbstractLifecycleComponent { RestFilter[] copy = new RestFilter[filters.length + 1]; System.arraycopy(filters, 0, copy, 0, filters.length); copy[filters.length] = preProcessor; - Arrays.sort(copy, new Comparator() { - @Override - public int compare(RestFilter o1, RestFilter o2) { - return Integer.compare(o1.order(), o2.order()); - } - }); + Arrays.sort(copy, (o1, o2) -> Integer.compare(o1.order(), o2.order())); filters = copy; } @@ -163,24 +158,31 @@ public class RestController extends AbstractLifecycleComponent { return new ControllerFilterChain(executionFilter); } - public void dispatchRequest(final RestRequest request, final RestChannel channel) { + public void dispatchRequest(final RestRequest request, final RestChannel channel, ThreadContext threadContext) { if (!checkRequestParameters(request, channel)) { return; } - - if (filters.length == 0) { - try { - executeHandler(request, channel); - } catch (Throwable e) { - try { - channel.sendResponse(new BytesRestResponse(channel, e)); - } catch (Throwable e1) { - logger.error("failed to send failure response for uri [" + request.uri() + "]", e1); + try (ThreadContext.StoredContext t = threadContext.stashContext()){ + for (String key : relevantHeaders) { + String httpHeader = request.header(key); + if (httpHeader != null) { + threadContext.putHeader(key, httpHeader); } } - } else { - ControllerFilterChain filterChain = new ControllerFilterChain(handlerFilter); - filterChain.continueProcessing(request, channel); + if (filters.length == 0) { + try { + executeHandler(request, channel); + } catch (Throwable e) { + try { + channel.sendResponse(new BytesRestResponse(channel, e)); + } catch (Throwable e1) { + logger.error("failed to send failure response for uri [" + request.uri() + "]", e1); + } + } + } else { + ControllerFilterChain filterChain = new ControllerFilterChain(handlerFilter); + filterChain.continueProcessing(request, channel); + } } } diff --git a/core/src/main/java/org/elasticsearch/rest/RestRequest.java b/core/src/main/java/org/elasticsearch/rest/RestRequest.java index 81f6052db5c..8872484d589 100644 --- a/core/src/main/java/org/elasticsearch/rest/RestRequest.java +++ b/core/src/main/java/org/elasticsearch/rest/RestRequest.java @@ -20,7 +20,6 @@ package org.elasticsearch.rest; import org.elasticsearch.common.Booleans; -import org.elasticsearch.common.ContextAndHeaderHolder; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; @@ -38,7 +37,7 @@ import static org.elasticsearch.common.unit.TimeValue.parseTimeValue; /** * */ -public abstract class RestRequest extends ContextAndHeaderHolder implements ToXContent.Params { +public abstract class RestRequest implements ToXContent.Params { public enum Method { GET, POST, PUT, DELETE, OPTIONS, HEAD diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/health/RestClusterHealthAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/health/RestClusterHealthAction.java index badf6f6de58..ccd0f982597 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/health/RestClusterHealthAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/health/RestClusterHealthAction.java @@ -43,7 +43,7 @@ public class RestClusterHealthAction extends BaseRestHandler { @Inject public RestClusterHealthAction(Settings settings, RestController controller, Client client) { - super(settings, controller, client); + super(settings, client); controller.registerHandler(RestRequest.Method.GET, "/_cluster/health", this); controller.registerHandler(RestRequest.Method.GET, "/_cluster/health/{index}", this); diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/node/hotthreads/RestNodesHotThreadsAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/node/hotthreads/RestNodesHotThreadsAction.java index 24c4c449410..53bec14f967 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/node/hotthreads/RestNodesHotThreadsAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/node/hotthreads/RestNodesHotThreadsAction.java @@ -43,7 +43,7 @@ public class RestNodesHotThreadsAction extends BaseRestHandler { @Inject public RestNodesHotThreadsAction(Settings settings, RestController controller, Client client) { - super(settings, controller, client); + super(settings, client); controller.registerHandler(RestRequest.Method.GET, "/_cluster/nodes/hotthreads", this); controller.registerHandler(RestRequest.Method.GET, "/_cluster/nodes/hot_threads", this); controller.registerHandler(RestRequest.Method.GET, "/_cluster/nodes/{nodeId}/hotthreads", this); diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/node/info/RestNodesInfoAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/node/info/RestNodesInfoAction.java index f2c51850000..ce1e7811da6 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/node/info/RestNodesInfoAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/node/info/RestNodesInfoAction.java @@ -52,7 +52,7 @@ public class RestNodesInfoAction extends BaseRestHandler { @Inject public RestNodesInfoAction(Settings settings, RestController controller, Client client, SettingsFilter settingsFilter) { - super(settings, controller, client); + super(settings, client); controller.registerHandler(GET, "/_nodes", this); // this endpoint is used for metrics, not for nodeIds, like /_nodes/fs controller.registerHandler(GET, "/_nodes/{nodeId}", this); diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/node/stats/RestNodesStatsAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/node/stats/RestNodesStatsAction.java index 786891d330d..2b3f0518c37 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/node/stats/RestNodesStatsAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/node/stats/RestNodesStatsAction.java @@ -45,7 +45,7 @@ public class RestNodesStatsAction extends BaseRestHandler { @Inject public RestNodesStatsAction(Settings settings, RestController controller, Client client) { - super(settings, controller, client); + super(settings, client); controller.registerHandler(GET, "/_nodes/stats", this); controller.registerHandler(GET, "/_nodes/{nodeId}/stats", this); diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/node/tasks/RestListTasksAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/node/tasks/RestListTasksAction.java index 813c7822428..46fef04b857 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/node/tasks/RestListTasksAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/node/tasks/RestListTasksAction.java @@ -37,7 +37,7 @@ public class RestListTasksAction extends BaseRestHandler { @Inject public RestListTasksAction(Settings settings, RestController controller, Client client) { - super(settings, controller, client); + super(settings, client); controller.registerHandler(GET, "/_tasks", this); controller.registerHandler(GET, "/_tasks/{nodeId}", this); controller.registerHandler(GET, "/_tasks/{nodeId}/{actions}", this); diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/repositories/delete/RestDeleteRepositoryAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/repositories/delete/RestDeleteRepositoryAction.java index 36e02ba4599..136c1cfae3f 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/repositories/delete/RestDeleteRepositoryAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/repositories/delete/RestDeleteRepositoryAction.java @@ -40,7 +40,7 @@ public class RestDeleteRepositoryAction extends BaseRestHandler { @Inject public RestDeleteRepositoryAction(Settings settings, RestController controller, Client client) { - super(settings, controller, client); + super(settings, client); controller.registerHandler(DELETE, "/_snapshot/{repository}", this); } diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/repositories/get/RestGetRepositoriesAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/repositories/get/RestGetRepositoriesAction.java index fd347ccd332..09422481cf3 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/repositories/get/RestGetRepositoriesAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/repositories/get/RestGetRepositoriesAction.java @@ -50,7 +50,7 @@ public class RestGetRepositoriesAction extends BaseRestHandler { @Inject public RestGetRepositoriesAction(Settings settings, RestController controller, Client client, SettingsFilter settingsFilter) { - super(settings, controller, client); + super(settings, client); controller.registerHandler(GET, "/_snapshot", this); controller.registerHandler(GET, "/_snapshot/{repository}", this); this.settingsFilter = settingsFilter; diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/repositories/put/RestPutRepositoryAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/repositories/put/RestPutRepositoryAction.java index feeeeb77aba..878eb2915bc 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/repositories/put/RestPutRepositoryAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/repositories/put/RestPutRepositoryAction.java @@ -41,7 +41,7 @@ public class RestPutRepositoryAction extends BaseRestHandler { @Inject public RestPutRepositoryAction(Settings settings, RestController controller, Client client) { - super(settings, controller, client); + super(settings, client); controller.registerHandler(PUT, "/_snapshot/{repository}", this); controller.registerHandler(POST, "/_snapshot/{repository}", this); } diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/repositories/verify/RestVerifyRepositoryAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/repositories/verify/RestVerifyRepositoryAction.java index c0c7ad5b953..306dcbb21b9 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/repositories/verify/RestVerifyRepositoryAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/repositories/verify/RestVerifyRepositoryAction.java @@ -36,7 +36,7 @@ public class RestVerifyRepositoryAction extends BaseRestHandler { @Inject public RestVerifyRepositoryAction(Settings settings, RestController controller, Client client) { - super(settings, controller, client); + super(settings, client); controller.registerHandler(POST, "/_snapshot/{repository}/_verify", this); } diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/reroute/RestClusterRerouteAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/reroute/RestClusterRerouteAction.java index 387728918a6..529d73d3e59 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/reroute/RestClusterRerouteAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/reroute/RestClusterRerouteAction.java @@ -49,7 +49,7 @@ public class RestClusterRerouteAction extends BaseRestHandler { @Inject public RestClusterRerouteAction(Settings settings, RestController controller, Client client, SettingsFilter settingsFilter) { - super(settings, controller, client); + super(settings, client); this.settingsFilter = settingsFilter; controller.registerHandler(RestRequest.Method.POST, "/_cluster/reroute", this); } diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/settings/RestClusterGetSettingsAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/settings/RestClusterGetSettingsAction.java index 5acbfc48d24..e7c97abbbd1 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/settings/RestClusterGetSettingsAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/settings/RestClusterGetSettingsAction.java @@ -48,7 +48,7 @@ public class RestClusterGetSettingsAction extends BaseRestHandler { @Inject public RestClusterGetSettingsAction(Settings settings, RestController controller, Client client, ClusterSettings clusterSettings) { - super(settings, controller, client); + super(settings, client); this.clusterSettings = clusterSettings; controller.registerHandler(RestRequest.Method.GET, "/_cluster/settings", this); } diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/settings/RestClusterUpdateSettingsAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/settings/RestClusterUpdateSettingsAction.java index aa84606b076..64083f1e806 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/settings/RestClusterUpdateSettingsAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/settings/RestClusterUpdateSettingsAction.java @@ -43,7 +43,7 @@ public class RestClusterUpdateSettingsAction extends BaseRestHandler { @Inject public RestClusterUpdateSettingsAction(Settings settings, RestController controller, Client client) { - super(settings, controller, client); + super(settings, client); controller.registerHandler(RestRequest.Method.PUT, "/_cluster/settings", this); } diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/shards/RestClusterSearchShardsAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/shards/RestClusterSearchShardsAction.java index ee68c1bbb7a..860e110b2d6 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/shards/RestClusterSearchShardsAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/shards/RestClusterSearchShardsAction.java @@ -42,7 +42,7 @@ public class RestClusterSearchShardsAction extends BaseRestHandler { @Inject public RestClusterSearchShardsAction(Settings settings, RestController controller, Client client) { - super(settings, controller, client); + super(settings, client); controller.registerHandler(GET, "/_search_shards", this); controller.registerHandler(POST, "/_search_shards", this); controller.registerHandler(GET, "/{index}/_search_shards", this); diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/snapshots/create/RestCreateSnapshotAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/snapshots/create/RestCreateSnapshotAction.java index bf9dd4a0119..9d6be664d48 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/snapshots/create/RestCreateSnapshotAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/snapshots/create/RestCreateSnapshotAction.java @@ -41,7 +41,7 @@ public class RestCreateSnapshotAction extends BaseRestHandler { @Inject public RestCreateSnapshotAction(Settings settings, RestController controller, Client client) { - super(settings, controller, client); + super(settings, client); controller.registerHandler(PUT, "/_snapshot/{repository}/{snapshot}", this); controller.registerHandler(POST, "/_snapshot/{repository}/{snapshot}", this); } diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/snapshots/delete/RestDeleteSnapshotAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/snapshots/delete/RestDeleteSnapshotAction.java index 66b5a4188c0..38c78bd5d88 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/snapshots/delete/RestDeleteSnapshotAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/snapshots/delete/RestDeleteSnapshotAction.java @@ -40,7 +40,7 @@ public class RestDeleteSnapshotAction extends BaseRestHandler { @Inject public RestDeleteSnapshotAction(Settings settings, RestController controller, Client client) { - super(settings, controller, client); + super(settings, client); controller.registerHandler(DELETE, "/_snapshot/{repository}/{snapshot}", this); } diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/snapshots/get/RestGetSnapshotsAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/snapshots/get/RestGetSnapshotsAction.java index 123798cf995..1151fed8f23 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/snapshots/get/RestGetSnapshotsAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/snapshots/get/RestGetSnapshotsAction.java @@ -41,7 +41,7 @@ public class RestGetSnapshotsAction extends BaseRestHandler { @Inject public RestGetSnapshotsAction(Settings settings, RestController controller, Client client) { - super(settings, controller, client); + super(settings, client); controller.registerHandler(GET, "/_snapshot/{repository}/{snapshot}", this); } diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/snapshots/restore/RestRestoreSnapshotAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/snapshots/restore/RestRestoreSnapshotAction.java index 028285d3064..e2a16bd4b46 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/snapshots/restore/RestRestoreSnapshotAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/snapshots/restore/RestRestoreSnapshotAction.java @@ -40,7 +40,7 @@ public class RestRestoreSnapshotAction extends BaseRestHandler { @Inject public RestRestoreSnapshotAction(Settings settings, RestController controller, Client client) { - super(settings, controller, client); + super(settings, client); controller.registerHandler(POST, "/_snapshot/{repository}/{snapshot}/_restore", this); } diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/snapshots/status/RestSnapshotsStatusAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/snapshots/status/RestSnapshotsStatusAction.java index b60a740a15d..2e8810e2ba7 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/snapshots/status/RestSnapshotsStatusAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/snapshots/status/RestSnapshotsStatusAction.java @@ -41,7 +41,7 @@ public class RestSnapshotsStatusAction extends BaseRestHandler { @Inject public RestSnapshotsStatusAction(Settings settings, RestController controller, Client client) { - super(settings, controller, client); + super(settings, client); controller.registerHandler(GET, "/_snapshot/{repository}/{snapshot}/_status", this); controller.registerHandler(GET, "/_snapshot/{repository}/_status", this); controller.registerHandler(GET, "/_snapshot/_status", this); diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/state/RestClusterStateAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/state/RestClusterStateAction.java index f28ecfe4888..720d19a7fe4 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/state/RestClusterStateAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/state/RestClusterStateAction.java @@ -52,7 +52,7 @@ public class RestClusterStateAction extends BaseRestHandler { @Inject public RestClusterStateAction(Settings settings, RestController controller, Client client, SettingsFilter settingsFilter) { - super(settings, controller, client); + super(settings, client); controller.registerHandler(RestRequest.Method.GET, "/_cluster/state", this); controller.registerHandler(RestRequest.Method.GET, "/_cluster/state/{metric}", this); controller.registerHandler(RestRequest.Method.GET, "/_cluster/state/{metric}/{indices}", this); diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/stats/RestClusterStatsAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/stats/RestClusterStatsAction.java index b14293ba310..a09820e71b6 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/stats/RestClusterStatsAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/stats/RestClusterStatsAction.java @@ -38,7 +38,7 @@ public class RestClusterStatsAction extends BaseRestHandler { @Inject public RestClusterStatsAction(Settings settings, RestController controller, Client client) { - super(settings, controller, client); + super(settings, client); controller.registerHandler(RestRequest.Method.GET, "/_cluster/stats", this); controller.registerHandler(RestRequest.Method.GET, "/_cluster/stats/nodes/{nodeId}", this); } diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/tasks/RestPendingClusterTasksAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/tasks/RestPendingClusterTasksAction.java index 5d9eac430b5..333b6d64491 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/tasks/RestPendingClusterTasksAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/tasks/RestPendingClusterTasksAction.java @@ -36,7 +36,7 @@ public class RestPendingClusterTasksAction extends BaseRestHandler { @Inject public RestPendingClusterTasksAction(Settings settings, RestController controller, Client client) { - super(settings, controller, client); + super(settings, client); controller.registerHandler(RestRequest.Method.GET, "/_cluster/pending_tasks", this); } diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/alias/RestIndicesAliasesAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/alias/RestIndicesAliasesAction.java index f62d6febee5..c60671f8644 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/alias/RestIndicesAliasesAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/alias/RestIndicesAliasesAction.java @@ -47,7 +47,7 @@ public class RestIndicesAliasesAction extends BaseRestHandler { @Inject public RestIndicesAliasesAction(Settings settings, RestController controller, Client client) { - super(settings, controller, client); + super(settings, client); controller.registerHandler(POST, "/_aliases", this); } diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/alias/delete/RestIndexDeleteAliasesAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/alias/delete/RestIndexDeleteAliasesAction.java index 6748cc2509d..7fcaadc3d8b 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/alias/delete/RestIndexDeleteAliasesAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/alias/delete/RestIndexDeleteAliasesAction.java @@ -38,7 +38,7 @@ public class RestIndexDeleteAliasesAction extends BaseRestHandler { @Inject public RestIndexDeleteAliasesAction(Settings settings, RestController controller, Client client) { - super(settings, controller, client); + super(settings, client); controller.registerHandler(DELETE, "/{index}/_alias/{name}", this); controller.registerHandler(DELETE, "/{index}/_aliases/{name}", this); } diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/alias/get/RestGetAliasesAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/alias/get/RestGetAliasesAction.java index aa62ee471dc..da439c63d5e 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/alias/get/RestGetAliasesAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/alias/get/RestGetAliasesAction.java @@ -52,7 +52,7 @@ public class RestGetAliasesAction extends BaseRestHandler { @Inject public RestGetAliasesAction(Settings settings, RestController controller, Client client) { - super(settings, controller, client); + super(settings, client); controller.registerHandler(GET, "/_alias/{name}", this); controller.registerHandler(GET, "/{index}/_alias/{name}", this); } diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/alias/get/RestGetIndicesAliasesAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/alias/get/RestGetIndicesAliasesAction.java index 4c774b58645..5a45a0a759e 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/alias/get/RestGetIndicesAliasesAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/alias/get/RestGetIndicesAliasesAction.java @@ -51,7 +51,7 @@ public class RestGetIndicesAliasesAction extends BaseRestHandler { @Inject public RestGetIndicesAliasesAction(Settings settings, RestController controller, Client client) { - super(settings, controller, client); + super(settings, client); controller.registerHandler(GET, "/{index}/_aliases/{name}", this); controller.registerHandler(GET, "/_aliases/{name}", this); } @@ -75,7 +75,7 @@ public class RestGetIndicesAliasesAction extends BaseRestHandler { final boolean isAllAliasesRequested = isAllOrWildcard(aliases); for (IndexMetaData indexMetaData : metaData) { - builder.startObject(indexMetaData.getIndex(), XContentBuilder.FieldCaseConversion.NONE); + builder.startObject(indexMetaData.getIndex().getName(), XContentBuilder.FieldCaseConversion.NONE); builder.startObject("aliases"); for (ObjectCursor cursor : indexMetaData.getAliases().values()) { diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/alias/head/RestAliasesExistAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/alias/head/RestAliasesExistAction.java index fce40123b68..15ea664245d 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/alias/head/RestAliasesExistAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/alias/head/RestAliasesExistAction.java @@ -44,7 +44,7 @@ public class RestAliasesExistAction extends BaseRestHandler { @Inject public RestAliasesExistAction(Settings settings, RestController controller, Client client) { - super(settings, controller, client); + super(settings, client); controller.registerHandler(HEAD, "/_alias/{name}", this); controller.registerHandler(HEAD, "/{index}/_alias/{name}", this); controller.registerHandler(HEAD, "/{index}/_alias", this); diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/alias/put/RestIndexPutAliasAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/alias/put/RestIndexPutAliasAction.java index 4965f6b218d..7a0c2ad466f 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/alias/put/RestIndexPutAliasAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/alias/put/RestIndexPutAliasAction.java @@ -45,7 +45,7 @@ public class RestIndexPutAliasAction extends BaseRestHandler { @Inject public RestIndexPutAliasAction(Settings settings, RestController controller, Client client) { - super(settings, controller, client); + super(settings, client); controller.registerHandler(PUT, "/{index}/_alias/{name}", this); controller.registerHandler(PUT, "/_alias/{name}", this); controller.registerHandler(PUT, "/{index}/_aliases/{name}", this); diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/analyze/RestAnalyzeAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/analyze/RestAnalyzeAction.java index 3a86911f464..e440e1b95c7 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/analyze/RestAnalyzeAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/analyze/RestAnalyzeAction.java @@ -61,7 +61,7 @@ public class RestAnalyzeAction extends BaseRestHandler { @Inject public RestAnalyzeAction(Settings settings, RestController controller, Client client) { - super(settings, controller, client); + super(settings, client); controller.registerHandler(GET, "/_analyze", this); controller.registerHandler(GET, "/{index}/_analyze", this); controller.registerHandler(POST, "/_analyze", this); diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/cache/clear/RestClearIndicesCacheAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/cache/clear/RestClearIndicesCacheAction.java index cc06a14b8de..7adb6909532 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/cache/clear/RestClearIndicesCacheAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/cache/clear/RestClearIndicesCacheAction.java @@ -51,7 +51,7 @@ public class RestClearIndicesCacheAction extends BaseRestHandler { @Inject public RestClearIndicesCacheAction(Settings settings, RestController controller, Client client) { - super(settings, controller, client); + super(settings, client); controller.registerHandler(POST, "/_cache/clear", this); controller.registerHandler(POST, "/{index}/_cache/clear", this); diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/close/RestCloseIndexAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/close/RestCloseIndexAction.java index 091fbc1680d..5f211b88d11 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/close/RestCloseIndexAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/close/RestCloseIndexAction.java @@ -39,7 +39,7 @@ public class RestCloseIndexAction extends BaseRestHandler { @Inject public RestCloseIndexAction(Settings settings, RestController controller, Client client) { - super(settings, controller, client); + super(settings, client); controller.registerHandler(RestRequest.Method.POST, "/_close", this); controller.registerHandler(RestRequest.Method.POST, "/{index}/_close", this); } diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/create/RestCreateIndexAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/create/RestCreateIndexAction.java index 41a272cc8b7..46bc9388972 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/create/RestCreateIndexAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/create/RestCreateIndexAction.java @@ -37,7 +37,7 @@ public class RestCreateIndexAction extends BaseRestHandler { @Inject public RestCreateIndexAction(Settings settings, RestController controller, Client client) { - super(settings, controller, client); + super(settings, client); controller.registerHandler(RestRequest.Method.PUT, "/{index}", this); controller.registerHandler(RestRequest.Method.POST, "/{index}", this); } diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/delete/RestDeleteIndexAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/delete/RestDeleteIndexAction.java index 0851fb867b7..4953842c54a 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/delete/RestDeleteIndexAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/delete/RestDeleteIndexAction.java @@ -39,7 +39,7 @@ public class RestDeleteIndexAction extends BaseRestHandler { @Inject public RestDeleteIndexAction(Settings settings, RestController controller, Client client) { - super(settings, controller, client); + super(settings, client); controller.registerHandler(RestRequest.Method.DELETE, "/", this); controller.registerHandler(RestRequest.Method.DELETE, "/{index}", this); } diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/exists/indices/RestIndicesExistsAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/exists/indices/RestIndicesExistsAction.java index 6843f5c5ce2..72dea18abd9 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/exists/indices/RestIndicesExistsAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/exists/indices/RestIndicesExistsAction.java @@ -45,7 +45,7 @@ public class RestIndicesExistsAction extends BaseRestHandler { @Inject public RestIndicesExistsAction(Settings settings, RestController controller, Client client) { - super(settings, controller, client); + super(settings, client); controller.registerHandler(HEAD, "/{index}", this); } diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/exists/types/RestTypesExistsAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/exists/types/RestTypesExistsAction.java index f1f227edfdd..dd206dcb63a 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/exists/types/RestTypesExistsAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/exists/types/RestTypesExistsAction.java @@ -44,7 +44,7 @@ public class RestTypesExistsAction extends BaseRestHandler { @Inject public RestTypesExistsAction(Settings settings, RestController controller, Client client) { - super(settings, controller, client); + super(settings, client); controller.registerHandler(HEAD, "/{index}/{type}", this); } diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/flush/RestFlushAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/flush/RestFlushAction.java index 47c0451adfc..f3b3304bcf9 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/flush/RestFlushAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/flush/RestFlushAction.java @@ -47,7 +47,7 @@ public class RestFlushAction extends BaseRestHandler { @Inject public RestFlushAction(Settings settings, RestController controller, Client client) { - super(settings, controller, client); + super(settings, client); controller.registerHandler(POST, "/_flush", this); controller.registerHandler(POST, "/{index}/_flush", this); diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/flush/RestSyncedFlushAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/flush/RestSyncedFlushAction.java index 4fe893bd411..9bb36f03d65 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/flush/RestSyncedFlushAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/flush/RestSyncedFlushAction.java @@ -45,7 +45,7 @@ public class RestSyncedFlushAction extends BaseRestHandler { @Inject public RestSyncedFlushAction(Settings settings, RestController controller, Client client) { - super(settings, controller, client); + super(settings, client); controller.registerHandler(POST, "/_flush/synced", this); controller.registerHandler(POST, "/{index}/_flush/synced", this); diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/forcemerge/RestForceMergeAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/forcemerge/RestForceMergeAction.java index d8ef7bace3a..8aa2683be5e 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/forcemerge/RestForceMergeAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/forcemerge/RestForceMergeAction.java @@ -46,7 +46,7 @@ public class RestForceMergeAction extends BaseRestHandler { @Inject public RestForceMergeAction(Settings settings, RestController controller, Client client) { - super(settings, controller, client); + super(settings, client); controller.registerHandler(POST, "/_forcemerge", this); controller.registerHandler(POST, "/{index}/_forcemerge", this); } diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/get/RestGetIndicesAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/get/RestGetIndicesAction.java index e23dec0f0bc..e54b3d92cbd 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/get/RestGetIndicesAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/get/RestGetIndicesAction.java @@ -57,7 +57,7 @@ public class RestGetIndicesAction extends BaseRestHandler { @Inject public RestGetIndicesAction(Settings settings, RestController controller, Client client, IndexScopedSettings indexScopedSettings) { - super(settings, controller, client); + super(settings, client); this.indexScopedSettings = indexScopedSettings; controller.registerHandler(GET, "/{index}", this); controller.registerHandler(GET, "/{index}/{type}", this); diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/mapping/get/RestGetFieldMappingAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/mapping/get/RestGetFieldMappingAction.java index 7594a097c94..0db931d0a7a 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/mapping/get/RestGetFieldMappingAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/mapping/get/RestGetFieldMappingAction.java @@ -51,7 +51,7 @@ public class RestGetFieldMappingAction extends BaseRestHandler { @Inject public RestGetFieldMappingAction(Settings settings, RestController controller, Client client) { - super(settings, controller, client); + super(settings, client); controller.registerHandler(GET, "/_mapping/field/{fields}", this); controller.registerHandler(GET, "/_mapping/{type}/field/{fields}", this); controller.registerHandler(GET, "/{index}/_mapping/field/{fields}", this); diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/mapping/get/RestGetMappingAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/mapping/get/RestGetMappingAction.java index 48fa60cb4b0..12c29bb781e 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/mapping/get/RestGetMappingAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/mapping/get/RestGetMappingAction.java @@ -52,7 +52,7 @@ public class RestGetMappingAction extends BaseRestHandler { @Inject public RestGetMappingAction(Settings settings, RestController controller, Client client) { - super(settings, controller, client); + super(settings, client); controller.registerHandler(GET, "/{index}/{type}/_mapping", this); controller.registerHandler(GET, "/{index}/_mappings/{type}", this); controller.registerHandler(GET, "/{index}/_mapping/{type}", this); @@ -78,7 +78,7 @@ public class RestGetMappingAction extends BaseRestHandler { } else if (indices.length != 0) { return new BytesRestResponse(channel, new IndexNotFoundException(indices[0])); } else if (types.length != 0) { - return new BytesRestResponse(channel, new TypeMissingException(new Index("_all"), types[0])); + return new BytesRestResponse(channel, new TypeMissingException("_all", types[0])); } else { return new BytesRestResponse(OK, builder.endObject()); } diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/mapping/put/RestPutMappingAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/mapping/put/RestPutMappingAction.java index 3ceecbfd3a9..fdb16d2fb8f 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/mapping/put/RestPutMappingAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/mapping/put/RestPutMappingAction.java @@ -44,7 +44,7 @@ public class RestPutMappingAction extends BaseRestHandler { @Inject public RestPutMappingAction(Settings settings, RestController controller, Client client) { - super(settings, controller, client); + super(settings, client); controller.registerHandler(PUT, "/{index}/_mapping/", this); controller.registerHandler(PUT, "/{index}/{type}/_mapping", this); controller.registerHandler(PUT, "/{index}/_mapping/{type}", this); diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/open/RestOpenIndexAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/open/RestOpenIndexAction.java index cb22f81ba46..58bda9d3a3d 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/open/RestOpenIndexAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/open/RestOpenIndexAction.java @@ -39,7 +39,7 @@ public class RestOpenIndexAction extends BaseRestHandler { @Inject public RestOpenIndexAction(Settings settings, RestController controller, Client client) { - super(settings, controller, client); + super(settings, client); controller.registerHandler(RestRequest.Method.POST, "/_open", this); controller.registerHandler(RestRequest.Method.POST, "/{index}/_open", this); } diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/recovery/RestRecoveryAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/recovery/RestRecoveryAction.java index e46831e81e8..88bc9fb8c9f 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/recovery/RestRecoveryAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/recovery/RestRecoveryAction.java @@ -45,7 +45,7 @@ public class RestRecoveryAction extends BaseRestHandler { @Inject public RestRecoveryAction(Settings settings, RestController controller, Client client) { - super(settings, controller, client); + super(settings, client); controller.registerHandler(GET, "/_recovery", this); controller.registerHandler(GET, "/{index}/_recovery", this); } diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/refresh/RestRefreshAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/refresh/RestRefreshAction.java index e552b13316a..fcc6d240b34 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/refresh/RestRefreshAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/refresh/RestRefreshAction.java @@ -47,7 +47,7 @@ public class RestRefreshAction extends BaseRestHandler { @Inject public RestRefreshAction(Settings settings, RestController controller, Client client) { - super(settings, controller, client); + super(settings, client); controller.registerHandler(POST, "/_refresh", this); controller.registerHandler(POST, "/{index}/_refresh", this); diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/segments/RestIndicesSegmentsAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/segments/RestIndicesSegmentsAction.java index a233c75da58..da76a769ce4 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/segments/RestIndicesSegmentsAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/segments/RestIndicesSegmentsAction.java @@ -45,7 +45,7 @@ public class RestIndicesSegmentsAction extends BaseRestHandler { @Inject public RestIndicesSegmentsAction(Settings settings, RestController controller, Client client) { - super(settings, controller, client); + super(settings, client); controller.registerHandler(GET, "/_segments", this); controller.registerHandler(GET, "/{index}/_segments", this); } diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/settings/RestGetSettingsAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/settings/RestGetSettingsAction.java index b924acc5fb3..7d8748955fd 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/settings/RestGetSettingsAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/settings/RestGetSettingsAction.java @@ -46,7 +46,7 @@ public class RestGetSettingsAction extends BaseRestHandler { @Inject public RestGetSettingsAction(Settings settings, RestController controller, Client client, IndexScopedSettings indexScopedSettings) { - super(settings, controller, client); + super(settings, client); this.indexScopedSettings = indexScopedSettings; controller.registerHandler(GET, "/{index}/_settings/{name}", this); controller.registerHandler(GET, "/_settings/{name}", this); diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/settings/RestUpdateSettingsAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/settings/RestUpdateSettingsAction.java index 1a8ba58306d..bcf43a4baa6 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/settings/RestUpdateSettingsAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/settings/RestUpdateSettingsAction.java @@ -53,7 +53,7 @@ public class RestUpdateSettingsAction extends BaseRestHandler { @Inject public RestUpdateSettingsAction(Settings settings, RestController controller, Client client) { - super(settings, controller, client); + super(settings, client); controller.registerHandler(RestRequest.Method.PUT, "/{index}/_settings", this); controller.registerHandler(RestRequest.Method.PUT, "/_settings", this); } diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/shards/RestIndicesShardStoresAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/shards/RestIndicesShardStoresAction.java index e2dc64cc475..586599c1a1e 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/shards/RestIndicesShardStoresAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/shards/RestIndicesShardStoresAction.java @@ -46,7 +46,7 @@ public class RestIndicesShardStoresAction extends BaseRestHandler { @Inject public RestIndicesShardStoresAction(Settings settings, RestController controller, Client client) { - super(settings, controller, client); + super(settings, client); controller.registerHandler(GET, "/_shard_stores", this); controller.registerHandler(GET, "/{index}/_shard_stores", this); } diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/stats/RestIndicesStatsAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/stats/RestIndicesStatsAction.java index 891afd6b8cf..e75dfcc4dc7 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/stats/RestIndicesStatsAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/stats/RestIndicesStatsAction.java @@ -47,7 +47,7 @@ public class RestIndicesStatsAction extends BaseRestHandler { @Inject public RestIndicesStatsAction(Settings settings, RestController controller, Client client) { - super(settings, controller, client); + super(settings, client); controller.registerHandler(GET, "/_stats", this); controller.registerHandler(GET, "/_stats/{metric}", this); controller.registerHandler(GET, "/_stats/{metric}/{indexMetric}", this); diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/template/delete/RestDeleteIndexTemplateAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/template/delete/RestDeleteIndexTemplateAction.java index a4c1869609b..a59ab9ac704 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/template/delete/RestDeleteIndexTemplateAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/template/delete/RestDeleteIndexTemplateAction.java @@ -36,7 +36,7 @@ public class RestDeleteIndexTemplateAction extends BaseRestHandler { @Inject public RestDeleteIndexTemplateAction(Settings settings, RestController controller, Client client) { - super(settings, controller, client); + super(settings, client); controller.registerHandler(RestRequest.Method.DELETE, "/_template/{name}", this); } diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/template/get/RestGetIndexTemplateAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/template/get/RestGetIndexTemplateAction.java index d5bfa0db907..d62d97400c5 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/template/get/RestGetIndexTemplateAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/template/get/RestGetIndexTemplateAction.java @@ -50,7 +50,7 @@ public class RestGetIndexTemplateAction extends BaseRestHandler { @Inject public RestGetIndexTemplateAction(Settings settings, RestController controller, Client client) { - super(settings, controller, client); + super(settings, client); controller.registerHandler(GET, "/_template", this); controller.registerHandler(GET, "/_template/{name}", this); diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/template/head/RestHeadIndexTemplateAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/template/head/RestHeadIndexTemplateAction.java index 0838fa887e6..648d083e763 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/template/head/RestHeadIndexTemplateAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/template/head/RestHeadIndexTemplateAction.java @@ -42,7 +42,7 @@ public class RestHeadIndexTemplateAction extends BaseRestHandler { @Inject public RestHeadIndexTemplateAction(Settings settings, RestController controller, Client client) { - super(settings, controller, client); + super(settings, client); controller.registerHandler(HEAD, "/_template/{name}", this); } diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/template/put/RestPutIndexTemplateAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/template/put/RestPutIndexTemplateAction.java index 45f8a674dde..0b08b64e89b 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/template/put/RestPutIndexTemplateAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/template/put/RestPutIndexTemplateAction.java @@ -36,7 +36,7 @@ public class RestPutIndexTemplateAction extends BaseRestHandler { @Inject public RestPutIndexTemplateAction(Settings settings, RestController controller, Client client) { - super(settings, controller, client); + super(settings, client); controller.registerHandler(RestRequest.Method.PUT, "/_template/{name}", this); controller.registerHandler(RestRequest.Method.POST, "/_template/{name}", this); } diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/upgrade/RestUpgradeAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/upgrade/RestUpgradeAction.java index 6a554db60fe..60a781f90bb 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/upgrade/RestUpgradeAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/upgrade/RestUpgradeAction.java @@ -49,7 +49,7 @@ public class RestUpgradeAction extends BaseRestHandler { @Inject public RestUpgradeAction(Settings settings, RestController controller, Client client) { - super(settings, controller, client); + super(settings, client); controller.registerHandler(POST, "/_upgrade", this); controller.registerHandler(POST, "/{index}/_upgrade", this); diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/validate/query/RestValidateQueryAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/validate/query/RestValidateQueryAction.java index 81bdaf7536b..86d6e9d6089 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/validate/query/RestValidateQueryAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/validate/query/RestValidateQueryAction.java @@ -57,7 +57,7 @@ public class RestValidateQueryAction extends BaseRestHandler { @Inject public RestValidateQueryAction(Settings settings, RestController controller, Client client, IndicesQueriesRegistry indicesQueriesRegistry) { - super(settings, controller, client); + super(settings, client); controller.registerHandler(GET, "/_validate/query", this); controller.registerHandler(POST, "/_validate/query", this); controller.registerHandler(GET, "/{index}/_validate/query", this); diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/validate/template/RestRenderSearchTemplateAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/validate/template/RestRenderSearchTemplateAction.java index 5ebec7130df..f1308657528 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/validate/template/RestRenderSearchTemplateAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/validate/template/RestRenderSearchTemplateAction.java @@ -52,7 +52,7 @@ public class RestRenderSearchTemplateAction extends BaseRestHandler { @Inject public RestRenderSearchTemplateAction(Settings settings, RestController controller, Client client) { - super(settings, controller, client); + super(settings, client); controller.registerHandler(GET, "/_render/template", this); controller.registerHandler(POST, "/_render/template", this); controller.registerHandler(GET, "/_render/template/{id}", this); diff --git a/core/src/main/java/org/elasticsearch/rest/action/bulk/RestBulkAction.java b/core/src/main/java/org/elasticsearch/rest/action/bulk/RestBulkAction.java index df20438fa97..dbc413fcfc0 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/bulk/RestBulkAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/bulk/RestBulkAction.java @@ -58,7 +58,7 @@ public class RestBulkAction extends BaseRestHandler { @Inject public RestBulkAction(Settings settings, RestController controller, Client client) { - super(settings, controller, client); + super(settings, client); controller.registerHandler(POST, "/_bulk", this); controller.registerHandler(PUT, "/_bulk", this); @@ -67,7 +67,7 @@ public class RestBulkAction extends BaseRestHandler { controller.registerHandler(POST, "/{index}/{type}/_bulk", this); controller.registerHandler(PUT, "/{index}/{type}/_bulk", this); - this.allowExplicitIndex = settings.getAsBoolean("rest.action.multi.allow_explicit_index", true); + this.allowExplicitIndex = MULTI_ALLOW_EXPLICIT_INDEX.get(settings); } @Override diff --git a/core/src/main/java/org/elasticsearch/rest/action/cat/AbstractCatAction.java b/core/src/main/java/org/elasticsearch/rest/action/cat/AbstractCatAction.java index 895211a0979..12393f58007 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/cat/AbstractCatAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/cat/AbstractCatAction.java @@ -39,7 +39,7 @@ import static org.elasticsearch.rest.action.support.RestTable.pad; public abstract class AbstractCatAction extends BaseRestHandler { public AbstractCatAction(Settings settings, RestController controller, Client client) { - super(settings, controller, client); + super(settings, client); } protected abstract void doRequest(final RestRequest request, final RestChannel channel, final Client client); diff --git a/core/src/main/java/org/elasticsearch/rest/action/cat/RestCatAction.java b/core/src/main/java/org/elasticsearch/rest/action/cat/RestCatAction.java index 337684769f3..23229540b96 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/cat/RestCatAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/cat/RestCatAction.java @@ -41,7 +41,7 @@ public class RestCatAction extends BaseRestHandler { @Inject public RestCatAction(Settings settings, RestController controller, Set catActions, Client client) { - super(settings, controller, client); + super(settings, client); controller.registerHandler(GET, "/_cat", this); StringBuilder sb = new StringBuilder(); sb.append(CAT_NL); diff --git a/core/src/main/java/org/elasticsearch/rest/action/cat/RestSegmentsAction.java b/core/src/main/java/org/elasticsearch/rest/action/cat/RestSegmentsAction.java index 734fb340090..e5a1b4b49a3 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/cat/RestSegmentsAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/cat/RestSegmentsAction.java @@ -127,7 +127,7 @@ public class RestSegmentsAction extends AbstractCatAction { for (Segment segment : segments) { table.startRow(); - table.addCell(shardSegment.getShardRouting().getIndex()); + table.addCell(shardSegment.getShardRouting().getIndexName()); table.addCell(shardSegment.getShardRouting().getId()); table.addCell(shardSegment.getShardRouting().primary() ? "p" : "r"); table.addCell(nodes.get(shardSegment.getShardRouting().currentNodeId()).getHostAddress()); diff --git a/core/src/main/java/org/elasticsearch/rest/action/cat/RestShardsAction.java b/core/src/main/java/org/elasticsearch/rest/action/cat/RestShardsAction.java index 692d5bebbc9..94a82e8e773 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/cat/RestShardsAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/cat/RestShardsAction.java @@ -188,7 +188,7 @@ public class RestShardsAction extends AbstractCatAction { table.startRow(); - table.addCell(shard.index()); + table.addCell(shard.getIndexName()); table.addCell(shard.id()); IndexMetaData indexMeta = state.getState().getMetaData().index(shard.index()); diff --git a/core/src/main/java/org/elasticsearch/rest/action/count/RestCountAction.java b/core/src/main/java/org/elasticsearch/rest/action/count/RestCountAction.java index 834b3d391b1..c423f7a8537 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/count/RestCountAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/count/RestCountAction.java @@ -54,7 +54,7 @@ public class RestCountAction extends BaseRestHandler { @Inject public RestCountAction(Settings settings, RestController controller, Client client, IndicesQueriesRegistry indicesQueriesRegistry) { - super(settings, controller, client); + super(settings, client); controller.registerHandler(POST, "/_count", this); controller.registerHandler(GET, "/_count", this); controller.registerHandler(POST, "/{index}/_count", this); diff --git a/core/src/main/java/org/elasticsearch/rest/action/delete/RestDeleteAction.java b/core/src/main/java/org/elasticsearch/rest/action/delete/RestDeleteAction.java index 4336c9db2d4..8e3449344c4 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/delete/RestDeleteAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/delete/RestDeleteAction.java @@ -41,7 +41,7 @@ public class RestDeleteAction extends BaseRestHandler { @Inject public RestDeleteAction(Settings settings, RestController controller, Client client) { - super(settings, controller, client); + super(settings, client); controller.registerHandler(DELETE, "/{index}/{type}/{id}", this); } diff --git a/core/src/main/java/org/elasticsearch/rest/action/explain/RestExplainAction.java b/core/src/main/java/org/elasticsearch/rest/action/explain/RestExplainAction.java index 0e472bb0bf3..864cddc4ba0 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/explain/RestExplainAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/explain/RestExplainAction.java @@ -58,7 +58,7 @@ public class RestExplainAction extends BaseRestHandler { @Inject public RestExplainAction(Settings settings, RestController controller, Client client, IndicesQueriesRegistry indicesQueriesRegistry) { - super(settings, controller, client); + super(settings, client); this.indicesQueriesRegistry = indicesQueriesRegistry; controller.registerHandler(GET, "/{index}/{type}/{id}/_explain", this); controller.registerHandler(POST, "/{index}/{type}/{id}/_explain", this); diff --git a/core/src/main/java/org/elasticsearch/rest/action/fieldstats/RestFieldStatsAction.java b/core/src/main/java/org/elasticsearch/rest/action/fieldstats/RestFieldStatsAction.java index c314c4325d6..17b406c71eb 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/fieldstats/RestFieldStatsAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/fieldstats/RestFieldStatsAction.java @@ -50,7 +50,7 @@ public class RestFieldStatsAction extends BaseRestHandler { @Inject public RestFieldStatsAction(Settings settings, RestController controller, Client client) { - super(settings, controller, client); + super(settings, client); controller.registerHandler(GET, "/_field_stats", this); controller.registerHandler(POST, "/_field_stats", this); controller.registerHandler(GET, "/{index}/_field_stats", this); diff --git a/core/src/main/java/org/elasticsearch/rest/action/get/RestGetAction.java b/core/src/main/java/org/elasticsearch/rest/action/get/RestGetAction.java index e85eef48574..0f541bf7a97 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/get/RestGetAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/get/RestGetAction.java @@ -48,7 +48,7 @@ public class RestGetAction extends BaseRestHandler { @Inject public RestGetAction(Settings settings, RestController controller, Client client) { - super(settings, controller, client); + super(settings, client); controller.registerHandler(GET, "/{index}/{type}/{id}", this); } diff --git a/core/src/main/java/org/elasticsearch/rest/action/get/RestGetSourceAction.java b/core/src/main/java/org/elasticsearch/rest/action/get/RestGetSourceAction.java index ff6c04a6d12..d38ad458c43 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/get/RestGetSourceAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/get/RestGetSourceAction.java @@ -48,7 +48,7 @@ public class RestGetSourceAction extends BaseRestHandler { @Inject public RestGetSourceAction(Settings settings, RestController controller, Client client) { - super(settings, controller, client); + super(settings, client); controller.registerHandler(GET, "/{index}/{type}/{id}/_source", this); } diff --git a/core/src/main/java/org/elasticsearch/rest/action/get/RestHeadAction.java b/core/src/main/java/org/elasticsearch/rest/action/get/RestHeadAction.java index f32c07f20f1..31fd0cc9eac 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/get/RestHeadAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/get/RestHeadAction.java @@ -44,7 +44,7 @@ public class RestHeadAction extends BaseRestHandler { @Inject public RestHeadAction(Settings settings, RestController controller, Client client) { - super(settings, controller, client); + super(settings, client); controller.registerHandler(HEAD, "/{index}/{type}/{id}", this); controller.registerHandler(HEAD, "/{index}/{type}/{id}/_source", this); } diff --git a/core/src/main/java/org/elasticsearch/rest/action/get/RestMultiGetAction.java b/core/src/main/java/org/elasticsearch/rest/action/get/RestMultiGetAction.java index 440312b7cb9..5e3bb8eacf8 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/get/RestMultiGetAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/get/RestMultiGetAction.java @@ -42,7 +42,7 @@ public class RestMultiGetAction extends BaseRestHandler { @Inject public RestMultiGetAction(Settings settings, RestController controller, Client client) { - super(settings, controller, client); + super(settings, client); controller.registerHandler(GET, "/_mget", this); controller.registerHandler(POST, "/_mget", this); controller.registerHandler(GET, "/{index}/_mget", this); @@ -50,7 +50,7 @@ public class RestMultiGetAction extends BaseRestHandler { controller.registerHandler(GET, "/{index}/{type}/_mget", this); controller.registerHandler(POST, "/{index}/{type}/_mget", this); - this.allowExplicitIndex = settings.getAsBoolean("rest.action.multi.allow_explicit_index", true); + this.allowExplicitIndex = MULTI_ALLOW_EXPLICIT_INDEX.get(settings); } @Override diff --git a/core/src/main/java/org/elasticsearch/rest/action/index/RestIndexAction.java b/core/src/main/java/org/elasticsearch/rest/action/index/RestIndexAction.java index 0fc15454ecb..26dd1eca78d 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/index/RestIndexAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/index/RestIndexAction.java @@ -47,7 +47,7 @@ public class RestIndexAction extends BaseRestHandler { @Inject public RestIndexAction(Settings settings, RestController controller, Client client) { - super(settings, controller, client); + super(settings, client); controller.registerHandler(POST, "/{index}/{type}", this); // auto id creation controller.registerHandler(PUT, "/{index}/{type}/{id}", this); controller.registerHandler(POST, "/{index}/{type}/{id}", this); @@ -58,7 +58,7 @@ public class RestIndexAction extends BaseRestHandler { final class CreateHandler extends BaseRestHandler { protected CreateHandler(Settings settings, RestController controller, Client client) { - super(settings, controller, client); + super(settings, client); } @Override diff --git a/core/src/main/java/org/elasticsearch/rest/action/ingest/RestDeletePipelineAction.java b/core/src/main/java/org/elasticsearch/rest/action/ingest/RestDeletePipelineAction.java index 2978b894996..6559e32817f 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/ingest/RestDeletePipelineAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/ingest/RestDeletePipelineAction.java @@ -33,7 +33,7 @@ public class RestDeletePipelineAction extends BaseRestHandler { @Inject public RestDeletePipelineAction(Settings settings, RestController controller, Client client) { - super(settings, controller, client); + super(settings, client); controller.registerHandler(RestRequest.Method.DELETE, "/_ingest/pipeline/{id}", this); } diff --git a/core/src/main/java/org/elasticsearch/rest/action/ingest/RestGetPipelineAction.java b/core/src/main/java/org/elasticsearch/rest/action/ingest/RestGetPipelineAction.java index e038202a325..c11290329b3 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/ingest/RestGetPipelineAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/ingest/RestGetPipelineAction.java @@ -34,7 +34,7 @@ public class RestGetPipelineAction extends BaseRestHandler { @Inject public RestGetPipelineAction(Settings settings, RestController controller, Client client) { - super(settings, controller, client); + super(settings, client); controller.registerHandler(RestRequest.Method.GET, "/_ingest/pipeline/{id}", this); } diff --git a/core/src/main/java/org/elasticsearch/rest/action/ingest/RestPutPipelineAction.java b/core/src/main/java/org/elasticsearch/rest/action/ingest/RestPutPipelineAction.java index ef7853e3ed8..f0ddc83acaa 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/ingest/RestPutPipelineAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/ingest/RestPutPipelineAction.java @@ -34,7 +34,7 @@ public class RestPutPipelineAction extends BaseRestHandler { @Inject public RestPutPipelineAction(Settings settings, RestController controller, Client client) { - super(settings, controller, client); + super(settings, client); controller.registerHandler(RestRequest.Method.PUT, "/_ingest/pipeline/{id}", this); } diff --git a/core/src/main/java/org/elasticsearch/rest/action/ingest/RestSimulatePipelineAction.java b/core/src/main/java/org/elasticsearch/rest/action/ingest/RestSimulatePipelineAction.java index 8e615505bb0..82b504b0ea7 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/ingest/RestSimulatePipelineAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/ingest/RestSimulatePipelineAction.java @@ -34,7 +34,7 @@ public class RestSimulatePipelineAction extends BaseRestHandler { @Inject public RestSimulatePipelineAction(Settings settings, RestController controller, Client client) { - super(settings, controller, client); + super(settings, client); controller.registerHandler(RestRequest.Method.POST, "/_ingest/pipeline/{id}/_simulate", this); controller.registerHandler(RestRequest.Method.GET, "/_ingest/pipeline/{id}/_simulate", this); controller.registerHandler(RestRequest.Method.POST, "/_ingest/pipeline/_simulate", this); diff --git a/core/src/main/java/org/elasticsearch/rest/action/main/RestMainAction.java b/core/src/main/java/org/elasticsearch/rest/action/main/RestMainAction.java index 42de9b898ae..aaf0906b0f5 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/main/RestMainAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/main/RestMainAction.java @@ -48,7 +48,7 @@ public class RestMainAction extends BaseRestHandler { @Inject public RestMainAction(Settings settings, Version version, RestController controller, ClusterName clusterName, Client client, ClusterService clusterService) { - super(settings, controller, client); + super(settings, client); this.version = version; this.clusterName = clusterName; this.clusterService = clusterService; diff --git a/core/src/main/java/org/elasticsearch/rest/action/percolate/RestMultiPercolateAction.java b/core/src/main/java/org/elasticsearch/rest/action/percolate/RestMultiPercolateAction.java index 879ec78d754..6a12ff1438b 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/percolate/RestMultiPercolateAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/percolate/RestMultiPercolateAction.java @@ -44,7 +44,7 @@ public class RestMultiPercolateAction extends BaseRestHandler { @Inject public RestMultiPercolateAction(Settings settings, RestController controller, Client client) { - super(settings, controller, client); + super(settings, client); controller.registerHandler(POST, "/_mpercolate", this); controller.registerHandler(POST, "/{index}/_mpercolate", this); controller.registerHandler(POST, "/{index}/{type}/_mpercolate", this); @@ -53,7 +53,7 @@ public class RestMultiPercolateAction extends BaseRestHandler { controller.registerHandler(GET, "/{index}/_mpercolate", this); controller.registerHandler(GET, "/{index}/{type}/_mpercolate", this); - this.allowExplicitIndex = settings.getAsBoolean("rest.action.multi.allow_explicit_index", true); + this.allowExplicitIndex = MULTI_ALLOW_EXPLICIT_INDEX.get(settings); } @Override diff --git a/core/src/main/java/org/elasticsearch/rest/action/percolate/RestPercolateAction.java b/core/src/main/java/org/elasticsearch/rest/action/percolate/RestPercolateAction.java index 052fa42104b..a7c66b245e1 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/percolate/RestPercolateAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/percolate/RestPercolateAction.java @@ -44,7 +44,7 @@ public class RestPercolateAction extends BaseRestHandler { @Inject public RestPercolateAction(Settings settings, RestController controller, Client client) { - super(settings, controller, client); + super(settings, client); controller.registerHandler(GET, "/{index}/{type}/_percolate", this); controller.registerHandler(POST, "/{index}/{type}/_percolate", this); @@ -109,7 +109,7 @@ public class RestPercolateAction extends BaseRestHandler { final class RestCountPercolateDocHandler extends BaseRestHandler { private RestCountPercolateDocHandler(Settings settings, final RestController controller, Client client) { - super(settings, controller, client); + super(settings, client); } @Override @@ -123,7 +123,7 @@ public class RestPercolateAction extends BaseRestHandler { final class RestPercolateExistingDocHandler extends BaseRestHandler { protected RestPercolateExistingDocHandler(Settings settings, final RestController controller, Client client) { - super(settings, controller, client); + super(settings, client); } @Override @@ -136,7 +136,7 @@ public class RestPercolateAction extends BaseRestHandler { final class RestCountPercolateExistingDocHandler extends BaseRestHandler { protected RestCountPercolateExistingDocHandler(Settings settings, final RestController controller, Client client) { - super(settings, controller, client); + super(settings, client); } @Override diff --git a/core/src/main/java/org/elasticsearch/rest/action/script/RestDeleteIndexedScriptAction.java b/core/src/main/java/org/elasticsearch/rest/action/script/RestDeleteIndexedScriptAction.java index b492e7c513f..9009025d3a9 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/script/RestDeleteIndexedScriptAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/script/RestDeleteIndexedScriptAction.java @@ -47,7 +47,7 @@ public class RestDeleteIndexedScriptAction extends BaseRestHandler { } protected RestDeleteIndexedScriptAction(Settings settings, RestController controller, boolean registerDefaultHandlers, Client client) { - super(settings, controller, client); + super(settings, client); if (registerDefaultHandlers) { controller.registerHandler(DELETE, "/_scripts/{lang}/{id}", this); } diff --git a/core/src/main/java/org/elasticsearch/rest/action/script/RestGetIndexedScriptAction.java b/core/src/main/java/org/elasticsearch/rest/action/script/RestGetIndexedScriptAction.java index a4c6784d415..e2c4ff6373d 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/script/RestGetIndexedScriptAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/script/RestGetIndexedScriptAction.java @@ -48,7 +48,7 @@ public class RestGetIndexedScriptAction extends BaseRestHandler { } protected RestGetIndexedScriptAction(Settings settings, RestController controller, boolean registerDefaultHandlers, Client client) { - super(settings, controller, client); + super(settings, client); if (registerDefaultHandlers) { controller.registerHandler(GET, "/_scripts/{lang}/{id}", this); } diff --git a/core/src/main/java/org/elasticsearch/rest/action/script/RestPutIndexedScriptAction.java b/core/src/main/java/org/elasticsearch/rest/action/script/RestPutIndexedScriptAction.java index ed440c2b9fa..f5a6f67517e 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/script/RestPutIndexedScriptAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/script/RestPutIndexedScriptAction.java @@ -55,7 +55,7 @@ public class RestPutIndexedScriptAction extends BaseRestHandler { } protected RestPutIndexedScriptAction(Settings settings, RestController controller, boolean registerDefaultHandlers, Client client) { - super(settings, controller, client); + super(settings, client); if (registerDefaultHandlers) { controller.registerHandler(POST, "/_scripts/{lang}/{id}", this); controller.registerHandler(PUT, "/_scripts/{lang}/{id}", this); @@ -67,7 +67,7 @@ public class RestPutIndexedScriptAction extends BaseRestHandler { final class CreateHandler extends BaseRestHandler { protected CreateHandler(Settings settings, RestController controller, Client client) { - super(settings, controller, client); + super(settings, client); } @Override diff --git a/core/src/main/java/org/elasticsearch/rest/action/search/RestClearScrollAction.java b/core/src/main/java/org/elasticsearch/rest/action/search/RestClearScrollAction.java index b2a2905585c..0dce23bf3b1 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/search/RestClearScrollAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/search/RestClearScrollAction.java @@ -47,7 +47,7 @@ public class RestClearScrollAction extends BaseRestHandler { @Inject public RestClearScrollAction(Settings settings, RestController controller, Client client) { - super(settings, controller, client); + super(settings, client); controller.registerHandler(DELETE, "/_search/scroll", this); controller.registerHandler(DELETE, "/_search/scroll/{scroll_id}", this); diff --git a/core/src/main/java/org/elasticsearch/rest/action/search/RestMultiSearchAction.java b/core/src/main/java/org/elasticsearch/rest/action/search/RestMultiSearchAction.java index 72ff389fa08..540dd260f8b 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/search/RestMultiSearchAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/search/RestMultiSearchAction.java @@ -62,7 +62,7 @@ public class RestMultiSearchAction extends BaseRestHandler { @Inject public RestMultiSearchAction(Settings settings, RestController controller, Client client, IndicesQueriesRegistry indicesQueriesRegistry) { - super(settings, controller, client); + super(settings, client); controller.registerHandler(GET, "/_msearch", this); controller.registerHandler(POST, "/_msearch", this); @@ -78,7 +78,7 @@ public class RestMultiSearchAction extends BaseRestHandler { controller.registerHandler(GET, "/{index}/{type}/_msearch/template", this); controller.registerHandler(POST, "/{index}/{type}/_msearch/template", this); - this.allowExplicitIndex = settings.getAsBoolean("rest.action.multi.allow_explicit_index", true); + this.allowExplicitIndex = MULTI_ALLOW_EXPLICIT_INDEX.get(settings); this.indicesQueriesRegistry = indicesQueriesRegistry; } diff --git a/core/src/main/java/org/elasticsearch/rest/action/search/RestSearchAction.java b/core/src/main/java/org/elasticsearch/rest/action/search/RestSearchAction.java index 6db9531af84..e58caea5320 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/search/RestSearchAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/search/RestSearchAction.java @@ -65,7 +65,7 @@ public class RestSearchAction extends BaseRestHandler { @Inject public RestSearchAction(Settings settings, RestController controller, Client client, IndicesQueriesRegistry queryRegistry) { - super(settings, controller, client); + super(settings, client); this.queryRegistry = queryRegistry; controller.registerHandler(GET, "/_search", this); controller.registerHandler(POST, "/_search", this); diff --git a/core/src/main/java/org/elasticsearch/rest/action/search/RestSearchScrollAction.java b/core/src/main/java/org/elasticsearch/rest/action/search/RestSearchScrollAction.java index eb7e0465902..9e9964245e4 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/search/RestSearchScrollAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/search/RestSearchScrollAction.java @@ -51,7 +51,7 @@ public class RestSearchScrollAction extends BaseRestHandler { @Inject public RestSearchScrollAction(Settings settings, RestController controller, Client client) { - super(settings, controller, client); + super(settings, client); controller.registerHandler(GET, "/_search/scroll", this); controller.registerHandler(POST, "/_search/scroll", this); diff --git a/core/src/main/java/org/elasticsearch/rest/action/suggest/RestSuggestAction.java b/core/src/main/java/org/elasticsearch/rest/action/suggest/RestSuggestAction.java index 2841bbe1fe3..4e6b88b68b8 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/suggest/RestSuggestAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/suggest/RestSuggestAction.java @@ -49,7 +49,7 @@ public class RestSuggestAction extends BaseRestHandler { @Inject public RestSuggestAction(Settings settings, RestController controller, Client client) { - super(settings, controller, client); + super(settings, client); controller.registerHandler(POST, "/_suggest", this); controller.registerHandler(GET, "/_suggest", this); controller.registerHandler(POST, "/{index}/_suggest", this); diff --git a/core/src/main/java/org/elasticsearch/rest/action/template/RestPutSearchTemplateAction.java b/core/src/main/java/org/elasticsearch/rest/action/template/RestPutSearchTemplateAction.java index 1523d299f03..4d0da8f0d14 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/template/RestPutSearchTemplateAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/template/RestPutSearchTemplateAction.java @@ -50,7 +50,7 @@ public class RestPutSearchTemplateAction extends RestPutIndexedScriptAction { final class CreateHandler extends BaseRestHandler { protected CreateHandler(Settings settings, RestController controller, Client client) { - super(settings, controller, client); + super(settings, client); } @Override diff --git a/core/src/main/java/org/elasticsearch/rest/action/termvectors/RestMultiTermVectorsAction.java b/core/src/main/java/org/elasticsearch/rest/action/termvectors/RestMultiTermVectorsAction.java index fe897f9b09f..dfcbeef171c 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/termvectors/RestMultiTermVectorsAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/termvectors/RestMultiTermVectorsAction.java @@ -40,7 +40,7 @@ public class RestMultiTermVectorsAction extends BaseRestHandler { @Inject public RestMultiTermVectorsAction(Settings settings, RestController controller, Client client) { - super(settings, controller, client); + super(settings, client); controller.registerHandler(GET, "/_mtermvectors", this); controller.registerHandler(POST, "/_mtermvectors", this); controller.registerHandler(GET, "/{index}/_mtermvectors", this); diff --git a/core/src/main/java/org/elasticsearch/rest/action/termvectors/RestTermVectorsAction.java b/core/src/main/java/org/elasticsearch/rest/action/termvectors/RestTermVectorsAction.java index af81dfcd0a9..dbbd885fe64 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/termvectors/RestTermVectorsAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/termvectors/RestTermVectorsAction.java @@ -49,7 +49,7 @@ public class RestTermVectorsAction extends BaseRestHandler { @Inject public RestTermVectorsAction(Settings settings, RestController controller, Client client) { - super(settings, controller, client); + super(settings, client); controller.registerHandler(GET, "/{index}/{type}/_termvectors", this); controller.registerHandler(POST, "/{index}/{type}/_termvectors", this); controller.registerHandler(GET, "/{index}/{type}/{id}/_termvectors", this); diff --git a/core/src/main/java/org/elasticsearch/rest/action/update/RestUpdateAction.java b/core/src/main/java/org/elasticsearch/rest/action/update/RestUpdateAction.java index 24264ca292e..88f90374523 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/update/RestUpdateAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/update/RestUpdateAction.java @@ -48,7 +48,7 @@ public class RestUpdateAction extends BaseRestHandler { @Inject public RestUpdateAction(Settings settings, RestController controller, Client client) { - super(settings, controller, client); + super(settings, client); controller.registerHandler(POST, "/{index}/{type}/{id}/_update", this); } diff --git a/core/src/main/java/org/elasticsearch/script/ScriptService.java b/core/src/main/java/org/elasticsearch/script/ScriptService.java index f16c45d630e..d0104656d5d 100644 --- a/core/src/main/java/org/elasticsearch/script/ScriptService.java +++ b/core/src/main/java/org/elasticsearch/script/ScriptService.java @@ -31,7 +31,6 @@ import org.elasticsearch.action.indexedscripts.delete.DeleteIndexedScriptRequest import org.elasticsearch.action.indexedscripts.get.GetIndexedScriptRequest; import org.elasticsearch.action.indexedscripts.put.PutIndexedScriptRequest; import org.elasticsearch.client.Client; -import org.elasticsearch.common.HasContextAndHeaders; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.Strings; @@ -229,7 +228,7 @@ public class ScriptService extends AbstractComponent implements Closeable { /** * Checks if a script can be executed and compiles it if needed, or returns the previously compiled and cached script. */ - public CompiledScript compile(Script script, ScriptContext scriptContext, HasContextAndHeaders headersContext, Map params) { + public CompiledScript compile(Script script, ScriptContext scriptContext, Map params) { if (script == null) { throw new IllegalArgumentException("The parameter script (Script) must not be null."); } @@ -257,14 +256,14 @@ public class ScriptService extends AbstractComponent implements Closeable { " operation [" + scriptContext.getKey() + "] and lang [" + lang + "] are not supported"); } - return compileInternal(script, headersContext, params); + return compileInternal(script, params); } /** * Compiles a script straight-away, or returns the previously compiled and cached script, * without checking if it can be executed based on settings. */ - public CompiledScript compileInternal(Script script, HasContextAndHeaders context, Map params) { + public CompiledScript compileInternal(Script script, Map params) { if (script == null) { throw new IllegalArgumentException("The parameter script (Script) must not be null."); } @@ -301,7 +300,7 @@ public class ScriptService extends AbstractComponent implements Closeable { //the script has been updated in the index since the last look up. final IndexedScript indexedScript = new IndexedScript(lang, name); name = indexedScript.id; - code = getScriptFromIndex(indexedScript.lang, indexedScript.id, context); + code = getScriptFromIndex(indexedScript.lang, indexedScript.id); } CacheKey cacheKey = new CacheKey(scriptEngineService, type == ScriptType.INLINE ? null : name, code, params); @@ -327,7 +326,7 @@ public class ScriptService extends AbstractComponent implements Closeable { public void queryScriptIndex(GetIndexedScriptRequest request, final ActionListener listener) { String scriptLang = validateScriptLanguage(request.scriptLang()); - GetRequest getRequest = new GetRequest(request, SCRIPT_INDEX).type(scriptLang).id(request.id()) + GetRequest getRequest = new GetRequest(SCRIPT_INDEX).type(scriptLang).id(request.id()) .version(request.version()).versionType(request.versionType()) .preference("_local"); //Set preference for no forking client.get(getRequest, listener); @@ -342,13 +341,12 @@ public class ScriptService extends AbstractComponent implements Closeable { return scriptLang; } - String getScriptFromIndex(String scriptLang, String id, HasContextAndHeaders context) { + String getScriptFromIndex(String scriptLang, String id) { if (client == null) { throw new IllegalArgumentException("Got an indexed script with no Client registered."); } scriptLang = validateScriptLanguage(scriptLang); GetRequest getRequest = new GetRequest(SCRIPT_INDEX, scriptLang, id); - getRequest.copyContextAndHeadersFrom(context); GetResponse responseFields = client.get(getRequest).actionGet(); if (responseFields.isExists()) { return getScriptFromResponse(responseFields); @@ -396,7 +394,7 @@ public class ScriptService extends AbstractComponent implements Closeable { //verify that the script compiles validate(request.source(), scriptLang); - IndexRequest indexRequest = new IndexRequest(request).index(SCRIPT_INDEX).type(scriptLang).id(request.id()) + IndexRequest indexRequest = new IndexRequest().index(SCRIPT_INDEX).type(scriptLang).id(request.id()) .version(request.version()).versionType(request.versionType()) .source(request.source()).opType(request.opType()).refresh(true); //Always refresh after indexing a template client.index(indexRequest, listener); @@ -404,7 +402,7 @@ public class ScriptService extends AbstractComponent implements Closeable { public void deleteScriptFromIndex(DeleteIndexedScriptRequest request, ActionListener listener) { String scriptLang = validateScriptLanguage(request.scriptLang()); - DeleteRequest deleteRequest = new DeleteRequest(request).index(SCRIPT_INDEX).type(scriptLang).id(request.id()) + DeleteRequest deleteRequest = new DeleteRequest().index(SCRIPT_INDEX).type(scriptLang).id(request.id()) .refresh(true).version(request.version()).versionType(request.versionType()); client.delete(deleteRequest, listener); } @@ -441,8 +439,8 @@ public class ScriptService extends AbstractComponent implements Closeable { /** * Compiles (or retrieves from cache) and executes the provided script */ - public ExecutableScript executable(Script script, ScriptContext scriptContext, HasContextAndHeaders headersContext, Map params) { - return executable(compile(script, scriptContext, headersContext, params), script.getParams()); + public ExecutableScript executable(Script script, ScriptContext scriptContext, Map params) { + return executable(compile(script, scriptContext, params), script.getParams()); } /** @@ -456,7 +454,7 @@ public class ScriptService extends AbstractComponent implements Closeable { * Compiles (or retrieves from cache) and executes the provided search script */ public SearchScript search(SearchLookup lookup, Script script, ScriptContext scriptContext, Map params) { - CompiledScript compiledScript = compile(script, scriptContext, SearchContext.current(), params); + CompiledScript compiledScript = compile(script, scriptContext, params); return getScriptEngineServiceForLang(compiledScript.lang()).search(compiledScript, lookup, script.getParams()); } diff --git a/core/src/main/java/org/elasticsearch/search/SearchService.java b/core/src/main/java/org/elasticsearch/search/SearchService.java index 5c74ccca2ba..dd377b9acd6 100644 --- a/core/src/main/java/org/elasticsearch/search/SearchService.java +++ b/core/src/main/java/org/elasticsearch/search/SearchService.java @@ -556,7 +556,7 @@ public class SearchService extends AbstractLifecycleComponent imp IndexService indexService = indicesService.indexServiceSafe(request.index()); IndexShard indexShard = indexService.getShard(request.shardId()); - SearchShardTarget shardTarget = new SearchShardTarget(clusterService.localNode().id(), request.index(), request.shardId()); + SearchShardTarget shardTarget = new SearchShardTarget(clusterService.localNode().id(), indexShard.shardId().getIndex(), request.shardId()); Engine.Searcher engineSearcher = searcher == null ? indexShard.acquireSearcher("search") : searcher; @@ -569,7 +569,7 @@ public class SearchService extends AbstractLifecycleComponent imp context.scrollContext().scroll = request.scroll(); } if (request.template() != null) { - ExecutableScript executable = this.scriptService.executable(request.template(), ScriptContext.Standard.SEARCH, context, Collections.emptyMap()); + ExecutableScript executable = this.scriptService.executable(request.template(), ScriptContext.Standard.SEARCH, Collections.emptyMap()); BytesReference run = (BytesReference) executable.run(); try (XContentParser parser = XContentFactory.xContent(run).createParser(run)) { QueryParseContext queryParseContext = new QueryParseContext(indicesService.getIndicesQueryRegistry()); @@ -610,7 +610,7 @@ public class SearchService extends AbstractLifecycleComponent imp private void freeAllContextForIndex(Index index) { assert index != null; for (SearchContext ctx : activeContexts.values()) { - if (index.equals(ctx.indexShard().shardId().index())) { + if (index.equals(ctx.indexShard().shardId().getIndex())) { freeContext(ctx.id()); } } diff --git a/core/src/main/java/org/elasticsearch/search/SearchShardTarget.java b/core/src/main/java/org/elasticsearch/search/SearchShardTarget.java index 87a2114a788..d3958505d70 100644 --- a/core/src/main/java/org/elasticsearch/search/SearchShardTarget.java +++ b/core/src/main/java/org/elasticsearch/search/SearchShardTarget.java @@ -24,6 +24,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Streamable; import org.elasticsearch.common.text.Text; +import org.elasticsearch.index.Index; import java.io.IOException; @@ -40,9 +41,9 @@ public class SearchShardTarget implements Streamable, Comparable listener) { - transportService.sendRequest(node, CLEAR_SCROLL_CONTEXTS_ACTION_NAME, new ClearScrollContextsRequest(request), new ActionListenerResponseHandler(listener) { + transportService.sendRequest(node, CLEAR_SCROLL_CONTEXTS_ACTION_NAME, new ClearScrollContextsRequest(), new ActionListenerResponseHandler(listener) { @Override public TransportResponse newInstance() { return TransportResponse.Empty.INSTANCE; @@ -220,11 +220,10 @@ public class SearchServiceTransportAction extends AbstractComponent { } ScrollFreeContextRequest(ClearScrollRequest request, long id) { - this((TransportRequest) request, id); + this(id); } - private ScrollFreeContextRequest(TransportRequest request, long id) { - super(request); + private ScrollFreeContextRequest(long id) { this.id = id; } @@ -252,7 +251,7 @@ public class SearchServiceTransportAction extends AbstractComponent { } SearchFreeContextRequest(SearchRequest request, long id) { - super(request, id); + super(id); this.originalIndices = new OriginalIndices(request); } @@ -322,14 +321,6 @@ public class SearchServiceTransportAction extends AbstractComponent { } public static class ClearScrollContextsRequest extends TransportRequest { - - public ClearScrollContextsRequest() { - } - - ClearScrollContextsRequest(TransportRequest request) { - super(request); - } - } class ClearScrollContextsTransportHandler implements TransportRequestHandler { diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/InternalAggregation.java b/core/src/main/java/org/elasticsearch/search/aggregations/InternalAggregation.java index 1c67a941daf..04b1026446f 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/InternalAggregation.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/InternalAggregation.java @@ -18,8 +18,6 @@ */ package org.elasticsearch.search.aggregations; -import org.elasticsearch.common.DelegatingHasContextAndHeaders; -import org.elasticsearch.common.HasContextAndHeaders; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.StreamInput; @@ -92,13 +90,12 @@ public abstract class InternalAggregation implements Aggregation, ToXContent, St } } - public static class ReduceContext extends DelegatingHasContextAndHeaders { + public static class ReduceContext { private final BigArrays bigArrays; private ScriptService scriptService; - public ReduceContext(BigArrays bigArrays, ScriptService scriptService, HasContextAndHeaders headersContext) { - super(headersContext); + public ReduceContext(BigArrays bigArrays, ScriptService scriptService) { this.bigArrays = bigArrays; this.scriptService = scriptService; } @@ -106,7 +103,7 @@ public abstract class InternalAggregation implements Aggregation, ToXContent, St public BigArrays bigArrays() { return bigArrays; } - + public ScriptService scriptService() { return scriptService; } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/heuristics/ScriptHeuristic.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/heuristics/ScriptHeuristic.java index 9efea000512..a160451265f 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/heuristics/ScriptHeuristic.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/heuristics/ScriptHeuristic.java @@ -87,7 +87,7 @@ public class ScriptHeuristic extends SignificanceHeuristic { @Override public void initialize(InternalAggregation.ReduceContext context) { - searchScript = context.scriptService().executable(script, ScriptContext.Standard.AGGS, context, Collections.emptyMap()); + searchScript = context.scriptService().executable(script, ScriptContext.Standard.AGGS, Collections.emptyMap()); searchScript.setNextVar("_subset_freq", subsetDfHolder); searchScript.setNextVar("_subset_size", subsetSizeHolder); searchScript.setNextVar("_superset_freq", supersetDfHolder); @@ -175,7 +175,7 @@ public class ScriptHeuristic extends SignificanceHeuristic { } ExecutableScript searchScript; try { - searchScript = scriptService.executable(script, ScriptContext.Standard.AGGS, context, Collections.emptyMap()); + searchScript = scriptService.executable(script, ScriptContext.Standard.AGGS, Collections.emptyMap()); } catch (Exception e) { throw new ElasticsearchParseException("failed to parse [{}] significance heuristic. the script [{}] could not be loaded", e, script, heuristicName); } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/scripted/InternalScriptedMetric.java b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/scripted/InternalScriptedMetric.java index 00c6b6b49bb..3a516c690e7 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/scripted/InternalScriptedMetric.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/scripted/InternalScriptedMetric.java @@ -92,7 +92,7 @@ public class InternalScriptedMetric extends InternalMetricsAggregation implement vars.putAll(firstAggregation.reduceScript.getParams()); } CompiledScript compiledScript = reduceContext.scriptService().compile(firstAggregation.reduceScript, - ScriptContext.Standard.AGGS, reduceContext, Collections.emptyMap()); + ScriptContext.Standard.AGGS, Collections.emptyMap()); ExecutableScript script = reduceContext.scriptService().executable(compiledScript, vars); aggregation = script.run(); } else { diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/scripted/ScriptedMetricAggregator.java b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/scripted/ScriptedMetricAggregator.java index 6603c6289b2..68d886a7bfe 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/scripted/ScriptedMetricAggregator.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/scripted/ScriptedMetricAggregator.java @@ -59,11 +59,11 @@ public class ScriptedMetricAggregator extends MetricsAggregator { this.params = params; ScriptService scriptService = context.searchContext().scriptService(); if (initScript != null) { - scriptService.executable(initScript, ScriptContext.Standard.AGGS, context.searchContext(), Collections.emptyMap()).run(); + scriptService.executable(initScript, ScriptContext.Standard.AGGS, Collections.emptyMap()).run(); } this.mapScript = scriptService.search(context.searchContext().lookup(), mapScript, ScriptContext.Standard.AGGS, Collections.emptyMap()); if (combineScript != null) { - this.combineScript = scriptService.executable(combineScript, ScriptContext.Standard.AGGS, context.searchContext(), Collections.emptyMap()); + this.combineScript = scriptService.executable(combineScript, ScriptContext.Standard.AGGS, Collections.emptyMap()); } else { this.combineScript = null; } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketscript/BucketScriptPipelineAggregator.java b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketscript/BucketScriptPipelineAggregator.java index 76cb15ed46d..4da355f4f8a 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketscript/BucketScriptPipelineAggregator.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketscript/BucketScriptPipelineAggregator.java @@ -94,7 +94,7 @@ public class BucketScriptPipelineAggregator extends PipelineAggregator { InternalMultiBucketAggregation originalAgg = (InternalMultiBucketAggregation) aggregation; List buckets = originalAgg.getBuckets(); - CompiledScript compiledScript = reduceContext.scriptService().compile(script, ScriptContext.Standard.AGGS, reduceContext, Collections.emptyMap()); + CompiledScript compiledScript = reduceContext.scriptService().compile(script, ScriptContext.Standard.AGGS, Collections.emptyMap()); List newBuckets = new ArrayList<>(); for (Bucket bucket : buckets) { Map vars = new HashMap<>(); diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/having/BucketSelectorPipelineAggregator.java b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/having/BucketSelectorPipelineAggregator.java index edc3b4e87ce..1032d0f0175 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/having/BucketSelectorPipelineAggregator.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/having/BucketSelectorPipelineAggregator.java @@ -89,7 +89,7 @@ public class BucketSelectorPipelineAggregator extends PipelineAggregator { InternalMultiBucketAggregation originalAgg = (InternalMultiBucketAggregation) aggregation; List buckets = originalAgg.getBuckets(); - CompiledScript compiledScript = reduceContext.scriptService().compile(script, ScriptContext.Standard.AGGS, reduceContext, Collections.emptyMap()); + CompiledScript compiledScript = reduceContext.scriptService().compile(script, ScriptContext.Standard.AGGS, Collections.emptyMap()); List newBuckets = new ArrayList<>(); for (Bucket bucket : buckets) { Map vars = new HashMap<>(); diff --git a/core/src/main/java/org/elasticsearch/search/controller/SearchPhaseController.java b/core/src/main/java/org/elasticsearch/search/controller/SearchPhaseController.java index ef16a03831d..d79b1f59a6a 100644 --- a/core/src/main/java/org/elasticsearch/search/controller/SearchPhaseController.java +++ b/core/src/main/java/org/elasticsearch/search/controller/SearchPhaseController.java @@ -31,7 +31,6 @@ import org.apache.lucene.search.TermStatistics; import org.apache.lucene.search.TopDocs; import org.apache.lucene.search.TopFieldDocs; import org.elasticsearch.action.search.SearchRequest; -import org.elasticsearch.common.HasContextAndHeaders; import org.elasticsearch.common.collect.HppcMaps; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.inject.Inject; @@ -299,7 +298,7 @@ public class SearchPhaseController extends AbstractComponent { } public InternalSearchResponse merge(ScoreDoc[] sortedDocs, AtomicArray queryResultsArr, - AtomicArray fetchResultsArr, HasContextAndHeaders headersContext) { + AtomicArray fetchResultsArr) { List> queryResults = queryResultsArr.asList(); List> fetchResults = fetchResultsArr.asList(); @@ -407,7 +406,7 @@ public class SearchPhaseController extends AbstractComponent { for (AtomicArray.Entry entry : queryResults) { aggregationsList.add((InternalAggregations) entry.value.queryResult().aggregations()); } - aggregations = InternalAggregations.reduce(aggregationsList, new ReduceContext(bigArrays, scriptService, headersContext)); + aggregations = InternalAggregations.reduce(aggregationsList, new ReduceContext(bigArrays, scriptService)); } } @@ -430,7 +429,7 @@ public class SearchPhaseController extends AbstractComponent { }).collect(Collectors.toList()); for (SiblingPipelineAggregator pipelineAggregator : pipelineAggregators) { InternalAggregation newAgg = pipelineAggregator.doReduce(new InternalAggregations(newAggs), new ReduceContext( - bigArrays, scriptService, headersContext)); + bigArrays, scriptService)); newAggs.add(newAgg); } aggregations = new InternalAggregations(newAggs); diff --git a/core/src/main/java/org/elasticsearch/search/fetch/ShardFetchRequest.java b/core/src/main/java/org/elasticsearch/search/fetch/ShardFetchRequest.java index 0d524ed3e3e..4087eb9a01c 100644 --- a/core/src/main/java/org/elasticsearch/search/fetch/ShardFetchRequest.java +++ b/core/src/main/java/org/elasticsearch/search/fetch/ShardFetchRequest.java @@ -22,7 +22,6 @@ package org.elasticsearch.search.fetch; import com.carrotsearch.hppc.IntArrayList; import org.apache.lucene.search.FieldDoc; import org.apache.lucene.search.ScoreDoc; -import org.elasticsearch.action.search.SearchScrollRequest; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.lucene.Lucene; @@ -47,16 +46,7 @@ public class ShardFetchRequest extends TransportRequest { public ShardFetchRequest() { } - public ShardFetchRequest(SearchScrollRequest request, long id, IntArrayList list, ScoreDoc lastEmittedDoc) { - super(request); - this.id = id; - this.docIds = list.buffer; - this.size = list.size(); - this.lastEmittedDoc = lastEmittedDoc; - } - - protected ShardFetchRequest(TransportRequest originalRequest, long id, IntArrayList list, ScoreDoc lastEmittedDoc) { - super(originalRequest); + public ShardFetchRequest(long id, IntArrayList list, ScoreDoc lastEmittedDoc) { this.id = id; this.docIds = list.buffer; this.size = list.size(); diff --git a/core/src/main/java/org/elasticsearch/search/fetch/ShardFetchSearchRequest.java b/core/src/main/java/org/elasticsearch/search/fetch/ShardFetchSearchRequest.java index cc53b48f135..d908aca0fc8 100644 --- a/core/src/main/java/org/elasticsearch/search/fetch/ShardFetchSearchRequest.java +++ b/core/src/main/java/org/elasticsearch/search/fetch/ShardFetchSearchRequest.java @@ -46,7 +46,7 @@ public class ShardFetchSearchRequest extends ShardFetchRequest implements Indice } public ShardFetchSearchRequest(SearchRequest request, long id, IntArrayList list, ScoreDoc lastEmittedDoc) { - super(request, id, list, lastEmittedDoc); + super(id, list, lastEmittedDoc); this.originalIndices = new OriginalIndices(request); } diff --git a/core/src/main/java/org/elasticsearch/search/internal/DefaultSearchContext.java b/core/src/main/java/org/elasticsearch/search/internal/DefaultSearchContext.java index 7486a451a7a..01431de406d 100644 --- a/core/src/main/java/org/elasticsearch/search/internal/DefaultSearchContext.java +++ b/core/src/main/java/org/elasticsearch/search/internal/DefaultSearchContext.java @@ -158,7 +158,7 @@ public class DefaultSearchContext extends SearchContext { BigArrays bigArrays, Counter timeEstimateCounter, ParseFieldMatcher parseFieldMatcher, TimeValue timeout ) { - super(parseFieldMatcher, request); + super(parseFieldMatcher); this.id = id; this.request = request; this.searchType = request.searchType(); diff --git a/core/src/main/java/org/elasticsearch/search/internal/FilteredSearchContext.java b/core/src/main/java/org/elasticsearch/search/internal/FilteredSearchContext.java index 801b46f93b6..73e2dc98f65 100644 --- a/core/src/main/java/org/elasticsearch/search/internal/FilteredSearchContext.java +++ b/core/src/main/java/org/elasticsearch/search/internal/FilteredSearchContext.java @@ -63,7 +63,7 @@ public abstract class FilteredSearchContext extends SearchContext { public FilteredSearchContext(SearchContext in) { //inner_hits in percolator ends up with null inner search context - super(in == null ? ParseFieldMatcher.EMPTY : in.parseFieldMatcher(), in); + super(in == null ? ParseFieldMatcher.EMPTY : in.parseFieldMatcher()); this.in = in; } diff --git a/core/src/main/java/org/elasticsearch/search/internal/InternalScrollSearchRequest.java b/core/src/main/java/org/elasticsearch/search/internal/InternalScrollSearchRequest.java index 77a490af385..7f918138045 100644 --- a/core/src/main/java/org/elasticsearch/search/internal/InternalScrollSearchRequest.java +++ b/core/src/main/java/org/elasticsearch/search/internal/InternalScrollSearchRequest.java @@ -42,7 +42,6 @@ public class InternalScrollSearchRequest extends TransportRequest { } public InternalScrollSearchRequest(SearchScrollRequest request, long id) { - super(request); this.id = id; this.scroll = request.scroll(); } diff --git a/core/src/main/java/org/elasticsearch/search/internal/SearchContext.java b/core/src/main/java/org/elasticsearch/search/internal/SearchContext.java index 374826a9879..5880869541b 100644 --- a/core/src/main/java/org/elasticsearch/search/internal/SearchContext.java +++ b/core/src/main/java/org/elasticsearch/search/internal/SearchContext.java @@ -26,8 +26,6 @@ import org.apache.lucene.search.Sort; import org.apache.lucene.util.Counter; import org.elasticsearch.action.search.SearchType; import org.elasticsearch.cache.recycler.PageCacheRecycler; -import org.elasticsearch.common.DelegatingHasContextAndHeaders; -import org.elasticsearch.common.HasContextAndHeaders; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.lease.Releasable; @@ -67,7 +65,7 @@ import java.util.List; import java.util.Map; import java.util.concurrent.atomic.AtomicBoolean; -public abstract class SearchContext extends DelegatingHasContextAndHeaders implements Releasable { +public abstract class SearchContext implements Releasable { private static ThreadLocal current = new ThreadLocal<>(); public final static int DEFAULT_TERMINATE_AFTER = 0; @@ -91,8 +89,7 @@ public abstract class SearchContext extends DelegatingHasContextAndHeaders imple protected final ParseFieldMatcher parseFieldMatcher; - protected SearchContext(ParseFieldMatcher parseFieldMatcher, HasContextAndHeaders contextHeaders) { - super(contextHeaders); + protected SearchContext(ParseFieldMatcher parseFieldMatcher) { this.parseFieldMatcher = parseFieldMatcher; } diff --git a/core/src/main/java/org/elasticsearch/search/internal/ShardSearchLocalRequest.java b/core/src/main/java/org/elasticsearch/search/internal/ShardSearchLocalRequest.java index 9d15dfd5790..0f46461f4a2 100644 --- a/core/src/main/java/org/elasticsearch/search/internal/ShardSearchLocalRequest.java +++ b/core/src/main/java/org/elasticsearch/search/internal/ShardSearchLocalRequest.java @@ -22,7 +22,6 @@ package org.elasticsearch.search.internal; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchType; import org.elasticsearch.cluster.routing.ShardRouting; -import org.elasticsearch.common.ContextAndHeaderHolder; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.BytesStreamOutput; @@ -57,7 +56,7 @@ import static org.elasticsearch.search.Scroll.readScroll; * */ -public class ShardSearchLocalRequest extends ContextAndHeaderHolder implements ShardSearchRequest { +public class ShardSearchLocalRequest implements ShardSearchRequest { private String index; private int shardId; @@ -84,7 +83,6 @@ public class ShardSearchLocalRequest extends ContextAndHeaderHolder implements S this.scroll = searchRequest.scroll(); this.filteringAliases = filteringAliases; this.nowInMillis = nowInMillis; - copyContextAndHeadersFrom(searchRequest); } public ShardSearchLocalRequest(String[] types, long nowInMillis) { @@ -99,7 +97,7 @@ public class ShardSearchLocalRequest extends ContextAndHeaderHolder implements S public ShardSearchLocalRequest(ShardId shardId, int numberOfShards, SearchType searchType, SearchSourceBuilder source, String[] types, Boolean requestCache) { - this.index = shardId.getIndex(); + this.index = shardId.getIndexName(); this.shardId = shardId.id(); this.numberOfShards = numberOfShards; this.searchType = searchType; diff --git a/core/src/main/java/org/elasticsearch/search/internal/ShardSearchRequest.java b/core/src/main/java/org/elasticsearch/search/internal/ShardSearchRequest.java index b1730b6a14e..1f0b3d1f188 100644 --- a/core/src/main/java/org/elasticsearch/search/internal/ShardSearchRequest.java +++ b/core/src/main/java/org/elasticsearch/search/internal/ShardSearchRequest.java @@ -20,7 +20,6 @@ package org.elasticsearch.search.internal; import org.elasticsearch.action.search.SearchType; -import org.elasticsearch.common.HasContextAndHeaders; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.script.Template; import org.elasticsearch.search.Scroll; @@ -33,7 +32,7 @@ import java.io.IOException; * It provides all the methods that the {@link org.elasticsearch.search.internal.SearchContext} needs. * Provides a cache key based on its content that can be used to cache shard level response. */ -public interface ShardSearchRequest extends HasContextAndHeaders { +public interface ShardSearchRequest { String index(); diff --git a/core/src/main/java/org/elasticsearch/search/internal/ShardSearchTransportRequest.java b/core/src/main/java/org/elasticsearch/search/internal/ShardSearchTransportRequest.java index 0f9c0ced411..48ea31c170a 100644 --- a/core/src/main/java/org/elasticsearch/search/internal/ShardSearchTransportRequest.java +++ b/core/src/main/java/org/elasticsearch/search/internal/ShardSearchTransportRequest.java @@ -51,7 +51,6 @@ public class ShardSearchTransportRequest extends TransportRequest implements Sha public ShardSearchTransportRequest(SearchRequest searchRequest, ShardRouting shardRouting, int numberOfShards, String[] filteringAliases, long nowInMillis) { - super(searchRequest); this.shardSearchLocalRequest = new ShardSearchLocalRequest(searchRequest, shardRouting, numberOfShards, filteringAliases, nowInMillis); this.originalIndices = new OriginalIndices(searchRequest); } diff --git a/core/src/main/java/org/elasticsearch/search/query/QuerySearchRequest.java b/core/src/main/java/org/elasticsearch/search/query/QuerySearchRequest.java index a1395bd73db..15593abf0da 100644 --- a/core/src/main/java/org/elasticsearch/search/query/QuerySearchRequest.java +++ b/core/src/main/java/org/elasticsearch/search/query/QuerySearchRequest.java @@ -47,7 +47,6 @@ public class QuerySearchRequest extends TransportRequest implements IndicesReque } public QuerySearchRequest(SearchRequest request, long id, AggregatedDfs dfs) { - super(request); this.id = id; this.dfs = dfs; this.originalIndices = new OriginalIndices(request); diff --git a/core/src/main/java/org/elasticsearch/search/searchafter/SearchAfterBuilder.java b/core/src/main/java/org/elasticsearch/search/searchafter/SearchAfterBuilder.java index 7cfcee4de59..13fae70174d 100644 --- a/core/src/main/java/org/elasticsearch/search/searchafter/SearchAfterBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/searchafter/SearchAfterBuilder.java @@ -82,7 +82,11 @@ public class SearchAfterBuilder implements ToXContent, FromXContentBuilder type = fieldValue.getClass(); + if (type == String.class) { + out.writeByte((byte) 1); + out.writeString((String) fieldValue); + } else if (type == Integer.class) { + out.writeByte((byte) 2); + out.writeInt((Integer) fieldValue); + } else if (type == Long.class) { + out.writeByte((byte) 3); + out.writeLong((Long) fieldValue); + } else if (type == Float.class) { + out.writeByte((byte) 4); + out.writeFloat((Float) fieldValue); + } else if (type == Double.class) { + out.writeByte((byte) 5); + out.writeDouble((Double) fieldValue); + } else if (type == Byte.class) { + out.writeByte((byte) 6); + out.writeByte((Byte) fieldValue); + } else if (type == Short.class) { + out.writeByte((byte) 7); + out.writeShort((Short) fieldValue); + } else if (type == Boolean.class) { + out.writeByte((byte) 8); + out.writeBoolean((Boolean) fieldValue); + } else if (fieldValue instanceof Text) { + out.writeByte((byte) 9); + out.writeText((Text) fieldValue); + } else { + throw new IOException("Can't handle " + SEARCH_AFTER.getPreferredName() + " field value of type [" + type + "]"); + } } } } @@ -250,7 +257,9 @@ public class SearchAfterBuilder implements ToXContent, FromXContentBuilder shardsToFail = null; for (ObjectObjectCursor shard : entry.shards()) { if (!shard.value.state().completed()) { - if (!event.state().metaData().hasIndex(shard.key.getIndex())) { + if (!event.state().metaData().hasIndex(shard.key.getIndex().getName())) { if (shardsToFail == null) { shardsToFail = new ArrayList<>(); } diff --git a/core/src/main/java/org/elasticsearch/snapshots/SnapshotShardFailure.java b/core/src/main/java/org/elasticsearch/snapshots/SnapshotShardFailure.java index 60bd25542c8..7facd49088f 100644 --- a/core/src/main/java/org/elasticsearch/snapshots/SnapshotShardFailure.java +++ b/core/src/main/java/org/elasticsearch/snapshots/SnapshotShardFailure.java @@ -21,6 +21,7 @@ package org.elasticsearch.snapshots; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.action.ShardOperationFailedException; +import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -37,9 +38,7 @@ import java.io.IOException; * Stores information about failures that occurred during shard snapshotting process */ public class SnapshotShardFailure implements ShardOperationFailedException { - private String index; - - private int shardId; + private ShardId shardId; private String reason; @@ -56,13 +55,11 @@ public class SnapshotShardFailure implements ShardOperationFailedException { * Constructs new snapshot shard failure object * * @param nodeId node where failure occurred - * @param index index which the shard belongs to * @param shardId shard id * @param reason failure reason */ - public SnapshotShardFailure(@Nullable String nodeId, String index, int shardId, String reason) { + public SnapshotShardFailure(@Nullable String nodeId, ShardId shardId, String reason) { this.nodeId = nodeId; - this.index = index; this.shardId = shardId; this.reason = reason; status = RestStatus.INTERNAL_SERVER_ERROR; @@ -75,7 +72,7 @@ public class SnapshotShardFailure implements ShardOperationFailedException { */ @Override public String index() { - return this.index; + return this.shardId.getIndexName(); } /** @@ -85,7 +82,7 @@ public class SnapshotShardFailure implements ShardOperationFailedException { */ @Override public int shardId() { - return this.shardId; + return this.shardId.id(); } /** @@ -110,7 +107,7 @@ public class SnapshotShardFailure implements ShardOperationFailedException { @Override public Throwable getCause() { - return new IndexShardSnapshotFailedException(new ShardId(index, shardId), reason); + return new IndexShardSnapshotFailedException(shardId, reason); } /** @@ -138,8 +135,7 @@ public class SnapshotShardFailure implements ShardOperationFailedException { @Override public void readFrom(StreamInput in) throws IOException { nodeId = in.readOptionalString(); - index = in.readString(); - shardId = in.readVInt(); + shardId = ShardId.readShardId(in); reason = in.readString(); status = RestStatus.readFrom(in); } @@ -147,15 +143,14 @@ public class SnapshotShardFailure implements ShardOperationFailedException { @Override public void writeTo(StreamOutput out) throws IOException { out.writeOptionalString(nodeId); - out.writeString(index); - out.writeVInt(shardId); + shardId.writeTo(out); out.writeString(reason); RestStatus.writeTo(out, status); } @Override public String toString() { - return "[" + index + "][" + shardId + "] failed, reason [" + reason + "]"; + return shardId + " failed, reason [" + reason + "]"; } /** @@ -181,6 +176,9 @@ public class SnapshotShardFailure implements ShardOperationFailedException { SnapshotShardFailure snapshotShardFailure = new SnapshotShardFailure(); XContentParser.Token token = parser.currentToken(); + String index = null; + String index_uuid = IndexMetaData.INDEX_UUID_NA_VALUE; + int shardId = -1; if (token == XContentParser.Token.START_OBJECT) { while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { @@ -188,13 +186,15 @@ public class SnapshotShardFailure implements ShardOperationFailedException { token = parser.nextToken(); if (token.isValue()) { if ("index".equals(currentFieldName)) { - snapshotShardFailure.index = parser.text(); + index = parser.text(); + } else if ("index_uuid".equals(currentFieldName)) { + index_uuid = parser.text(); } else if ("node_id".equals(currentFieldName)) { snapshotShardFailure.nodeId = parser.text(); } else if ("reason".equals(currentFieldName)) { snapshotShardFailure.reason = parser.text(); } else if ("shard_id".equals(currentFieldName)) { - snapshotShardFailure.shardId = parser.intValue(); + shardId = parser.intValue(); } else if ("status".equals(currentFieldName)) { snapshotShardFailure.status = RestStatus.valueOf(parser.text()); } else { @@ -208,13 +208,21 @@ public class SnapshotShardFailure implements ShardOperationFailedException { } else { throw new ElasticsearchParseException("unexpected token [{}]", token); } + if (index == null) { + throw new ElasticsearchParseException("index name was not set"); + } + if (shardId == -1) { + throw new ElasticsearchParseException("index shard was not set"); + } + snapshotShardFailure.shardId = new ShardId(index, index_uuid, shardId); return snapshotShardFailure; } @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.field("index", index); - builder.field("shard_id", shardId); + builder.field("index", shardId.getIndexName()); + builder.field("index_uuid", shardId.getIndexName()); + builder.field("shard_id", shardId.id()); builder.field("reason", reason); if (nodeId != null) { builder.field("node_id", nodeId); diff --git a/core/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java b/core/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java index b1d16a04ead..909fb4a1569 100644 --- a/core/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java +++ b/core/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java @@ -488,7 +488,7 @@ public class SnapshotsService extends AbstractLifecycleComponent shardFailures, ShardId shardId) { for (SnapshotShardFailure shardFailure : shardFailures) { - if (shardId.getIndex().equals(shardFailure.index()) && shardId.getId() == shardFailure.shardId()) { + if (shardId.getIndexName().equals(shardFailure.index()) && shardId.getId() == shardFailure.shardId()) { return shardFailure; } } @@ -751,10 +751,10 @@ public class SnapshotsService extends AbstractLifecycleComponent closed = new HashSet<>(); for (ObjectObjectCursor entry : shards) { if (entry.value.state() == State.MISSING) { - if (metaData.hasIndex(entry.key.getIndex()) && metaData.index(entry.key.getIndex()).getState() == IndexMetaData.State.CLOSE) { - closed.add(entry.key.getIndex()); + if (metaData.hasIndex(entry.key.getIndex().getName()) && metaData.index(entry.key.getIndex()).getState() == IndexMetaData.State.CLOSE) { + closed.add(entry.key.getIndex().getName()); } else { - missing.add(entry.key.getIndex()); + missing.add(entry.key.getIndex().getName()); } } } @@ -796,7 +796,7 @@ public class SnapshotsService extends AbstractLifecycleComponent command.run(); + private final ThreadContext threadContext; + public ThreadPool(String name) { this(Settings.builder().put("name", name).build()); } @@ -208,7 +213,7 @@ public class ThreadPool extends AbstractComponent { super(settings); assert settings.get("name") != null : "ThreadPool's settings should contain a name"; - + threadContext = new ThreadContext(settings); Map groupSettings = THREADPOOL_GROUP_SETTING.get(settings).getAsGroups(); validate(groupSettings); @@ -448,7 +453,7 @@ public class ThreadPool extends AbstractComponent { } else { logger.debug("creating thread_pool [{}], type [{}], keep_alive [{}]", name, type, keepAlive); } - Executor executor = EsExecutors.newCached(name, keepAlive.millis(), TimeUnit.MILLISECONDS, threadFactory); + Executor executor = EsExecutors.newCached(name, keepAlive.millis(), TimeUnit.MILLISECONDS, threadFactory, threadContext); return new ExecutorHolder(executor, new Info(name, threadPoolType, -1, -1, keepAlive, null)); } else if (ThreadPoolType.FIXED == threadPoolType) { int defaultSize = defaultSettings.getAsInt("size", EsExecutors.boundedNumberOfProcessors(settings)); @@ -483,7 +488,7 @@ public class ThreadPool extends AbstractComponent { int size = applyHardSizeLimit(name, settings.getAsInt("size", defaultSize)); SizeValue queueSize = getAsSizeOrUnbounded(settings, "capacity", getAsSizeOrUnbounded(settings, "queue", getAsSizeOrUnbounded(settings, "queue_size", defaultQueueSize))); logger.debug("creating thread_pool [{}], type [{}], size [{}], queue_size [{}]", name, type, size, queueSize); - Executor executor = EsExecutors.newFixed(name, size, queueSize == null ? -1 : (int) queueSize.singles(), threadFactory); + Executor executor = EsExecutors.newFixed(name, size, queueSize == null ? -1 : (int) queueSize.singles(), threadFactory, threadContext); return new ExecutorHolder(executor, new Info(name, threadPoolType, size, size, null, queueSize)); } else if (ThreadPoolType.SCALING == threadPoolType) { TimeValue defaultKeepAlive = defaultSettings.getAsTime("keep_alive", timeValueMinutes(5)); @@ -527,7 +532,7 @@ public class ThreadPool extends AbstractComponent { } else { logger.debug("creating thread_pool [{}], type [{}], min [{}], size [{}], keep_alive [{}]", name, type, min, size, keepAlive); } - Executor executor = EsExecutors.newScaling(name, min, size, keepAlive.millis(), TimeUnit.MILLISECONDS, threadFactory); + Executor executor = EsExecutors.newScaling(name, min, size, keepAlive.millis(), TimeUnit.MILLISECONDS, threadFactory, threadContext); return new ExecutorHolder(executor, new Info(name, threadPoolType, min, size, keepAlive, null)); } throw new IllegalArgumentException("No type found [" + type + "], for [" + name + "]"); @@ -914,17 +919,30 @@ public class ThreadPool extends AbstractComponent { */ public static boolean terminate(ThreadPool pool, long timeout, TimeUnit timeUnit) { if (pool != null) { - pool.shutdown(); try { - if (pool.awaitTermination(timeout, timeUnit)) { - return true; + pool.shutdown(); + try { + if (pool.awaitTermination(timeout, timeUnit)) { + return true; + } + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); } - } catch (InterruptedException e) { - Thread.currentThread().interrupt(); + // last resort + pool.shutdownNow(); + } finally { + IOUtils.closeWhileHandlingException(pool); } - // last resort - pool.shutdownNow(); } return false; } + + @Override + public void close() throws IOException { + threadContext.close(); + } + + public ThreadContext getThreadContext() { + return threadContext; + } } diff --git a/core/src/main/java/org/elasticsearch/transport/TransportMessage.java b/core/src/main/java/org/elasticsearch/transport/TransportMessage.java index f52f9179dd6..1434a6eaa73 100644 --- a/core/src/main/java/org/elasticsearch/transport/TransportMessage.java +++ b/core/src/main/java/org/elasticsearch/transport/TransportMessage.java @@ -19,29 +19,20 @@ package org.elasticsearch.transport; -import org.elasticsearch.common.ContextAndHeaderHolder; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Streamable; import org.elasticsearch.common.transport.TransportAddress; import java.io.IOException; +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; -/** - * The transport message is also a {@link ContextAndHeaderHolder context holder} that holds transient context, that is, - * the context is not serialized with message. - */ -public abstract class TransportMessage> extends ContextAndHeaderHolder implements Streamable { +public abstract class TransportMessage> implements Streamable { private TransportAddress remoteAddress; - protected TransportMessage() { - } - - protected TransportMessage(TM message) { - copyContextAndHeadersFrom(message); - } - public void remoteAddress(TransportAddress remoteAddress) { this.remoteAddress = remoteAddress; } @@ -52,16 +43,11 @@ public abstract class TransportMessage> extends @Override public void readFrom(StreamInput in) throws IOException { - headers = in.readBoolean() ? in.readMap() : null; + } @Override public void writeTo(StreamOutput out) throws IOException { - if (headers == null) { - out.writeBoolean(false); - } else { - out.writeBoolean(true); - out.writeMap(headers); - } + } } diff --git a/core/src/main/java/org/elasticsearch/transport/TransportRequest.java b/core/src/main/java/org/elasticsearch/transport/TransportRequest.java index d5c1491f1a6..7db7f0726d0 100644 --- a/core/src/main/java/org/elasticsearch/transport/TransportRequest.java +++ b/core/src/main/java/org/elasticsearch/transport/TransportRequest.java @@ -26,24 +26,12 @@ import org.elasticsearch.tasks.Task; public abstract class TransportRequest extends TransportMessage { public static class Empty extends TransportRequest { - public static final Empty INSTANCE = new Empty(); - - public Empty() { - super(); - } - - public Empty(TransportRequest request) { - super(request); - } } public TransportRequest() { } - protected TransportRequest(TransportRequest request) { - super(request); - } public Task createTask(long id, String type, String action) { return new Task(id, type, action, this::getDescription); diff --git a/core/src/main/java/org/elasticsearch/transport/TransportResponse.java b/core/src/main/java/org/elasticsearch/transport/TransportResponse.java index 8ea7cd60d2d..28dcd12ca53 100644 --- a/core/src/main/java/org/elasticsearch/transport/TransportResponse.java +++ b/core/src/main/java/org/elasticsearch/transport/TransportResponse.java @@ -24,23 +24,6 @@ package org.elasticsearch.transport; public abstract class TransportResponse extends TransportMessage { public static class Empty extends TransportResponse { - public static final Empty INSTANCE = new Empty(); - - public Empty() { - super(); - } - - public Empty(TransportResponse request) { - super(request); - } } - - protected TransportResponse() { - } - - protected TransportResponse(TransportResponse response) { - super(response); - } - } diff --git a/core/src/main/java/org/elasticsearch/transport/TransportService.java b/core/src/main/java/org/elasticsearch/transport/TransportService.java index 5d74c4a408f..8cff05a4d6a 100644 --- a/core/src/main/java/org/elasticsearch/transport/TransportService.java +++ b/core/src/main/java/org/elasticsearch/transport/TransportService.java @@ -40,6 +40,7 @@ import org.elasticsearch.common.util.concurrent.ConcurrentMapLong; import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; import org.elasticsearch.common.util.concurrent.FutureUtils; import org.elasticsearch.tasks.TaskManager; +import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.threadpool.ThreadPool; import java.io.IOException; @@ -288,7 +289,7 @@ public class TransportService extends AbstractLifecycleComponent(handler, node, action, timeoutHandler)); + clientHandlers.put(requestId, new RequestHolder<>(new ContextRestoreResponseHandler(threadPool.getThreadContext().newStoredContext(), handler), node, action, timeoutHandler)); if (started.get() == false) { // if we are not started the exception handling will remove the RequestHolder again and calls the handler to notify the caller. // it will only notify if the toStop code hasn't done the work yet. @@ -494,6 +495,7 @@ public class TransportService extends AbstractLifecycleComponent implements TransportResponseHandler { + private final TransportResponseHandler delegate; + private final ThreadContext.StoredContext threadContext; + private ContextRestoreResponseHandler(ThreadContext.StoredContext threadContext, TransportResponseHandler delegate) { + this.delegate = delegate; + this.threadContext = threadContext; + } + + @Override + public T newInstance() { + return delegate.newInstance(); + } + + @Override + public void handleResponse(T response) { + threadContext.restore(); + delegate.handleResponse(response); + } + + @Override + public void handleException(TransportException exp) { + threadContext.restore(); + delegate.handleException(exp); + } + + @Override + public String executor() { + return delegate.executor(); + } + } + static class DirectResponseChannel implements TransportChannel { final ESLogger logger; final DiscoveryNode localNode; diff --git a/core/src/main/java/org/elasticsearch/transport/local/LocalTransport.java b/core/src/main/java/org/elasticsearch/transport/local/LocalTransport.java index ba067fdabdf..7a41bf626c6 100644 --- a/core/src/main/java/org/elasticsearch/transport/local/LocalTransport.java +++ b/core/src/main/java/org/elasticsearch/transport/local/LocalTransport.java @@ -36,6 +36,7 @@ import org.elasticsearch.common.transport.LocalTransportAddress; import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.common.util.concurrent.AbstractRunnable; import org.elasticsearch.common.util.concurrent.EsExecutors; +import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.ActionNotFoundTransportException; import org.elasticsearch.transport.ConnectTransportException; @@ -72,7 +73,7 @@ import static org.elasticsearch.common.util.concurrent.ConcurrentCollections.new public class LocalTransport extends AbstractLifecycleComponent implements Transport { public static final String LOCAL_TRANSPORT_THREAD_NAME_PREFIX = "local_transport"; - private final ThreadPool threadPool; + final ThreadPool threadPool; private final ThreadPoolExecutor workers; private final Version version; private volatile TransportServiceAdapter transportServiceAdapter; @@ -96,7 +97,7 @@ public class LocalTransport extends AbstractLifecycleComponent implem int queueSize = this.settings.getAsInt(TRANSPORT_LOCAL_QUEUE, -1); logger.debug("creating [{}] workers, queue_size [{}]", workerCount, queueSize); final ThreadFactory threadFactory = EsExecutors.daemonThreadFactory(this.settings, LOCAL_TRANSPORT_THREAD_NAME_PREFIX); - this.workers = EsExecutors.newFixed(LOCAL_TRANSPORT_THREAD_NAME_PREFIX, workerCount, queueSize, threadFactory); + this.workers = EsExecutors.newFixed(LOCAL_TRANSPORT_THREAD_NAME_PREFIX, workerCount, queueSize, threadFactory, threadPool.getThreadContext()); this.namedWriteableRegistry = namedWriteableRegistry; } @@ -209,6 +210,7 @@ public class LocalTransport extends AbstractLifecycleComponent implem status = TransportStatus.setRequest(status); stream.writeByte(status); // 0 for request, 1 for response. + threadPool.getThreadContext().writeTo(stream); stream.writeString(action); request.writeTo(stream); @@ -220,12 +222,11 @@ public class LocalTransport extends AbstractLifecycleComponent implem } final byte[] data = stream.bytes().toBytes(); - transportServiceAdapter.sent(data.length); transportServiceAdapter.onRequestSent(node, requestId, action, request, options); - targetTransport.workers().execute(new Runnable() { - @Override - public void run() { + targetTransport.workers().execute(() -> { + ThreadContext threadContext = targetTransport.threadPool.getThreadContext(); + try (ThreadContext.StoredContext context = threadContext.stashContext()) { targetTransport.messageReceived(data, action, LocalTransport.this, version, requestId); } }); @@ -246,8 +247,9 @@ public class LocalTransport extends AbstractLifecycleComponent implem long requestId = stream.readLong(); byte status = stream.readByte(); boolean isRequest = TransportStatus.isRequest(status); - if (isRequest) { + ThreadContext threadContext = threadPool.getThreadContext(); + threadContext.readHeaders(stream); handleRequest(stream, requestId, sourceTransport, version); } else { final TransportResponseHandler handler = transportServiceAdapter.onResponseReceived(requestId); @@ -322,6 +324,7 @@ public class LocalTransport extends AbstractLifecycleComponent implem logger.warn("Failed to send error message back to client for action [" + action + "]", e); logger.warn("Actual Exception", e1); } + } } @@ -338,15 +341,11 @@ public class LocalTransport extends AbstractLifecycleComponent implem } protected void handleParsedResponse(final TransportResponse response, final TransportResponseHandler handler) { - threadPool.executor(handler.executor()).execute(new Runnable() { - @SuppressWarnings({"unchecked"}) - @Override - public void run() { - try { - handler.handleResponse(response); - } catch (Throwable e) { - handleException(handler, new ResponseHandlerFailureTransportException(e)); - } + threadPool.executor(handler.executor()).execute(() -> { + try { + handler.handleResponse(response); + } catch (Throwable e) { + handleException(handler, new ResponseHandlerFailureTransportException(e)); } }); } diff --git a/core/src/main/java/org/elasticsearch/transport/local/LocalTransportChannel.java b/core/src/main/java/org/elasticsearch/transport/local/LocalTransportChannel.java index e1e85e9a12f..aad31fd8ccd 100644 --- a/core/src/main/java/org/elasticsearch/transport/local/LocalTransportChannel.java +++ b/core/src/main/java/org/elasticsearch/transport/local/LocalTransportChannel.java @@ -21,6 +21,7 @@ package org.elasticsearch.transport.local; import org.elasticsearch.Version; import org.elasticsearch.common.io.stream.BytesStreamOutput; +import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.transport.RemoteTransportException; import org.elasticsearch.transport.TransportChannel; import org.elasticsearch.transport.TransportResponse; @@ -79,9 +80,9 @@ public class LocalTransportChannel implements TransportChannel { stream.writeByte(status); // 0 for request, 1 for response. response.writeTo(stream); final byte[] data = stream.bytes().toBytes(); - targetTransport.workers().execute(new Runnable() { - @Override - public void run() { + targetTransport.workers().execute(() -> { + ThreadContext threadContext = targetTransport.threadPool.getThreadContext(); + try (ThreadContext.StoredContext ignore = threadContext.stashContext()){ targetTransport.messageReceived(data, action, sourceTransport, version, null); } }); @@ -97,9 +98,9 @@ public class LocalTransportChannel implements TransportChannel { stream.writeThrowable(tx); final byte[] data = stream.bytes().toBytes(); - targetTransport.workers().execute(new Runnable() { - @Override - public void run() { + targetTransport.workers().execute(() -> { + ThreadContext threadContext = targetTransport.threadPool.getThreadContext(); + try (ThreadContext.StoredContext ignore = threadContext.stashContext()){ targetTransport.messageReceived(data, action, sourceTransport, version, null); } }); diff --git a/core/src/main/java/org/elasticsearch/transport/netty/MessageChannelHandler.java b/core/src/main/java/org/elasticsearch/transport/netty/MessageChannelHandler.java index 8df17f73233..6732b26ddbb 100644 --- a/core/src/main/java/org/elasticsearch/transport/netty/MessageChannelHandler.java +++ b/core/src/main/java/org/elasticsearch/transport/netty/MessageChannelHandler.java @@ -30,6 +30,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.transport.InetSocketTransportAddress; import org.elasticsearch.common.util.concurrent.AbstractRunnable; +import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.ActionNotFoundTransportException; import org.elasticsearch.transport.RemoteTransportException; @@ -64,9 +65,11 @@ public class MessageChannelHandler extends SimpleChannelUpstreamHandler { protected final TransportServiceAdapter transportServiceAdapter; protected final NettyTransport transport; protected final String profileName; + private final ThreadContext threadContext; public MessageChannelHandler(NettyTransport transport, ESLogger logger, String profileName) { this.threadPool = transport.threadPool(); + this.threadContext = threadPool.getThreadContext(); this.transportServiceAdapter = transport.transportServiceAdapter(); this.transport = transport; this.logger = logger; @@ -101,7 +104,7 @@ public class MessageChannelHandler extends SimpleChannelUpstreamHandler { // buffer, or in the cumlation buffer, which is cleaned each time StreamInput streamIn = ChannelBufferStreamInputFactory.create(buffer, size); boolean success = false; - try { + try (ThreadContext.StoredContext tCtx = threadContext.stashContext()) { long requestId = streamIn.readLong(); byte status = streamIn.readByte(); Version version = Version.fromId(streamIn.readInt()); @@ -123,8 +126,8 @@ public class MessageChannelHandler extends SimpleChannelUpstreamHandler { streamIn = compressor.streamInput(streamIn); } streamIn.setVersion(version); - if (TransportStatus.isRequest(status)) { + threadContext.readHeaders(streamIn); String action = handleRequest(ctx.getChannel(), streamIn, requestId, version); // Chek the entire message has been read diff --git a/core/src/main/java/org/elasticsearch/transport/netty/NettyTransport.java b/core/src/main/java/org/elasticsearch/transport/netty/NettyTransport.java index 8ea170993e5..f131ce1aed9 100644 --- a/core/src/main/java/org/elasticsearch/transport/netty/NettyTransport.java +++ b/core/src/main/java/org/elasticsearch/transport/netty/NettyTransport.java @@ -833,6 +833,7 @@ public class NettyTransport extends AbstractLifecycleComponent implem Version version = Version.smallest(this.version, node.version()); stream.setVersion(version); + threadPool.getThreadContext().writeTo(stream); stream.writeString(action); ReleasablePagedBytesReference bytes; diff --git a/core/src/main/java/org/elasticsearch/tribe/TribeService.java b/core/src/main/java/org/elasticsearch/tribe/TribeService.java index 8ec6bb2733e..e576f26eb4c 100644 --- a/core/src/main/java/org/elasticsearch/tribe/TribeService.java +++ b/core/src/main/java/org/elasticsearch/tribe/TribeService.java @@ -344,7 +344,7 @@ public class TribeService extends AbstractLifecycleComponent { clusterStateChanged = true; logger.info("[{}] dropping index [{}] due to conflict with [{}]", tribeName, tribeIndex.getIndex(), existingFromTribe); removeIndex(blocks, metaData, routingTable, tribeIndex); - droppedIndices.add(tribeIndex.getIndex()); + droppedIndices.add(tribeIndex.getIndex().getName()); } else if (onConflict.startsWith(ON_CONFLICT_PREFER)) { // on conflict, prefer a tribe... String preferredTribeName = onConflict.substring(ON_CONFLICT_PREFER.length()); @@ -368,23 +368,23 @@ public class TribeService extends AbstractLifecycleComponent { } private void removeIndex(ClusterBlocks.Builder blocks, MetaData.Builder metaData, RoutingTable.Builder routingTable, IndexMetaData index) { - metaData.remove(index.getIndex()); - routingTable.remove(index.getIndex()); - blocks.removeIndexBlocks(index.getIndex()); + metaData.remove(index.getIndex().getName()); + routingTable.remove(index.getIndex().getName()); + blocks.removeIndexBlocks(index.getIndex().getName()); } private void addNewIndex(ClusterState tribeState, ClusterBlocks.Builder blocks, MetaData.Builder metaData, RoutingTable.Builder routingTable, IndexMetaData tribeIndex) { Settings tribeSettings = Settings.builder().put(tribeIndex.getSettings()).put(TRIBE_NAME, tribeName).build(); metaData.put(IndexMetaData.builder(tribeIndex).settings(tribeSettings)); routingTable.add(tribeState.routingTable().index(tribeIndex.getIndex())); - if (Regex.simpleMatch(blockIndicesMetadata, tribeIndex.getIndex())) { - blocks.addIndexBlock(tribeIndex.getIndex(), IndexMetaData.INDEX_METADATA_BLOCK); + if (Regex.simpleMatch(blockIndicesMetadata, tribeIndex.getIndex().getName())) { + blocks.addIndexBlock(tribeIndex.getIndex().getName(), IndexMetaData.INDEX_METADATA_BLOCK); } - if (Regex.simpleMatch(blockIndicesRead, tribeIndex.getIndex())) { - blocks.addIndexBlock(tribeIndex.getIndex(), IndexMetaData.INDEX_READ_BLOCK); + if (Regex.simpleMatch(blockIndicesRead, tribeIndex.getIndex().getName())) { + blocks.addIndexBlock(tribeIndex.getIndex().getName(), IndexMetaData.INDEX_READ_BLOCK); } - if (Regex.simpleMatch(blockIndicesWrite, tribeIndex.getIndex())) { - blocks.addIndexBlock(tribeIndex.getIndex(), IndexMetaData.INDEX_WRITE_BLOCK); + if (Regex.simpleMatch(blockIndicesWrite, tribeIndex.getIndex().getName())) { + blocks.addIndexBlock(tribeIndex.getIndex().getName(), IndexMetaData.INDEX_WRITE_BLOCK); } } } diff --git a/core/src/main/resources/org/elasticsearch/plugins/plugin-install.help b/core/src/main/resources/org/elasticsearch/plugins/plugin-install.help index d46f7dca29a..9d5b8b3d68d 100644 --- a/core/src/main/resources/org/elasticsearch/plugins/plugin-install.help +++ b/core/src/main/resources/org/elasticsearch/plugins/plugin-install.help @@ -45,7 +45,7 @@ OFFICIAL PLUGINS - discovery-multicast - ingest-geoip - lang-javascript - - lang-plan-a + - lang-painless - lang-python - mapper-attachments - mapper-murmur3 diff --git a/core/src/test/java/org/elasticsearch/ESExceptionTests.java b/core/src/test/java/org/elasticsearch/ESExceptionTests.java index a547e18de52..aef38850efc 100644 --- a/core/src/test/java/org/elasticsearch/ESExceptionTests.java +++ b/core/src/test/java/org/elasticsearch/ESExceptionTests.java @@ -85,9 +85,9 @@ public class ESExceptionTests extends ESTestCase { assertEquals(ElasticsearchException.getExceptionName(rootCauses[0]), "index_not_found_exception"); assertEquals(rootCauses[0].getMessage(), "no such index"); ShardSearchFailure failure = new ShardSearchFailure(new ParsingException(1, 2, "foobar", null), - new SearchShardTarget("node_1", "foo", 1)); + new SearchShardTarget("node_1", new Index("foo", "_na_"), 1)); ShardSearchFailure failure1 = new ShardSearchFailure(new ParsingException(1, 2, "foobar", null), - new SearchShardTarget("node_1", "foo", 2)); + new SearchShardTarget("node_1", new Index("foo", "_na_"), 2)); SearchPhaseExecutionException ex = new SearchPhaseExecutionException("search", "all shards failed", new ShardSearchFailure[]{failure, failure1}); if (randomBoolean()) { rootCauses = (randomBoolean() ? new RemoteTransportException("remoteboom", ex) : ex).guessRootCauses(); @@ -105,11 +105,11 @@ public class ESExceptionTests extends ESTestCase { { ShardSearchFailure failure = new ShardSearchFailure( new ParsingException(1, 2, "foobar", null), - new SearchShardTarget("node_1", "foo", 1)); - ShardSearchFailure failure1 = new ShardSearchFailure(new QueryShardException(new Index("foo1"), "foobar", null), - new SearchShardTarget("node_1", "foo1", 1)); - ShardSearchFailure failure2 = new ShardSearchFailure(new QueryShardException(new Index("foo1"), "foobar", null), - new SearchShardTarget("node_1", "foo1", 2)); + new SearchShardTarget("node_1", new Index("foo", "_na_"), 1)); + ShardSearchFailure failure1 = new ShardSearchFailure(new QueryShardException(new Index("foo1", "_na_"), "foobar", null), + new SearchShardTarget("node_1", new Index("foo1", "_na_"), 1)); + ShardSearchFailure failure2 = new ShardSearchFailure(new QueryShardException(new Index("foo1", "_na_"), "foobar", null), + new SearchShardTarget("node_1", new Index("foo1", "_na_"), 2)); SearchPhaseExecutionException ex = new SearchPhaseExecutionException("search", "all shards failed", new ShardSearchFailure[]{failure, failure1, failure2}); final ElasticsearchException[] rootCauses = ex.guessRootCauses(); assertEquals(rootCauses.length, 2); @@ -118,7 +118,7 @@ public class ESExceptionTests extends ESTestCase { assertEquals(((ParsingException) rootCauses[0]).getLineNumber(), 1); assertEquals(((ParsingException) rootCauses[0]).getColumnNumber(), 2); assertEquals(ElasticsearchException.getExceptionName(rootCauses[1]), "query_shard_exception"); - assertEquals((rootCauses[1]).getIndex(), "foo1"); + assertEquals((rootCauses[1]).getIndex().getName(), "foo1"); assertEquals(rootCauses[1].getMessage(), "foobar"); } @@ -136,9 +136,9 @@ public class ESExceptionTests extends ESTestCase { public void testDeduplicate() throws IOException { { ShardSearchFailure failure = new ShardSearchFailure(new ParsingException(1, 2, "foobar", null), - new SearchShardTarget("node_1", "foo", 1)); + new SearchShardTarget("node_1", new Index("foo", "_na_"), 1)); ShardSearchFailure failure1 = new ShardSearchFailure(new ParsingException(1, 2, "foobar", null), - new SearchShardTarget("node_1", "foo", 2)); + new SearchShardTarget("node_1", new Index("foo", "_na_"), 2)); SearchPhaseExecutionException ex = new SearchPhaseExecutionException("search", "all shards failed", randomBoolean() ? failure1.getCause() : failure.getCause(), new ShardSearchFailure[]{failure, failure1}); XContentBuilder builder = XContentFactory.jsonBuilder(); builder.startObject(); @@ -149,24 +149,24 @@ public class ESExceptionTests extends ESTestCase { } { ShardSearchFailure failure = new ShardSearchFailure(new ParsingException(1, 2, "foobar", null), - new SearchShardTarget("node_1", "foo", 1)); - ShardSearchFailure failure1 = new ShardSearchFailure(new QueryShardException(new Index("foo1"), "foobar", null), - new SearchShardTarget("node_1", "foo1", 1)); - ShardSearchFailure failure2 = new ShardSearchFailure(new QueryShardException(new Index("foo1"), "foobar", null), - new SearchShardTarget("node_1", "foo1", 2)); + new SearchShardTarget("node_1", new Index("foo", "_na_"), 1)); + ShardSearchFailure failure1 = new ShardSearchFailure(new QueryShardException(new Index("foo1", "_na_"), "foobar", null), + new SearchShardTarget("node_1", new Index("foo1", "_na_"), 1)); + ShardSearchFailure failure2 = new ShardSearchFailure(new QueryShardException(new Index("foo1", "_na_"), "foobar", null), + new SearchShardTarget("node_1", new Index("foo1", "_na_"), 2)); SearchPhaseExecutionException ex = new SearchPhaseExecutionException("search", "all shards failed", new ShardSearchFailure[]{failure, failure1, failure2}); XContentBuilder builder = XContentFactory.jsonBuilder(); builder.startObject(); ex.toXContent(builder, PARAMS); builder.endObject(); - String expected = "{\"type\":\"search_phase_execution_exception\",\"reason\":\"all shards failed\",\"phase\":\"search\",\"grouped\":true,\"failed_shards\":[{\"shard\":1,\"index\":\"foo\",\"node\":\"node_1\",\"reason\":{\"type\":\"parsing_exception\",\"reason\":\"foobar\",\"line\":1,\"col\":2}},{\"shard\":1,\"index\":\"foo1\",\"node\":\"node_1\",\"reason\":{\"type\":\"query_shard_exception\",\"reason\":\"foobar\",\"index\":\"foo1\"}}]}"; + String expected = "{\"type\":\"search_phase_execution_exception\",\"reason\":\"all shards failed\",\"phase\":\"search\",\"grouped\":true,\"failed_shards\":[{\"shard\":1,\"index\":\"foo\",\"node\":\"node_1\",\"reason\":{\"type\":\"parsing_exception\",\"reason\":\"foobar\",\"line\":1,\"col\":2}},{\"shard\":1,\"index\":\"foo1\",\"node\":\"node_1\",\"reason\":{\"type\":\"query_shard_exception\",\"reason\":\"foobar\",\"index_uuid\":\"_na_\",\"index\":\"foo1\"}}]}"; assertEquals(expected, builder.string()); } { ShardSearchFailure failure = new ShardSearchFailure(new ParsingException(1, 2, "foobar", null), - new SearchShardTarget("node_1", "foo", 1)); + new SearchShardTarget("node_1", new Index("foo", "_na_"), 1)); ShardSearchFailure failure1 = new ShardSearchFailure(new ParsingException(1, 2, "foobar", null), - new SearchShardTarget("node_1", "foo", 2)); + new SearchShardTarget("node_1", new Index("foo", "_na_"), 2)); NullPointerException nullPointerException = new NullPointerException(); SearchPhaseExecutionException ex = new SearchPhaseExecutionException("search", "all shards failed", nullPointerException, new ShardSearchFailure[]{failure, failure1}); assertEquals(nullPointerException, ex.getCause()); @@ -320,7 +320,7 @@ public class ESExceptionTests extends ESTestCase { new OutOfMemoryError("no memory left"), new AlreadyClosedException("closed!!", new NullPointerException()), new LockObtainFailedException("can't lock directory", new NullPointerException()), - new Throwable("this exception is unknown", new QueryShardException(new Index("foo"), "foobar", null) ), // somethin unknown + new Throwable("this exception is unknown", new QueryShardException(new Index("foo", "_na_"), "foobar", null) ), // somethin unknown }; for (Throwable t : causes) { BytesStreamOutput out = new BytesStreamOutput(); diff --git a/core/src/test/java/org/elasticsearch/ExceptionSerializationTests.java b/core/src/test/java/org/elasticsearch/ExceptionSerializationTests.java index db7b5df6662..9f8e861f9ce 100644 --- a/core/src/test/java/org/elasticsearch/ExceptionSerializationTests.java +++ b/core/src/test/java/org/elasticsearch/ExceptionSerializationTests.java @@ -245,8 +245,8 @@ public class ExceptionSerializationTests extends ESTestCase { } public void testQueryShardException() throws IOException { - QueryShardException ex = serialize(new QueryShardException(new Index("foo"), "fobar", null)); - assertEquals(ex.getIndex(), "foo"); + QueryShardException ex = serialize(new QueryShardException(new Index("foo", "_na_"), "fobar", null)); + assertEquals(ex.getIndex().getName(), "foo"); assertEquals(ex.getMessage(), "fobar"); ex = serialize(new QueryShardException((Index) null, null, null)); @@ -255,7 +255,7 @@ public class ExceptionSerializationTests extends ESTestCase { } public void testSearchException() throws IOException { - SearchShardTarget target = new SearchShardTarget("foo", "bar", 1); + SearchShardTarget target = new SearchShardTarget("foo", new Index("bar", "_na_"), 1); SearchException ex = serialize(new SearchException(target, "hello world")); assertEquals(target, ex.shard()); assertEquals(ex.getMessage(), "hello world"); @@ -268,7 +268,7 @@ public class ExceptionSerializationTests extends ESTestCase { public void testAlreadyExpiredException() throws IOException { AlreadyExpiredException alreadyExpiredException = serialize(new AlreadyExpiredException("index", "type", "id", 1, 2, 3)); - assertEquals("index", alreadyExpiredException.getIndex()); + assertEquals("index", alreadyExpiredException.getIndex().getName()); assertEquals("type", alreadyExpiredException.type()); assertEquals("id", alreadyExpiredException.id()); assertEquals(2, alreadyExpiredException.ttl()); @@ -303,7 +303,7 @@ public class ExceptionSerializationTests extends ESTestCase { } public void testRecoverFilesRecoveryException() throws IOException { - ShardId id = new ShardId("foo", 1); + ShardId id = new ShardId("foo", "_na_", 1); ByteSizeValue bytes = new ByteSizeValue(randomIntBetween(0, 10000)); RecoverFilesRecoveryException ex = serialize(new RecoverFilesRecoveryException(id, 10, bytes, null)); assertEquals(ex.getShardId(), id); @@ -331,7 +331,7 @@ public class ExceptionSerializationTests extends ESTestCase { } public void testBatchOperationException() throws IOException { - ShardId id = new ShardId("foo", 1); + ShardId id = new ShardId("foo", "_na_", 1); TranslogRecoveryPerformer.BatchOperationException ex = serialize(new TranslogRecoveryPerformer.BatchOperationException(id, "batched the fucker", 666, null)); assertEquals(ex.getShardId(), id); assertEquals(666, ex.completedOperations()); @@ -368,7 +368,7 @@ public class ExceptionSerializationTests extends ESTestCase { } public void testPercolateException() throws IOException { - ShardId id = new ShardId("foo", 1); + ShardId id = new ShardId("foo", "_na_", 1); PercolateException ex = serialize(new PercolateException(id, "percolate my ass", null)); assertEquals(id, ex.getShardId()); assertEquals("percolate my ass", ex.getMessage()); @@ -402,9 +402,9 @@ public class ExceptionSerializationTests extends ESTestCase { } public void testIndexFailedEngineException() throws IOException { - ShardId id = new ShardId("foo", 1); + ShardId id = new ShardId("foo", "_na_", 1); IndexFailedEngineException ex = serialize(new IndexFailedEngineException(id, "type", "id", null)); - assertEquals(ex.getShardId(), new ShardId("foo", 1)); + assertEquals(ex.getShardId(), new ShardId("foo", "_na_", 1)); assertEquals("type", ex.type()); assertEquals("id", ex.id()); assertNull(ex.getCause()); @@ -433,7 +433,7 @@ public class ExceptionSerializationTests extends ESTestCase { } public void testIllegalIndexShardStateException() throws IOException { - ShardId id = new ShardId("foo", 1); + ShardId id = new ShardId("foo", "_na_", 1); IndexShardState state = randomFrom(IndexShardState.values()); IllegalIndexShardStateException ex = serialize(new IllegalIndexShardStateException(id, state, "come back later buddy")); assertEquals(id, ex.getShardId()); @@ -477,7 +477,7 @@ public class ExceptionSerializationTests extends ESTestCase { public void testRoutingMissingException() throws IOException { RoutingMissingException ex = serialize(new RoutingMissingException("idx", "type", "id")); - assertEquals("idx", ex.getIndex()); + assertEquals("idx", ex.getIndex().getName()); assertEquals("type", ex.getType()); assertEquals("id", ex.getId()); assertEquals("routing is required for [idx]/[type]/[id]", ex.getMessage()); @@ -505,7 +505,7 @@ public class ExceptionSerializationTests extends ESTestCase { public void testRecoveryEngineException() throws IOException { - ShardId id = new ShardId("foo", 1); + ShardId id = new ShardId("foo", "_na_", 1); RecoveryEngineException ex = serialize(new RecoveryEngineException(id, 10, "total failure", new NullPointerException())); assertEquals(id, ex.getShardId()); assertEquals("Phase[10] total failure", ex.getMessage()); diff --git a/core/src/test/java/org/elasticsearch/action/ListenerActionIT.java b/core/src/test/java/org/elasticsearch/action/ListenerActionIT.java index f68cb76c955..a9bb96a0a9f 100644 --- a/core/src/test/java/org/elasticsearch/action/ListenerActionIT.java +++ b/core/src/test/java/org/elasticsearch/action/ListenerActionIT.java @@ -61,7 +61,7 @@ public class ListenerActionIT extends ESIntegTestCase { latch.await(); - boolean shouldBeThreaded = DiscoveryNode.clientNode(client.settings()) || TransportClient.CLIENT_TYPE.equals(client.settings().get(Client.CLIENT_TYPE_SETTING)); + boolean shouldBeThreaded = DiscoveryNode.clientNode(client.settings()) || TransportClient.CLIENT_TYPE.equals(Client.CLIENT_TYPE_SETTING_S.get(client.settings())); if (shouldBeThreaded) { assertTrue(threadName.get().contains("listener")); } else { diff --git a/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TransportTasksActionTests.java b/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TransportTasksActionTests.java index d8d4f2656c8..3fbac003419 100644 --- a/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TransportTasksActionTests.java +++ b/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TransportTasksActionTests.java @@ -159,7 +159,7 @@ public class TransportTasksActionTests extends ESTestCase { } public NodeRequest(NodesRequest request, String nodeId) { - super(request, nodeId); + super(nodeId); requestName = request.requestName; enableTaskManager = request.enableTaskManager; } diff --git a/core/src/test/java/org/elasticsearch/action/admin/indices/TransportAnalyzeActionTests.java b/core/src/test/java/org/elasticsearch/action/admin/indices/TransportAnalyzeActionTests.java index 4d53d6cd1e5..ed4e8cd387e 100644 --- a/core/src/test/java/org/elasticsearch/action/admin/indices/TransportAnalyzeActionTests.java +++ b/core/src/test/java/org/elasticsearch/action/admin/indices/TransportAnalyzeActionTests.java @@ -57,7 +57,7 @@ public class TransportAnalyzeActionTests extends ESTestCase { .putArray("index.analysis.analyzer.custom_analyzer.filter", "lowercase", "wordDelimiter") .put("index.analysis.analyzer.custom_analyzer.tokenizer", "whitespace") .putArray("index.analysis.analyzer.custom_analyzer.filter", "lowercase", "wordDelimiter").build(); - IndexSettings idxSettings = IndexSettingsModule.newIndexSettings(new Index("index"), indexSettings); + IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", indexSettings); environment = new Environment(settings); registry = new AnalysisRegistry(null, environment); analysisService = registry.build(idxSettings); diff --git a/core/src/test/java/org/elasticsearch/action/admin/indices/flush/SyncedFlushUnitTests.java b/core/src/test/java/org/elasticsearch/action/admin/indices/flush/SyncedFlushUnitTests.java index 8439e98f0ee..18b22b95cb1 100644 --- a/core/src/test/java/org/elasticsearch/action/admin/indices/flush/SyncedFlushUnitTests.java +++ b/core/src/test/java/org/elasticsearch/action/admin/indices/flush/SyncedFlushUnitTests.java @@ -148,7 +148,7 @@ public class SyncedFlushUnitTests extends ESTestCase { int failures = 0; List shardsResults = new ArrayList<>(); for (int shard = 0; shard < shards; shard++) { - final ShardId shardId = new ShardId(index, shard); + final ShardId shardId = new ShardId(index, "_na_", shard); if (randomInt(5) < 2) { // total shard failure failed += replicas + 1; diff --git a/core/src/test/java/org/elasticsearch/action/ingest/BulkRequestModifierTests.java b/core/src/test/java/org/elasticsearch/action/ingest/BulkRequestModifierTests.java index aa30c89ef59..66c2a0183e8 100644 --- a/core/src/test/java/org/elasticsearch/action/ingest/BulkRequestModifierTests.java +++ b/core/src/test/java/org/elasticsearch/action/ingest/BulkRequestModifierTests.java @@ -71,7 +71,7 @@ public class BulkRequestModifierTests extends ESTestCase { BulkResponse bulkResponse = actionListener.getResponse(); for (int j = 0; j < bulkResponse.getItems().length; j++) { if (failedSlots.contains(j)) { - BulkItemResponse item = bulkResponse.getItems()[j]; + BulkItemResponse item = bulkResponse.getItems()[j]; assertThat(item.isFailed(), is(true)); assertThat(item.getFailure().getIndex(), equalTo("_index")); assertThat(item.getFailure().getType(), equalTo("_type")); @@ -116,7 +116,7 @@ public class BulkRequestModifierTests extends ESTestCase { List originalResponses = new ArrayList<>(); for (ActionRequest actionRequest : bulkRequest.requests()) { IndexRequest indexRequest = (IndexRequest) actionRequest; - IndexResponse indexResponse = new IndexResponse(new ShardId("index", 0), indexRequest.type(), indexRequest.id(), 1, true); + IndexResponse indexResponse = new IndexResponse(new ShardId("index", "_na_", 0), indexRequest.type(), indexRequest.id(), 1, true); originalResponses.add(new BulkItemResponse(Integer.parseInt(indexRequest.id()), indexRequest.opType().lowercase(), indexResponse)); } bulkResponseListener.onResponse(new BulkResponse(originalResponses.toArray(new BulkItemResponse[originalResponses.size()]), 0)); @@ -151,7 +151,7 @@ public class BulkRequestModifierTests extends ESTestCase { @Override public void onResponse(BulkResponse bulkItemResponses) { - this.response = bulkItemResponses ; + this.response = bulkItemResponses; } @Override diff --git a/core/src/test/java/org/elasticsearch/action/support/AutoCreateIndexTests.java b/core/src/test/java/org/elasticsearch/action/support/AutoCreateIndexTests.java index 816e1a110e5..40359e1c749 100644 --- a/core/src/test/java/org/elasticsearch/action/support/AutoCreateIndexTests.java +++ b/core/src/test/java/org/elasticsearch/action/support/AutoCreateIndexTests.java @@ -20,57 +20,124 @@ package org.elasticsearch.action.support; import org.elasticsearch.Version; +import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.test.ESTestCase; +import static org.hamcrest.CoreMatchers.equalTo; + public class AutoCreateIndexTests extends ESTestCase { - public void testBasic() { - { - AutoCreateIndex autoCreateIndex = new AutoCreateIndex(Settings.EMPTY, new IndexNameExpressionResolver(Settings.EMPTY)); - ClusterState cs = buildClusterState("foo"); - assertFalse("already exists", autoCreateIndex.shouldAutoCreate("foo", cs)); - assertTrue(autoCreateIndex.shouldAutoCreate("foobar", cs)); - } - { - AutoCreateIndex autoCreateIndex = new AutoCreateIndex(Settings.builder().put("action.auto_create_index", "-foo,+b*").build(), new IndexNameExpressionResolver(Settings.EMPTY)); - ClusterState cs = buildClusterState("foobar", "baz"); - assertFalse(autoCreateIndex.shouldAutoCreate("foo", cs)); - assertTrue(autoCreateIndex.shouldAutoCreate("bar", cs)); - assertFalse("already exists", autoCreateIndex.shouldAutoCreate("baz", cs)); - } - - { - AutoCreateIndex autoCreateIndex = new AutoCreateIndex(Settings.builder().put("action.auto_create_index", "-foo,+b*").put("index.mapper.dynamic", false).build(), new IndexNameExpressionResolver(Settings.EMPTY)); - ClusterState cs = buildClusterState("foobar", "baz"); - assertFalse(autoCreateIndex.shouldAutoCreate("foo", cs)); - assertFalse(autoCreateIndex.shouldAutoCreate("bar", cs)); - assertFalse("already exists", autoCreateIndex.shouldAutoCreate("baz", cs)); - } - - { - AutoCreateIndex autoCreateIndex = new AutoCreateIndex(Settings.builder().put("action.auto_create_index", false).put("index.mapper.dynamic", false).build(), new IndexNameExpressionResolver(Settings.EMPTY)); - ClusterState cs = buildClusterState("foobar", "baz"); - assertFalse(autoCreateIndex.shouldAutoCreate("foo", cs)); - assertFalse(autoCreateIndex.shouldAutoCreate("bar", cs)); - assertFalse("already exists", autoCreateIndex.shouldAutoCreate("baz", cs)); - } - } - public void testParseFailed() { try { new AutoCreateIndex(Settings.builder().put("action.auto_create_index", ",,,").build(), new IndexNameExpressionResolver(Settings.EMPTY)); - }catch (IllegalArgumentException ex) { - assertEquals("Can't parse [,,,] for setting [action.auto_create_index] must be either [true, false, or a comma seperated list of index patterns]", ex.getMessage()); + fail("initialization should have failed"); + } catch (IllegalArgumentException ex) { + assertEquals("Can't parse [,,,] for setting [action.auto_create_index] must be either [true, false, or a comma separated list of index patterns]", ex.getMessage()); } - } - public ClusterState buildClusterState(String... indices) { + public void testParseFailedMissingIndex() { + String prefix = randomFrom("+", "-"); + Settings settings = Settings.builder().put(AutoCreateIndex.AUTO_CREATE_INDEX_SETTING.getKey(), prefix).build(); + try { + new AutoCreateIndex(settings, new IndexNameExpressionResolver(settings)); + fail("initialization should have failed"); + } catch(IllegalArgumentException ex) { + assertEquals("Can't parse [" + prefix + "] for setting [action.auto_create_index] must contain an index name after [" + prefix + "]", ex.getMessage()); + } + } + + public void testAutoCreationDisabled() { + Settings settings = Settings.builder().put(AutoCreateIndex.AUTO_CREATE_INDEX_SETTING.getKey(), false).build(); + AutoCreateIndex autoCreateIndex = new AutoCreateIndex(settings, new IndexNameExpressionResolver(Settings.EMPTY)); + assertThat(autoCreateIndex.shouldAutoCreate(randomAsciiOfLengthBetween(1, 10), buildClusterState()), equalTo(false)); + } + + public void testAutoCreationEnabled() { + Settings settings = Settings.builder().put(AutoCreateIndex.AUTO_CREATE_INDEX_SETTING.getKey(), true).build(); + AutoCreateIndex autoCreateIndex = new AutoCreateIndex(settings, new IndexNameExpressionResolver(Settings.EMPTY)); + assertThat(autoCreateIndex.shouldAutoCreate(randomAsciiOfLengthBetween(1, 10), buildClusterState()), equalTo(true)); + } + + public void testDefaultAutoCreation() { + AutoCreateIndex autoCreateIndex = new AutoCreateIndex(Settings.EMPTY, new IndexNameExpressionResolver(Settings.EMPTY)); + assertThat(autoCreateIndex.shouldAutoCreate(randomAsciiOfLengthBetween(1, 10), buildClusterState()), equalTo(true)); + } + + public void testExistingIndex() { + Settings settings = Settings.builder().put(AutoCreateIndex.AUTO_CREATE_INDEX_SETTING.getKey(), randomFrom(true, false, randomAsciiOfLengthBetween(7, 10))).build(); + AutoCreateIndex autoCreateIndex = new AutoCreateIndex(settings, new IndexNameExpressionResolver(Settings.EMPTY)); + assertThat(autoCreateIndex.shouldAutoCreate(randomFrom("index1", "index2", "index3"), buildClusterState("index1", "index2", "index3")), equalTo(false)); + } + + public void testDynamicMappingDisabled() { + Settings settings = Settings.builder().put(AutoCreateIndex.AUTO_CREATE_INDEX_SETTING.getKey(), randomFrom(true, randomAsciiOfLengthBetween(1, 10))) + .put(MapperService.INDEX_MAPPER_DYNAMIC_SETTING.getKey(), false).build(); + AutoCreateIndex autoCreateIndex = new AutoCreateIndex(settings, new IndexNameExpressionResolver(Settings.EMPTY)); + assertThat(autoCreateIndex.shouldAutoCreate(randomAsciiOfLengthBetween(1, 10), buildClusterState()), equalTo(false)); + } + + public void testAutoCreationPatternEnabled() { + Settings settings = Settings.builder().put(AutoCreateIndex.AUTO_CREATE_INDEX_SETTING.getKey(), randomFrom("+index*", "index*")).build(); + AutoCreateIndex autoCreateIndex = new AutoCreateIndex(settings, new IndexNameExpressionResolver(settings)); + ClusterState clusterState = ClusterState.builder(new ClusterName("test")).metaData(MetaData.builder()).build(); + assertThat(autoCreateIndex.shouldAutoCreate("index" + randomAsciiOfLengthBetween(1, 5), clusterState), equalTo(true)); + assertThat(autoCreateIndex.shouldAutoCreate("does_not_match" + randomAsciiOfLengthBetween(1, 5), clusterState), equalTo(false)); + } + + public void testAutoCreationPatternDisabled() { + Settings settings = Settings.builder().put(AutoCreateIndex.AUTO_CREATE_INDEX_SETTING.getKey(), "-index*").build(); + AutoCreateIndex autoCreateIndex = new AutoCreateIndex(settings, new IndexNameExpressionResolver(settings)); + ClusterState clusterState = ClusterState.builder(new ClusterName("test")).metaData(MetaData.builder()).build(); + assertThat(autoCreateIndex.shouldAutoCreate("index" + randomAsciiOfLengthBetween(1, 5), clusterState), equalTo(false)); + //default is false when patterns are specified + assertThat(autoCreateIndex.shouldAutoCreate("does_not_match" + randomAsciiOfLengthBetween(1, 5), clusterState), equalTo(false)); + } + + public void testAutoCreationMultiplePatternsWithWildcards() { + Settings settings = Settings.builder().put(AutoCreateIndex.AUTO_CREATE_INDEX_SETTING.getKey(), randomFrom("+test*,-index*", "test*,-index*")).build(); + AutoCreateIndex autoCreateIndex = new AutoCreateIndex(settings, new IndexNameExpressionResolver(settings)); + ClusterState clusterState = ClusterState.builder(new ClusterName("test")).metaData(MetaData.builder()).build(); + assertThat(autoCreateIndex.shouldAutoCreate("index" + randomAsciiOfLengthBetween(1, 5), clusterState), equalTo(false)); + assertThat(autoCreateIndex.shouldAutoCreate("test" + randomAsciiOfLengthBetween(1, 5), clusterState), equalTo(true)); + assertThat(autoCreateIndex.shouldAutoCreate("does_not_match" + randomAsciiOfLengthBetween(1, 5), clusterState), equalTo(false)); + } + + public void testAutoCreationMultiplePatternsNoWildcards() { + Settings settings = Settings.builder().put(AutoCreateIndex.AUTO_CREATE_INDEX_SETTING.getKey(), "+test1,-index1").build(); + AutoCreateIndex autoCreateIndex = new AutoCreateIndex(settings, new IndexNameExpressionResolver(settings)); + ClusterState clusterState = ClusterState.builder(new ClusterName("test")).metaData(MetaData.builder()).build(); + assertThat(autoCreateIndex.shouldAutoCreate("test1", clusterState), equalTo(true)); + assertThat(autoCreateIndex.shouldAutoCreate("index" + randomAsciiOfLengthBetween(1, 5), clusterState), equalTo(false)); + assertThat(autoCreateIndex.shouldAutoCreate("test" + randomAsciiOfLengthBetween(2, 5), clusterState), equalTo(false)); + assertThat(autoCreateIndex.shouldAutoCreate("does_not_match" + randomAsciiOfLengthBetween(1, 5), clusterState), equalTo(false)); + } + + public void testAutoCreationMultipleIndexNames() { + Settings settings = Settings.builder().put(AutoCreateIndex.AUTO_CREATE_INDEX_SETTING.getKey(), "test1,test2").build(); + AutoCreateIndex autoCreateIndex = new AutoCreateIndex(settings, new IndexNameExpressionResolver(settings)); + ClusterState clusterState = ClusterState.builder(new ClusterName("test")).metaData(MetaData.builder()).build(); + assertThat(autoCreateIndex.shouldAutoCreate("test1", clusterState), equalTo(true)); + assertThat(autoCreateIndex.shouldAutoCreate("test2", clusterState), equalTo(true)); + assertThat(autoCreateIndex.shouldAutoCreate("does_not_match" + randomAsciiOfLengthBetween(1, 5), clusterState), equalTo(false)); + } + + public void testAutoCreationConflictingPatternsFirstWins() { + Settings settings = Settings.builder().put(AutoCreateIndex.AUTO_CREATE_INDEX_SETTING.getKey(), "+test1,-test1,-test2,+test2").build(); + AutoCreateIndex autoCreateIndex = new AutoCreateIndex(settings, new IndexNameExpressionResolver(settings)); + ClusterState clusterState = ClusterState.builder(new ClusterName("test")).metaData(MetaData.builder()).build(); + assertThat(autoCreateIndex.shouldAutoCreate("test1", clusterState), equalTo(true)); + assertThat(autoCreateIndex.shouldAutoCreate("test2", clusterState), equalTo(false)); + assertThat(autoCreateIndex.shouldAutoCreate("does_not_match" + randomAsciiOfLengthBetween(1, 5), clusterState), equalTo(false)); + } + + private static ClusterState buildClusterState(String... indices) { MetaData.Builder metaData = MetaData.builder(); for (String index : indices) { metaData.put(IndexMetaData.builder(index).settings(settings(Version.CURRENT)).numberOfShards(1).numberOfReplicas(1)); diff --git a/core/src/test/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeActionTests.java b/core/src/test/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeActionTests.java index d94049c036f..76307ccd806 100644 --- a/core/src/test/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeActionTests.java +++ b/core/src/test/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeActionTests.java @@ -47,6 +47,7 @@ import org.elasticsearch.cluster.routing.TestShardRouting; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.DummyTransportAddress; +import org.elasticsearch.index.Index; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.test.ESTestCase; @@ -198,7 +199,7 @@ public class TransportBroadcastByNodeActionTests extends ESTestCase { void setClusterState(TestClusterService clusterService, String index) { int numberOfNodes = randomIntBetween(3, 5); DiscoveryNodes.Builder discoBuilder = DiscoveryNodes.builder(); - IndexRoutingTable.Builder indexRoutingTable = IndexRoutingTable.builder(index); + IndexRoutingTable.Builder indexRoutingTable = IndexRoutingTable.builder(new Index(index,"_na_")); int shardIndex = -1; for (int i = 0; i < numberOfNodes; i++) { @@ -206,7 +207,7 @@ public class TransportBroadcastByNodeActionTests extends ESTestCase { discoBuilder = discoBuilder.put(node); int numberOfShards = randomIntBetween(1, 10); for (int j = 0; j < numberOfShards; j++) { - final ShardId shardId = new ShardId(index, ++shardIndex); + final ShardId shardId = new ShardId(index, "_na_", ++shardIndex); ShardRouting shard = TestShardRouting.newShardRouting(index, shardId.getId(), node.id(), true, ShardRoutingState.STARTED, 1); IndexShardRoutingTable.Builder indexShard = new IndexShardRoutingTable.Builder(shardId); indexShard.addShard(shard); diff --git a/core/src/test/java/org/elasticsearch/action/support/replication/BroadcastReplicationTests.java b/core/src/test/java/org/elasticsearch/action/support/replication/BroadcastReplicationTests.java index c3084b93eb8..fccdd494af7 100644 --- a/core/src/test/java/org/elasticsearch/action/support/replication/BroadcastReplicationTests.java +++ b/core/src/test/java/org/elasticsearch/action/support/replication/BroadcastReplicationTests.java @@ -147,7 +147,7 @@ public class BroadcastReplicationTests extends ESTestCase { if (shardsSucceeded == 1 && randomBoolean()) { //sometimes add failure (no failure means shard unavailable) failures = new ReplicationResponse.ShardInfo.Failure[1]; - failures[0] = new ReplicationResponse.ShardInfo.Failure(index, shardRequests.v1().id(), null, new Exception("pretend shard failed"), RestStatus.GATEWAY_TIMEOUT, false); + failures[0] = new ReplicationResponse.ShardInfo.Failure(shardRequests.v1(), null, new Exception("pretend shard failed"), RestStatus.GATEWAY_TIMEOUT, false); failed++; } replicationResponse.setShardInfo(new ReplicationResponse.ShardInfo(2, shardsSucceeded, failures)); @@ -171,11 +171,11 @@ public class BroadcastReplicationTests extends ESTestCase { public void testShardsList() throws InterruptedException, ExecutionException { final String index = "test"; - final ShardId shardId = new ShardId(index, 0); + final ShardId shardId = new ShardId(index, "_na_", 0); ClusterState clusterState = state(index, randomBoolean(), randomBoolean() ? ShardRoutingState.INITIALIZING : ShardRoutingState.UNASSIGNED, ShardRoutingState.UNASSIGNED); logger.debug("--> using initial state:\n{}", clusterService.state().prettyPrint()); - List shards = broadcastReplicationAction.shards(new DummyBroadcastRequest().indices(shardId.index().name()), clusterState); + List shards = broadcastReplicationAction.shards(new DummyBroadcastRequest().indices(shardId.getIndexName()), clusterState); assertThat(shards.size(), equalTo(1)); assertThat(shards.get(0), equalTo(shardId)); } diff --git a/core/src/test/java/org/elasticsearch/action/support/replication/ClusterStateCreationUtils.java b/core/src/test/java/org/elasticsearch/action/support/replication/ClusterStateCreationUtils.java index 913d52d5b17..49a5e072e1f 100644 --- a/core/src/test/java/org/elasticsearch/action/support/replication/ClusterStateCreationUtils.java +++ b/core/src/test/java/org/elasticsearch/action/support/replication/ClusterStateCreationUtils.java @@ -74,7 +74,7 @@ public class ClusterStateCreationUtils { } } numberOfNodes = Math.max(2, numberOfNodes); // we need a non-local master to test shard failures - final ShardId shardId = new ShardId(index, 0); + final ShardId shardId = new ShardId(index, "_na_", 0); DiscoveryNodes.Builder discoBuilder = DiscoveryNodes.builder(); Set unassignedNodes = new HashSet<>(); for (int i = 0; i < numberOfNodes + 1; i++) { @@ -131,7 +131,7 @@ public class ClusterStateCreationUtils { ClusterState.Builder state = ClusterState.builder(new ClusterName("test")); state.nodes(discoBuilder); state.metaData(MetaData.builder().put(indexMetaData, false).generateClusterUuidIfNeeded()); - state.routingTable(RoutingTable.builder().add(IndexRoutingTable.builder(index).addIndexShard(indexShardRoutingBuilder.build())).build()); + state.routingTable(RoutingTable.builder().add(IndexRoutingTable.builder(indexMetaData.getIndex()).addIndexShard(indexShardRoutingBuilder.build())).build()); return state.build(); } @@ -155,11 +155,11 @@ public class ClusterStateCreationUtils { ClusterState.Builder state = ClusterState.builder(new ClusterName("test")); state.nodes(discoBuilder); state.metaData(MetaData.builder().put(indexMetaData, false).generateClusterUuidIfNeeded()); - IndexRoutingTable.Builder indexRoutingTableBuilder = IndexRoutingTable.builder(index); + IndexRoutingTable.Builder indexRoutingTableBuilder = IndexRoutingTable.builder(indexMetaData.getIndex()); for (int i = 0; i < numberOfShards; i++) { RoutingTable.Builder routing = new RoutingTable.Builder(); routing.addAsNew(indexMetaData); - final ShardId shardId = new ShardId(index, i); + final ShardId shardId = new ShardId(index, "_na_", i); IndexShardRoutingTable.Builder indexShardRoutingBuilder = new IndexShardRoutingTable.Builder(shardId); indexShardRoutingBuilder.addShard(TestShardRouting.newShardRouting(index, i, newNode(0).id(), null, null, true, ShardRoutingState.STARTED, 0, null)); indexShardRoutingBuilder.addShard(TestShardRouting.newShardRouting(index, i, newNode(1).id(), null, null, false, ShardRoutingState.STARTED, 0, null)); diff --git a/core/src/test/java/org/elasticsearch/action/support/replication/TransportReplicationActionTests.java b/core/src/test/java/org/elasticsearch/action/support/replication/TransportReplicationActionTests.java index 9fdbdf1cb38..c868b0d036f 100644 --- a/core/src/test/java/org/elasticsearch/action/support/replication/TransportReplicationActionTests.java +++ b/core/src/test/java/org/elasticsearch/action/support/replication/TransportReplicationActionTests.java @@ -171,7 +171,7 @@ public class TransportReplicationActionTests extends ESTestCase { public void testNotStartedPrimary() throws InterruptedException, ExecutionException { final String index = "test"; - final ShardId shardId = new ShardId(index, 0); + final ShardId shardId = new ShardId(index, "_na_", 0); // no replicas in oder to skip the replication part clusterService.setState(state(index, true, randomBoolean() ? ShardRoutingState.INITIALIZING : ShardRoutingState.UNASSIGNED)); @@ -209,12 +209,12 @@ public class TransportReplicationActionTests extends ESTestCase { clusterService.setState(state(index, true, randomBoolean() ? ShardRoutingState.INITIALIZING : ShardRoutingState.UNASSIGNED)); logger.debug("--> using initial state:\n{}", clusterService.state().prettyPrint()); - Request request = new Request(new ShardId("unknown_index", 0)).timeout("1ms"); + Request request = new Request(new ShardId("unknown_index", "_na_", 0)).timeout("1ms"); PlainActionFuture listener = new PlainActionFuture<>(); TransportReplicationAction.ReroutePhase reroutePhase = action.new ReroutePhase(request, listener); reroutePhase.run(); assertListenerThrows("must throw index not found exception", listener, IndexNotFoundException.class); - request = new Request(new ShardId(index, 10)).timeout("1ms"); + request = new Request(new ShardId(index, "_na_", 10)).timeout("1ms"); listener = new PlainActionFuture<>(); reroutePhase = action.new ReroutePhase(request, listener); reroutePhase.run(); @@ -223,7 +223,7 @@ public class TransportReplicationActionTests extends ESTestCase { public void testRoutePhaseExecutesRequest() { final String index = "test"; - final ShardId shardId = new ShardId(index, 0); + final ShardId shardId = new ShardId(index, "_na_", 0); clusterService.setState(stateWithStartedPrimary(index, randomBoolean(), 3)); @@ -251,7 +251,7 @@ public class TransportReplicationActionTests extends ESTestCase { public void testPrimaryPhaseExecutesRequest() throws InterruptedException, ExecutionException { final String index = "test"; - final ShardId shardId = new ShardId(index, 0); + final ShardId shardId = new ShardId(index, "_na_", 0); clusterService.setState(state(index, true, ShardRoutingState.STARTED, ShardRoutingState.STARTED)); Request request = new Request(shardId).timeout("1ms"); PlainActionFuture listener = new PlainActionFuture<>(); @@ -267,7 +267,7 @@ public class TransportReplicationActionTests extends ESTestCase { public void testAddedReplicaAfterPrimaryOperation() { final String index = "test"; - final ShardId shardId = new ShardId(index, 0); + final ShardId shardId = new ShardId(index, "_na_", 0); // start with no replicas clusterService.setState(stateWithStartedPrimary(index, true, 0)); logger.debug("--> using initial state:\n{}", clusterService.state().prettyPrint()); @@ -300,7 +300,7 @@ public class TransportReplicationActionTests extends ESTestCase { public void testRelocatingReplicaAfterPrimaryOperation() { final String index = "test"; - final ShardId shardId = new ShardId(index, 0); + final ShardId shardId = new ShardId(index, "_na_", 0); // start with a replica clusterService.setState(state(index, true, ShardRoutingState.STARTED, randomBoolean() ? ShardRoutingState.INITIALIZING : ShardRoutingState.STARTED)); logger.debug("--> using initial state:\n{}", clusterService.state().prettyPrint()); @@ -334,7 +334,7 @@ public class TransportReplicationActionTests extends ESTestCase { public void testIndexDeletedAfterPrimaryOperation() { final String index = "test"; - final ShardId shardId = new ShardId(index, 0); + final ShardId shardId = new ShardId(index, "_na_", 0); clusterService.setState(state(index, true, ShardRoutingState.STARTED, ShardRoutingState.STARTED)); logger.debug("--> using initial state:\n{}", clusterService.state().prettyPrint()); final ClusterState stateWithDeletedIndex = state(index + "_new", true, ShardRoutingState.STARTED, ShardRoutingState.RELOCATING); @@ -361,7 +361,7 @@ public class TransportReplicationActionTests extends ESTestCase { public void testWriteConsistency() throws ExecutionException, InterruptedException { action = new ActionWithConsistency(Settings.EMPTY, "testActionWithConsistency", transportService, clusterService, threadPool); final String index = "test"; - final ShardId shardId = new ShardId(index, 0); + final ShardId shardId = new ShardId(index, "_na_", 0); final int assignedReplicas = randomInt(2); final int unassignedReplicas = randomInt(2); final int totalShards = 1 + assignedReplicas + unassignedReplicas; @@ -430,7 +430,7 @@ public class TransportReplicationActionTests extends ESTestCase { public void testReplication() throws ExecutionException, InterruptedException { final String index = "test"; - final ShardId shardId = new ShardId(index, 0); + final ShardId shardId = new ShardId(index, "_na_", 0); clusterService.setState(stateWithStartedPrimary(index, true, randomInt(5))); @@ -453,7 +453,7 @@ public class TransportReplicationActionTests extends ESTestCase { public void testReplicationWithShadowIndex() throws ExecutionException, InterruptedException { final String index = "test"; - final ShardId shardId = new ShardId(index, 0); + final ShardId shardId = new ShardId(index, "_na_", 0); ClusterState state = stateWithStartedPrimary(index, true, randomInt(5)); MetaData.Builder metaData = MetaData.builder(state.metaData()); @@ -511,7 +511,7 @@ public class TransportReplicationActionTests extends ESTestCase { assertThat(nodesSentTo.keySet(), not(hasItem(clusterService.state().getNodes().localNodeId()))); // requests were sent to the correct shard copies - for (ShardRouting shard : clusterService.state().getRoutingTable().shardRoutingTable(shardId.getIndex(), shardId.id())) { + for (ShardRouting shard : clusterService.state().getRoutingTable().shardRoutingTable(shardId)) { if (shard.primary() == false && executeOnReplica == false) { continue; } @@ -602,7 +602,7 @@ public class TransportReplicationActionTests extends ESTestCase { public void testCounterOnPrimary() throws Exception { final String index = "test"; - final ShardId shardId = new ShardId(index, 0); + final ShardId shardId = new ShardId(index, "_na_", 0); // no replica, we only want to test on primary clusterService.setState(state(index, true, ShardRoutingState.STARTED)); @@ -640,7 +640,7 @@ public class TransportReplicationActionTests extends ESTestCase { public void testCounterIncrementedWhileReplicationOngoing() throws InterruptedException, ExecutionException, IOException { final String index = "test"; - final ShardId shardId = new ShardId(index, 0); + final ShardId shardId = new ShardId(index, "_na_", 0); // one replica to make sure replication is attempted clusterService.setState(state(index, true, ShardRoutingState.STARTED, ShardRoutingState.STARTED)); @@ -670,8 +670,8 @@ public class TransportReplicationActionTests extends ESTestCase { } public void testReplicasCounter() throws Exception { - final ShardId shardId = new ShardId("test", 0); - clusterService.setState(state(shardId.index().getName(), true, + final ShardId shardId = new ShardId("test", "_na_", 0); + clusterService.setState(state(shardId.getIndexName(), true, ShardRoutingState.STARTED, ShardRoutingState.STARTED)); action = new ActionWithDelay(Settings.EMPTY, "testActionWithExceptions", transportService, clusterService, threadPool); final Action.ReplicaOperationTransportHandler replicaOperationTransportHandler = action.new ReplicaOperationTransportHandler(); @@ -706,7 +706,7 @@ public class TransportReplicationActionTests extends ESTestCase { public void testCounterDecrementedIfShardOperationThrowsException() throws InterruptedException, ExecutionException, IOException { action = new ActionWithExceptions(Settings.EMPTY, "testActionWithExceptions", transportService, clusterService, threadPool); final String index = "test"; - final ShardId shardId = new ShardId(index, 0); + final ShardId shardId = new ShardId(index, "_na_", 0); clusterService.setState(state(index, true, ShardRoutingState.STARTED, ShardRoutingState.STARTED)); logger.debug("--> using initial state:\n{}", clusterService.state().prettyPrint()); @@ -749,7 +749,7 @@ public class TransportReplicationActionTests extends ESTestCase { Request(ShardId shardId) { this(); this.shardId = shardId; - this.index = shardId.getIndex(); + this.index = shardId.getIndexName(); // keep things simple } @@ -773,7 +773,7 @@ public class TransportReplicationActionTests extends ESTestCase { ClusterService clusterService, ThreadPool threadPool) { super(settings, actionName, transportService, clusterService, null, threadPool, - new ShardStateAction(settings, clusterService, transportService, null, null), null, + new ShardStateAction(settings, clusterService, transportService, null, null, threadPool), null, new ActionFilters(new HashSet()), new IndexNameExpressionResolver(Settings.EMPTY), Request::new, Request::new, ThreadPool.Names.SAME); } diff --git a/core/src/test/java/org/elasticsearch/action/update/UpdateRequestTests.java b/core/src/test/java/org/elasticsearch/action/update/UpdateRequestTests.java index 55260957105..bcb26613388 100644 --- a/core/src/test/java/org/elasticsearch/action/update/UpdateRequestTests.java +++ b/core/src/test/java/org/elasticsearch/action/update/UpdateRequestTests.java @@ -28,6 +28,7 @@ import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.index.get.GetResult; +import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptService.ScriptType; import org.elasticsearch.test.ESTestCase; @@ -144,7 +145,7 @@ public class UpdateRequestTests extends ESTestCase { // We simulate that the document is not existing yet GetResult getResult = new GetResult("test", "type1", "1", 0, false, null, null); - UpdateHelper.Result result = updateHelper.prepare(updateRequest, getResult); + UpdateHelper.Result result = updateHelper.prepare(new ShardId("test", "_na_", 0),updateRequest, getResult); Streamable action = result.action(); assertThat(action, instanceOf(IndexRequest.class)); IndexRequest indexAction = (IndexRequest) action; @@ -161,7 +162,7 @@ public class UpdateRequestTests extends ESTestCase { // We simulate that the document is not existing yet getResult = new GetResult("test", "type1", "2", 0, false, null, null); - result = updateHelper.prepare(updateRequest, getResult); + result = updateHelper.prepare(new ShardId("test", "_na_", 0), updateRequest, getResult); action = result.action(); assertThat(action, instanceOf(IndexRequest.class)); indexAction = (IndexRequest) action; diff --git a/core/src/test/java/org/elasticsearch/bwcompat/BasicBackwardsCompatibilityIT.java b/core/src/test/java/org/elasticsearch/bwcompat/BasicBackwardsCompatibilityIT.java index 43633fe6f27..26dcb214bea 100644 --- a/core/src/test/java/org/elasticsearch/bwcompat/BasicBackwardsCompatibilityIT.java +++ b/core/src/test/java/org/elasticsearch/bwcompat/BasicBackwardsCompatibilityIT.java @@ -262,7 +262,7 @@ public class BasicBackwardsCompatibilityIT extends ESBackcompatTestCase { for (IndexRoutingTable indexRoutingTable : clusterState.routingTable()) { for (IndexShardRoutingTable indexShardRoutingTable : indexRoutingTable) { for (ShardRouting shardRouting : indexShardRoutingTable) { - if (shardRouting.currentNodeId() != null && index.equals(shardRouting.getIndex())) { + if (shardRouting.currentNodeId() != null && index.equals(shardRouting.getIndexName())) { String name = clusterState.nodes().get(shardRouting.currentNodeId()).name(); assertThat("Allocated on new node: " + name, Regex.simpleMatch(pattern, name), is(true)); } diff --git a/core/src/test/java/org/elasticsearch/bwcompat/TransportClientBackwardsCompatibilityIT.java b/core/src/test/java/org/elasticsearch/bwcompat/TransportClientBackwardsCompatibilityIT.java index 0e2f94e3cf1..b1fc1d45dca 100644 --- a/core/src/test/java/org/elasticsearch/bwcompat/TransportClientBackwardsCompatibilityIT.java +++ b/core/src/test/java/org/elasticsearch/bwcompat/TransportClientBackwardsCompatibilityIT.java @@ -39,7 +39,7 @@ import static org.hamcrest.CoreMatchers.equalTo; public class TransportClientBackwardsCompatibilityIT extends ESBackcompatTestCase { public void testSniffMode() throws ExecutionException, InterruptedException { Settings settings = Settings.builder().put(requiredSettings()).put("client.transport.nodes_sampler_interval", "1s") - .put("name", "transport_client_sniff_mode").put(ClusterName.SETTING, cluster().getClusterName()) + .put("name", "transport_client_sniff_mode").put(ClusterName.CLUSTER_NAME_SETTING.getKey(), cluster().getClusterName()) .put("client.transport.sniff", true).build(); CompositeTestCluster compositeTestCluster = backwardsCluster(); diff --git a/core/src/test/java/org/elasticsearch/client/AbstractClientHeadersTestCase.java b/core/src/test/java/org/elasticsearch/client/AbstractClientHeadersTestCase.java index 3bdfc1fb7ee..25a1e7005a7 100644 --- a/core/src/test/java/org/elasticsearch/client/AbstractClientHeadersTestCase.java +++ b/core/src/test/java/org/elasticsearch/client/AbstractClientHeadersTestCase.java @@ -23,35 +23,22 @@ import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.GenericAction; import org.elasticsearch.action.admin.cluster.reroute.ClusterRerouteAction; -import org.elasticsearch.action.admin.cluster.reroute.ClusterRerouteResponse; import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotAction; -import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotResponse; import org.elasticsearch.action.admin.cluster.stats.ClusterStatsAction; -import org.elasticsearch.action.admin.cluster.stats.ClusterStatsResponse; import org.elasticsearch.action.admin.indices.cache.clear.ClearIndicesCacheAction; -import org.elasticsearch.action.admin.indices.cache.clear.ClearIndicesCacheResponse; import org.elasticsearch.action.admin.indices.create.CreateIndexAction; -import org.elasticsearch.action.admin.indices.create.CreateIndexResponse; import org.elasticsearch.action.admin.indices.flush.FlushAction; -import org.elasticsearch.action.admin.indices.flush.FlushResponse; import org.elasticsearch.action.admin.indices.stats.IndicesStatsAction; -import org.elasticsearch.action.admin.indices.stats.IndicesStatsResponse; import org.elasticsearch.action.delete.DeleteAction; -import org.elasticsearch.action.delete.DeleteResponse; import org.elasticsearch.action.get.GetAction; -import org.elasticsearch.action.get.GetResponse; import org.elasticsearch.action.index.IndexAction; -import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.action.indexedscripts.delete.DeleteIndexedScriptAction; -import org.elasticsearch.action.indexedscripts.delete.DeleteIndexedScriptResponse; import org.elasticsearch.action.search.SearchAction; -import org.elasticsearch.action.search.SearchResponse; -import org.elasticsearch.client.support.Headers; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.env.Environment; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.transport.TransportMessage; import org.junit.After; import org.junit.Before; @@ -59,7 +46,6 @@ import java.util.HashMap; import java.util.Map; import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.notNullValue; /** @@ -68,8 +54,8 @@ import static org.hamcrest.Matchers.notNullValue; public abstract class AbstractClientHeadersTestCase extends ESTestCase { protected static final Settings HEADER_SETTINGS = Settings.builder() - .put(Headers.PREFIX + ".key1", "val1") - .put(Headers.PREFIX + ".key2", "val 2") + .put(ThreadContext.PREFIX + ".key1", "val1") + .put(ThreadContext.PREFIX + ".key2", "val 2") .build(); private static final GenericAction[] ACTIONS = new GenericAction[] { @@ -91,9 +77,11 @@ public abstract class AbstractClientHeadersTestCase extends ESTestCase { public void initClient() { Settings settings = Settings.builder() .put(HEADER_SETTINGS) + .put("path.home", createTempDir().toString()) + .put("name", "test-" + getTestName()) .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .build(); - threadPool = new ThreadPool("test-" + getTestName()); + threadPool = new ThreadPool(settings); client = buildClient(settings, ACTIONS); } @@ -114,89 +102,75 @@ public abstract class AbstractClientHeadersTestCase extends ESTestCase { // validation in the settings??? - ugly and conceptually wrong) // choosing arbitrary top level actions to test - client.prepareGet("idx", "type", "id").execute().addListener(new AssertingActionListener(GetAction.NAME)); - client.prepareSearch().execute().addListener(new AssertingActionListener(SearchAction.NAME)); - client.prepareDelete("idx", "type", "id").execute().addListener(new AssertingActionListener(DeleteAction.NAME)); - client.prepareDeleteIndexedScript("lang", "id").execute().addListener(new AssertingActionListener(DeleteIndexedScriptAction.NAME)); - client.prepareIndex("idx", "type", "id").setSource("source").execute().addListener(new AssertingActionListener(IndexAction.NAME)); + client.prepareGet("idx", "type", "id").execute().addListener(new AssertingActionListener<>(GetAction.NAME, client.threadPool())); + client.prepareSearch().execute().addListener(new AssertingActionListener<>(SearchAction.NAME, client.threadPool())); + client.prepareDelete("idx", "type", "id").execute().addListener(new AssertingActionListener<>(DeleteAction.NAME, client.threadPool())); + client.prepareDeleteIndexedScript("lang", "id").execute().addListener(new AssertingActionListener<>(DeleteIndexedScriptAction.NAME, client.threadPool())); + client.prepareIndex("idx", "type", "id").setSource("source").execute().addListener(new AssertingActionListener<>(IndexAction.NAME, client.threadPool())); // choosing arbitrary cluster admin actions to test - client.admin().cluster().prepareClusterStats().execute().addListener(new AssertingActionListener(ClusterStatsAction.NAME)); - client.admin().cluster().prepareCreateSnapshot("repo", "bck").execute().addListener(new AssertingActionListener(CreateSnapshotAction.NAME)); - client.admin().cluster().prepareReroute().execute().addListener(new AssertingActionListener(ClusterRerouteAction.NAME)); + client.admin().cluster().prepareClusterStats().execute().addListener(new AssertingActionListener<>(ClusterStatsAction.NAME, client.threadPool())); + client.admin().cluster().prepareCreateSnapshot("repo", "bck").execute().addListener(new AssertingActionListener<>(CreateSnapshotAction.NAME, client.threadPool())); + client.admin().cluster().prepareReroute().execute().addListener(new AssertingActionListener<>(ClusterRerouteAction.NAME, client.threadPool())); // choosing arbitrary indices admin actions to test - client.admin().indices().prepareCreate("idx").execute().addListener(new AssertingActionListener(CreateIndexAction.NAME)); - client.admin().indices().prepareStats().execute().addListener(new AssertingActionListener(IndicesStatsAction.NAME)); - client.admin().indices().prepareClearCache("idx1", "idx2").execute().addListener(new AssertingActionListener(ClearIndicesCacheAction.NAME)); - client.admin().indices().prepareFlush().execute().addListener(new AssertingActionListener(FlushAction.NAME)); + client.admin().indices().prepareCreate("idx").execute().addListener(new AssertingActionListener<>(CreateIndexAction.NAME, client.threadPool())); + client.admin().indices().prepareStats().execute().addListener(new AssertingActionListener<>(IndicesStatsAction.NAME, client.threadPool())); + client.admin().indices().prepareClearCache("idx1", "idx2").execute().addListener(new AssertingActionListener<>(ClearIndicesCacheAction.NAME, client.threadPool())); + client.admin().indices().prepareFlush().execute().addListener(new AssertingActionListener<>(FlushAction.NAME, client.threadPool())); } public void testOverideHeader() throws Exception { String key1Val = randomAsciiOfLength(5); - Map expected = new HashMap<>(); + Map expected = new HashMap<>(); expected.put("key1", key1Val); expected.put("key2", "val 2"); - + client.threadPool().getThreadContext().putHeader("key1", key1Val); client.prepareGet("idx", "type", "id") - .putHeader("key1", key1Val) - .execute().addListener(new AssertingActionListener(GetAction.NAME, expected)); + .execute().addListener(new AssertingActionListener<>(GetAction.NAME, expected, client.threadPool())); client.admin().cluster().prepareClusterStats() - .putHeader("key1", key1Val) - .execute().addListener(new AssertingActionListener(ClusterStatsAction.NAME, expected)); + .execute().addListener(new AssertingActionListener<>(ClusterStatsAction.NAME, expected, client.threadPool())); client.admin().indices().prepareCreate("idx") - .putHeader("key1", key1Val) - .execute().addListener(new AssertingActionListener(CreateIndexAction.NAME, expected)); + .execute().addListener(new AssertingActionListener<>(CreateIndexAction.NAME, expected, client.threadPool())); } - protected static void assertHeaders(Map headers, Map expected) { - assertThat(headers, notNullValue()); - assertThat(headers.size(), is(expected.size())); - for (Map.Entry expectedEntry : expected.entrySet()) { - assertThat(headers.get(expectedEntry.getKey()), equalTo(expectedEntry.getValue())); + protected static void assertHeaders(Map headers, Map expected) { + assertNotNull(headers); + assertEquals(expected.size(), headers.size()); + for (Map.Entry expectedEntry : expected.entrySet()) { + assertEquals(headers.get(expectedEntry.getKey()), expectedEntry.getValue()); } } - protected static void assertHeaders(TransportMessage message) { - assertHeaders(message, HEADER_SETTINGS.getAsSettings(Headers.PREFIX).getAsStructuredMap()); - } - - protected static void assertHeaders(TransportMessage message, Map expected) { - assertThat(message.getHeaders(), notNullValue()); - assertThat(message.getHeaders().size(), is(expected.size())); - for (Map.Entry expectedEntry : expected.entrySet()) { - assertThat(message.getHeader(expectedEntry.getKey()), equalTo(expectedEntry.getValue())); - } + protected static void assertHeaders(ThreadPool pool) { + assertHeaders(pool.getThreadContext().getHeaders(), (Map)HEADER_SETTINGS.getAsSettings(ThreadContext.PREFIX).getAsStructuredMap()); } public static class InternalException extends Exception { private final String action; - private final Map headers; - public InternalException(String action, TransportMessage message) { + public InternalException(String action) { this.action = action; - this.headers = new HashMap<>(); - for (String key : message.getHeaders()) { - headers.put(key, message.getHeader(key)); - } } } protected static class AssertingActionListener implements ActionListener { private final String action; - private final Map expectedHeaders; + private final Map expectedHeaders; + private final ThreadPool pool; - public AssertingActionListener(String action) { - this(action, HEADER_SETTINGS.getAsSettings(Headers.PREFIX).getAsStructuredMap()); + public AssertingActionListener(String action, ThreadPool pool) { + this(action, (Map)HEADER_SETTINGS.getAsSettings(ThreadContext.PREFIX).getAsStructuredMap(), pool); } - public AssertingActionListener(String action, Map expectedHeaders) { + public AssertingActionListener(String action, Map expectedHeaders, ThreadPool pool) { this.action = action; this.expectedHeaders = expectedHeaders; + this.pool = pool; } @Override @@ -209,7 +183,7 @@ public abstract class AbstractClientHeadersTestCase extends ESTestCase { Throwable e = unwrap(t, InternalException.class); assertThat("expected action [" + action + "] to throw an internal exception", e, notNullValue()); assertThat(action, equalTo(((InternalException) e).action)); - Map headers = ((InternalException) e).headers; + Map headers = pool.getThreadContext().getHeaders(); assertHeaders(headers, expectedHeaders); } diff --git a/core/src/test/java/org/elasticsearch/client/node/NodeClientHeadersTests.java b/core/src/test/java/org/elasticsearch/client/node/NodeClientHeadersTests.java index e7ba8de0f97..f69c8f2da0b 100644 --- a/core/src/test/java/org/elasticsearch/client/node/NodeClientHeadersTests.java +++ b/core/src/test/java/org/elasticsearch/client/node/NodeClientHeadersTests.java @@ -27,7 +27,6 @@ import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.TransportAction; import org.elasticsearch.client.AbstractClientHeadersTestCase; import org.elasticsearch.client.Client; -import org.elasticsearch.client.support.Headers; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.tasks.Task; import org.elasticsearch.tasks.TaskManager; @@ -46,9 +45,8 @@ public class NodeClientHeadersTests extends AbstractClientHeadersTestCase { @Override protected Client buildClient(Settings headersSettings, GenericAction[] testedActions) { Settings settings = HEADER_SETTINGS; - Headers headers = new Headers(settings); Actions actions = new Actions(settings, threadPool, testedActions); - return new NodeClient(settings, threadPool, headers, actions); + return new NodeClient(settings, threadPool, actions); } private static class Actions extends HashMap { @@ -68,7 +66,7 @@ public class NodeClientHeadersTests extends AbstractClientHeadersTestCase { @Override protected void doExecute(ActionRequest request, ActionListener listener) { - listener.onFailure(new InternalException(actionName, request)); + listener.onFailure(new InternalException(actionName)); } } diff --git a/core/src/test/java/org/elasticsearch/client/node/NodeClientIT.java b/core/src/test/java/org/elasticsearch/client/node/NodeClientIT.java index 966553b8f3d..ae839e8837e 100644 --- a/core/src/test/java/org/elasticsearch/client/node/NodeClientIT.java +++ b/core/src/test/java/org/elasticsearch/client/node/NodeClientIT.java @@ -34,12 +34,12 @@ import static org.hamcrest.Matchers.is; public class NodeClientIT extends ESIntegTestCase { @Override protected Settings nodeSettings(int nodeOrdinal) { - return settingsBuilder().put(super.nodeSettings(nodeOrdinal)).put(Client.CLIENT_TYPE_SETTING, "anything").build(); + return settingsBuilder().put(super.nodeSettings(nodeOrdinal)).put(Client.CLIENT_TYPE_SETTING_S.getKey(), "anything").build(); } public void testThatClientTypeSettingCannotBeChanged() { for (Settings settings : internalCluster().getInstances(Settings.class)) { - assertThat(settings.get(Client.CLIENT_TYPE_SETTING), is("node")); + assertThat(Client.CLIENT_TYPE_SETTING_S.get(settings), is("node")); } } } diff --git a/core/src/test/java/org/elasticsearch/client/transport/TransportClientHeadersTests.java b/core/src/test/java/org/elasticsearch/client/transport/TransportClientHeadersTests.java index e61dab2fc4b..221b425cae7 100644 --- a/core/src/test/java/org/elasticsearch/client/transport/TransportClientHeadersTests.java +++ b/core/src/test/java/org/elasticsearch/client/transport/TransportClientHeadersTests.java @@ -135,30 +135,30 @@ public class TransportClientHeadersTests extends AbstractClientHeadersTestCase { @Override @SuppressWarnings("unchecked") public void sendRequest(DiscoveryNode node, String action, TransportRequest request, TransportRequestOptions options, TransportResponseHandler handler) { if (TransportLivenessAction.NAME.equals(action)) { - assertHeaders(request); + assertHeaders(threadPool); ((TransportResponseHandler) handler).handleResponse(new LivenessResponse(ClusterName.DEFAULT, node)); return; } if (ClusterStateAction.NAME.equals(action)) { - assertHeaders(request); + assertHeaders(threadPool); ClusterName cluster1 = new ClusterName("cluster1"); ((TransportResponseHandler) handler).handleResponse(new ClusterStateResponse(cluster1, state(cluster1))); clusterStateLatch.countDown(); return; } - handler.handleException(new TransportException("", new InternalException(action, request))); + handler.handleException(new TransportException("", new InternalException(action))); } @Override public boolean nodeConnected(DiscoveryNode node) { - assertThat((LocalTransportAddress) node.getAddress(), equalTo(address)); + assertThat(node.getAddress(), equalTo(address)); return true; } @Override public void connectToNode(DiscoveryNode node) throws ConnectTransportException { - assertThat((LocalTransportAddress) node.getAddress(), equalTo(address)); + assertThat(node.getAddress(), equalTo(address)); } } diff --git a/core/src/test/java/org/elasticsearch/client/transport/TransportClientIT.java b/core/src/test/java/org/elasticsearch/client/transport/TransportClientIT.java index 3bf05f21c4c..a561b5bcf7b 100644 --- a/core/src/test/java/org/elasticsearch/client/transport/TransportClientIT.java +++ b/core/src/test/java/org/elasticsearch/client/transport/TransportClientIT.java @@ -58,7 +58,7 @@ public class TransportClientIT extends ESIntegTestCase { .put("http.enabled", false) .put(Node.NODE_DATA_SETTING.getKey(), false) .put("cluster.name", "foobar") - .put(InternalSettingsPreparer.IGNORE_SYSTEM_PROPERTIES_SETTING, true) // make sure we get what we set :) + .put(InternalSettingsPreparer.IGNORE_SYSTEM_PROPERTIES_SETTING.getKey(), true) // make sure we get what we set :) .build()); node.start(); try { @@ -86,17 +86,16 @@ public class TransportClientIT extends ESIntegTestCase { public void testThatTransportClientSettingIsSet() { TransportClient client = (TransportClient) internalCluster().client(); Settings settings = client.injector.getInstance(Settings.class); - assertThat(settings.get(Client.CLIENT_TYPE_SETTING), is("transport")); + assertThat(Client.CLIENT_TYPE_SETTING_S.get(settings), is("transport")); } public void testThatTransportClientSettingCannotBeChanged() { Settings baseSettings = settingsBuilder() - .put(Client.CLIENT_TYPE_SETTING, "anything") .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()) .build(); try (TransportClient client = TransportClient.builder().settings(baseSettings).build()) { Settings settings = client.injector.getInstance(Settings.class); - assertThat(settings.get(Client.CLIENT_TYPE_SETTING), is("transport")); + assertThat(Client.CLIENT_TYPE_SETTING_S.get(settings), is("transport")); } } } diff --git a/core/src/test/java/org/elasticsearch/client/transport/TransportClientNodesServiceTests.java b/core/src/test/java/org/elasticsearch/client/transport/TransportClientNodesServiceTests.java index 72ace64d9ee..e6ea0410a55 100644 --- a/core/src/test/java/org/elasticsearch/client/transport/TransportClientNodesServiceTests.java +++ b/core/src/test/java/org/elasticsearch/client/transport/TransportClientNodesServiceTests.java @@ -21,7 +21,6 @@ package org.elasticsearch.client.transport; import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.client.support.Headers; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.settings.Settings; @@ -74,7 +73,7 @@ public class TransportClientNodesServiceTests extends ESTestCase { }; transportService = new TransportService(Settings.EMPTY, transport, threadPool); transportService.start(); - transportClientNodesService = new TransportClientNodesService(Settings.EMPTY, ClusterName.DEFAULT, transportService, threadPool, Headers.EMPTY, Version.CURRENT); + transportClientNodesService = new TransportClientNodesService(Settings.EMPTY, ClusterName.DEFAULT, transportService, threadPool, Version.CURRENT); nodesCount = randomIntBetween(1, 10); for (int i = 0; i < nodesCount; i++) { diff --git a/core/src/test/java/org/elasticsearch/client/transport/TransportClientRetryIT.java b/core/src/test/java/org/elasticsearch/client/transport/TransportClientRetryIT.java index afb693db1e5..dcb5ac46948 100644 --- a/core/src/test/java/org/elasticsearch/client/transport/TransportClientRetryIT.java +++ b/core/src/test/java/org/elasticsearch/client/transport/TransportClientRetryIT.java @@ -55,8 +55,8 @@ public class TransportClientRetryIT extends ESIntegTestCase { Settings.Builder builder = settingsBuilder().put("client.transport.nodes_sampler_interval", "1s") .put("name", "transport_client_retry_test") .put(Node.NODE_MODE_SETTING.getKey(), internalCluster().getNodeMode()) - .put(ClusterName.SETTING, internalCluster().getClusterName()) - .put(InternalSettingsPreparer.IGNORE_SYSTEM_PROPERTIES_SETTING, true) + .put(ClusterName.CLUSTER_NAME_SETTING.getKey(), internalCluster().getClusterName()) + .put(InternalSettingsPreparer.IGNORE_SYSTEM_PROPERTIES_SETTING.getKey(), true) .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()); try (TransportClient transportClient = TransportClient.builder().settings(builder.build()).build()) { diff --git a/core/src/test/java/org/elasticsearch/cluster/ClusterStateDiffIT.java b/core/src/test/java/org/elasticsearch/cluster/ClusterStateDiffIT.java index 8b79b78397b..6f3fdee36df 100644 --- a/core/src/test/java/org/elasticsearch/cluster/ClusterStateDiffIT.java +++ b/core/src/test/java/org/elasticsearch/cluster/ClusterStateDiffIT.java @@ -46,6 +46,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.LocalTransportAddress; import org.elasticsearch.discovery.DiscoverySettings; import org.elasticsearch.gateway.GatewayService; +import org.elasticsearch.index.Index; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.test.ESIntegTestCase; @@ -223,11 +224,11 @@ public class ClusterStateDiffIT extends ESIntegTestCase { * Randomly updates index routing table in the cluster state */ private IndexRoutingTable randomIndexRoutingTable(String index, String[] nodeIds) { - IndexRoutingTable.Builder builder = IndexRoutingTable.builder(index); + IndexRoutingTable.Builder builder = IndexRoutingTable.builder(new Index(index, "_na_")); int shardCount = randomInt(10); for (int i = 0; i < shardCount; i++) { - IndexShardRoutingTable.Builder indexShard = new IndexShardRoutingTable.Builder(new ShardId(index, i)); + IndexShardRoutingTable.Builder indexShard = new IndexShardRoutingTable.Builder(new ShardId(index, "_na_", i)); int replicaCount = randomIntBetween(1, 10); for (int j = 0; j < replicaCount; j++) { UnassignedInfo unassignedInfo = null; diff --git a/core/src/test/java/org/elasticsearch/cluster/DiskUsageTests.java b/core/src/test/java/org/elasticsearch/cluster/DiskUsageTests.java index f581e4c91f6..9ee423b91df 100644 --- a/core/src/test/java/org/elasticsearch/cluster/DiskUsageTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/DiskUsageTests.java @@ -29,6 +29,7 @@ import org.elasticsearch.cluster.routing.ShardRoutingHelper; import org.elasticsearch.cluster.routing.UnassignedInfo; import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.transport.DummyTransportAddress; +import org.elasticsearch.index.Index; import org.elasticsearch.index.shard.ShardPath; import org.elasticsearch.index.store.StoreStats; import org.elasticsearch.monitor.fs.FsInfo; @@ -91,13 +92,14 @@ public class DiskUsageTests extends ESTestCase { } public void testFillShardLevelInfo() { - ShardRouting test_0 = ShardRouting.newUnassigned("test", 0, null, false, new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "foo")); + final Index index = new Index("test", "_na_"); + ShardRouting test_0 = ShardRouting.newUnassigned(index, 0, null, false, new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "foo")); ShardRoutingHelper.initialize(test_0, "node1"); ShardRoutingHelper.moveToStarted(test_0); Path test0Path = createTempDir().resolve("indices").resolve("test").resolve("0"); CommonStats commonStats0 = new CommonStats(); commonStats0.store = new StoreStats(100, 1); - ShardRouting test_1 = ShardRouting.newUnassigned("test", 1, null, false, new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "foo")); + ShardRouting test_1 = ShardRouting.newUnassigned(index, 1, null, false, new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "foo")); ShardRoutingHelper.initialize(test_1, "node2"); ShardRoutingHelper.moveToStarted(test_1); Path test1Path = createTempDir().resolve("indices").resolve("test").resolve("1"); diff --git a/core/src/test/java/org/elasticsearch/cluster/ack/AckIT.java b/core/src/test/java/org/elasticsearch/cluster/ack/AckIT.java index 9f646d0df58..2ec3b11a164 100644 --- a/core/src/test/java/org/elasticsearch/cluster/ack/AckIT.java +++ b/core/src/test/java/org/elasticsearch/cluster/ack/AckIT.java @@ -38,6 +38,8 @@ import org.elasticsearch.cluster.routing.ShardRoutingState; import org.elasticsearch.cluster.routing.allocation.command.MoveAllocationCommand; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.discovery.DiscoverySettings; +import org.elasticsearch.index.Index; +import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.ESIntegTestCase.ClusterScope; @@ -60,7 +62,7 @@ public class AckIT extends ESIntegTestCase { //otherwise the operation is most likely acknowledged even if it doesn't support ack return Settings.builder().put(super.nodeSettings(nodeOrdinal)) .put(DiscoverySettings.PUBLISH_TIMEOUT_SETTING.getKey(), 0).build(); -} + } public void testUpdateSettingsAcknowledgement() { createIndex("test"); @@ -83,13 +85,15 @@ public class AckIT extends ESIntegTestCase { public void testClusterRerouteAcknowledgement() throws InterruptedException { assertAcked(prepareCreate("test").setSettings(Settings.builder() - .put(indexSettings()) - .put(SETTING_NUMBER_OF_SHARDS, between(cluster().numDataNodes(), DEFAULT_MAX_NUM_SHARDS)) - .put(SETTING_NUMBER_OF_REPLICAS, 0) + .put(indexSettings()) + .put(SETTING_NUMBER_OF_SHARDS, between(cluster().numDataNodes(), DEFAULT_MAX_NUM_SHARDS)) + .put(SETTING_NUMBER_OF_REPLICAS, 0) )); ensureGreen(); MoveAllocationCommand moveAllocationCommand = getAllocationCommand(); + final Index index = client().admin().cluster().prepareState().get().getState().metaData().index("test").getIndex(); + final ShardId commandShard = new ShardId(index, moveAllocationCommand.shardId()); assertAcked(client().admin().cluster().prepareReroute().add(moveAllocationCommand)); @@ -97,7 +101,7 @@ public class AckIT extends ESIntegTestCase { ClusterState clusterState = getLocalClusterState(client); for (ShardRouting shardRouting : clusterState.getRoutingNodes().routingNodeIter(moveAllocationCommand.fromNode())) { //if the shard that we wanted to move is still on the same node, it must be relocating - if (shardRouting.shardId().equals(moveAllocationCommand.shardId())) { + if (shardRouting.shardId().equals(commandShard)) { assertThat(shardRouting.relocating(), equalTo(true)); } @@ -105,7 +109,7 @@ public class AckIT extends ESIntegTestCase { boolean found = false; for (ShardRouting shardRouting : clusterState.getRoutingNodes().routingNodeIter(moveAllocationCommand.toNode())) { - if (shardRouting.shardId().equals(moveAllocationCommand.shardId())) { + if (shardRouting.shardId().equals(commandShard)) { assertThat(shardRouting.state(), anyOf(equalTo(ShardRoutingState.INITIALIZING), equalTo(ShardRoutingState.STARTED))); found = true; break; @@ -137,6 +141,9 @@ public class AckIT extends ESIntegTestCase { MoveAllocationCommand moveAllocationCommand = getAllocationCommand(); + final Index index = client().admin().cluster().prepareState().get().getState().metaData().index("test").getIndex(); + final ShardId commandShard = new ShardId(index, moveAllocationCommand.shardId()); + assertAcked(client().admin().cluster().prepareReroute().setDryRun(true).add(moveAllocationCommand)); //testing only on master with the latest cluster state as we didn't make any change thus we cannot guarantee that @@ -145,7 +152,7 @@ public class AckIT extends ESIntegTestCase { boolean found = false; for (ShardRouting shardRouting : clusterStateResponse.getState().getRoutingNodes().routingNodeIter(moveAllocationCommand.fromNode())) { //the shard that we wanted to move is still on the same node, as we had dryRun flag - if (shardRouting.shardId().equals(moveAllocationCommand.shardId())) { + if (shardRouting.shardId().equals(commandShard)) { assertThat(shardRouting.started(), equalTo(true)); found = true; break; @@ -154,7 +161,7 @@ public class AckIT extends ESIntegTestCase { assertThat(found, equalTo(true)); for (ShardRouting shardRouting : clusterStateResponse.getState().getRoutingNodes().routingNodeIter(moveAllocationCommand.toNode())) { - if (shardRouting.shardId().equals(moveAllocationCommand.shardId())) { + if (shardRouting.shardId().equals(commandShard)) { fail("shard [" + shardRouting + "] shouldn't be on node [" + moveAllocationCommand.toString() + "]"); } } @@ -199,7 +206,7 @@ public class AckIT extends ESIntegTestCase { assertNotNull(shardToBeMoved); logger.info("==> going to move shard [{}] from [{}] to [{}]", shardToBeMoved, fromNodeId, toNodeId); - return new MoveAllocationCommand(shardToBeMoved.shardId(), fromNodeId, toNodeId); + return new MoveAllocationCommand(shardToBeMoved.getIndexName(), shardToBeMoved.id(), fromNodeId, toNodeId); } public void testIndicesAliasesAcknowledgement() { diff --git a/core/src/test/java/org/elasticsearch/cluster/action/shard/ShardStateActionTests.java b/core/src/test/java/org/elasticsearch/cluster/action/shard/ShardStateActionTests.java index 30d4e48551f..8a13e6e6ddd 100644 --- a/core/src/test/java/org/elasticsearch/cluster/action/shard/ShardStateActionTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/action/shard/ShardStateActionTests.java @@ -70,7 +70,7 @@ public class ShardStateActionTests extends ESTestCase { private static class TestShardStateAction extends ShardStateAction { public TestShardStateAction(Settings settings, ClusterService clusterService, TransportService transportService, AllocationService allocationService, RoutingService routingService) { - super(settings, clusterService, transportService, allocationService, routingService); + super(settings, clusterService, transportService, allocationService, routingService, THREAD_POOL); } private Runnable onBeforeWaitForNewMasterAndRetry; diff --git a/core/src/test/java/org/elasticsearch/cluster/allocation/ClusterRerouteIT.java b/core/src/test/java/org/elasticsearch/cluster/allocation/ClusterRerouteIT.java index 793cb0ce421..cc5ce05aca6 100644 --- a/core/src/test/java/org/elasticsearch/cluster/allocation/ClusterRerouteIT.java +++ b/core/src/test/java/org/elasticsearch/cluster/allocation/ClusterRerouteIT.java @@ -100,7 +100,7 @@ public class ClusterRerouteIT extends ESIntegTestCase { logger.info("--> explicitly allocate shard 1, *under dry_run*"); state = client().admin().cluster().prepareReroute() .setExplain(randomBoolean()) - .add(new AllocateEmptyPrimaryAllocationCommand(new ShardId("test", 0), node_1, true)) + .add(new AllocateEmptyPrimaryAllocationCommand("test", 0, node_1, true)) .setDryRun(true) .execute().actionGet().getState(); assertThat(state.getRoutingNodes().unassigned().size(), equalTo(1)); @@ -113,7 +113,7 @@ public class ClusterRerouteIT extends ESIntegTestCase { logger.info("--> explicitly allocate shard 1, actually allocating, no dry run"); state = client().admin().cluster().prepareReroute() .setExplain(randomBoolean()) - .add(new AllocateEmptyPrimaryAllocationCommand(new ShardId("test", 0), node_1, true)) + .add(new AllocateEmptyPrimaryAllocationCommand("test", 0, node_1, true)) .execute().actionGet().getState(); assertThat(state.getRoutingNodes().unassigned().size(), equalTo(1)); assertThat(state.getRoutingNodes().node(state.nodes().resolveNode(node_1).id()).get(0).state(), equalTo(ShardRoutingState.INITIALIZING)); @@ -129,7 +129,7 @@ public class ClusterRerouteIT extends ESIntegTestCase { logger.info("--> move shard 1 primary from node1 to node2"); state = client().admin().cluster().prepareReroute() .setExplain(randomBoolean()) - .add(new MoveAllocationCommand(new ShardId("test", 0), node_1, node_2)) + .add(new MoveAllocationCommand("test", 0, node_1, node_2)) .execute().actionGet().getState(); assertThat(state.getRoutingNodes().node(state.nodes().resolveNode(node_1).id()).get(0).state(), equalTo(ShardRoutingState.RELOCATING)); @@ -212,7 +212,7 @@ public class ClusterRerouteIT extends ESIntegTestCase { logger.info("--> explicitly allocate shard 1, actually allocating, no dry run"); state = client().admin().cluster().prepareReroute() .setExplain(randomBoolean()) - .add(new AllocateEmptyPrimaryAllocationCommand(new ShardId("test", 0), node_1, true)) + .add(new AllocateEmptyPrimaryAllocationCommand("test", 0, node_1, true)) .execute().actionGet().getState(); assertThat(state.getRoutingNodes().unassigned().size(), equalTo(1)); assertThat(state.getRoutingNodes().node(state.nodes().resolveNode(node_1).id()).get(0).state(), equalTo(ShardRoutingState.INITIALIZING)); @@ -228,7 +228,7 @@ public class ClusterRerouteIT extends ESIntegTestCase { client().prepareIndex("test", "type", "1").setSource("field", "value").setRefresh(true).execute().actionGet(); logger.info("--> closing all nodes"); - Path[] shardLocation = internalCluster().getInstance(NodeEnvironment.class, node_1).availableShardPaths(new ShardId("test", 0)); + Path[] shardLocation = internalCluster().getInstance(NodeEnvironment.class, node_1).availableShardPaths(new ShardId("test", "_na_", 0)); assertThat(FileSystemUtils.exists(shardLocation), equalTo(true)); // make sure the data is there! internalCluster().closeNonSharedNodes(false); // don't wipe data directories the index needs to be there! @@ -246,7 +246,7 @@ public class ClusterRerouteIT extends ESIntegTestCase { logger.info("--> explicitly allocate primary"); state = client().admin().cluster().prepareReroute() .setExplain(randomBoolean()) - .add(new AllocateEmptyPrimaryAllocationCommand(new ShardId("test", 0), node_1, true)) + .add(new AllocateEmptyPrimaryAllocationCommand("test", 0, node_1, true)) .execute().actionGet().getState(); assertThat(state.getRoutingNodes().unassigned().size(), equalTo(1)); assertThat(state.getRoutingNodes().node(state.nodes().resolveNode(node_1).id()).get(0).state(), equalTo(ShardRoutingState.INITIALIZING)); @@ -291,7 +291,7 @@ public class ClusterRerouteIT extends ESIntegTestCase { assertThat(healthResponse.isTimedOut(), equalTo(false)); logger.info("--> try to move the shard from node1 to node2"); - MoveAllocationCommand cmd = new MoveAllocationCommand(new ShardId("test", 0), node_1, node_2); + MoveAllocationCommand cmd = new MoveAllocationCommand("test", 0, node_1, node_2); ClusterRerouteResponse resp = client().admin().cluster().prepareReroute().add(cmd).setExplain(true).execute().actionGet(); RoutingExplanations e = resp.getExplanations(); assertThat(e.explanations().size(), equalTo(1)); @@ -331,7 +331,7 @@ public class ClusterRerouteIT extends ESIntegTestCase { try { enableIndexBlock("test-blocks", blockSetting); assertAcked(client().admin().cluster().prepareReroute() - .add(new MoveAllocationCommand(new ShardId("test-blocks", 0), nodesIds.get(toggle % 2), nodesIds.get(++toggle % 2)))); + .add(new MoveAllocationCommand("test-blocks", 0, nodesIds.get(toggle % 2), nodesIds.get(++toggle % 2)))); ClusterHealthResponse healthResponse = client().admin().cluster().prepareHealth().setWaitForYellowStatus().setWaitForRelocatingShards(0).execute().actionGet(); assertThat(healthResponse.isTimedOut(), equalTo(false)); @@ -344,7 +344,7 @@ public class ClusterRerouteIT extends ESIntegTestCase { try { setClusterReadOnly(true); assertBlocked(client().admin().cluster().prepareReroute() - .add(new MoveAllocationCommand(new ShardId("test-blocks", 1), nodesIds.get(toggle % 2), nodesIds.get(++toggle % 2)))); + .add(new MoveAllocationCommand("test-blocks", 1, nodesIds.get(toggle % 2), nodesIds.get(++toggle % 2)))); } finally { setClusterReadOnly(false); } diff --git a/core/src/test/java/org/elasticsearch/cluster/health/RoutingTableGenerator.java b/core/src/test/java/org/elasticsearch/cluster/health/RoutingTableGenerator.java index 5f48c5abde9..0303f7c8947 100644 --- a/core/src/test/java/org/elasticsearch/cluster/health/RoutingTableGenerator.java +++ b/core/src/test/java/org/elasticsearch/cluster/health/RoutingTableGenerator.java @@ -58,7 +58,7 @@ class RoutingTableGenerator { } public IndexShardRoutingTable genShardRoutingTable(String index, int shardId, int replicas, ShardCounter counter) { - IndexShardRoutingTable.Builder builder = new IndexShardRoutingTable.Builder(new ShardId(index, shardId)); + IndexShardRoutingTable.Builder builder = new IndexShardRoutingTable.Builder(new ShardId(index, "_na_", shardId)); ShardRouting shardRouting = genShardRouting(index, shardId, true); counter.update(shardRouting); builder.addShard(shardRouting); @@ -74,7 +74,7 @@ class RoutingTableGenerator { public IndexRoutingTable genIndexRoutingTable(IndexMetaData indexMetaData, ShardCounter counter) { IndexRoutingTable.Builder builder = IndexRoutingTable.builder(indexMetaData.getIndex()); for (int shard = 0; shard < indexMetaData.getNumberOfShards(); shard++) { - builder.addIndexShard(genShardRoutingTable(indexMetaData.getIndex(), shard, indexMetaData.getNumberOfReplicas(), counter)); + builder.addIndexShard(genShardRoutingTable(indexMetaData.getIndex().getName(), shard, indexMetaData.getNumberOfReplicas(), counter)); } return builder.build(); } diff --git a/core/src/test/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolverTests.java b/core/src/test/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolverTests.java index d3b31221b67..1f98275aee7 100644 --- a/core/src/test/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolverTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolverTests.java @@ -69,7 +69,7 @@ public class IndexNameExpressionResolverTests extends ESTestCase { indexNameExpressionResolver.concreteIndices(context, "bar"); fail(); } catch (IndexNotFoundException e) { - assertThat(e.getIndex(), equalTo("bar")); + assertThat(e.getIndex().getName(), equalTo("bar")); } results = indexNameExpressionResolver.concreteIndices(context, "foofoo", "foobar"); @@ -84,14 +84,14 @@ public class IndexNameExpressionResolverTests extends ESTestCase { indexNameExpressionResolver.concreteIndices(context, "bar"); fail(); } catch (IndexNotFoundException e) { - assertThat(e.getIndex(), equalTo("bar")); + assertThat(e.getIndex().getName(), equalTo("bar")); } try { indexNameExpressionResolver.concreteIndices(context, "foo", "bar"); fail(); } catch (IndexNotFoundException e) { - assertThat(e.getIndex(), equalTo("bar")); + assertThat(e.getIndex().getName(), equalTo("bar")); } results = indexNameExpressionResolver.concreteIndices(context, "barbaz", "foobar"); @@ -102,7 +102,7 @@ public class IndexNameExpressionResolverTests extends ESTestCase { indexNameExpressionResolver.concreteIndices(context, "barbaz", "bar"); fail(); } catch (IndexNotFoundException e) { - assertThat(e.getIndex(), equalTo("bar")); + assertThat(e.getIndex().getName(), equalTo("bar")); } results = indexNameExpressionResolver.concreteIndices(context, "baz*"); @@ -227,21 +227,21 @@ public class IndexNameExpressionResolverTests extends ESTestCase { indexNameExpressionResolver.concreteIndices(context, "bar"); fail(); } catch(IndexNotFoundException e) { - assertThat(e.getIndex(), equalTo("bar")); + assertThat(e.getIndex().getName(), equalTo("bar")); } try { indexNameExpressionResolver.concreteIndices(context, "baz*"); fail(); } catch (IndexNotFoundException e) { - assertThat(e.getIndex(), equalTo("baz*")); + assertThat(e.getIndex().getName(), equalTo("baz*")); } try { indexNameExpressionResolver.concreteIndices(context, "foo", "baz*"); fail(); } catch (IndexNotFoundException e) { - assertThat(e.getIndex(), equalTo("baz*")); + assertThat(e.getIndex().getName(), equalTo("baz*")); } } @@ -362,7 +362,7 @@ public class IndexNameExpressionResolverTests extends ESTestCase { indexNameExpressionResolver.concreteIndices(context, "baz*"); fail(); } catch (IndexNotFoundException e) { - assertThat(e.getIndex(), equalTo("baz*")); + assertThat(e.getIndex().getName(), equalTo("baz*")); } String[] results = indexNameExpressionResolver.concreteIndices(context, "foo", "baz*"); @@ -385,7 +385,7 @@ public class IndexNameExpressionResolverTests extends ESTestCase { indexNameExpressionResolver.concreteIndices(context, "foo", "baz*"); fail(); } catch (IndexNotFoundException e) { - assertThat(e.getIndex(), equalTo("baz*")); + assertThat(e.getIndex().getName(), equalTo("baz*")); } results = indexNameExpressionResolver.concreteIndices(context, "foofoobar"); @@ -401,14 +401,14 @@ public class IndexNameExpressionResolverTests extends ESTestCase { indexNameExpressionResolver.concreteIndices(context, "baz*"); fail(); } catch (IndexNotFoundException e) { - assertThat(e.getIndex(), equalTo("baz*")); + assertThat(e.getIndex().getName(), equalTo("baz*")); } try { indexNameExpressionResolver.concreteIndices(context, "foo", "baz*"); fail(); } catch (IndexNotFoundException e) { - assertThat(e.getIndex(), equalTo("baz*")); + assertThat(e.getIndex().getName(), equalTo("baz*")); } String[] results = indexNameExpressionResolver.concreteIndices(context, "foofoobar"); @@ -432,7 +432,7 @@ public class IndexNameExpressionResolverTests extends ESTestCase { indexNameExpressionResolver.concreteIndices(context, "baz*"); fail(); } catch (IndexNotFoundException e) { - assertThat(e.getIndex(), equalTo("baz*")); + assertThat(e.getIndex().getName(), equalTo("baz*")); } try { @@ -440,7 +440,7 @@ public class IndexNameExpressionResolverTests extends ESTestCase { indexNameExpressionResolver.concreteIndices(context, "foo", "baz*"); fail(); } catch (IndexNotFoundException e) { - assertThat(e.getIndex(), equalTo("baz*")); + assertThat(e.getIndex().getName(), equalTo("baz*")); } try { @@ -465,7 +465,7 @@ public class IndexNameExpressionResolverTests extends ESTestCase { fail(); } catch(IndexClosedException e) { assertThat(e.getMessage(), equalTo("closed")); - assertEquals(e.getIndex(), "foofoo-closed"); + assertEquals(e.getIndex().getName(), "foofoo-closed"); } IndexNameExpressionResolver.Context context = new IndexNameExpressionResolver.Context(state, IndicesOptions.strictSingleIndexNoExpandForbidClosed()); @@ -485,7 +485,7 @@ public class IndexNameExpressionResolverTests extends ESTestCase { indexNameExpressionResolver.concreteIndices(context, "foo"); fail(); } catch (IndexNotFoundException e) { - assertThat(e.getIndex(), equalTo("foo")); + assertThat(e.getIndex().getName(), equalTo("foo")); } results = indexNameExpressionResolver.concreteIndices(context, "foo*"); assertThat(results, emptyArray()); @@ -493,7 +493,7 @@ public class IndexNameExpressionResolverTests extends ESTestCase { indexNameExpressionResolver.concreteIndices(context, "foo*", "bar"); fail(); } catch (IndexNotFoundException e) { - assertThat(e.getIndex(), equalTo("bar")); + assertThat(e.getIndex().getName(), equalTo("bar")); } diff --git a/core/src/test/java/org/elasticsearch/cluster/metadata/MetaDataTests.java b/core/src/test/java/org/elasticsearch/cluster/metadata/MetaDataTests.java index 4076286ce5d..a2fa89b503b 100644 --- a/core/src/test/java/org/elasticsearch/cluster/metadata/MetaDataTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/metadata/MetaDataTests.java @@ -23,7 +23,6 @@ import org.elasticsearch.Version; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.test.ESTestCase; @@ -42,7 +41,7 @@ public class MetaDataTests extends ESTestCase { .putAlias(AliasMetaData.builder("index").build()); try { MetaData.builder().put(builder).build(); - fail("expection should have been thrown"); + fail("exception should have been thrown"); } catch (IllegalStateException e) { assertThat(e.getMessage(), equalTo("index and alias names need to be unique, but alias [index] and index [index] have the same name")); } diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/AllocationIdTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/AllocationIdTests.java index 8f7ae0c822b..00acf1ebabc 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/AllocationIdTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/AllocationIdTests.java @@ -23,6 +23,7 @@ import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.index.Index; import org.elasticsearch.test.ESTestCase; import java.io.IOException; @@ -37,7 +38,7 @@ import static org.hamcrest.Matchers.nullValue; public class AllocationIdTests extends ESTestCase { public void testShardToStarted() { logger.info("-- create unassigned shard"); - ShardRouting shard = ShardRouting.newUnassigned("test", 0, null, true, new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, null)); + ShardRouting shard = ShardRouting.newUnassigned(new Index("test","_na_"), 0, null, true, new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, null)); assertThat(shard.allocationId(), nullValue()); logger.info("-- initialize the shard"); @@ -57,7 +58,7 @@ public class AllocationIdTests extends ESTestCase { public void testSuccessfulRelocation() { logger.info("-- build started shard"); - ShardRouting shard = ShardRouting.newUnassigned("test", 0, null, true, new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, null)); + ShardRouting shard = ShardRouting.newUnassigned(new Index("test","_na_"), 0, null, true, new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, null)); shard.initialize("node1", -1); shard.moveToStarted(); @@ -80,7 +81,7 @@ public class AllocationIdTests extends ESTestCase { public void testCancelRelocation() { logger.info("-- build started shard"); - ShardRouting shard = ShardRouting.newUnassigned("test", 0, null, true, new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, null)); + ShardRouting shard = ShardRouting.newUnassigned(new Index("test","_na_"), 0, null, true, new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, null)); shard.initialize("node1", -1); shard.moveToStarted(); @@ -100,7 +101,7 @@ public class AllocationIdTests extends ESTestCase { public void testMoveToUnassigned() { logger.info("-- build started shard"); - ShardRouting shard = ShardRouting.newUnassigned("test", 0, null, true, new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, null)); + ShardRouting shard = ShardRouting.newUnassigned(new Index("test","_na_"), 0, null, true, new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, null)); shard.initialize("node1", -1); shard.moveToStarted(); @@ -111,7 +112,7 @@ public class AllocationIdTests extends ESTestCase { public void testReinitializing() { logger.info("-- build started shard"); - ShardRouting shard = ShardRouting.newUnassigned("test", 0, null, true, new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, null)); + ShardRouting shard = ShardRouting.newUnassigned(new Index("test","_na_"), 0, null, true, new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, null)); shard.initialize("node1", -1); shard.moveToStarted(); AllocationId allocationId = shard.allocationId(); diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/PrimaryAllocationIT.java b/core/src/test/java/org/elasticsearch/cluster/routing/PrimaryAllocationIT.java index 9203b270b2f..457e679ebd8 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/PrimaryAllocationIT.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/PrimaryAllocationIT.java @@ -141,7 +141,7 @@ public class PrimaryAllocationIT extends ESIntegTestCase { assertThat(client().admin().cluster().prepareState().get().getState().getRoutingTable().index("test").getShards().get(0).primaryShard().unassignedInfo().getReason(), equalTo(UnassignedInfo.Reason.NODE_LEFT)); logger.info("--> force allocation of stale copy to node that does not have shard copy"); - client().admin().cluster().prepareReroute().add(new AllocateStalePrimaryAllocationCommand(new ShardId("test", 0), dataNodeWithNoShardCopy, true)).get(); + client().admin().cluster().prepareReroute().add(new AllocateStalePrimaryAllocationCommand("test", 0, dataNodeWithNoShardCopy, true)).get(); logger.info("--> wait until shard is failed and becomes unassigned again"); assertBusy(() -> assertTrue(client().admin().cluster().prepareState().get().getState().getRoutingTable().index("test").allPrimaryShardsUnassigned())); @@ -161,9 +161,9 @@ public class PrimaryAllocationIT extends ESIntegTestCase { logger.info("--> adding allocation command for shard " + shardId); // force allocation based on node id if (useStaleReplica) { - rerouteBuilder.add(new AllocateStalePrimaryAllocationCommand(new ShardId("test", shardId), storeStatus.getNode().getId(), true)); + rerouteBuilder.add(new AllocateStalePrimaryAllocationCommand("test", shardId, storeStatus.getNode().getId(), true)); } else { - rerouteBuilder.add(new AllocateEmptyPrimaryAllocationCommand(new ShardId("test", shardId), storeStatus.getNode().getId(), true)); + rerouteBuilder.add(new AllocateEmptyPrimaryAllocationCommand("test", shardId, storeStatus.getNode().getId(), true)); } } rerouteBuilder.get(); @@ -182,7 +182,7 @@ public class PrimaryAllocationIT extends ESIntegTestCase { assertThat(client().admin().cluster().prepareState().get().getState().getRoutingTable().shardRoutingTable("test", 0).assignedShards(), empty()); - client().admin().cluster().prepareReroute().add(new AllocateEmptyPrimaryAllocationCommand(new ShardId("test", 0), node, true)).get(); + client().admin().cluster().prepareReroute().add(new AllocateEmptyPrimaryAllocationCommand("test", 0, node, true)).get(); ensureGreen("test"); } diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/RoutingServiceTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/RoutingServiceTests.java index e50272d2b08..5c922f07e46 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/RoutingServiceTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/RoutingServiceTests.java @@ -198,7 +198,7 @@ public class RoutingServiceTests extends ESAllocationTestCase { ShardRouting shortDelayUnassignedReplica = null; ShardRouting longDelayUnassignedReplica = null; for (ShardRouting shr : unassigned) { - if (shr.getIndex().equals("short_delay")) { + if (shr.getIndexName().equals("short_delay")) { shortDelayUnassignedReplica = shr; } else { longDelayUnassignedReplica = shr; diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/RoutingTableTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/RoutingTableTests.java index 713bf0aa311..741d62d74e6 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/RoutingTableTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/RoutingTableTests.java @@ -71,8 +71,8 @@ public class RoutingTableTests extends ESAllocationTestCase { .build(); this.testRoutingTable = new RoutingTable.Builder() - .add(new IndexRoutingTable.Builder(TEST_INDEX_1).initializeAsNew(metaData.index(TEST_INDEX_1)).build()) - .add(new IndexRoutingTable.Builder(TEST_INDEX_2).initializeAsNew(metaData.index(TEST_INDEX_2)).build()) + .add(new IndexRoutingTable.Builder(metaData.index(TEST_INDEX_1).getIndex()).initializeAsNew(metaData.index(TEST_INDEX_1)).build()) + .add(new IndexRoutingTable.Builder(metaData.index(TEST_INDEX_2).getIndex()).initializeAsNew(metaData.index(TEST_INDEX_2)).build()) .build(); this.clusterState = ClusterState.builder(org.elasticsearch.cluster.ClusterName.DEFAULT).metaData(metaData).routingTable(testRoutingTable).build(); } @@ -127,7 +127,7 @@ public class RoutingTableTests extends ESAllocationTestCase { } public void testIndex() { - assertThat(this.testRoutingTable.index(TEST_INDEX_1).getIndex(), is(TEST_INDEX_1)); + assertThat(this.testRoutingTable.index(TEST_INDEX_1).getIndex().getName(), is(TEST_INDEX_1)); assertThat(this.testRoutingTable.index("foobar"), is(nullValue())); } diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/ShardRoutingTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/ShardRoutingTests.java index 54e39cc227d..db94742b1e5 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/ShardRoutingTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/ShardRoutingTests.java @@ -161,35 +161,35 @@ public class ShardRoutingTests extends ESTestCase { switch (changeId) { case 0: // change index - otherRouting = TestShardRouting.newShardRouting(otherRouting.index() + "a", otherRouting.id(), otherRouting.currentNodeId(), otherRouting.relocatingNodeId(), + otherRouting = TestShardRouting.newShardRouting(otherRouting.getIndexName() + "a", otherRouting.id(), otherRouting.currentNodeId(), otherRouting.relocatingNodeId(), otherRouting.restoreSource(), otherRouting.primary(), otherRouting.state(), otherRouting.version(), otherRouting.unassignedInfo()); break; case 1: // change shard id - otherRouting = TestShardRouting.newShardRouting(otherRouting.index(), otherRouting.id() + 1, otherRouting.currentNodeId(), otherRouting.relocatingNodeId(), + otherRouting = TestShardRouting.newShardRouting(otherRouting.getIndexName(), otherRouting.id() + 1, otherRouting.currentNodeId(), otherRouting.relocatingNodeId(), otherRouting.restoreSource(), otherRouting.primary(), otherRouting.state(), otherRouting.version(), otherRouting.unassignedInfo()); break; case 2: // change current node - otherRouting = TestShardRouting.newShardRouting(otherRouting.index(), otherRouting.id(), otherRouting.currentNodeId() == null ? "1" : otherRouting.currentNodeId() + "_1", otherRouting.relocatingNodeId(), + otherRouting = TestShardRouting.newShardRouting(otherRouting.getIndexName(), otherRouting.id(), otherRouting.currentNodeId() == null ? "1" : otherRouting.currentNodeId() + "_1", otherRouting.relocatingNodeId(), otherRouting.restoreSource(), otherRouting.primary(), otherRouting.state(), otherRouting.version(), otherRouting.unassignedInfo()); break; case 3: // change relocating node - otherRouting = TestShardRouting.newShardRouting(otherRouting.index(), otherRouting.id(), otherRouting.currentNodeId(), + otherRouting = TestShardRouting.newShardRouting(otherRouting.getIndexName(), otherRouting.id(), otherRouting.currentNodeId(), otherRouting.relocatingNodeId() == null ? "1" : otherRouting.relocatingNodeId() + "_1", otherRouting.restoreSource(), otherRouting.primary(), otherRouting.state(), otherRouting.version(), otherRouting.unassignedInfo()); break; case 4: // change restore source - otherRouting = TestShardRouting.newShardRouting(otherRouting.index(), otherRouting.id(), otherRouting.currentNodeId(), otherRouting.relocatingNodeId(), + otherRouting = TestShardRouting.newShardRouting(otherRouting.getIndexName(), otherRouting.id(), otherRouting.currentNodeId(), otherRouting.relocatingNodeId(), otherRouting.restoreSource() == null ? new RestoreSource(new SnapshotId("test", "s1"), Version.CURRENT, "test") : new RestoreSource(otherRouting.restoreSource().snapshotId(), Version.CURRENT, otherRouting.index() + "_1"), otherRouting.primary(), otherRouting.state(), otherRouting.version(), otherRouting.unassignedInfo()); break; case 5: // change primary flag - otherRouting = TestShardRouting.newShardRouting(otherRouting.index(), otherRouting.id(), otherRouting.currentNodeId(), otherRouting.relocatingNodeId(), + otherRouting = TestShardRouting.newShardRouting(otherRouting.getIndexName(), otherRouting.id(), otherRouting.currentNodeId(), otherRouting.relocatingNodeId(), otherRouting.restoreSource(), otherRouting.primary() == false, otherRouting.state(), otherRouting.version(), otherRouting.unassignedInfo()); break; case 6: @@ -204,7 +204,7 @@ public class ShardRoutingTests extends ESTestCase { unassignedInfo = new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "test"); } - otherRouting = TestShardRouting.newShardRouting(otherRouting.index(), otherRouting.id(), otherRouting.currentNodeId(), otherRouting.relocatingNodeId(), + otherRouting = TestShardRouting.newShardRouting(otherRouting.getIndexName(), otherRouting.id(), otherRouting.currentNodeId(), otherRouting.relocatingNodeId(), otherRouting.restoreSource(), otherRouting.primary(), newState, otherRouting.version(), unassignedInfo); break; } @@ -216,7 +216,7 @@ public class ShardRoutingTests extends ESTestCase { if (randomBoolean()) { // change unassigned info - otherRouting = TestShardRouting.newShardRouting(otherRouting.index(), otherRouting.id(), otherRouting.currentNodeId(), otherRouting.relocatingNodeId(), + otherRouting = TestShardRouting.newShardRouting(otherRouting.getIndexName(), otherRouting.id(), otherRouting.currentNodeId(), otherRouting.relocatingNodeId(), otherRouting.restoreSource(), otherRouting.primary(), otherRouting.state(), otherRouting.version(), otherRouting.unassignedInfo() == null ? new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "test") : new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, otherRouting.unassignedInfo().getMessage() + "_1")); diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/UnassignedInfoTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/UnassignedInfoTests.java index e277080ebf7..37882dd6772 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/UnassignedInfoTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/UnassignedInfoTests.java @@ -34,6 +34,7 @@ import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.index.Index; import org.elasticsearch.test.ESAllocationTestCase; import java.util.Collections; @@ -160,15 +161,16 @@ public class UnassignedInfoTests extends ESAllocationTestCase { MetaData metaData = MetaData.builder() .put(IndexMetaData.builder("test").settings(settings(Version.CURRENT)).numberOfShards(1).numberOfReplicas(0)) .build(); + final Index index = metaData.index("test").getIndex(); ClusterState clusterState = ClusterState.builder(ClusterName.DEFAULT) .metaData(metaData) - .routingTable(RoutingTable.builder().addAsNew(metaData.index("test")).build()).build(); + .routingTable(RoutingTable.builder().addAsNew(metaData.index(index)).build()).build(); clusterState = ClusterState.builder(clusterState).nodes(DiscoveryNodes.builder().put(newNode("node1"))).build(); clusterState = ClusterState.builder(clusterState).routingResult(allocation.reroute(clusterState, "reroute")).build(); // starting primaries clusterState = ClusterState.builder(clusterState).routingResult(allocation.applyStartedShards(clusterState, clusterState.getRoutingNodes().shardsWithState(INITIALIZING))).build(); - IndexRoutingTable.Builder builder = IndexRoutingTable.builder("test"); - for (IndexShardRoutingTable indexShardRoutingTable : clusterState.routingTable().index("test")) { + IndexRoutingTable.Builder builder = IndexRoutingTable.builder(index); + for (IndexShardRoutingTable indexShardRoutingTable : clusterState.routingTable().index(index)) { builder.addIndexShard(indexShardRoutingTable); } builder.addReplica(); diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/AllocationCommandsTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/AllocationCommandsTests.java index 65c2e577015..16d9acb8de2 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/AllocationCommandsTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/AllocationCommandsTests.java @@ -88,7 +88,7 @@ public class AllocationCommandsTests extends ESAllocationTestCase { } else { toNodeId = "node1"; } - rerouteResult = allocation.reroute(clusterState, new AllocationCommands(new MoveAllocationCommand(new ShardId("test", 0), existingNodeId, toNodeId))); + rerouteResult = allocation.reroute(clusterState, new AllocationCommands(new MoveAllocationCommand("test", 0, existingNodeId, toNodeId))); assertThat(rerouteResult.changed(), equalTo(true)); clusterState = ClusterState.builder(clusterState).routingTable(rerouteResult.routingTable()).build(); assertThat(clusterState.getRoutingNodes().node(existingNodeId).get(0).state(), equalTo(ShardRoutingState.RELOCATING)); @@ -102,11 +102,11 @@ public class AllocationCommandsTests extends ESAllocationTestCase { assertThat(clusterState.getRoutingNodes().node(toNodeId).get(0).state(), equalTo(ShardRoutingState.STARTED)); } - private AbstractAllocateAllocationCommand randomAllocateCommand(ShardId shardId, String node) { + private AbstractAllocateAllocationCommand randomAllocateCommand(String index, int shardId, String node) { return randomFrom( - new AllocateReplicaAllocationCommand(shardId, node), - new AllocateEmptyPrimaryAllocationCommand(shardId, node, true), - new AllocateStalePrimaryAllocationCommand(shardId, node, true) + new AllocateReplicaAllocationCommand(index, shardId, node), + new AllocateEmptyPrimaryAllocationCommand(index, shardId, node, true), + new AllocateStalePrimaryAllocationCommand(index, shardId, node, true) ); } @@ -115,18 +115,19 @@ public class AllocationCommandsTests extends ESAllocationTestCase { .put(EnableAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ENABLE_SETTING.getKey(), "none") .put(EnableAllocationDecider.CLUSTER_ROUTING_REBALANCE_ENABLE_SETTING.getKey(), "none") .build()); + final String index = "test"; logger.info("--> building initial routing table"); MetaData metaData = MetaData.builder() - .put(IndexMetaData.builder("test").settings(settings(Version.CURRENT)).numberOfShards(1).numberOfReplicas(1)) + .put(IndexMetaData.builder(index).settings(settings(Version.CURRENT)).numberOfShards(1).numberOfReplicas(1)) .build(); // shard routing is added as "from recovery" instead of "new index creation" so that we can test below that allocating an empty // primary with accept_data_loss flag set to false fails RoutingTable routingTable = RoutingTable.builder() - .addAsRecovery(metaData.index("test")) + .addAsRecovery(metaData.index(index)) .build(); ClusterState clusterState = ClusterState.builder(org.elasticsearch.cluster.ClusterName.DEFAULT).metaData(metaData).routingTable(routingTable).build(); - ShardId shardId = new ShardId("test", 0); + final ShardId shardId = new ShardId(metaData.index(index).getIndex(), 0); logger.info("--> adding 3 nodes on same rack and do rerouting"); clusterState = ClusterState.builder(clusterState).nodes(DiscoveryNodes.builder() @@ -141,7 +142,7 @@ public class AllocationCommandsTests extends ESAllocationTestCase { logger.info("--> allocating to non-existent node, should fail"); try { - allocation.reroute(clusterState, new AllocationCommands(randomAllocateCommand(shardId, "node42"))); + allocation.reroute(clusterState, new AllocationCommands(randomAllocateCommand(index, shardId.id(), "node42"))); fail("expected IllegalArgumentException when allocating to non-existing node"); } catch (IllegalArgumentException e) { assertThat(e.getMessage(), containsString("failed to resolve [node42], no matching nodes")); @@ -149,7 +150,7 @@ public class AllocationCommandsTests extends ESAllocationTestCase { logger.info("--> allocating to non-data node, should fail"); try { - allocation.reroute(clusterState, new AllocationCommands(randomAllocateCommand(shardId, "node4"))); + allocation.reroute(clusterState, new AllocationCommands(randomAllocateCommand(index, shardId.id(), "node4"))); fail("expected IllegalArgumentException when allocating to non-data node"); } catch (IllegalArgumentException e) { assertThat(e.getMessage(), containsString("allocation can only be done on data nodes")); @@ -157,7 +158,7 @@ public class AllocationCommandsTests extends ESAllocationTestCase { logger.info("--> allocating non-existing shard, should fail"); try { - allocation.reroute(clusterState, new AllocationCommands(randomAllocateCommand(new ShardId("test", 1), "node2"))); + allocation.reroute(clusterState, new AllocationCommands(randomAllocateCommand("test", 1, "node2"))); fail("expected ShardNotFoundException when allocating non-existing shard"); } catch (ShardNotFoundException e) { assertThat(e.getMessage(), containsString("no such shard")); @@ -165,7 +166,7 @@ public class AllocationCommandsTests extends ESAllocationTestCase { logger.info("--> allocating non-existing index, should fail"); try { - allocation.reroute(clusterState, new AllocationCommands(randomAllocateCommand(new ShardId("test2", 0), "node2"))); + allocation.reroute(clusterState, new AllocationCommands(randomAllocateCommand("test2", 0, "node2"))); fail("expected ShardNotFoundException when allocating non-existing index"); } catch (IndexNotFoundException e) { assertThat(e.getMessage(), containsString("no such index")); @@ -173,7 +174,7 @@ public class AllocationCommandsTests extends ESAllocationTestCase { logger.info("--> allocating empty primary with acceptDataLoss flag set to false"); try { - allocation.reroute(clusterState, new AllocationCommands(new AllocateEmptyPrimaryAllocationCommand(new ShardId("test", 0), "node1", false))); + allocation.reroute(clusterState, new AllocationCommands(new AllocateEmptyPrimaryAllocationCommand("test", 0, "node1", false))); fail("expected IllegalArgumentException when allocating empty primary with acceptDataLoss flag set to false"); } catch (IllegalArgumentException e) { assertThat(e.getMessage(), containsString("allocating an empty primary for " + shardId + " can result in data loss. Please confirm by setting the accept_data_loss parameter to true")); @@ -181,14 +182,14 @@ public class AllocationCommandsTests extends ESAllocationTestCase { logger.info("--> allocating stale primary with acceptDataLoss flag set to false"); try { - allocation.reroute(clusterState, new AllocationCommands(new AllocateStalePrimaryAllocationCommand(shardId, "node1", false))); + allocation.reroute(clusterState, new AllocationCommands(new AllocateStalePrimaryAllocationCommand(index, shardId.id(), "node1", false))); fail("expected IllegalArgumentException when allocating stale primary with acceptDataLoss flag set to false"); } catch (IllegalArgumentException e) { assertThat(e.getMessage(), containsString("allocating an empty primary for " + shardId + " can result in data loss. Please confirm by setting the accept_data_loss parameter to true")); } logger.info("--> allocating empty primary with acceptDataLoss flag set to true"); - rerouteResult = allocation.reroute(clusterState, new AllocationCommands(new AllocateEmptyPrimaryAllocationCommand(new ShardId("test", 0), "node1", true))); + rerouteResult = allocation.reroute(clusterState, new AllocationCommands(new AllocateEmptyPrimaryAllocationCommand("test", 0, "node1", true))); assertThat(rerouteResult.changed(), equalTo(true)); clusterState = ClusterState.builder(clusterState).routingTable(rerouteResult.routingTable()).build(); assertThat(clusterState.getRoutingNodes().node("node1").size(), equalTo(1)); @@ -204,13 +205,13 @@ public class AllocationCommandsTests extends ESAllocationTestCase { logger.info("--> allocate the replica shard on the primary shard node, should fail"); try { - allocation.reroute(clusterState, new AllocationCommands(new AllocateReplicaAllocationCommand(new ShardId("test", 0), "node1"))); + allocation.reroute(clusterState, new AllocationCommands(new AllocateReplicaAllocationCommand("test", 0, "node1"))); fail("expected IllegalArgumentException when allocating replica shard on the primary shard node"); } catch (IllegalArgumentException e) { } logger.info("--> allocate the replica shard on on the second node"); - rerouteResult = allocation.reroute(clusterState, new AllocationCommands(new AllocateReplicaAllocationCommand(new ShardId("test", 0), "node2"))); + rerouteResult = allocation.reroute(clusterState, new AllocationCommands(new AllocateReplicaAllocationCommand("test", 0, "node2"))); assertThat(rerouteResult.changed(), equalTo(true)); clusterState = ClusterState.builder(clusterState).routingTable(rerouteResult.routingTable()).build(); assertThat(clusterState.getRoutingNodes().node("node1").size(), equalTo(1)); @@ -229,7 +230,7 @@ public class AllocationCommandsTests extends ESAllocationTestCase { logger.info("--> verify that we fail when there are no unassigned shards"); try { - allocation.reroute(clusterState, new AllocationCommands(randomAllocateCommand(new ShardId("test", 0), "node3"))); + allocation.reroute(clusterState, new AllocationCommands(randomAllocateCommand("test", 0, "node3"))); fail("expected IllegalArgumentException when allocating shard while no unassigned shard available"); } catch (IllegalArgumentException e) { } @@ -261,7 +262,7 @@ public class AllocationCommandsTests extends ESAllocationTestCase { assertThat(clusterState.getRoutingNodes().shardsWithState(INITIALIZING).size(), equalTo(0)); logger.info("--> allocating empty primary shard with accept_data_loss flag set to true"); - rerouteResult = allocation.reroute(clusterState, new AllocationCommands(new AllocateEmptyPrimaryAllocationCommand(new ShardId("test", 0), "node1", true))); + rerouteResult = allocation.reroute(clusterState, new AllocationCommands(new AllocateEmptyPrimaryAllocationCommand("test", 0, "node1", true))); assertThat(rerouteResult.changed(), equalTo(true)); clusterState = ClusterState.builder(clusterState).routingTable(rerouteResult.routingTable()).build(); assertThat(clusterState.getRoutingNodes().node("node1").size(), equalTo(1)); @@ -270,7 +271,7 @@ public class AllocationCommandsTests extends ESAllocationTestCase { logger.info("--> cancel primary allocation, make sure it fails..."); try { - allocation.reroute(clusterState, new AllocationCommands(new CancelAllocationCommand(new ShardId("test", 0), "node1", false))); + allocation.reroute(clusterState, new AllocationCommands(new CancelAllocationCommand("test", 0, "node1", false))); fail(); } catch (IllegalArgumentException e) { } @@ -284,13 +285,13 @@ public class AllocationCommandsTests extends ESAllocationTestCase { logger.info("--> cancel primary allocation, make sure it fails..."); try { - allocation.reroute(clusterState, new AllocationCommands(new CancelAllocationCommand(new ShardId("test", 0), "node1", false))); + allocation.reroute(clusterState, new AllocationCommands(new CancelAllocationCommand("test", 0, "node1", false))); fail(); } catch (IllegalArgumentException e) { } logger.info("--> allocate the replica shard on on the second node"); - rerouteResult = allocation.reroute(clusterState, new AllocationCommands(new AllocateReplicaAllocationCommand(new ShardId("test", 0), "node2"))); + rerouteResult = allocation.reroute(clusterState, new AllocationCommands(new AllocateReplicaAllocationCommand("test", 0, "node2"))); assertThat(rerouteResult.changed(), equalTo(true)); clusterState = ClusterState.builder(clusterState).routingTable(rerouteResult.routingTable()).build(); assertThat(clusterState.getRoutingNodes().node("node1").size(), equalTo(1)); @@ -299,7 +300,7 @@ public class AllocationCommandsTests extends ESAllocationTestCase { assertThat(clusterState.getRoutingNodes().node("node2").shardsWithState(INITIALIZING).size(), equalTo(1)); logger.info("--> cancel the relocation allocation"); - rerouteResult = allocation.reroute(clusterState, new AllocationCommands(new CancelAllocationCommand(new ShardId("test", 0), "node2", false))); + rerouteResult = allocation.reroute(clusterState, new AllocationCommands(new CancelAllocationCommand("test", 0, "node2", false))); assertThat(rerouteResult.changed(), equalTo(true)); clusterState = ClusterState.builder(clusterState).routingTable(rerouteResult.routingTable()).build(); assertThat(clusterState.getRoutingNodes().node("node1").size(), equalTo(1)); @@ -308,7 +309,7 @@ public class AllocationCommandsTests extends ESAllocationTestCase { assertThat(clusterState.getRoutingNodes().node("node3").size(), equalTo(0)); logger.info("--> allocate the replica shard on on the second node"); - rerouteResult = allocation.reroute(clusterState, new AllocationCommands(new AllocateReplicaAllocationCommand(new ShardId("test", 0), "node2"))); + rerouteResult = allocation.reroute(clusterState, new AllocationCommands(new AllocateReplicaAllocationCommand("test", 0, "node2"))); assertThat(rerouteResult.changed(), equalTo(true)); clusterState = ClusterState.builder(clusterState).routingTable(rerouteResult.routingTable()).build(); assertThat(clusterState.getRoutingNodes().node("node1").size(), equalTo(1)); @@ -318,7 +319,7 @@ public class AllocationCommandsTests extends ESAllocationTestCase { logger.info("--> cancel the primary being replicated, make sure it fails"); try { - allocation.reroute(clusterState, new AllocationCommands(new CancelAllocationCommand(new ShardId("test", 0), "node1", false))); + allocation.reroute(clusterState, new AllocationCommands(new CancelAllocationCommand("test", 0, "node1", false))); fail(); } catch (IllegalArgumentException e) { } @@ -332,7 +333,7 @@ public class AllocationCommandsTests extends ESAllocationTestCase { assertThat(clusterState.getRoutingNodes().node("node2").shardsWithState(STARTED).size(), equalTo(1)); logger.info("--> cancel allocation of the replica shard"); - rerouteResult = allocation.reroute(clusterState, new AllocationCommands(new CancelAllocationCommand(new ShardId("test", 0), "node2", false))); + rerouteResult = allocation.reroute(clusterState, new AllocationCommands(new CancelAllocationCommand("test", 0, "node2", false))); assertThat(rerouteResult.changed(), equalTo(true)); clusterState = ClusterState.builder(clusterState).routingTable(rerouteResult.routingTable()).build(); assertThat(clusterState.getRoutingNodes().node("node1").size(), equalTo(1)); @@ -341,7 +342,7 @@ public class AllocationCommandsTests extends ESAllocationTestCase { assertThat(clusterState.getRoutingNodes().node("node3").size(), equalTo(0)); logger.info("--> allocate the replica shard on on the second node"); - rerouteResult = allocation.reroute(clusterState, new AllocationCommands(new AllocateReplicaAllocationCommand(new ShardId("test", 0), "node2"))); + rerouteResult = allocation.reroute(clusterState, new AllocationCommands(new AllocateReplicaAllocationCommand("test", 0, "node2"))); clusterState = ClusterState.builder(clusterState).routingTable(rerouteResult.routingTable()).build(); assertThat(rerouteResult.changed(), equalTo(true)); assertThat(clusterState.getRoutingNodes().node("node1").size(), equalTo(1)); @@ -357,7 +358,7 @@ public class AllocationCommandsTests extends ESAllocationTestCase { assertThat(clusterState.getRoutingNodes().node("node2").shardsWithState(STARTED).size(), equalTo(1)); logger.info("--> move the replica shard"); - rerouteResult = allocation.reroute(clusterState, new AllocationCommands(new MoveAllocationCommand(new ShardId("test", 0), "node2", "node3"))); + rerouteResult = allocation.reroute(clusterState, new AllocationCommands(new MoveAllocationCommand("test", 0, "node2", "node3"))); clusterState = ClusterState.builder(clusterState).routingTable(rerouteResult.routingTable()).build(); assertThat(clusterState.getRoutingNodes().node("node1").size(), equalTo(1)); assertThat(clusterState.getRoutingNodes().node("node1").shardsWithState(STARTED).size(), equalTo(1)); @@ -367,7 +368,7 @@ public class AllocationCommandsTests extends ESAllocationTestCase { assertThat(clusterState.getRoutingNodes().node("node3").shardsWithState(INITIALIZING).size(), equalTo(1)); logger.info("--> cancel the move of the replica shard"); - rerouteResult = allocation.reroute(clusterState, new AllocationCommands(new CancelAllocationCommand(new ShardId("test", 0), "node3", false))); + rerouteResult = allocation.reroute(clusterState, new AllocationCommands(new CancelAllocationCommand("test", 0, "node3", false))); clusterState = ClusterState.builder(clusterState).routingTable(rerouteResult.routingTable()).build(); assertThat(clusterState.getRoutingNodes().node("node1").size(), equalTo(1)); assertThat(clusterState.getRoutingNodes().node("node1").shardsWithState(STARTED).size(), equalTo(1)); @@ -376,7 +377,7 @@ public class AllocationCommandsTests extends ESAllocationTestCase { logger.info("--> cancel the primary allocation (with allow_primary set to true)"); - rerouteResult = allocation.reroute(clusterState, new AllocationCommands(new CancelAllocationCommand(new ShardId("test", 0), "node1", true))); + rerouteResult = allocation.reroute(clusterState, new AllocationCommands(new CancelAllocationCommand("test", 0, "node1", true))); clusterState = ClusterState.builder(clusterState).routingTable(rerouteResult.routingTable()).build(); assertThat(rerouteResult.changed(), equalTo(true)); assertThat(clusterState.getRoutingNodes().node("node2").shardsWithState(STARTED).get(0).primary(), equalTo(true)); @@ -386,33 +387,38 @@ public class AllocationCommandsTests extends ESAllocationTestCase { public void testSerialization() throws Exception { AllocationCommands commands = new AllocationCommands( - new AllocateEmptyPrimaryAllocationCommand(new ShardId("test", 1), "node1", true), - new AllocateStalePrimaryAllocationCommand(new ShardId("test", 2), "node1", true), - new AllocateReplicaAllocationCommand(new ShardId("test", 2), "node1"), - new MoveAllocationCommand(new ShardId("test", 3), "node2", "node3"), - new CancelAllocationCommand(new ShardId("test", 4), "node5", true) + new AllocateEmptyPrimaryAllocationCommand("test", 1, "node1", true), + new AllocateStalePrimaryAllocationCommand("test", 2, "node1", true), + new AllocateReplicaAllocationCommand("test", 2, "node1"), + new MoveAllocationCommand("test", 3, "node2", "node3"), + new CancelAllocationCommand("test", 4, "node5", true) ); BytesStreamOutput bytes = new BytesStreamOutput(); AllocationCommands.writeTo(commands, bytes); AllocationCommands sCommands = AllocationCommands.readFrom(StreamInput.wrap(bytes.bytes())); assertThat(sCommands.commands().size(), equalTo(5)); - assertThat(((AllocateEmptyPrimaryAllocationCommand) (sCommands.commands().get(0))).shardId(), equalTo(new ShardId("test", 1))); + assertThat(((AllocateEmptyPrimaryAllocationCommand) (sCommands.commands().get(0))).shardId(), equalTo(1)); + assertThat(((AllocateEmptyPrimaryAllocationCommand) (sCommands.commands().get(0))).index(), equalTo("test")); assertThat(((AllocateEmptyPrimaryAllocationCommand) (sCommands.commands().get(0))).node(), equalTo("node1")); assertThat(((AllocateEmptyPrimaryAllocationCommand) (sCommands.commands().get(0))).acceptDataLoss(), equalTo(true)); - assertThat(((AllocateStalePrimaryAllocationCommand) (sCommands.commands().get(1))).shardId(), equalTo(new ShardId("test", 2))); + assertThat(((AllocateStalePrimaryAllocationCommand) (sCommands.commands().get(1))).shardId(), equalTo(2)); + assertThat(((AllocateStalePrimaryAllocationCommand) (sCommands.commands().get(1))).index(), equalTo("test")); assertThat(((AllocateStalePrimaryAllocationCommand) (sCommands.commands().get(1))).node(), equalTo("node1")); assertThat(((AllocateStalePrimaryAllocationCommand) (sCommands.commands().get(1))).acceptDataLoss(), equalTo(true)); - assertThat(((AllocateReplicaAllocationCommand) (sCommands.commands().get(2))).shardId(), equalTo(new ShardId("test", 2))); + assertThat(((AllocateReplicaAllocationCommand) (sCommands.commands().get(2))).shardId(), equalTo(2)); + assertThat(((AllocateReplicaAllocationCommand) (sCommands.commands().get(2))).index(), equalTo("test")); assertThat(((AllocateReplicaAllocationCommand) (sCommands.commands().get(2))).node(), equalTo("node1")); - assertThat(((MoveAllocationCommand) (sCommands.commands().get(3))).shardId(), equalTo(new ShardId("test", 3))); + assertThat(((MoveAllocationCommand) (sCommands.commands().get(3))).shardId(), equalTo(3)); + assertThat(((MoveAllocationCommand) (sCommands.commands().get(3))).index(), equalTo("test")); assertThat(((MoveAllocationCommand) (sCommands.commands().get(3))).fromNode(), equalTo("node2")); assertThat(((MoveAllocationCommand) (sCommands.commands().get(3))).toNode(), equalTo("node3")); - assertThat(((CancelAllocationCommand) (sCommands.commands().get(4))).shardId(), equalTo(new ShardId("test", 4))); + assertThat(((CancelAllocationCommand) (sCommands.commands().get(4))).shardId(), equalTo(4)); + assertThat(((CancelAllocationCommand) (sCommands.commands().get(4))).index(), equalTo("test")); assertThat(((CancelAllocationCommand) (sCommands.commands().get(4))).node(), equalTo("node5")); assertThat(((CancelAllocationCommand) (sCommands.commands().get(4))).allowPrimary(), equalTo(true)); } @@ -434,22 +440,27 @@ public class AllocationCommandsTests extends ESAllocationTestCase { AllocationCommands sCommands = AllocationCommands.fromXContent(parser); assertThat(sCommands.commands().size(), equalTo(5)); - assertThat(((AllocateEmptyPrimaryAllocationCommand) (sCommands.commands().get(0))).shardId(), equalTo(new ShardId("test", 1))); + assertThat(((AllocateEmptyPrimaryAllocationCommand) (sCommands.commands().get(0))).shardId(), equalTo(1)); + assertThat(((AllocateEmptyPrimaryAllocationCommand) (sCommands.commands().get(0))).index(), equalTo("test")); assertThat(((AllocateEmptyPrimaryAllocationCommand) (sCommands.commands().get(0))).node(), equalTo("node1")); assertThat(((AllocateEmptyPrimaryAllocationCommand) (sCommands.commands().get(0))).acceptDataLoss(), equalTo(true)); - assertThat(((AllocateStalePrimaryAllocationCommand) (sCommands.commands().get(1))).shardId(), equalTo(new ShardId("test", 2))); + assertThat(((AllocateStalePrimaryAllocationCommand) (sCommands.commands().get(1))).shardId(), equalTo(2)); + assertThat(((AllocateStalePrimaryAllocationCommand) (sCommands.commands().get(1))).index(), equalTo("test")); assertThat(((AllocateStalePrimaryAllocationCommand) (sCommands.commands().get(1))).node(), equalTo("node1")); assertThat(((AllocateStalePrimaryAllocationCommand) (sCommands.commands().get(1))).acceptDataLoss(), equalTo(true)); - assertThat(((AllocateReplicaAllocationCommand) (sCommands.commands().get(2))).shardId(), equalTo(new ShardId("test", 2))); + assertThat(((AllocateReplicaAllocationCommand) (sCommands.commands().get(2))).shardId(), equalTo(2)); + assertThat(((AllocateReplicaAllocationCommand) (sCommands.commands().get(2))).index(), equalTo("test")); assertThat(((AllocateReplicaAllocationCommand) (sCommands.commands().get(2))).node(), equalTo("node1")); - assertThat(((MoveAllocationCommand) (sCommands.commands().get(3))).shardId(), equalTo(new ShardId("test", 3))); + assertThat(((MoveAllocationCommand) (sCommands.commands().get(3))).shardId(), equalTo(3)); + assertThat(((MoveAllocationCommand) (sCommands.commands().get(3))).index(), equalTo("test")); assertThat(((MoveAllocationCommand) (sCommands.commands().get(3))).fromNode(), equalTo("node2")); assertThat(((MoveAllocationCommand) (sCommands.commands().get(3))).toNode(), equalTo("node3")); - assertThat(((CancelAllocationCommand) (sCommands.commands().get(4))).shardId(), equalTo(new ShardId("test", 4))); + assertThat(((CancelAllocationCommand) (sCommands.commands().get(4))).shardId(), equalTo(4)); + assertThat(((CancelAllocationCommand) (sCommands.commands().get(4))).index(), equalTo("test")); assertThat(((CancelAllocationCommand) (sCommands.commands().get(4))).node(), equalTo("node5")); assertThat(((CancelAllocationCommand) (sCommands.commands().get(4))).allowPrimary(), equalTo(true)); } diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/AllocationPriorityTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/AllocationPriorityTests.java index 52aad66776e..925258636c6 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/AllocationPriorityTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/AllocationPriorityTests.java @@ -74,26 +74,26 @@ public class AllocationPriorityTests extends ESAllocationTestCase { routingTable = allocation.reroute(clusterState, "reroute").routingTable(); clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build(); assertEquals(2, clusterState.getRoutingNodes().shardsWithState(INITIALIZING).size()); - assertEquals(highPriorityName, clusterState.getRoutingNodes().shardsWithState(INITIALIZING).get(0).index()); - assertEquals(highPriorityName, clusterState.getRoutingNodes().shardsWithState(INITIALIZING).get(1).index()); + assertEquals(highPriorityName, clusterState.getRoutingNodes().shardsWithState(INITIALIZING).get(0).getIndexName()); + assertEquals(highPriorityName, clusterState.getRoutingNodes().shardsWithState(INITIALIZING).get(1).getIndexName()); routingTable = allocation.applyStartedShards(clusterState, clusterState.getRoutingNodes().shardsWithState(INITIALIZING)).routingTable(); clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build(); assertEquals(2, clusterState.getRoutingNodes().shardsWithState(INITIALIZING).size()); - assertEquals(lowPriorityName, clusterState.getRoutingNodes().shardsWithState(INITIALIZING).get(0).index()); - assertEquals(lowPriorityName, clusterState.getRoutingNodes().shardsWithState(INITIALIZING).get(1).index()); + assertEquals(lowPriorityName, clusterState.getRoutingNodes().shardsWithState(INITIALIZING).get(0).getIndexName()); + assertEquals(lowPriorityName, clusterState.getRoutingNodes().shardsWithState(INITIALIZING).get(1).getIndexName()); routingTable = allocation.applyStartedShards(clusterState, clusterState.getRoutingNodes().shardsWithState(INITIALIZING)).routingTable(); clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build(); assertEquals(clusterState.getRoutingNodes().shardsWithState(INITIALIZING).toString(),2, clusterState.getRoutingNodes().shardsWithState(INITIALIZING).size()); - assertEquals(highPriorityName, clusterState.getRoutingNodes().shardsWithState(INITIALIZING).get(0).index()); - assertEquals(highPriorityName, clusterState.getRoutingNodes().shardsWithState(INITIALIZING).get(1).index()); + assertEquals(highPriorityName, clusterState.getRoutingNodes().shardsWithState(INITIALIZING).get(0).getIndexName()); + assertEquals(highPriorityName, clusterState.getRoutingNodes().shardsWithState(INITIALIZING).get(1).getIndexName()); routingTable = allocation.applyStartedShards(clusterState, clusterState.getRoutingNodes().shardsWithState(INITIALIZING)).routingTable(); clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build(); assertEquals(2, clusterState.getRoutingNodes().shardsWithState(INITIALIZING).size()); - assertEquals(lowPriorityName, clusterState.getRoutingNodes().shardsWithState(INITIALIZING).get(0).index()); - assertEquals(lowPriorityName, clusterState.getRoutingNodes().shardsWithState(INITIALIZING).get(1).index()); + assertEquals(lowPriorityName, clusterState.getRoutingNodes().shardsWithState(INITIALIZING).get(0).getIndexName()); + assertEquals(lowPriorityName, clusterState.getRoutingNodes().shardsWithState(INITIALIZING).get(1).getIndexName()); } } diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/AwarenessAllocationTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/AwarenessAllocationTests.java index eb94b6de109..8810fc47395 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/AwarenessAllocationTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/AwarenessAllocationTests.java @@ -872,10 +872,10 @@ public class AwarenessAllocationTests extends ESAllocationTestCase { if (routing.primary()) { primaryNode = routing.currentNodeId(); } else if (routing.initializing()) { - commands.add(new CancelAllocationCommand(routing.shardId(), routing.currentNodeId(), false)); + commands.add(new CancelAllocationCommand(routing.shardId().getIndexName(), routing.id(), routing.currentNodeId(), false)); } } - commands.add(new MoveAllocationCommand(new ShardId("test", 0), primaryNode, "A-4")); + commands.add(new MoveAllocationCommand("test", 0, primaryNode, "A-4")); routingTable = strategy.reroute(clusterState, commands).routingTable(); clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build(); diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/CatAllocationTestCase.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/CatAllocationTestCase.java index 8ac6c4fcedc..422851a229f 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/CatAllocationTestCase.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/CatAllocationTestCase.java @@ -98,10 +98,10 @@ public abstract class CatAllocationTestCase extends ESAllocationTestCase { for(Idx idx : indices.values()) { IndexMetaData idxMeta = IndexMetaData.builder(idx.name).settings(settings(Version.CURRENT)).numberOfShards(idx.numShards()).numberOfReplicas(idx.numReplicas()).build(); builder.put(idxMeta, false); - IndexRoutingTable.Builder tableBuilder = new IndexRoutingTable.Builder(idx.name).initializeAsRecovery(idxMeta); + IndexRoutingTable.Builder tableBuilder = new IndexRoutingTable.Builder(idxMeta.getIndex()).initializeAsRecovery(idxMeta); Map shardIdToRouting = new HashMap<>(); for (ShardRouting r : idx.routing) { - IndexShardRoutingTable refData = new IndexShardRoutingTable.Builder(new ShardId(idx.name, r.id())).addShard(r).build(); + IndexShardRoutingTable refData = new IndexShardRoutingTable.Builder(r.shardId()).addShard(r).build(); if (shardIdToRouting.containsKey(r.getId())) { refData = new IndexShardRoutingTable.Builder(shardIdToRouting.get(r.getId())).addShard(r).build(); } diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/ClusterRebalanceRoutingTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/ClusterRebalanceRoutingTests.java index 827da901dc9..beb7c134c16 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/ClusterRebalanceRoutingTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/ClusterRebalanceRoutingTests.java @@ -28,7 +28,6 @@ import org.elasticsearch.cluster.routing.RoutingNodes; import org.elasticsearch.cluster.routing.RoutingTable; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.allocation.decider.ClusterRebalanceAllocationDecider; -import org.elasticsearch.cluster.routing.allocation.decider.FilterAllocationDecider; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.Settings; @@ -130,7 +129,7 @@ public class ClusterRebalanceRoutingTests extends ESAllocationTestCase { routingNodes = clusterState.getRoutingNodes(); assertThat(routingNodes.node("node3").size(), equalTo(1)); - assertThat(routingNodes.node("node3").get(0).shardId().index().name(), equalTo("test1")); + assertThat(routingNodes.node("node3").get(0).shardId().getIndex().getName(), equalTo("test1")); } @@ -235,7 +234,7 @@ public class ClusterRebalanceRoutingTests extends ESAllocationTestCase { routingNodes = clusterState.getRoutingNodes(); assertThat(routingNodes.node("node3").size(), equalTo(1)); - assertThat(routingNodes.node("node3").get(0).shardId().index().name(), equalTo("test1")); + assertThat(routingNodes.node("node3").get(0).shardId().getIndex().getName(), equalTo("test1")); } public void testClusterPrimariesActive2() { @@ -442,7 +441,7 @@ public class ClusterRebalanceRoutingTests extends ESAllocationTestCase { routingNodes = clusterState.getRoutingNodes(); assertThat(routingNodes.node("node3").size(), equalTo(1)); - assertThat(routingNodes.node("node3").get(0).shardId().index().name(), anyOf(equalTo("test1"), equalTo("test2"))); + assertThat(routingNodes.node("node3").get(0).shardId().getIndex().getName(), anyOf(equalTo("test1"), equalTo("test2"))); } public void testClusterAllActive2() { @@ -643,7 +642,7 @@ public class ClusterRebalanceRoutingTests extends ESAllocationTestCase { RoutingNodes.UnassignedShards.UnassignedIterator iterator = unassigned.iterator(); while (iterator.hasNext()) { ShardRouting next = iterator.next(); - if ("test1".equals(next.index())) { + if ("test1".equals(next.index().getName())) { iterator.removeAndIgnore(); } diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/DeadNodesAllocationTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/DeadNodesAllocationTests.java index cb09fb93b60..82496f31bd2 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/DeadNodesAllocationTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/DeadNodesAllocationTests.java @@ -149,7 +149,7 @@ public class DeadNodesAllocationTests extends ESAllocationTestCase { logger.info("--> moving primary shard to node3"); rerouteResult = allocation.reroute(clusterState, new AllocationCommands( - new MoveAllocationCommand(clusterState.routingTable().index("test").shard(0).primaryShard().shardId(), clusterState.routingTable().index("test").shard(0).primaryShard().currentNodeId(), "node3")) + new MoveAllocationCommand("test", 0, clusterState.routingTable().index("test").shard(0).primaryShard().currentNodeId(), "node3")) ); assertThat(rerouteResult.changed(), equalTo(true)); clusterState = ClusterState.builder(clusterState).routingTable(rerouteResult.routingTable()).build(); @@ -223,7 +223,7 @@ public class DeadNodesAllocationTests extends ESAllocationTestCase { logger.info("--> moving primary shard to node3"); rerouteResult = allocation.reroute(clusterState, new AllocationCommands( - new MoveAllocationCommand(clusterState.routingTable().index("test").shard(0).primaryShard().shardId(), clusterState.routingTable().index("test").shard(0).primaryShard().currentNodeId(), "node3")) + new MoveAllocationCommand("test",0 , clusterState.routingTable().index("test").shard(0).primaryShard().currentNodeId(), "node3")) ); assertThat(rerouteResult.changed(), equalTo(true)); clusterState = ClusterState.builder(clusterState).routingTable(rerouteResult.routingTable()).build(); diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/ExpectedShardSizeAllocationTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/ExpectedShardSizeAllocationTests.java index 8807816d2e8..4b720036b9a 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/ExpectedShardSizeAllocationTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/ExpectedShardSizeAllocationTests.java @@ -35,7 +35,6 @@ import org.elasticsearch.cluster.routing.allocation.command.MoveAllocationComman import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.test.ESAllocationTestCase; import static org.elasticsearch.cluster.routing.ShardRoutingState.INITIALIZING; @@ -54,7 +53,7 @@ public class ExpectedShardSizeAllocationTests extends ESAllocationTestCase { return new ClusterInfo() { @Override public Long getShardSize(ShardRouting shardRouting) { - if (shardRouting.index().equals("test") && shardRouting.shardId().getId() == 0) { + if (shardRouting.getIndexName().equals("test") && shardRouting.shardId().getId() == 0) { return byteSize; } return null; @@ -112,7 +111,7 @@ public class ExpectedShardSizeAllocationTests extends ESAllocationTestCase { return new ClusterInfo() { @Override public Long getShardSize(ShardRouting shardRouting) { - if (shardRouting.index().equals("test") && shardRouting.shardId().getId() == 0) { + if (shardRouting.getIndexName().equals("test") && shardRouting.shardId().getId() == 0) { return byteSize; } return null; @@ -150,7 +149,7 @@ public class ExpectedShardSizeAllocationTests extends ESAllocationTestCase { } else { toNodeId = "node1"; } - rerouteResult = allocation.reroute(clusterState, new AllocationCommands(new MoveAllocationCommand(new ShardId("test", 0), existingNodeId, toNodeId))); + rerouteResult = allocation.reroute(clusterState, new AllocationCommands(new MoveAllocationCommand("test", 0, existingNodeId, toNodeId))); assertThat(rerouteResult.changed(), equalTo(true)); clusterState = ClusterState.builder(clusterState).routingTable(rerouteResult.routingTable()).build(); assertEquals(clusterState.getRoutingNodes().node(existingNodeId).get(0).state(), ShardRoutingState.RELOCATING); diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/FailedShardsRoutingTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/FailedShardsRoutingTests.java index b8ab9c13590..0c687bc6658 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/FailedShardsRoutingTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/FailedShardsRoutingTests.java @@ -109,7 +109,7 @@ public class FailedShardsRoutingTests extends ESAllocationTestCase { logger.info("--> moving primary shard to node3"); rerouteResult = allocation.reroute(clusterState, new AllocationCommands( - new MoveAllocationCommand(clusterState.routingTable().index("test").shard(0).primaryShard().shardId(), clusterState.routingTable().index("test").shard(0).primaryShard().currentNodeId(), "node3")) + new MoveAllocationCommand("test", 0, clusterState.routingTable().index("test").shard(0).primaryShard().currentNodeId(), "node3")) ); assertThat(rerouteResult.changed(), equalTo(true)); clusterState = ClusterState.builder(clusterState).routingTable(rerouteResult.routingTable()).build(); @@ -125,7 +125,7 @@ public class FailedShardsRoutingTests extends ESAllocationTestCase { logger.info("--> moving primary shard to node3"); rerouteResult = allocation.reroute(clusterState, new AllocationCommands( - new MoveAllocationCommand(clusterState.routingTable().index("test").shard(0).primaryShard().shardId(), clusterState.routingTable().index("test").shard(0).primaryShard().currentNodeId(), "node3")) + new MoveAllocationCommand("test", 0, clusterState.routingTable().index("test").shard(0).primaryShard().currentNodeId(), "node3")) ); assertThat(rerouteResult.changed(), equalTo(true)); clusterState = ClusterState.builder(clusterState).routingTable(rerouteResult.routingTable()).build(); diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/NodeVersionAllocationDeciderTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/NodeVersionAllocationDeciderTests.java index 809b01cb619..062a95c8677 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/NodeVersionAllocationDeciderTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/NodeVersionAllocationDeciderTests.java @@ -299,26 +299,26 @@ public class NodeVersionAllocationDeciderTests extends ESAllocationTestCase { } public void testRebalanceDoesNotAllocatePrimaryAndReplicasOnDifferentVersionNodes() { - ShardId shard1 = new ShardId("test1", 0); - ShardId shard2 = new ShardId("test2", 0); + ShardId shard1 = new ShardId("test1", "_na_", 0); + ShardId shard2 = new ShardId("test2", "_na_", 0); final DiscoveryNode newNode = new DiscoveryNode("newNode", DummyTransportAddress.INSTANCE, Version.CURRENT); final DiscoveryNode oldNode1 = new DiscoveryNode("oldNode1", DummyTransportAddress.INSTANCE, VersionUtils.getPreviousVersion()); final DiscoveryNode oldNode2 = new DiscoveryNode("oldNode2", DummyTransportAddress.INSTANCE, VersionUtils.getPreviousVersion()); MetaData metaData = MetaData.builder() - .put(IndexMetaData.builder(shard1.getIndex()).settings(settings(Version.CURRENT).put(Settings.EMPTY)).numberOfShards(1).numberOfReplicas(1)) - .put(IndexMetaData.builder(shard2.getIndex()).settings(settings(Version.CURRENT).put(Settings.EMPTY)).numberOfShards(1).numberOfReplicas(1)) + .put(IndexMetaData.builder(shard1.getIndexName()).settings(settings(Version.CURRENT).put(Settings.EMPTY)).numberOfShards(1).numberOfReplicas(1)) + .put(IndexMetaData.builder(shard2.getIndexName()).settings(settings(Version.CURRENT).put(Settings.EMPTY)).numberOfShards(1).numberOfReplicas(1)) .build(); RoutingTable routingTable = RoutingTable.builder() .add(IndexRoutingTable.builder(shard1.getIndex()) .addIndexShard(new IndexShardRoutingTable.Builder(shard1) - .addShard(TestShardRouting.newShardRouting(shard1.getIndex(), shard1.getId(), newNode.id(), true, ShardRoutingState.STARTED, 10)) - .addShard(TestShardRouting.newShardRouting(shard1.getIndex(), shard1.getId(), oldNode1.id(), false, ShardRoutingState.STARTED, 10)) + .addShard(TestShardRouting.newShardRouting(shard1.getIndexName(), shard1.getId(), newNode.id(), true, ShardRoutingState.STARTED, 10)) + .addShard(TestShardRouting.newShardRouting(shard1.getIndexName(), shard1.getId(), oldNode1.id(), false, ShardRoutingState.STARTED, 10)) .build()) ) .add(IndexRoutingTable.builder(shard2.getIndex()) .addIndexShard(new IndexShardRoutingTable.Builder(shard2) - .addShard(TestShardRouting.newShardRouting(shard2.getIndex(), shard2.getId(), newNode.id(), true, ShardRoutingState.STARTED, 10)) - .addShard(TestShardRouting.newShardRouting(shard2.getIndex(), shard2.getId(), oldNode1.id(), false, ShardRoutingState.STARTED, 10)) + .addShard(TestShardRouting.newShardRouting(shard2.getIndexName(), shard2.getId(), newNode.id(), true, ShardRoutingState.STARTED, 10)) + .addShard(TestShardRouting.newShardRouting(shard2.getIndexName(), shard2.getId(), oldNode1.id(), false, ShardRoutingState.STARTED, 10)) .build()) ) .build(); diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/RebalanceAfterActiveTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/RebalanceAfterActiveTests.java index 4672f339c70..df0c4a8fa3e 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/RebalanceAfterActiveTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/RebalanceAfterActiveTests.java @@ -66,7 +66,7 @@ public class RebalanceAfterActiveTests extends ESAllocationTestCase { return new ClusterInfo() { @Override public Long getShardSize(ShardRouting shardRouting) { - if (shardRouting.index().equals("test")) { + if (shardRouting.getIndexName().equals("test")) { return sizes[shardRouting.getId()]; } return null; } diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/ShardsLimitAllocationTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/ShardsLimitAllocationTests.java index ac539c421b0..0830747a9dd 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/ShardsLimitAllocationTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/ShardsLimitAllocationTests.java @@ -206,10 +206,10 @@ public class ShardsLimitAllocationTests extends ESAllocationTestCase { assertThat(numberOfShardsOfType(clusterState.getRoutingNodes(), STARTED), equalTo(10)); for (ShardRouting shardRouting : clusterState.getRoutingNodes().node("node1")) { - assertThat(shardRouting.index(), equalTo("test")); + assertThat(shardRouting.getIndexName(), equalTo("test")); } for (ShardRouting shardRouting : clusterState.getRoutingNodes().node("node2")) { - assertThat(shardRouting.index(), equalTo("test1")); + assertThat(shardRouting.getIndexName(), equalTo("test1")); } logger.info("update " + ShardsLimitAllocationDecider.INDEX_TOTAL_SHARDS_PER_NODE_SETTING.getKey() + " for test, see that things move"); diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/SingleShardNoReplicasRoutingTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/SingleShardNoReplicasRoutingTests.java index bf41ad8a053..90263acb13a 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/SingleShardNoReplicasRoutingTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/SingleShardNoReplicasRoutingTests.java @@ -274,8 +274,8 @@ public class SingleShardNoReplicasRoutingTests extends ESAllocationTestCase { assertThat(nodeIndex, lessThan(25)); // check that we don't have a shard associated with a node with the same index name (we have a single shard) for (ShardRouting shardRoutingEntry : routingNode) { - assertThat(encounteredIndices, not(hasItem(shardRoutingEntry.index()))); - encounteredIndices.add(shardRoutingEntry.index()); + assertThat(encounteredIndices, not(hasItem(shardRoutingEntry.getIndexName()))); + encounteredIndices.add(shardRoutingEntry.getIndexName()); } } diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/StartedShardsRoutingTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/StartedShardsRoutingTests.java index 0712e9cd02a..bbf5396f393 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/StartedShardsRoutingTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/StartedShardsRoutingTests.java @@ -31,6 +31,7 @@ import org.elasticsearch.cluster.routing.RoutingTable; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.ShardRoutingState; import org.elasticsearch.cluster.routing.TestShardRouting; +import org.elasticsearch.index.Index; import org.elasticsearch.test.ESAllocationTestCase; import java.util.Arrays; @@ -47,14 +48,15 @@ public class StartedShardsRoutingTests extends ESAllocationTestCase { .settings(settings(Version.CURRENT)) .numberOfShards(3).numberOfReplicas(0) .build(); + final Index index = indexMetaData.getIndex(); ClusterState.Builder stateBuilder = ClusterState.builder(ClusterName.DEFAULT) .nodes(DiscoveryNodes.builder().put(newNode("node1")).put(newNode("node2"))) .metaData(MetaData.builder().put(indexMetaData, false)); - final ShardRouting initShard = TestShardRouting.newShardRouting("test", 0, "node1", true, ShardRoutingState.INITIALIZING, 1); - final ShardRouting startedShard = TestShardRouting.newShardRouting("test", 1, "node2", true, ShardRoutingState.STARTED, 1); - final ShardRouting relocatingShard = TestShardRouting.newShardRouting("test", 2, "node1", "node2", true, ShardRoutingState.RELOCATING, 1); - stateBuilder.routingTable(RoutingTable.builder().add(IndexRoutingTable.builder("test") + final ShardRouting initShard = TestShardRouting.newShardRouting(index, 0, "node1", true, ShardRoutingState.INITIALIZING, 1); + final ShardRouting startedShard = TestShardRouting.newShardRouting(index, 1, "node2", true, ShardRoutingState.STARTED, 1); + final ShardRouting relocatingShard = TestShardRouting.newShardRouting(index, 2, "node1", "node2", true, ShardRoutingState.RELOCATING, 1); + stateBuilder.routingTable(RoutingTable.builder().add(IndexRoutingTable.builder(index) .addIndexShard(new IndexShardRoutingTable.Builder(initShard.shardId()).addShard(initShard).build()) .addIndexShard(new IndexShardRoutingTable.Builder(startedShard.shardId()).addShard(startedShard).build()) .addIndexShard(new IndexShardRoutingTable.Builder(relocatingShard.shardId()).addShard(relocatingShard).build())).build()); diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/ThrottlingAllocationTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/ThrottlingAllocationTests.java index 1d60436d3c7..5573780e2e1 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/ThrottlingAllocationTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/ThrottlingAllocationTests.java @@ -284,7 +284,7 @@ public class ThrottlingAllocationTests extends ESAllocationTestCase { assertEquals(clusterState.getRoutingNodes().getOutgoingRecoveries("node2"), 0); assertEquals(clusterState.getRoutingNodes().getOutgoingRecoveries("node3"), 0); - RoutingAllocation.Result reroute = strategy.reroute(clusterState, new AllocationCommands(new MoveAllocationCommand(clusterState.getRoutingNodes().node("node1").get(0).shardId(), "node1", "node2"))); + RoutingAllocation.Result reroute = strategy.reroute(clusterState, new AllocationCommands(new MoveAllocationCommand("test", clusterState.getRoutingNodes().node("node1").get(0).shardId().id(), "node1", "node2"))); assertEquals(reroute.explanations().explanations().size(), 1); assertEquals(reroute.explanations().explanations().get(0).decisions().type(), Decision.Type.YES); routingTable = reroute.routingTable(); @@ -297,7 +297,7 @@ public class ThrottlingAllocationTests extends ESAllocationTestCase { assertEquals(clusterState.getRoutingNodes().getOutgoingRecoveries("node3"), 0); // outgoing throttles - reroute = strategy.reroute(clusterState, new AllocationCommands(new MoveAllocationCommand(clusterState.getRoutingNodes().node("node3").get(0).shardId(), "node3", "node1")), true); + reroute = strategy.reroute(clusterState, new AllocationCommands(new MoveAllocationCommand("test", clusterState.getRoutingNodes().node("node3").get(0).shardId().id(), "node3", "node1")), true); assertEquals(reroute.explanations().explanations().size(), 1); assertEquals(reroute.explanations().explanations().get(0).decisions().type(), Decision.Type.THROTTLE); assertEquals(clusterState.getRoutingNodes().getIncomingRecoveries("node1"), 0); @@ -312,7 +312,7 @@ public class ThrottlingAllocationTests extends ESAllocationTestCase { assertThat(routingTable.shardsWithState(UNASSIGNED).size(), equalTo(0)); // incoming throttles - reroute = strategy.reroute(clusterState, new AllocationCommands(new MoveAllocationCommand(clusterState.getRoutingNodes().node("node3").get(0).shardId(), "node3", "node2")), true); + reroute = strategy.reroute(clusterState, new AllocationCommands(new MoveAllocationCommand("test", clusterState.getRoutingNodes().node("node3").get(0).shardId().id(), "node3", "node2")), true); assertEquals(reroute.explanations().explanations().size(), 1); assertEquals(reroute.explanations().explanations().get(0).decisions().type(), Decision.Type.THROTTLE); diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderTests.java index fa52503eac5..6c5862682fe 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderTests.java @@ -46,7 +46,6 @@ import org.elasticsearch.cluster.routing.allocation.command.MoveAllocationComman import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.LocalTransportAddress; -import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.test.ESAllocationTestCase; import org.elasticsearch.test.gateway.NoopGatewayAllocator; @@ -320,8 +319,8 @@ public class DiskThresholdDeciderTests extends ESAllocationTestCase { logger.info("--> adding node1 and node2 node"); clusterState = ClusterState.builder(clusterState).nodes(DiscoveryNodes.builder() - .put(newNode("node1")) - .put(newNode("node2")) + .put(newNode("node1")) + .put(newNode("node2")) ).build(); routingTable = strategy.reroute(clusterState, "reroute").routingTable(); @@ -504,7 +503,7 @@ public class DiskThresholdDeciderTests extends ESAllocationTestCase { logger.info("--> adding node5"); clusterState = ClusterState.builder(clusterState).nodes(DiscoveryNodes.builder(clusterState.nodes()) - .put(newNode("node5")) + .put(newNode("node5")) ).build(); routingTable = strategy.reroute(clusterState, "reroute").routingTable(); clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build(); @@ -757,8 +756,8 @@ public class DiskThresholdDeciderTests extends ESAllocationTestCase { logger.info("--> adding two nodes"); clusterState = ClusterState.builder(clusterState).nodes(DiscoveryNodes.builder() - .put(newNode("node1")) - .put(newNode("node2")) + .put(newNode("node1")) + .put(newNode("node2")) ).build(); routingTable = strategy.reroute(clusterState, "reroute").routingTable(); clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build(); @@ -777,17 +776,17 @@ public class DiskThresholdDeciderTests extends ESAllocationTestCase { logger.info("--> adding node3"); clusterState = ClusterState.builder(clusterState).nodes(DiscoveryNodes.builder(clusterState.nodes()) - .put(newNode("node3")) + .put(newNode("node3")) ).build(); - AllocationCommand relocate1 = new MoveAllocationCommand(new ShardId("test", 0), "node2", "node3"); + AllocationCommand relocate1 = new MoveAllocationCommand("test", 0, "node2", "node3"); AllocationCommands cmds = new AllocationCommands(relocate1); routingTable = strategy.reroute(clusterState, cmds).routingTable(); clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build(); logShardStates(clusterState); - AllocationCommand relocate2 = new MoveAllocationCommand(new ShardId("test2", 0), "node2", "node3"); + AllocationCommand relocate2 = new MoveAllocationCommand("test2", 0, "node2", "node3"); cmds = new AllocationCommands(relocate2); try { @@ -848,14 +847,14 @@ public class DiskThresholdDeciderTests extends ESAllocationTestCase { ShardRouting secondRouting = TestShardRouting.newShardRouting("test", 1, "node1", null, null, true, ShardRoutingState.STARTED, 1); RoutingNode firstRoutingNode = new RoutingNode("node1", discoveryNode1, Arrays.asList(firstRouting, secondRouting)); RoutingTable.Builder builder = RoutingTable.builder().add( - IndexRoutingTable.builder("test") - .addIndexShard(new IndexShardRoutingTable.Builder(new ShardId("test", 0)) - .addShard(firstRouting) - .build() + IndexRoutingTable.builder(firstRouting.index()) + .addIndexShard(new IndexShardRoutingTable.Builder(firstRouting.shardId()) + .addShard(firstRouting) + .build() ) - .addIndexShard(new IndexShardRoutingTable.Builder(new ShardId("test", 1)) - .addShard(secondRouting) - .build() + .addIndexShard(new IndexShardRoutingTable.Builder(secondRouting.shardId()) + .addShard(secondRouting) + .build() ) ); ClusterState clusterState = ClusterState.builder(baseClusterState).routingTable(builder.build()).build(); @@ -868,14 +867,14 @@ public class DiskThresholdDeciderTests extends ESAllocationTestCase { secondRouting = TestShardRouting.newShardRouting("test", 1, "node1", "node2", null, true, ShardRoutingState.RELOCATING, 1); firstRoutingNode = new RoutingNode("node1", discoveryNode1, Arrays.asList(firstRouting, secondRouting)); builder = RoutingTable.builder().add( - IndexRoutingTable.builder("test") - .addIndexShard(new IndexShardRoutingTable.Builder(new ShardId("test", 0)) - .addShard(firstRouting) - .build() + IndexRoutingTable.builder(firstRouting.index()) + .addIndexShard(new IndexShardRoutingTable.Builder(firstRouting.shardId()) + .addShard(firstRouting) + .build() ) - .addIndexShard(new IndexShardRoutingTable.Builder(new ShardId("test", 1)) - .addShard(secondRouting) - .build() + .addIndexShard(new IndexShardRoutingTable.Builder(secondRouting.shardId()) + .addShard(secondRouting) + .build() ) ); clusterState = ClusterState.builder(baseClusterState).routingTable(builder.build()).build(); @@ -897,7 +896,7 @@ public class DiskThresholdDeciderTests extends ESAllocationTestCase { } }; AllocationDeciders deciders = new AllocationDeciders(Settings.EMPTY, new HashSet<>(Arrays.asList( - new SameShardAllocationDecider(Settings.EMPTY), diskThresholdDecider + new SameShardAllocationDecider(Settings.EMPTY), diskThresholdDecider ))); AllocationService strategy = new AllocationService(settingsBuilder() .put("cluster.routing.allocation.node_concurrent_recoveries", 10) @@ -967,14 +966,14 @@ public class DiskThresholdDeciderTests extends ESAllocationTestCase { RoutingNode firstRoutingNode = new RoutingNode("node2", discoveryNode2, Arrays.asList(firstRouting, secondRouting)); RoutingTable.Builder builder = RoutingTable.builder().add( - IndexRoutingTable.builder("test") - .addIndexShard(new IndexShardRoutingTable.Builder(new ShardId("test", 0)) - .addShard(firstRouting) - .build() + IndexRoutingTable.builder(firstRouting.index()) + .addIndexShard(new IndexShardRoutingTable.Builder(firstRouting.shardId()) + .addShard(firstRouting) + .build() ) - .addIndexShard(new IndexShardRoutingTable.Builder(new ShardId("test", 1)) - .addShard(secondRouting) - .build() + .addIndexShard(new IndexShardRoutingTable.Builder(secondRouting.shardId()) + .addShard(secondRouting) + .build() ) ); ClusterState clusterState = ClusterState.builder(baseClusterState).routingTable(builder.build()).build(); @@ -1024,14 +1023,14 @@ public class DiskThresholdDeciderTests extends ESAllocationTestCase { secondRouting = TestShardRouting.newShardRouting("test", 1, "node2", "node3", null, true, ShardRoutingState.RELOCATING, 1); firstRoutingNode = new RoutingNode("node2", discoveryNode2, Arrays.asList(firstRouting, secondRouting)); builder = RoutingTable.builder().add( - IndexRoutingTable.builder("test") - .addIndexShard(new IndexShardRoutingTable.Builder(new ShardId("test", 0)) - .addShard(firstRouting) - .build() + IndexRoutingTable.builder(firstRouting.index()) + .addIndexShard(new IndexShardRoutingTable.Builder(firstRouting.shardId()) + .addShard(firstRouting) + .build() ) - .addIndexShard(new IndexShardRoutingTable.Builder(new ShardId("test", 1)) - .addShard(secondRouting) - .build() + .addIndexShard(new IndexShardRoutingTable.Builder(secondRouting.shardId()) + .addShard(secondRouting) + .build() ) ); diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderUnitTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderUnitTests.java index 8551af718e2..ec076a54af7 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderUnitTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderUnitTests.java @@ -42,6 +42,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.DummyTransportAddress; import org.elasticsearch.common.transport.LocalTransportAddress; import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.index.Index; import org.elasticsearch.test.ESTestCase; import java.util.Arrays; @@ -95,14 +96,16 @@ public class DiskThresholdDeciderUnitTests extends ESTestCase { ClusterInfoService cis = EmptyClusterInfoService.INSTANCE; DiskThresholdDecider decider = new DiskThresholdDecider(Settings.EMPTY, nss, cis, null); - ShardRouting test_0 = ShardRouting.newUnassigned("test", 0, null, true, new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "foo")); - DiscoveryNode node_0 = new DiscoveryNode("node_0", DummyTransportAddress.INSTANCE, Version.CURRENT); - DiscoveryNode node_1 = new DiscoveryNode("node_1", DummyTransportAddress.INSTANCE, Version.CURRENT); - MetaData metaData = MetaData.builder() .put(IndexMetaData.builder("test").settings(settings(Version.CURRENT)).numberOfShards(1).numberOfReplicas(1)) .build(); + final Index index = metaData.index("test").getIndex(); + + ShardRouting test_0 = ShardRouting.newUnassigned(index, 0, null, true, new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "foo")); + DiscoveryNode node_0 = new DiscoveryNode("node_0", DummyTransportAddress.INSTANCE, Version.CURRENT); + DiscoveryNode node_1 = new DiscoveryNode("node_1", DummyTransportAddress.INSTANCE, Version.CURRENT); + RoutingTable routingTable = RoutingTable.builder() .addAsNew(metaData.index("test")) .build(); @@ -140,32 +143,33 @@ public class DiskThresholdDeciderUnitTests extends ESTestCase { DiscoveryNode node_0 = new DiscoveryNode("node_0", DummyTransportAddress.INSTANCE, Version.CURRENT); DiscoveryNode node_1 = new DiscoveryNode("node_1", DummyTransportAddress.INSTANCE, Version.CURRENT); - ShardRouting test_0 = ShardRouting.newUnassigned("test", 0, null, true, new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "foo")); + MetaData metaData = MetaData.builder() + .put(IndexMetaData.builder("test").settings(settings(Version.CURRENT)).numberOfShards(1).numberOfReplicas(1)) + .build(); + final IndexMetaData indexMetaData = metaData.index("test"); + + ShardRouting test_0 = ShardRouting.newUnassigned(indexMetaData.getIndex(), 0, null, true, new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "foo")); ShardRoutingHelper.initialize(test_0, node_0.getId()); ShardRoutingHelper.moveToStarted(test_0); shardRoutingMap.put(test_0, "/node0/least"); - ShardRouting test_1 = ShardRouting.newUnassigned("test", 1, null, true, new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "foo")); + ShardRouting test_1 = ShardRouting.newUnassigned(indexMetaData.getIndex(), 1, null, true, new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "foo")); ShardRoutingHelper.initialize(test_1, node_1.getId()); ShardRoutingHelper.moveToStarted(test_1); shardRoutingMap.put(test_1, "/node1/least"); - ShardRouting test_2 = ShardRouting.newUnassigned("test", 2, null, true, new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "foo")); + ShardRouting test_2 = ShardRouting.newUnassigned(indexMetaData.getIndex(), 2, null, true, new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "foo")); ShardRoutingHelper.initialize(test_2, node_1.getId()); ShardRoutingHelper.moveToStarted(test_2); shardRoutingMap.put(test_2, "/node1/most"); - ShardRouting test_3 = ShardRouting.newUnassigned("test", 3, null, true, new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "foo")); + ShardRouting test_3 = ShardRouting.newUnassigned(indexMetaData.getIndex(), 3, null, true, new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "foo")); ShardRoutingHelper.initialize(test_3, node_1.getId()); ShardRoutingHelper.moveToStarted(test_3); // Intentionally not in the shardRoutingMap. We want to test what happens when we don't know where it is. - MetaData metaData = MetaData.builder() - .put(IndexMetaData.builder("test").settings(settings(Version.CURRENT)).numberOfShards(1).numberOfReplicas(1)) - .build(); - RoutingTable routingTable = RoutingTable.builder() - .addAsNew(metaData.index("test")) + .addAsNew(indexMetaData) .build(); ClusterState clusterState = ClusterState.builder(org.elasticsearch.cluster.ClusterName.DEFAULT).metaData(metaData).routingTable(routingTable).build(); @@ -220,17 +224,18 @@ public class DiskThresholdDeciderUnitTests extends ESTestCase { shardSizes.put("[test][2][r]", 1000L); shardSizes.put("[other][0][p]", 10000L); ClusterInfo info = new DevNullClusterInfo(ImmutableOpenMap.of(), ImmutableOpenMap.of(), shardSizes.build()); - ShardRouting test_0 = ShardRouting.newUnassigned("test", 0, null, false, new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "foo")); + final Index index = new Index("test", "_na_"); + ShardRouting test_0 = ShardRouting.newUnassigned(index, 0, null, false, new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "foo")); ShardRoutingHelper.initialize(test_0, "node1"); ShardRoutingHelper.moveToStarted(test_0); ShardRoutingHelper.relocate(test_0, "node2"); - ShardRouting test_1 = ShardRouting.newUnassigned("test", 1, null, false, new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "foo")); + ShardRouting test_1 = ShardRouting.newUnassigned(index, 1, null, false, new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "foo")); ShardRoutingHelper.initialize(test_1, "node2"); ShardRoutingHelper.moveToStarted(test_1); ShardRoutingHelper.relocate(test_1, "node1"); - ShardRouting test_2 = ShardRouting.newUnassigned("test", 2, null, false, new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "foo")); + ShardRouting test_2 = ShardRouting.newUnassigned(index, 2, null, false, new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "foo")); ShardRoutingHelper.initialize(test_2, "node1"); ShardRoutingHelper.moveToStarted(test_2); @@ -244,13 +249,13 @@ public class DiskThresholdDeciderUnitTests extends ESTestCase { assertEquals(0l, DiskThresholdDecider.sizeOfRelocatingShards(node, info, true, "/dev/some/other/dev")); assertEquals(0l, DiskThresholdDecider.sizeOfRelocatingShards(node, info, true, "/dev/some/other/dev")); - ShardRouting test_3 = ShardRouting.newUnassigned("test", 3, null, false, new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "foo")); + ShardRouting test_3 = ShardRouting.newUnassigned(index, 3, null, false, new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "foo")); ShardRoutingHelper.initialize(test_3, "node1"); ShardRoutingHelper.moveToStarted(test_3); assertEquals(0l, DiskThresholdDecider.getShardSize(test_3, info)); - ShardRouting other_0 = ShardRouting.newUnassigned("other", 0, null, randomBoolean(), new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "foo")); + ShardRouting other_0 = ShardRouting.newUnassigned(new Index("other", "_NA_"), 0, null, randomBoolean(), new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "foo")); ShardRoutingHelper.initialize(other_0, "node2"); ShardRoutingHelper.moveToStarted(other_0); ShardRoutingHelper.relocate(other_0, "node1"); diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/EnableAllocationTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/EnableAllocationTests.java index 62005a3fa1d..bd1738b59b2 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/EnableAllocationTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/EnableAllocationTests.java @@ -233,18 +233,18 @@ public class EnableAllocationTests extends ESAllocationTestCase { case PRIMARIES: for (ShardRouting routing : mutableShardRoutings) { assertTrue("only primaries are allowed to relocate", routing.primary()); - assertThat("only test index can rebalance", routing.getIndex(), equalTo("test")); + assertThat("only test index can rebalance", routing.getIndexName(), equalTo("test")); } break; case REPLICAS: for (ShardRouting routing : mutableShardRoutings) { assertFalse("only replicas are allowed to relocate", routing.primary()); - assertThat("only test index can rebalance", routing.getIndex(), equalTo("test")); + assertThat("only test index can rebalance", routing.getIndexName(), equalTo("test")); } break; case ALL: for (ShardRouting routing : mutableShardRoutings) { - assertThat("only test index can rebalance", routing.getIndex(), equalTo("test")); + assertThat("only test index can rebalance", routing.getIndexName(), equalTo("test")); } break; default: diff --git a/core/src/test/java/org/elasticsearch/cluster/settings/ClusterSettingsIT.java b/core/src/test/java/org/elasticsearch/cluster/settings/ClusterSettingsIT.java index 70d7697e666..cf99879c4d4 100644 --- a/core/src/test/java/org/elasticsearch/cluster/settings/ClusterSettingsIT.java +++ b/core/src/test/java/org/elasticsearch/cluster/settings/ClusterSettingsIT.java @@ -331,7 +331,7 @@ public class ClusterSettingsIT extends ESIntegTestCase { private void createNode(Settings settings) { internalCluster().startNode(Settings.builder() - .put(ClusterName.SETTING, "ClusterSettingsIT") + .put(ClusterName.CLUSTER_NAME_SETTING.getKey(), "ClusterSettingsIT") .put("node.name", "ClusterSettingsIT") .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0) diff --git a/core/src/test/java/org/elasticsearch/cluster/structure/RoutingIteratorTests.java b/core/src/test/java/org/elasticsearch/cluster/structure/RoutingIteratorTests.java index ced1e0097a1..2f1bbd6fa53 100644 --- a/core/src/test/java/org/elasticsearch/cluster/structure/RoutingIteratorTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/structure/RoutingIteratorTests.java @@ -59,28 +59,28 @@ import static org.hamcrest.Matchers.sameInstance; public class RoutingIteratorTests extends ESAllocationTestCase { public void testEmptyIterator() { ShardShuffler shuffler = new RotationShardShuffler(0); - ShardIterator shardIterator = new PlainShardIterator(new ShardId("test1", 0), shuffler.shuffle(Collections.emptyList())); + ShardIterator shardIterator = new PlainShardIterator(new ShardId("test1", "_na_", 0), shuffler.shuffle(Collections.emptyList())); assertThat(shardIterator.remaining(), equalTo(0)); assertThat(shardIterator.nextOrNull(), nullValue()); assertThat(shardIterator.remaining(), equalTo(0)); assertThat(shardIterator.nextOrNull(), nullValue()); assertThat(shardIterator.remaining(), equalTo(0)); - shardIterator = new PlainShardIterator(new ShardId("test1", 0), shuffler.shuffle(Collections.emptyList())); + shardIterator = new PlainShardIterator(new ShardId("test1", "_na_", 0), shuffler.shuffle(Collections.emptyList())); assertThat(shardIterator.remaining(), equalTo(0)); assertThat(shardIterator.nextOrNull(), nullValue()); assertThat(shardIterator.remaining(), equalTo(0)); assertThat(shardIterator.nextOrNull(), nullValue()); assertThat(shardIterator.remaining(), equalTo(0)); - shardIterator = new PlainShardIterator(new ShardId("test1", 0), shuffler.shuffle(Collections.emptyList())); + shardIterator = new PlainShardIterator(new ShardId("test1", "_na_", 0), shuffler.shuffle(Collections.emptyList())); assertThat(shardIterator.remaining(), equalTo(0)); assertThat(shardIterator.nextOrNull(), nullValue()); assertThat(shardIterator.remaining(), equalTo(0)); assertThat(shardIterator.nextOrNull(), nullValue()); assertThat(shardIterator.remaining(), equalTo(0)); - shardIterator = new PlainShardIterator(new ShardId("test1", 0), shuffler.shuffle(Collections.emptyList())); + shardIterator = new PlainShardIterator(new ShardId("test1", "_na_", 0), shuffler.shuffle(Collections.emptyList())); assertThat(shardIterator.remaining(), equalTo(0)); assertThat(shardIterator.nextOrNull(), nullValue()); assertThat(shardIterator.remaining(), equalTo(0)); diff --git a/core/src/test/java/org/elasticsearch/common/blobstore/BlobStoreTests.java b/core/src/test/java/org/elasticsearch/common/blobstore/BlobStoreTests.java deleted file mode 100644 index 80afa5d51f9..00000000000 --- a/core/src/test/java/org/elasticsearch/common/blobstore/BlobStoreTests.java +++ /dev/null @@ -1,139 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.elasticsearch.common.blobstore; - -import org.apache.lucene.util.BytesRef; -import org.apache.lucene.util.BytesRefBuilder; -import org.apache.lucene.util.LuceneTestCase; -import org.elasticsearch.common.blobstore.fs.FsBlobStore; -import org.elasticsearch.common.bytes.BytesArray; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.unit.ByteSizeUnit; -import org.elasticsearch.common.unit.ByteSizeValue; -import org.elasticsearch.test.ESTestCase; - -import java.io.IOException; -import java.io.InputStream; -import java.nio.file.Path; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; - -import static org.hamcrest.CoreMatchers.equalTo; -import static org.hamcrest.CoreMatchers.notNullValue; - -@LuceneTestCase.SuppressFileSystems("ExtrasFS") -public class BlobStoreTests extends ESTestCase { - public void testWriteRead() throws IOException { - final BlobStore store = newBlobStore(); - final BlobContainer container = store.blobContainer(new BlobPath()); - byte[] data = randomBytes(randomIntBetween(10, scaledRandomIntBetween(1024, 1 << 16))); - container.writeBlob("foobar", new BytesArray(data)); - try (InputStream stream = container.readBlob("foobar")) { - BytesRefBuilder target = new BytesRefBuilder(); - while (target.length() < data.length) { - byte[] buffer = new byte[scaledRandomIntBetween(1, data.length - target.length())]; - int offset = scaledRandomIntBetween(0, buffer.length - 1); - int read = stream.read(buffer, offset, buffer.length - offset); - target.append(new BytesRef(buffer, offset, read)); - } - assertEquals(data.length, target.length()); - assertArrayEquals(data, Arrays.copyOfRange(target.bytes(), 0, target.length())); - } - store.close(); - } - - public void testMoveAndList() throws IOException { - final BlobStore store = newBlobStore(); - final BlobContainer container = store.blobContainer(new BlobPath()); - assertThat(container.listBlobs().size(), equalTo(0)); - int numberOfFooBlobs = randomIntBetween(0, 10); - int numberOfBarBlobs = randomIntBetween(3, 20); - Map generatedBlobs = new HashMap<>(); - for (int i = 0; i < numberOfFooBlobs; i++) { - int length = randomIntBetween(10, 100); - String name = "foo-" + i + "-"; - generatedBlobs.put(name, (long) length); - createRandomBlob(container, name, length); - } - for (int i = 1; i < numberOfBarBlobs; i++) { - int length = randomIntBetween(10, 100); - String name = "bar-" + i + "-"; - generatedBlobs.put(name, (long) length); - createRandomBlob(container, name, length); - } - int length = randomIntBetween(10, 100); - String name = "bar-0-"; - generatedBlobs.put(name, (long) length); - byte[] data = createRandomBlob(container, name, length); - - Map blobs = container.listBlobs(); - assertThat(blobs.size(), equalTo(numberOfFooBlobs + numberOfBarBlobs)); - for (Map.Entry generated : generatedBlobs.entrySet()) { - BlobMetaData blobMetaData = blobs.get(generated.getKey()); - assertThat(generated.getKey(), blobMetaData, notNullValue()); - assertThat(blobMetaData.name(), equalTo(generated.getKey())); - assertThat(blobMetaData.length(), equalTo(generated.getValue())); - } - - assertThat(container.listBlobsByPrefix("foo-").size(), equalTo(numberOfFooBlobs)); - assertThat(container.listBlobsByPrefix("bar-").size(), equalTo(numberOfBarBlobs)); - assertThat(container.listBlobsByPrefix("baz-").size(), equalTo(0)); - - String newName = "bar-new"; - // Move to a new location - container.move(name, newName); - assertThat(container.listBlobsByPrefix(name).size(), equalTo(0)); - blobs = container.listBlobsByPrefix(newName); - assertThat(blobs.size(), equalTo(1)); - assertThat(blobs.get(newName).length(), equalTo(generatedBlobs.get(name))); - assertThat(data, equalTo(readBlobFully(container, newName, length))); - store.close(); - } - - protected byte[] createRandomBlob(BlobContainer container, String name, int length) throws IOException { - byte[] data = randomBytes(length); - container.writeBlob(name, new BytesArray(data)); - return data; - } - - protected byte[] readBlobFully(BlobContainer container, String name, int length) throws IOException { - byte[] data = new byte[length]; - try (InputStream inputStream = container.readBlob(name)) { - assertThat(inputStream.read(data), equalTo(length)); - assertThat(inputStream.read(), equalTo(-1)); - } - return data; - } - - protected byte[] randomBytes(int length) { - byte[] data = new byte[length]; - for (int i = 0; i < data.length; i++) { - data[i] = (byte) randomInt(); - } - return data; - } - - protected BlobStore newBlobStore() throws IOException { - Path tempDir = createTempDir(); - Settings settings = randomBoolean() ? Settings.EMPTY : Settings.builder().put("buffer_size", new ByteSizeValue(randomIntBetween(1, 100), ByteSizeUnit.KB)).build(); - FsBlobStore store = new FsBlobStore(settings, tempDir); - return store; - } -} diff --git a/core/src/test/java/org/elasticsearch/common/blobstore/FsBlobStoreContainerTests.java b/core/src/test/java/org/elasticsearch/common/blobstore/FsBlobStoreContainerTests.java new file mode 100644 index 00000000000..63c04b1c5e1 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/common/blobstore/FsBlobStoreContainerTests.java @@ -0,0 +1,38 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.common.blobstore; + +import org.apache.lucene.util.LuceneTestCase; +import org.elasticsearch.common.blobstore.fs.FsBlobStore; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.unit.ByteSizeUnit; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.test.ESBlobStoreContainerTestCase; + +import java.io.IOException; +import java.nio.file.Path; + +@LuceneTestCase.SuppressFileSystems("ExtrasFS") +public class FsBlobStoreContainerTests extends ESBlobStoreContainerTestCase { + protected BlobStore newBlobStore() throws IOException { + Path tempDir = createTempDir(); + Settings settings = randomBoolean() ? Settings.EMPTY : Settings.builder().put("buffer_size", new ByteSizeValue(randomIntBetween(1, 100), ByteSizeUnit.KB)).build(); + return new FsBlobStore(settings, tempDir); + } +} diff --git a/core/src/test/java/org/elasticsearch/common/blobstore/FsBlobStoreTests.java b/core/src/test/java/org/elasticsearch/common/blobstore/FsBlobStoreTests.java new file mode 100644 index 00000000000..f6f53549ce4 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/common/blobstore/FsBlobStoreTests.java @@ -0,0 +1,38 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.common.blobstore; + +import org.apache.lucene.util.LuceneTestCase; +import org.elasticsearch.common.blobstore.fs.FsBlobStore; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.unit.ByteSizeUnit; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.test.ESBlobStoreTestCase; + +import java.io.IOException; +import java.nio.file.Path; + +@LuceneTestCase.SuppressFileSystems("ExtrasFS") +public class FsBlobStoreTests extends ESBlobStoreTestCase { + protected BlobStore newBlobStore() throws IOException { + Path tempDir = createTempDir(); + Settings settings = randomBoolean() ? Settings.EMPTY : Settings.builder().put("buffer_size", new ByteSizeValue(randomIntBetween(1, 100), ByteSizeUnit.KB)).build(); + return new FsBlobStore(settings, tempDir); + } +} diff --git a/core/src/test/java/org/elasticsearch/common/lucene/ShardCoreKeyMapTests.java b/core/src/test/java/org/elasticsearch/common/lucene/ShardCoreKeyMapTests.java index 0c14e1a0bcb..5cba06741a9 100644 --- a/core/src/test/java/org/elasticsearch/common/lucene/ShardCoreKeyMapTests.java +++ b/core/src/test/java/org/elasticsearch/common/lucene/ShardCoreKeyMapTests.java @@ -62,7 +62,7 @@ public class ShardCoreKeyMapTests extends ESTestCase { try (Directory dir = newDirectory(); RandomIndexWriter writer = new RandomIndexWriter(random(), dir)) { writer.addDocument(new Document()); - try (DirectoryReader dirReader = ElasticsearchDirectoryReader.wrap(writer.getReader(), new ShardId("index1", 1))) { + try (DirectoryReader dirReader = ElasticsearchDirectoryReader.wrap(writer.getReader(), new ShardId("index1", "_na_", 1))) { reader = dirReader.leaves().get(0).reader(); } } @@ -89,9 +89,9 @@ public class ShardCoreKeyMapTests extends ESTestCase { RandomIndexWriter w3 = new RandomIndexWriter(random(), dir3); w3.addDocument(new Document()); - ShardId shardId1 = new ShardId("index1", 1); - ShardId shardId2 = new ShardId("index1", 3); - ShardId shardId3 = new ShardId("index2", 2); + ShardId shardId1 = new ShardId("index1", "_na_", 1); + ShardId shardId2 = new ShardId("index1", "_na_", 3); + ShardId shardId3 = new ShardId("index2", "_na_", 2); ElasticsearchDirectoryReader reader1 = ElasticsearchDirectoryReader.wrap(w1.getReader(), shardId1); ElasticsearchDirectoryReader reader2 = ElasticsearchDirectoryReader.wrap(w2.getReader(), shardId2); diff --git a/core/src/test/java/org/elasticsearch/common/lucene/index/ESDirectoryReaderTests.java b/core/src/test/java/org/elasticsearch/common/lucene/index/ESDirectoryReaderTests.java index 3c4a34d952f..7fb3ec0c2e9 100644 --- a/core/src/test/java/org/elasticsearch/common/lucene/index/ESDirectoryReaderTests.java +++ b/core/src/test/java/org/elasticsearch/common/lucene/index/ESDirectoryReaderTests.java @@ -54,7 +54,7 @@ public class ESDirectoryReaderTests extends ESTestCase { iw.addDocument(doc); // open reader - ShardId shardId = new ShardId(new Index("fake"), 1); + ShardId shardId = new ShardId("fake", "_na_", 1); DirectoryReader ir = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(iw, true), shardId); assertEquals(2, ir.numDocs()); assertEquals(1, ir.leaves().size()); diff --git a/core/src/test/java/org/elasticsearch/common/lucene/uid/VersionsTests.java b/core/src/test/java/org/elasticsearch/common/lucene/uid/VersionsTests.java index d6abcfe7735..fb839f9c49e 100644 --- a/core/src/test/java/org/elasticsearch/common/lucene/uid/VersionsTests.java +++ b/core/src/test/java/org/elasticsearch/common/lucene/uid/VersionsTests.java @@ -78,7 +78,7 @@ public class VersionsTests extends ESTestCase { public void testVersions() throws Exception { Directory dir = newDirectory(); IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(Lucene.STANDARD_ANALYZER)); - DirectoryReader directoryReader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer, true), new ShardId("foo", 1)); + DirectoryReader directoryReader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer, true), new ShardId("foo", "_na_", 1)); MatcherAssert.assertThat(Versions.loadVersion(directoryReader, new Term(UidFieldMapper.NAME, "1")), equalTo(Versions.NOT_FOUND)); Document doc = new Document(); @@ -145,7 +145,7 @@ public class VersionsTests extends ESTestCase { docs.add(doc); writer.updateDocuments(new Term(UidFieldMapper.NAME, "1"), docs); - DirectoryReader directoryReader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer, true), new ShardId("foo", 1)); + DirectoryReader directoryReader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer, true), new ShardId("foo", "_na_", 1)); assertThat(Versions.loadVersion(directoryReader, new Term(UidFieldMapper.NAME, "1")), equalTo(5l)); assertThat(Versions.loadDocIdAndVersion(directoryReader, new Term(UidFieldMapper.NAME, "1")).version, equalTo(5l)); @@ -170,7 +170,7 @@ public class VersionsTests extends ESTestCase { Directory dir = newDirectory(); IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(Lucene.STANDARD_ANALYZER)); - DirectoryReader directoryReader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer, true), new ShardId("foo", 1)); + DirectoryReader directoryReader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer, true), new ShardId("foo", "_na_", 1)); MatcherAssert.assertThat(Versions.loadVersion(directoryReader, new Term(UidFieldMapper.NAME, "1")), equalTo(Versions.NOT_FOUND)); Document doc = new Document(); @@ -281,7 +281,7 @@ public class VersionsTests extends ESTestCase { // Force merge and check versions iw.forceMerge(1, true); - final LeafReader ir = SlowCompositeReaderWrapper.wrap(ElasticsearchDirectoryReader.wrap(DirectoryReader.open(iw.getDirectory()), new ShardId("foo", 1))); + final LeafReader ir = SlowCompositeReaderWrapper.wrap(ElasticsearchDirectoryReader.wrap(DirectoryReader.open(iw.getDirectory()), new ShardId("foo", "_na_", 1))); final NumericDocValues versions = ir.getNumericDocValues(VersionFieldMapper.NAME); assertThat(versions, notNullValue()); for (int i = 0; i < ir.maxDoc(); ++i) { @@ -334,7 +334,7 @@ public class VersionsTests extends ESTestCase { assertEquals(87, Versions.loadVersion(reader, new Term(UidFieldMapper.NAME, "6"))); assertEquals(size+1, Versions.lookupStates.size()); // now wrap the reader - DirectoryReader wrapped = ElasticsearchDirectoryReader.wrap(reader, new ShardId("bogus", 5)); + DirectoryReader wrapped = ElasticsearchDirectoryReader.wrap(reader, new ShardId("bogus", "_na_", 5)); assertEquals(87, Versions.loadVersion(wrapped, new Term(UidFieldMapper.NAME, "6"))); // same size map: core cache key is shared assertEquals(size+1, Versions.lookupStates.size()); diff --git a/core/src/test/java/org/elasticsearch/common/network/NetworkModuleTests.java b/core/src/test/java/org/elasticsearch/common/network/NetworkModuleTests.java index b8a21e1b678..0fb9b1a92fb 100644 --- a/core/src/test/java/org/elasticsearch/common/network/NetworkModuleTests.java +++ b/core/src/test/java/org/elasticsearch/common/network/NetworkModuleTests.java @@ -82,7 +82,7 @@ public class NetworkModuleTests extends ModuleTestCase { static class FakeRestHandler extends BaseRestHandler { public FakeRestHandler() { - super(null, null, null); + super(null, null); } @Override protected void handleRequest(RestRequest request, RestChannel channel, Client client) throws Exception {} diff --git a/core/src/test/java/org/elasticsearch/common/util/concurrent/EsExecutorsTests.java b/core/src/test/java/org/elasticsearch/common/util/concurrent/EsExecutorsTests.java index b59c8dd1cb6..57da614e689 100644 --- a/core/src/test/java/org/elasticsearch/common/util/concurrent/EsExecutorsTests.java +++ b/core/src/test/java/org/elasticsearch/common/util/concurrent/EsExecutorsTests.java @@ -20,6 +20,7 @@ package org.elasticsearch.common.util.concurrent; import org.elasticsearch.ExceptionsHelper; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.test.ESTestCase; import org.hamcrest.Matcher; @@ -38,12 +39,13 @@ import static org.hamcrest.Matchers.lessThan; */ public class EsExecutorsTests extends ESTestCase { + private final ThreadContext threadContext = new ThreadContext(Settings.EMPTY); private TimeUnit randomTimeUnit() { return TimeUnit.values()[between(0, TimeUnit.values().length - 1)]; } public void testFixedForcedExecution() throws Exception { - EsThreadPoolExecutor executor = EsExecutors.newFixed(getTestName(), 1, 1, EsExecutors.daemonThreadFactory("test")); + EsThreadPoolExecutor executor = EsExecutors.newFixed(getTestName(), 1, 1, EsExecutors.daemonThreadFactory("test"), threadContext); final CountDownLatch wait = new CountDownLatch(1); final CountDownLatch exec1Wait = new CountDownLatch(1); @@ -105,7 +107,7 @@ public class EsExecutorsTests extends ESTestCase { } public void testFixedRejected() throws Exception { - EsThreadPoolExecutor executor = EsExecutors.newFixed(getTestName(), 1, 1, EsExecutors.daemonThreadFactory("test")); + EsThreadPoolExecutor executor = EsExecutors.newFixed(getTestName(), 1, 1, EsExecutors.daemonThreadFactory("test"), threadContext); final CountDownLatch wait = new CountDownLatch(1); final CountDownLatch exec1Wait = new CountDownLatch(1); @@ -163,7 +165,7 @@ public class EsExecutorsTests extends ESTestCase { final int max = between(min + 1, 6); final ThreadBarrier barrier = new ThreadBarrier(max + 1); - ThreadPoolExecutor pool = EsExecutors.newScaling(getTestName(), min, max, between(1, 100), randomTimeUnit(), EsExecutors.daemonThreadFactory("test")); + ThreadPoolExecutor pool = EsExecutors.newScaling(getTestName(), min, max, between(1, 100), randomTimeUnit(), EsExecutors.daemonThreadFactory("test"), threadContext); assertThat("Min property", pool.getCorePoolSize(), equalTo(min)); assertThat("Max property", pool.getMaximumPoolSize(), equalTo(max)); @@ -199,7 +201,7 @@ public class EsExecutorsTests extends ESTestCase { final int max = between(min + 1, 6); final ThreadBarrier barrier = new ThreadBarrier(max + 1); - final ThreadPoolExecutor pool = EsExecutors.newScaling(getTestName(), min, max, between(1, 100), TimeUnit.MILLISECONDS, EsExecutors.daemonThreadFactory("test")); + final ThreadPoolExecutor pool = EsExecutors.newScaling(getTestName(), min, max, between(1, 100), TimeUnit.MILLISECONDS, EsExecutors.daemonThreadFactory("test"), threadContext); assertThat("Min property", pool.getCorePoolSize(), equalTo(min)); assertThat("Max property", pool.getMaximumPoolSize(), equalTo(max)); @@ -242,7 +244,7 @@ public class EsExecutorsTests extends ESTestCase { int queue = between(0, 100); int actions = queue + pool; final CountDownLatch latch = new CountDownLatch(1); - EsThreadPoolExecutor executor = EsExecutors.newFixed(getTestName(), pool, queue, EsExecutors.daemonThreadFactory("dummy")); + EsThreadPoolExecutor executor = EsExecutors.newFixed(getTestName(), pool, queue, EsExecutors.daemonThreadFactory("dummy"), threadContext); try { for (int i = 0; i < actions; i++) { executor.execute(new Runnable() { @@ -321,4 +323,65 @@ public class EsExecutorsTests extends ESTestCase { assertThat(message, containsString("completed tasks = " + actions)); } } + + public void testInheritContext() throws InterruptedException { + int pool = between(1, 10); + int queue = between(0, 100); + final CountDownLatch latch = new CountDownLatch(1); + final CountDownLatch executed = new CountDownLatch(1); + + threadContext.putHeader("foo", "bar"); + final Integer one = new Integer(1); + threadContext.putTransient("foo", one); + EsThreadPoolExecutor executor = EsExecutors.newFixed(getTestName(), pool, queue, EsExecutors.daemonThreadFactory("dummy"), threadContext); + try { + executor.execute(() -> { + try { + latch.await(); + } catch (InterruptedException e) { + fail(); + } + assertEquals(threadContext.getHeader("foo"), "bar"); + assertSame(threadContext.getTransient("foo"), one); + assertNull(threadContext.getHeader("bar")); + assertNull(threadContext.getTransient("bar")); + executed.countDown(); + }); + threadContext.putTransient("bar", "boom"); + threadContext.putHeader("bar", "boom"); + latch.countDown(); + executed.await(); + + } finally { + latch.countDown(); + terminate(executor); + } + } + + public void testGetTasks() throws InterruptedException { + int pool = between(1, 10); + int queue = between(0, 100); + final CountDownLatch latch = new CountDownLatch(1); + final CountDownLatch executed = new CountDownLatch(1); + EsThreadPoolExecutor executor = EsExecutors.newFixed(getTestName(), pool, queue, EsExecutors.daemonThreadFactory("dummy"), threadContext); + try { + Runnable r = () -> { + latch.countDown(); + try { + executed.await(); + } catch (InterruptedException e) { + fail(); + } + }; + executor.execute(r); + latch.await(); + executor.getTasks().forEach((runnable) -> assertSame(runnable, r)); + executed.countDown(); + + } finally { + latch.countDown(); + terminate(executor); + } + + } } diff --git a/core/src/test/java/org/elasticsearch/common/util/concurrent/PrioritizedExecutorsTests.java b/core/src/test/java/org/elasticsearch/common/util/concurrent/PrioritizedExecutorsTests.java index 685e06afb16..50b7d5f775c 100644 --- a/core/src/test/java/org/elasticsearch/common/util/concurrent/PrioritizedExecutorsTests.java +++ b/core/src/test/java/org/elasticsearch/common/util/concurrent/PrioritizedExecutorsTests.java @@ -19,6 +19,7 @@ package org.elasticsearch.common.util.concurrent; import org.elasticsearch.common.Priority; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.ThreadPool; @@ -43,6 +44,9 @@ import static org.hamcrest.Matchers.is; * */ public class PrioritizedExecutorsTests extends ESTestCase { + + private final ThreadContext holder = new ThreadContext(Settings.EMPTY); + public void testPriorityQueue() throws Exception { PriorityBlockingQueue queue = new PriorityBlockingQueue<>(); List priorities = Arrays.asList(Priority.values()); @@ -63,7 +67,7 @@ public class PrioritizedExecutorsTests extends ESTestCase { } public void testSubmitPrioritizedExecutorWithRunnables() throws Exception { - ExecutorService executor = EsExecutors.newSinglePrioritizing(getTestName(), EsExecutors.daemonThreadFactory(getTestName())); + ExecutorService executor = EsExecutors.newSinglePrioritizing(getTestName(), EsExecutors.daemonThreadFactory(getTestName()), holder); List results = new ArrayList<>(8); CountDownLatch awaitingLatch = new CountDownLatch(1); CountDownLatch finishedLatch = new CountDownLatch(8); @@ -92,7 +96,7 @@ public class PrioritizedExecutorsTests extends ESTestCase { } public void testExecutePrioritizedExecutorWithRunnables() throws Exception { - ExecutorService executor = EsExecutors.newSinglePrioritizing(getTestName(), EsExecutors.daemonThreadFactory(getTestName())); + ExecutorService executor = EsExecutors.newSinglePrioritizing(getTestName(), EsExecutors.daemonThreadFactory(getTestName()), holder); List results = new ArrayList<>(8); CountDownLatch awaitingLatch = new CountDownLatch(1); CountDownLatch finishedLatch = new CountDownLatch(8); @@ -121,7 +125,7 @@ public class PrioritizedExecutorsTests extends ESTestCase { } public void testSubmitPrioritizedExecutorWithCallables() throws Exception { - ExecutorService executor = EsExecutors.newSinglePrioritizing(getTestName(), EsExecutors.daemonThreadFactory(getTestName())); + ExecutorService executor = EsExecutors.newSinglePrioritizing(getTestName(), EsExecutors.daemonThreadFactory(getTestName()), holder); List results = new ArrayList<>(8); CountDownLatch awaitingLatch = new CountDownLatch(1); CountDownLatch finishedLatch = new CountDownLatch(8); @@ -150,7 +154,7 @@ public class PrioritizedExecutorsTests extends ESTestCase { } public void testSubmitPrioritizedExecutorWithMixed() throws Exception { - ExecutorService executor = EsExecutors.newSinglePrioritizing(getTestName(), EsExecutors.daemonThreadFactory(getTestName())); + ExecutorService executor = EsExecutors.newSinglePrioritizing(getTestName(), EsExecutors.daemonThreadFactory(getTestName()), holder); List results = new ArrayList<>(8); CountDownLatch awaitingLatch = new CountDownLatch(1); CountDownLatch finishedLatch = new CountDownLatch(8); @@ -180,7 +184,7 @@ public class PrioritizedExecutorsTests extends ESTestCase { public void testTimeout() throws Exception { ScheduledExecutorService timer = Executors.newSingleThreadScheduledExecutor(EsExecutors.daemonThreadFactory(getTestName())); - PrioritizedEsThreadPoolExecutor executor = EsExecutors.newSinglePrioritizing(getTestName(), EsExecutors.daemonThreadFactory(getTestName())); + PrioritizedEsThreadPoolExecutor executor = EsExecutors.newSinglePrioritizing(getTestName(), EsExecutors.daemonThreadFactory(getTestName()), holder); final CountDownLatch invoked = new CountDownLatch(1); final CountDownLatch block = new CountDownLatch(1); executor.execute(new Runnable() { @@ -243,7 +247,7 @@ public class PrioritizedExecutorsTests extends ESTestCase { ThreadPool threadPool = new ThreadPool("test"); final ScheduledThreadPoolExecutor timer = (ScheduledThreadPoolExecutor) threadPool.scheduler(); final AtomicBoolean timeoutCalled = new AtomicBoolean(); - PrioritizedEsThreadPoolExecutor executor = EsExecutors.newSinglePrioritizing(getTestName(), EsExecutors.daemonThreadFactory(getTestName())); + PrioritizedEsThreadPoolExecutor executor = EsExecutors.newSinglePrioritizing(getTestName(), EsExecutors.daemonThreadFactory(getTestName()), holder); final CountDownLatch invoked = new CountDownLatch(1); executor.execute(new Runnable() { @Override diff --git a/core/src/test/java/org/elasticsearch/common/util/concurrent/ThreadContextTests.java b/core/src/test/java/org/elasticsearch/common/util/concurrent/ThreadContextTests.java new file mode 100644 index 00000000000..cbf58bf9daa --- /dev/null +++ b/core/src/test/java/org/elasticsearch/common/util/concurrent/ThreadContextTests.java @@ -0,0 +1,238 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.common.util.concurrent; + +import org.elasticsearch.common.io.stream.BytesStreamOutput; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.test.ESTestCase; + +import java.io.IOException; +import java.util.Collections; +import java.util.HashMap; + +public class ThreadContextTests extends ESTestCase { + + public void testStashContext() { + Settings build = Settings.builder().put("request.headers.default", "1").build(); + ThreadContext threadContext = new ThreadContext(build); + threadContext.putHeader("foo", "bar"); + threadContext.putTransient("ctx.foo", new Integer(1)); + assertEquals("bar", threadContext.getHeader("foo")); + assertEquals(new Integer(1), threadContext.getTransient("ctx.foo")); + assertEquals("1", threadContext.getHeader("default")); + try (ThreadContext.StoredContext ctx = threadContext.stashContext()) { + assertNull(threadContext.getHeader("foo")); + assertNull(threadContext.getTransient("ctx.foo")); + assertEquals("1", threadContext.getHeader("default")); + } + + assertEquals("bar", threadContext.getHeader("foo")); + assertEquals(new Integer(1), threadContext.getTransient("ctx.foo")); + assertEquals("1", threadContext.getHeader("default")); + } + + public void testStashAndMerge() { + Settings build = Settings.builder().put("request.headers.default", "1").build(); + ThreadContext threadContext = new ThreadContext(build); + threadContext.putHeader("foo", "bar"); + threadContext.putTransient("ctx.foo", new Integer(1)); + assertEquals("bar", threadContext.getHeader("foo")); + assertEquals(new Integer(1), threadContext.getTransient("ctx.foo")); + assertEquals("1", threadContext.getHeader("default")); + HashMap toMerge = new HashMap<>(); + toMerge.put("foo", "baz"); + toMerge.put("simon", "says"); + try (ThreadContext.StoredContext ctx = threadContext.stashAndMergeHeaders(toMerge)) { + assertEquals("bar", threadContext.getHeader("foo")); + assertEquals("says", threadContext.getHeader("simon")); + assertNull(threadContext.getTransient("ctx.foo")); + assertEquals("1", threadContext.getHeader("default")); + } + + assertNull(threadContext.getHeader("simon")); + assertEquals("bar", threadContext.getHeader("foo")); + assertEquals(new Integer(1), threadContext.getTransient("ctx.foo")); + assertEquals("1", threadContext.getHeader("default")); + } + + public void testStoreContext() { + Settings build = Settings.builder().put("request.headers.default", "1").build(); + ThreadContext threadContext = new ThreadContext(build); + threadContext.putHeader("foo", "bar"); + threadContext.putTransient("ctx.foo", new Integer(1)); + assertEquals("bar", threadContext.getHeader("foo")); + assertEquals(new Integer(1), threadContext.getTransient("ctx.foo")); + assertEquals("1", threadContext.getHeader("default")); + ThreadContext.StoredContext storedContext = threadContext.newStoredContext(); + threadContext.putHeader("foo.bar", "baz"); + try (ThreadContext.StoredContext ctx = threadContext.stashContext()) { + assertNull(threadContext.getHeader("foo")); + assertNull(threadContext.getTransient("ctx.foo")); + assertEquals("1", threadContext.getHeader("default")); + } + + assertEquals("bar", threadContext.getHeader("foo")); + assertEquals(new Integer(1), threadContext.getTransient("ctx.foo")); + assertEquals("1", threadContext.getHeader("default")); + assertEquals("baz", threadContext.getHeader("foo.bar")); + if (randomBoolean()) { + storedContext.restore(); + } else { + storedContext.close(); + } + assertEquals("bar", threadContext.getHeader("foo")); + assertEquals(new Integer(1), threadContext.getTransient("ctx.foo")); + assertEquals("1", threadContext.getHeader("default")); + assertNull(threadContext.getHeader("foo.bar")); + } + + public void testCopyHeaders() { + Settings build = Settings.builder().put("request.headers.default", "1").build(); + ThreadContext threadContext = new ThreadContext(build); + threadContext.copyHeaders(Collections.emptyMap().entrySet()); + threadContext.copyHeaders(Collections.singletonMap("foo", "bar").entrySet()); + assertEquals("bar", threadContext.getHeader("foo")); + } + + public void testAccessClosed() throws IOException { + Settings build = Settings.builder().put("request.headers.default", "1").build(); + ThreadContext threadContext = new ThreadContext(build); + threadContext.putHeader("foo", "bar"); + threadContext.putTransient("ctx.foo", new Integer(1)); + + threadContext.close(); + try { + threadContext.getHeader("foo"); + fail(); + } catch (IllegalStateException ise) { + assertEquals("threadcontext is already closed", ise.getMessage()); + } + + try { + threadContext.putTransient("foo", new Object()); + fail(); + } catch (IllegalStateException ise) { + assertEquals("threadcontext is already closed", ise.getMessage()); + } + + try { + threadContext.putHeader("boom", "boom"); + fail(); + } catch (IllegalStateException ise) { + assertEquals("threadcontext is already closed", ise.getMessage()); + } + } + + public void testSerialize() throws IOException { + Settings build = Settings.builder().put("request.headers.default", "1").build(); + ThreadContext threadContext = new ThreadContext(build); + threadContext.putHeader("foo", "bar"); + threadContext.putTransient("ctx.foo", new Integer(1)); + BytesStreamOutput out = new BytesStreamOutput(); + threadContext.writeTo(out); + try (ThreadContext.StoredContext ctx = threadContext.stashContext()) { + assertNull(threadContext.getHeader("foo")); + assertNull(threadContext.getTransient("ctx.foo")); + assertEquals("1", threadContext.getHeader("default")); + + threadContext.readHeaders(StreamInput.wrap(out.bytes())); + assertEquals("bar", threadContext.getHeader("foo")); + assertNull(threadContext.getTransient("ctx.foo")); + } + assertEquals("bar", threadContext.getHeader("foo")); + assertEquals(new Integer(1), threadContext.getTransient("ctx.foo")); + assertEquals("1", threadContext.getHeader("default")); + } + + public void testSerializeInDifferentContext() throws IOException { + BytesStreamOutput out = new BytesStreamOutput(); + { + Settings build = Settings.builder().put("request.headers.default", "1").build(); + ThreadContext threadContext = new ThreadContext(build); + threadContext.putHeader("foo", "bar"); + threadContext.putTransient("ctx.foo", new Integer(1)); + + assertEquals("bar", threadContext.getHeader("foo")); + assertNotNull(threadContext.getTransient("ctx.foo")); + assertEquals("1", threadContext.getHeader("default")); + threadContext.writeTo(out); + } + { + Settings otherSettings = Settings.builder().put("request.headers.default", "5").build(); + ThreadContext otherhreadContext = new ThreadContext(otherSettings); + otherhreadContext.readHeaders(StreamInput.wrap(out.bytes())); + + assertEquals("bar", otherhreadContext.getHeader("foo")); + assertNull(otherhreadContext.getTransient("ctx.foo")); + assertEquals("1", otherhreadContext.getHeader("default")); + } + } + + public void testSerializeInDifferentContextNoDefaults() throws IOException { + BytesStreamOutput out = new BytesStreamOutput(); + { + ThreadContext threadContext = new ThreadContext(Settings.EMPTY); + threadContext.putHeader("foo", "bar"); + threadContext.putTransient("ctx.foo", new Integer(1)); + + assertEquals("bar", threadContext.getHeader("foo")); + assertNotNull(threadContext.getTransient("ctx.foo")); + assertNull(threadContext.getHeader("default")); + threadContext.writeTo(out); + } + { + Settings otherSettings = Settings.builder().put("request.headers.default", "5").build(); + ThreadContext otherhreadContext = new ThreadContext(otherSettings); + otherhreadContext.readHeaders(StreamInput.wrap(out.bytes())); + + assertEquals("bar", otherhreadContext.getHeader("foo")); + assertNull(otherhreadContext.getTransient("ctx.foo")); + assertEquals("5", otherhreadContext.getHeader("default")); + } + } + + + public void testCanResetDefault() { + Settings build = Settings.builder().put("request.headers.default", "1").build(); + ThreadContext threadContext = new ThreadContext(build); + threadContext.putHeader("default", "2"); + assertEquals("2", threadContext.getHeader("default")); + } + + public void testStashAndMergeWithModifiedDefaults() { + Settings build = Settings.builder().put("request.headers.default", "1").build(); + ThreadContext threadContext = new ThreadContext(build); + HashMap toMerge = new HashMap<>(); + toMerge.put("default", "2"); + try (ThreadContext.StoredContext ctx = threadContext.stashAndMergeHeaders(toMerge)) { + assertEquals("2", threadContext.getHeader("default")); + } + + build = Settings.builder().put("request.headers.default", "1").build(); + threadContext = new ThreadContext(build); + threadContext.putHeader("default", "4"); + toMerge = new HashMap<>(); + toMerge.put("default", "2"); + try (ThreadContext.StoredContext ctx = threadContext.stashAndMergeHeaders(toMerge)) { + assertEquals("4", threadContext.getHeader("default")); + } + } + +} diff --git a/core/src/test/java/org/elasticsearch/discovery/DiscoveryWithServiceDisruptionsIT.java b/core/src/test/java/org/elasticsearch/discovery/DiscoveryWithServiceDisruptionsIT.java index 4dcf6f55059..032b6142454 100644 --- a/core/src/test/java/org/elasticsearch/discovery/DiscoveryWithServiceDisruptionsIT.java +++ b/core/src/test/java/org/elasticsearch/discovery/DiscoveryWithServiceDisruptionsIT.java @@ -1043,7 +1043,7 @@ public class DiscoveryWithServiceDisruptionsIT extends ESIntegTestCase { CountDownLatch beginRelocationLatch = new CountDownLatch(1); CountDownLatch endRelocationLatch = new CountDownLatch(1); transportServiceNode2.addTracer(new IndicesStoreIntegrationIT.ReclocationStartEndTracer(logger, beginRelocationLatch, endRelocationLatch)); - internalCluster().client().admin().cluster().prepareReroute().add(new MoveAllocationCommand(new ShardId("test", 0), node_1, node_2)).get(); + internalCluster().client().admin().cluster().prepareReroute().add(new MoveAllocationCommand("test", 0, node_1, node_2)).get(); // wait for relocation to start beginRelocationLatch.await(); disruption.startDisrupting(); diff --git a/core/src/test/java/org/elasticsearch/env/NodeEnvironmentTests.java b/core/src/test/java/org/elasticsearch/env/NodeEnvironmentTests.java index 1ead12ff432..e9113ce3612 100644 --- a/core/src/test/java/org/elasticsearch/env/NodeEnvironmentTests.java +++ b/core/src/test/java/org/elasticsearch/env/NodeEnvironmentTests.java @@ -87,21 +87,21 @@ public class NodeEnvironmentTests extends ESTestCase { public void testShardLock() throws IOException { final NodeEnvironment env = newNodeEnvironment(); - ShardLock fooLock = env.shardLock(new ShardId("foo", 0)); - assertEquals(new ShardId("foo", 0), fooLock.getShardId()); + ShardLock fooLock = env.shardLock(new ShardId("foo", "_na_", 0)); + assertEquals(new ShardId("foo", "_na_", 0), fooLock.getShardId()); try { - env.shardLock(new ShardId("foo", 0)); + env.shardLock(new ShardId("foo", "_na_", 0)); fail("shard is locked"); } catch (LockObtainFailedException ex) { // expected } - for (Path path : env.indexPaths(new Index("foo"))) { + for (Path path : env.indexPaths("foo")) { Files.createDirectories(path.resolve("0")); Files.createDirectories(path.resolve("1")); } try { - env.lockAllForIndex(new Index("foo"), idxSettings, randomIntBetween(0, 10)); + env.lockAllForIndex(new Index("foo", "_na_"), idxSettings, randomIntBetween(0, 10)); fail("shard 0 is locked"); } catch (LockObtainFailedException ex) { // expected @@ -109,11 +109,11 @@ public class NodeEnvironmentTests extends ESTestCase { fooLock.close(); // can lock again? - env.shardLock(new ShardId("foo", 0)).close(); + env.shardLock(new ShardId("foo", "_na_", 0)).close(); - List locks = env.lockAllForIndex(new Index("foo"), idxSettings, randomIntBetween(0, 10)); + List locks = env.lockAllForIndex(new Index("foo", "_na_"), idxSettings, randomIntBetween(0, 10)); try { - env.shardLock(new ShardId("foo", 0)); + env.shardLock(new ShardId("foo", "_na_", 0)); fail("shard is locked"); } catch (LockObtainFailedException ex) { // expected @@ -127,7 +127,7 @@ public class NodeEnvironmentTests extends ESTestCase { final NodeEnvironment env = newNodeEnvironment(); final int numIndices = randomIntBetween(1, 10); for (int i = 0; i < numIndices; i++) { - for (Path path : env.indexPaths(new Index("foo" + i))) { + for (Path path : env.indexPaths("foo" + i)) { Files.createDirectories(path); } } @@ -142,44 +142,44 @@ public class NodeEnvironmentTests extends ESTestCase { public void testDeleteSafe() throws IOException, InterruptedException { final NodeEnvironment env = newNodeEnvironment(); - ShardLock fooLock = env.shardLock(new ShardId("foo", 0)); - assertEquals(new ShardId("foo", 0), fooLock.getShardId()); + ShardLock fooLock = env.shardLock(new ShardId("foo", "_na_", 0)); + assertEquals(new ShardId("foo", "_na_", 0), fooLock.getShardId()); - for (Path path : env.indexPaths(new Index("foo"))) { + for (Path path : env.indexPaths("foo")) { Files.createDirectories(path.resolve("0")); Files.createDirectories(path.resolve("1")); } try { - env.deleteShardDirectorySafe(new ShardId("foo", 0), idxSettings); + env.deleteShardDirectorySafe(new ShardId("foo", "_na_", 0), idxSettings); fail("shard is locked"); } catch (LockObtainFailedException ex) { // expected } - for (Path path : env.indexPaths(new Index("foo"))) { + for (Path path : env.indexPaths("foo")) { assertTrue(Files.exists(path.resolve("0"))); assertTrue(Files.exists(path.resolve("1"))); } - env.deleteShardDirectorySafe(new ShardId("foo", 1), idxSettings); + env.deleteShardDirectorySafe(new ShardId("foo", "_na_", 1), idxSettings); - for (Path path : env.indexPaths(new Index("foo"))) { + for (Path path : env.indexPaths("foo")) { assertTrue(Files.exists(path.resolve("0"))); assertFalse(Files.exists(path.resolve("1"))); } try { - env.deleteIndexDirectorySafe(new Index("foo"), randomIntBetween(0, 10), idxSettings); + env.deleteIndexDirectorySafe(new Index("foo", "_na_"), randomIntBetween(0, 10), idxSettings); fail("shard is locked"); } catch (LockObtainFailedException ex) { // expected } fooLock.close(); - for (Path path : env.indexPaths(new Index("foo"))) { + for (Path path : env.indexPaths("foo")) { assertTrue(Files.exists(path)); } @@ -200,7 +200,7 @@ public class NodeEnvironmentTests extends ESTestCase { @Override protected void doRun() throws Exception { start.await(); - try (ShardLock autoCloses = env.shardLock(new ShardId("foo", 0))) { + try (ShardLock autoCloses = env.shardLock(new ShardId("foo", "_na_", 0))) { blockLatch.countDown(); Thread.sleep(randomIntBetween(1, 10)); } @@ -215,11 +215,11 @@ public class NodeEnvironmentTests extends ESTestCase { start.countDown(); blockLatch.await(); - env.deleteIndexDirectorySafe(new Index("foo"), 5000, idxSettings); + env.deleteIndexDirectorySafe(new Index("foo", "_na_"), 5000, idxSettings); assertNull(threadException.get()); - for (Path path : env.indexPaths(new Index("foo"))) { + for (Path path : env.indexPaths("foo")) { assertFalse(Files.exists(path)); } latch.await(); @@ -258,7 +258,7 @@ public class NodeEnvironmentTests extends ESTestCase { for (int i = 0; i < iters; i++) { int shard = randomIntBetween(0, counts.length - 1); try { - try (ShardLock autoCloses = env.shardLock(new ShardId("foo", shard), scaledRandomIntBetween(0, 10))) { + try (ShardLock autoCloses = env.shardLock(new ShardId("foo", "_na_", shard), scaledRandomIntBetween(0, 10))) { counts[shard].value++; countsAtomic[shard].incrementAndGet(); assertEquals(flipFlop[shard].incrementAndGet(), 1); @@ -294,8 +294,8 @@ public class NodeEnvironmentTests extends ESTestCase { IndexSettings s1 = IndexSettingsModule.newIndexSettings("myindex", Settings.EMPTY); IndexSettings s2 = IndexSettingsModule.newIndexSettings("myindex", Settings.builder().put(IndexMetaData.SETTING_DATA_PATH, "/tmp/foo").build()); - ShardId sid = new ShardId("myindex", 0); - Index i = new Index("myindex"); + Index index = new Index("myindex", "_na_"); + ShardId sid = new ShardId(index, 0); assertFalse("no settings should mean no custom data path", s1.hasCustomDataPath()); assertTrue("settings with path_data should have a custom data path", s2.hasCustomDataPath()); @@ -308,7 +308,7 @@ public class NodeEnvironmentTests extends ESTestCase { equalTo(stringsToPaths(dataPaths, "elasticsearch/nodes/0/indices/myindex/0"))); assertThat("index paths uses the regular template", - env.indexPaths(i), equalTo(stringsToPaths(dataPaths, "elasticsearch/nodes/0/indices/myindex"))); + env.indexPaths(index.getName()), equalTo(stringsToPaths(dataPaths, "elasticsearch/nodes/0/indices/myindex"))); env.close(); NodeEnvironment env2 = newNodeEnvironment(dataPaths, "/tmp", @@ -322,7 +322,7 @@ public class NodeEnvironmentTests extends ESTestCase { equalTo(stringsToPaths(dataPaths, "elasticsearch/nodes/0/indices/myindex/0"))); assertThat("index paths uses the regular template", - env2.indexPaths(i), equalTo(stringsToPaths(dataPaths, "elasticsearch/nodes/0/indices/myindex"))); + env2.indexPaths(index.getName()), equalTo(stringsToPaths(dataPaths, "elasticsearch/nodes/0/indices/myindex"))); env2.close(); } diff --git a/core/src/test/java/org/elasticsearch/gateway/AsyncShardFetchTests.java b/core/src/test/java/org/elasticsearch/gateway/AsyncShardFetchTests.java index e81db454e02..c6431e6fcff 100644 --- a/core/src/test/java/org/elasticsearch/gateway/AsyncShardFetchTests.java +++ b/core/src/test/java/org/elasticsearch/gateway/AsyncShardFetchTests.java @@ -243,7 +243,7 @@ public class AsyncShardFetchTests extends ESTestCase { private AtomicInteger reroute = new AtomicInteger(); public TestFetch(ThreadPool threadPool) { - super(Loggers.getLogger(TestFetch.class), "test", new ShardId("test", 1), null); + super(Loggers.getLogger(TestFetch.class), "test", new ShardId("test", "_na_", 1), null); this.threadPool = threadPool; } diff --git a/core/src/test/java/org/elasticsearch/gateway/DanglingIndicesStateTests.java b/core/src/test/java/org/elasticsearch/gateway/DanglingIndicesStateTests.java index 6b28b7f7897..95c52f89933 100644 --- a/core/src/test/java/org/elasticsearch/gateway/DanglingIndicesStateTests.java +++ b/core/src/test/java/org/elasticsearch/gateway/DanglingIndicesStateTests.java @@ -107,7 +107,7 @@ public class DanglingIndicesStateTests extends ESTestCase { IndexMetaData dangledIndex = IndexMetaData.builder("test1").settings(indexSettings).build(); metaStateService.writeIndex("test_write", dangledIndex, null); - for (Path path : env.indexPaths(new Index("test1"))) { + for (Path path : env.indexPaths("test1")) { Files.move(path, path.getParent().resolve("test1_renamed")); } diff --git a/core/src/test/java/org/elasticsearch/gateway/GatewayMetaStateTests.java b/core/src/test/java/org/elasticsearch/gateway/GatewayMetaStateTests.java index 0de220a8fa3..422aea70134 100644 --- a/core/src/test/java/org/elasticsearch/gateway/GatewayMetaStateTests.java +++ b/core/src/test/java/org/elasticsearch/gateway/GatewayMetaStateTests.java @@ -183,7 +183,7 @@ public class GatewayMetaStateTests extends ESAllocationTestCase { if (expectMetaData) { assertThat(indices.hasNext(), equalTo(true)); - assertThat(indices.next().getNewMetaData().getIndex(), equalTo("test")); + assertThat(indices.next().getNewMetaData().getIndex().getName(), equalTo("test")); assertThat(indices.hasNext(), equalTo(false)); } else { assertThat(indices.hasNext(), equalTo(false)); diff --git a/core/src/test/java/org/elasticsearch/gateway/MetaDataStateFormatTests.java b/core/src/test/java/org/elasticsearch/gateway/MetaDataStateFormatTests.java index 98b62dc18ba..3d90d948ea6 100644 --- a/core/src/test/java/org/elasticsearch/gateway/MetaDataStateFormatTests.java +++ b/core/src/test/java/org/elasticsearch/gateway/MetaDataStateFormatTests.java @@ -357,7 +357,7 @@ public class MetaDataStateFormatTests extends ESTestCase { ImmutableOpenMap indices = loadedMetaData.indices(); assertThat(indices.size(), equalTo(latestMetaData.indices().size())); for (IndexMetaData original : latestMetaData) { - IndexMetaData deserialized = indices.get(original.getIndex()); + IndexMetaData deserialized = indices.get(original.getIndex().getName()); assertThat(deserialized, notNullValue()); assertThat(deserialized.getVersion(), equalTo(original.getVersion())); assertThat(deserialized.getNumberOfReplicas(), equalTo(original.getNumberOfReplicas())); diff --git a/core/src/test/java/org/elasticsearch/gateway/MetaDataWriteDataNodesIT.java b/core/src/test/java/org/elasticsearch/gateway/MetaDataWriteDataNodesIT.java index d2f7bb888cd..bada7faa8c8 100644 --- a/core/src/test/java/org/elasticsearch/gateway/MetaDataWriteDataNodesIT.java +++ b/core/src/test/java/org/elasticsearch/gateway/MetaDataWriteDataNodesIT.java @@ -170,7 +170,7 @@ public class MetaDataWriteDataNodesIT extends ESIntegTestCase { private boolean indexDirectoryExists(String nodeName, String indexName) { NodeEnvironment nodeEnv = ((InternalTestCluster) cluster()).getInstance(NodeEnvironment.class, nodeName); - for (Path path : nodeEnv.indexPaths(new Index(indexName))) { + for (Path path : nodeEnv.indexPaths(indexName)) { if (Files.exists(path)) { return true; } diff --git a/core/src/test/java/org/elasticsearch/gateway/PrimaryShardAllocatorTests.java b/core/src/test/java/org/elasticsearch/gateway/PrimaryShardAllocatorTests.java index e5362aa84fc..a61354458ca 100644 --- a/core/src/test/java/org/elasticsearch/gateway/PrimaryShardAllocatorTests.java +++ b/core/src/test/java/org/elasticsearch/gateway/PrimaryShardAllocatorTests.java @@ -53,7 +53,7 @@ import static org.hamcrest.Matchers.equalTo; */ public class PrimaryShardAllocatorTests extends ESAllocationTestCase { - private final ShardId shardId = new ShardId("test", 0); + private final ShardId shardId = new ShardId("test", "_na_", 0); private final DiscoveryNode node1 = newNode("node1"); private final DiscoveryNode node2 = newNode("node2"); private final DiscoveryNode node3 = newNode("node3"); @@ -298,12 +298,12 @@ public class PrimaryShardAllocatorTests extends ESAllocationTestCase { private RoutingAllocation getRestoreRoutingAllocation(AllocationDeciders allocationDeciders) { Version version = randomFrom(Version.CURRENT, Version.V_2_0_0); MetaData metaData = MetaData.builder() - .put(IndexMetaData.builder(shardId.getIndex()).settings(settings(version)).numberOfShards(1).numberOfReplicas(0) + .put(IndexMetaData.builder(shardId.getIndexName()).settings(settings(version)).numberOfShards(1).numberOfReplicas(0) .putActiveAllocationIds(0, version == Version.CURRENT ? Sets.newHashSet("allocId") : Collections.emptySet())) .build(); RoutingTable routingTable = RoutingTable.builder() - .addAsRestore(metaData.index(shardId.getIndex()), new RestoreSource(new SnapshotId("test", "test"), version, shardId.getIndex())) + .addAsRestore(metaData.index(shardId.getIndex()), new RestoreSource(new SnapshotId("test", "test"), version, shardId.getIndexName())) .build(); ClusterState state = ClusterState.builder(org.elasticsearch.cluster.ClusterName.DEFAULT) .metaData(metaData) @@ -366,14 +366,14 @@ public class PrimaryShardAllocatorTests extends ESAllocationTestCase { private RoutingAllocation getRecoverOnAnyNodeRoutingAllocation(AllocationDeciders allocationDeciders) { Version version = randomFrom(Version.CURRENT, Version.V_2_0_0); MetaData metaData = MetaData.builder() - .put(IndexMetaData.builder(shardId.getIndex()).settings(settings(version) + .put(IndexMetaData.builder(shardId.getIndexName()).settings(settings(version) .put(IndexMetaData.SETTING_SHARED_FILESYSTEM, true) .put(IndexMetaData.SETTING_SHARED_FS_ALLOW_RECOVERY_ON_ANY_NODE, true)) .numberOfShards(1).numberOfReplicas(0).putActiveAllocationIds(0, version == Version.CURRENT ? Sets.newHashSet("allocId") : Collections.emptySet())) .build(); RoutingTable routingTable = RoutingTable.builder() - .addAsRestore(metaData.index(shardId.getIndex()), new RestoreSource(new SnapshotId("test", "test"), Version.CURRENT, shardId.getIndex())) + .addAsRestore(metaData.index(shardId.getIndex()), new RestoreSource(new SnapshotId("test", "test"), Version.CURRENT, shardId.getIndexName())) .build(); ClusterState state = ClusterState.builder(org.elasticsearch.cluster.ClusterName.DEFAULT) .metaData(metaData) @@ -388,7 +388,7 @@ public class PrimaryShardAllocatorTests extends ESAllocationTestCase { */ public void testEnoughCopiesFoundForAllocationOnLegacyIndex() { MetaData metaData = MetaData.builder() - .put(IndexMetaData.builder(shardId.getIndex()).settings(settings(Version.V_2_0_0)).numberOfShards(1).numberOfReplicas(2)) + .put(IndexMetaData.builder(shardId.getIndexName()).settings(settings(Version.V_2_0_0)).numberOfShards(1).numberOfReplicas(2)) .build(); RoutingTable routingTable = RoutingTable.builder() .addAsRecovery(metaData.index(shardId.getIndex())) @@ -429,7 +429,7 @@ public class PrimaryShardAllocatorTests extends ESAllocationTestCase { */ public void testEnoughCopiesFoundForAllocationOnLegacyIndexWithDifferentVersion() { MetaData metaData = MetaData.builder() - .put(IndexMetaData.builder(shardId.getIndex()).settings(settings(Version.V_2_0_0)).numberOfShards(1).numberOfReplicas(2)) + .put(IndexMetaData.builder(shardId.getIndexName()).settings(settings(Version.V_2_0_0)).numberOfShards(1).numberOfReplicas(2)) .build(); RoutingTable routingTable = RoutingTable.builder() .addAsRecovery(metaData.index(shardId.getIndex())) @@ -466,7 +466,7 @@ public class PrimaryShardAllocatorTests extends ESAllocationTestCase { private RoutingAllocation routingAllocationWithOnePrimaryNoReplicas(AllocationDeciders deciders, boolean asNew, Version version, String... activeAllocationIds) { MetaData metaData = MetaData.builder() - .put(IndexMetaData.builder(shardId.getIndex()).settings(settings(version)) + .put(IndexMetaData.builder(shardId.getIndexName()).settings(settings(version)) .numberOfShards(1).numberOfReplicas(0).putActiveAllocationIds(0, Sets.newHashSet(activeAllocationIds))) .build(); RoutingTable.Builder routingTableBuilder = RoutingTable.builder(); diff --git a/core/src/test/java/org/elasticsearch/gateway/PriorityComparatorTests.java b/core/src/test/java/org/elasticsearch/gateway/PriorityComparatorTests.java index 3b7e62216ce..4231f1215f8 100644 --- a/core/src/test/java/org/elasticsearch/gateway/PriorityComparatorTests.java +++ b/core/src/test/java/org/elasticsearch/gateway/PriorityComparatorTests.java @@ -60,9 +60,9 @@ public class PriorityComparatorTests extends ESTestCase { }); RoutingNodes.UnassignedShards.UnassignedIterator iterator = shards.iterator(); ShardRouting next = iterator.next(); - assertEquals("newest", next.index()); + assertEquals("newest", next.getIndexName()); next = iterator.next(); - assertEquals("oldest", next.index()); + assertEquals("oldest", next.getIndexName()); assertFalse(iterator.hasNext()); } @@ -90,9 +90,9 @@ public class PriorityComparatorTests extends ESTestCase { }); RoutingNodes.UnassignedShards.UnassignedIterator iterator = shards.iterator(); ShardRouting next = iterator.next(); - assertEquals("oldest", next.index()); + assertEquals("oldest", next.getIndexName()); next = iterator.next(); - assertEquals("newest", next.index()); + assertEquals("newest", next.getIndexName()); assertFalse(iterator.hasNext()); } @@ -126,8 +126,8 @@ public class PriorityComparatorTests extends ESTestCase { ShardRouting previous = null; for (ShardRouting routing : shards) { if (previous != null) { - IndexMeta prevMeta = map.get(previous.getIndex()); - IndexMeta currentMeta = map.get(routing.getIndex()); + IndexMeta prevMeta = map.get(previous.getIndexName()); + IndexMeta currentMeta = map.get(routing.getIndexName()); if (prevMeta.priority == currentMeta.priority) { if (prevMeta.creationDate == currentMeta.creationDate) { if (prevMeta.name.equals(currentMeta.name) == false) { diff --git a/core/src/test/java/org/elasticsearch/gateway/ReplicaShardAllocatorTests.java b/core/src/test/java/org/elasticsearch/gateway/ReplicaShardAllocatorTests.java index 17a3da6421f..cbfc9d34d59 100644 --- a/core/src/test/java/org/elasticsearch/gateway/ReplicaShardAllocatorTests.java +++ b/core/src/test/java/org/elasticsearch/gateway/ReplicaShardAllocatorTests.java @@ -64,7 +64,7 @@ import static org.hamcrest.Matchers.equalTo; /** */ public class ReplicaShardAllocatorTests extends ESAllocationTestCase { - private final ShardId shardId = new ShardId("test", 0); + private final ShardId shardId = new ShardId("test", "_na_", 0); private final DiscoveryNode node1 = newNode("node1"); private final DiscoveryNode node2 = newNode("node2"); private final DiscoveryNode node3 = newNode("node3"); @@ -286,7 +286,7 @@ public class ReplicaShardAllocatorTests extends ESAllocationTestCase { private RoutingAllocation onePrimaryOnNode1And1Replica(AllocationDeciders deciders, Settings settings, UnassignedInfo.Reason reason) { ShardRouting primaryShard = TestShardRouting.newShardRouting(shardId.getIndex(), shardId.getId(), node1.id(), true, ShardRoutingState.STARTED, 10); MetaData metaData = MetaData.builder() - .put(IndexMetaData.builder(shardId.getIndex()).settings(settings(Version.CURRENT).put(settings)) + .put(IndexMetaData.builder(shardId.getIndexName()).settings(settings(Version.CURRENT).put(settings)) .numberOfShards(1).numberOfReplicas(1) .putActiveAllocationIds(0, Sets.newHashSet(primaryShard.allocationId().getId()))) .build(); @@ -308,7 +308,7 @@ public class ReplicaShardAllocatorTests extends ESAllocationTestCase { private RoutingAllocation onePrimaryOnNode1And1ReplicaRecovering(AllocationDeciders deciders) { ShardRouting primaryShard = TestShardRouting.newShardRouting(shardId.getIndex(), shardId.getId(), node1.id(), true, ShardRoutingState.STARTED, 10); MetaData metaData = MetaData.builder() - .put(IndexMetaData.builder(shardId.getIndex()).settings(settings(Version.CURRENT)) + .put(IndexMetaData.builder(shardId.getIndexName()).settings(settings(Version.CURRENT)) .numberOfShards(1).numberOfReplicas(1) .putActiveAllocationIds(0, Sets.newHashSet(primaryShard.allocationId().getId()))) .build(); diff --git a/core/src/test/java/org/elasticsearch/http/netty/NettyHttpChannelTests.java b/core/src/test/java/org/elasticsearch/http/netty/NettyHttpChannelTests.java index f02916c68ac..017eef345a7 100644 --- a/core/src/test/java/org/elasticsearch/http/netty/NettyHttpChannelTests.java +++ b/core/src/test/java/org/elasticsearch/http/netty/NettyHttpChannelTests.java @@ -83,7 +83,7 @@ public class NettyHttpChannelTests extends ESTestCase { Settings settings = Settings.builder() .put(NettyHttpServerTransport.SETTING_CORS_ENABLED.getKey(), true) .build(); - httpServerTransport = new NettyHttpServerTransport(settings, networkService, bigArrays); + httpServerTransport = new NettyHttpServerTransport(settings, networkService, bigArrays, threadPool); HttpRequest httpRequest = new TestHttpRequest(); httpRequest.headers().add(HttpHeaders.Names.ORIGIN, "remote"); httpRequest.headers().add(HttpHeaders.Names.USER_AGENT, "Mozilla fake"); @@ -107,7 +107,7 @@ public class NettyHttpChannelTests extends ESTestCase { .put(NettyHttpServerTransport.SETTING_CORS_ENABLED.getKey(), true) .put(NettyHttpServerTransport.SETTING_CORS_ALLOW_ORIGIN, "remote-host") .build(); - httpServerTransport = new NettyHttpServerTransport(settings, networkService, bigArrays); + httpServerTransport = new NettyHttpServerTransport(settings, networkService, bigArrays, threadPool); HttpRequest httpRequest = new TestHttpRequest(); httpRequest.headers().add(HttpHeaders.Names.ORIGIN, "remote"); httpRequest.headers().add(HttpHeaders.Names.USER_AGENT, "Mozilla fake"); diff --git a/core/src/test/java/org/elasticsearch/http/netty/NettyHttpServerPipeliningTests.java b/core/src/test/java/org/elasticsearch/http/netty/NettyHttpServerPipeliningTests.java index 95cb5b46b5f..6afe8a0aefc 100644 --- a/core/src/test/java/org/elasticsearch/http/netty/NettyHttpServerPipeliningTests.java +++ b/core/src/test/java/org/elasticsearch/http/netty/NettyHttpServerPipeliningTests.java @@ -23,6 +23,7 @@ import org.elasticsearch.common.network.NetworkService; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.InetSocketTransportAddress; import org.elasticsearch.common.util.MockBigArrays; +import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.http.HttpServerTransport; import org.elasticsearch.http.netty.NettyHttpServerTransport.HttpChannelPipelineFactory; import org.elasticsearch.http.netty.pipelining.OrderedDownstreamChannelEvent; @@ -132,13 +133,13 @@ public class NettyHttpServerPipeliningTests extends ESTestCase { private final ExecutorService executorService; public CustomNettyHttpServerTransport(Settings settings) { - super(settings, NettyHttpServerPipeliningTests.this.networkService, NettyHttpServerPipeliningTests.this.bigArrays); + super(settings, NettyHttpServerPipeliningTests.this.networkService, NettyHttpServerPipeliningTests.this.bigArrays, NettyHttpServerPipeliningTests.this.threadPool); this.executorService = Executors.newFixedThreadPool(5); } @Override public ChannelPipelineFactory configureServerChannelPipelineFactory() { - return new CustomHttpChannelPipelineFactory(this, executorService); + return new CustomHttpChannelPipelineFactory(this, executorService, NettyHttpServerPipeliningTests.this.threadPool.getThreadContext()); } @Override @@ -152,8 +153,8 @@ public class NettyHttpServerPipeliningTests extends ESTestCase { private final ExecutorService executorService; - public CustomHttpChannelPipelineFactory(NettyHttpServerTransport transport, ExecutorService executorService) { - super(transport, randomBoolean()); + public CustomHttpChannelPipelineFactory(NettyHttpServerTransport transport, ExecutorService executorService, ThreadContext threadContext) { + super(transport, randomBoolean(), threadContext); this.executorService = executorService; } diff --git a/core/src/test/java/org/elasticsearch/index/IndexModuleTests.java b/core/src/test/java/org/elasticsearch/index/IndexModuleTests.java index 582c4f4ebe7..e6701ab5bdb 100644 --- a/core/src/test/java/org/elasticsearch/index/IndexModuleTests.java +++ b/core/src/test/java/org/elasticsearch/index/IndexModuleTests.java @@ -120,9 +120,9 @@ public class IndexModuleTests extends ESTestCase { @Override public void setUp() throws Exception { super.setUp(); - index = new Index("foo"); settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()).build(); - indexSettings = IndexSettingsModule.newIndexSettings(index, settings); + indexSettings = IndexSettingsModule.newIndexSettings("foo", settings); + index = indexSettings.getIndex(); environment = new Environment(settings); nodeServicesProvider = newNodeServiceProvider(settings, environment, null); nodeEnvironment = new NodeEnvironment(settings, environment); @@ -151,7 +151,6 @@ public class IndexModuleTests extends ESTestCase { public void testRegisterIndexStore() throws IOException { - final Index index = new Index("foo"); final Settings settings = Settings .builder() .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) @@ -159,6 +158,7 @@ public class IndexModuleTests extends ESTestCase { .put(IndexModule.INDEX_STORE_TYPE_SETTING.getKey(), "foo_store") .build(); IndexSettings indexSettings = IndexSettingsModule.newIndexSettings(index, settings); + final Index index = indexSettings.getIndex(); IndexModule module = new IndexModule(indexSettings, null, new AnalysisRegistry(null, environment)); module.addIndexStore("foo_store", FooStore::new); IndexService indexService = module.newIndexService(nodeEnvironment, deleter, nodeServicesProvider, mapperRegistry); @@ -221,7 +221,7 @@ public class IndexModuleTests extends ESTestCase { .put("index.similarity.my_similarity.key", "there is a key") .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .build(); - IndexModule module = new IndexModule(IndexSettingsModule.newIndexSettings(new Index("foo"), indexSettings), null, new AnalysisRegistry(null, environment)); + IndexModule module = new IndexModule(IndexSettingsModule.newIndexSettings("foo", indexSettings), null, new AnalysisRegistry(null, environment)); module.addSimilarity("test_similarity", (string, settings) -> new SimilarityProvider() { @Override public String name() { @@ -249,7 +249,7 @@ public class IndexModuleTests extends ESTestCase { .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .build(); - IndexModule module = new IndexModule(IndexSettingsModule.newIndexSettings(new Index("foo"), indexSettings), null, new AnalysisRegistry(null, environment)); + IndexModule module = new IndexModule(IndexSettingsModule.newIndexSettings("foo", indexSettings), null, new AnalysisRegistry(null, environment)); try { module.newIndexService(nodeEnvironment, deleter, nodeServicesProvider, mapperRegistry); } catch (IllegalArgumentException ex) { @@ -263,7 +263,7 @@ public class IndexModuleTests extends ESTestCase { .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) .build(); - IndexModule module = new IndexModule(IndexSettingsModule.newIndexSettings(new Index("foo"), indexSettings), null, new AnalysisRegistry(null, environment)); + IndexModule module = new IndexModule(IndexSettingsModule.newIndexSettings("foo", indexSettings), null, new AnalysisRegistry(null, environment)); try { module.newIndexService(nodeEnvironment, deleter, nodeServicesProvider, mapperRegistry); } catch (IllegalArgumentException ex) { @@ -275,7 +275,7 @@ public class IndexModuleTests extends ESTestCase { Settings indexSettings = Settings.settingsBuilder() .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build(); - IndexModule module = new IndexModule(IndexSettingsModule.newIndexSettings(new Index("foo"), indexSettings), null, new AnalysisRegistry(null, environment)); + IndexModule module = new IndexModule(IndexSettingsModule.newIndexSettings("foo", indexSettings), null, new AnalysisRegistry(null, environment)); try { module.registerQueryCache("index", IndexQueryCache::new); fail("only once"); @@ -303,7 +303,7 @@ public class IndexModuleTests extends ESTestCase { .put(IndexModule.INDEX_QUERY_CACHE_TYPE_SETTING.getKey(), "custom") .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build(); - IndexModule module = new IndexModule(IndexSettingsModule.newIndexSettings(new Index("foo"), indexSettings), null, new AnalysisRegistry(null, environment)); + IndexModule module = new IndexModule(IndexSettingsModule.newIndexSettings("foo", indexSettings), null, new AnalysisRegistry(null, environment)); module.registerQueryCache("custom", (a, b) -> new CustomQueryCache()); try { module.registerQueryCache("custom", (a, b) -> new CustomQueryCache()); @@ -321,7 +321,7 @@ public class IndexModuleTests extends ESTestCase { Settings indexSettings = Settings.settingsBuilder() .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build(); - IndexModule module = new IndexModule(IndexSettingsModule.newIndexSettings(new Index("foo"), indexSettings), null, new AnalysisRegistry(null, environment)); + IndexModule module = new IndexModule(IndexSettingsModule.newIndexSettings("foo", indexSettings), null, new AnalysisRegistry(null, environment)); IndexService indexService = module.newIndexService(nodeEnvironment, deleter, nodeServicesProvider, mapperRegistry); assertTrue(indexService.cache().query() instanceof IndexQueryCache); indexService.close("simon says", false); @@ -339,7 +339,7 @@ public class IndexModuleTests extends ESTestCase { @Override public Index index() { - return new Index("test"); + return new Index("test", "_na_"); } @Override diff --git a/core/src/test/java/org/elasticsearch/index/IndexSettingsTests.java b/core/src/test/java/org/elasticsearch/index/IndexSettingsTests.java index 56179d5390b..09fec10a621 100644 --- a/core/src/test/java/org/elasticsearch/index/IndexSettingsTests.java +++ b/core/src/test/java/org/elasticsearch/index/IndexSettingsTests.java @@ -114,7 +114,7 @@ public class IndexSettingsTests extends ESTestCase { if (settings.length > 0) { settingSet.addAll(Arrays.asList(settings)); } - return new IndexSettings(metaData, nodeSettings, (idx) -> Regex.simpleMatch(idx, metaData.getIndex()), new IndexScopedSettings(Settings.EMPTY, settingSet)); + return new IndexSettings(metaData, nodeSettings, (idx) -> Regex.simpleMatch(idx, metaData.getIndex().getName()), new IndexScopedSettings(Settings.EMPTY, settingSet)); } diff --git a/core/src/test/java/org/elasticsearch/index/MergePolicySettingsTests.java b/core/src/test/java/org/elasticsearch/index/MergePolicySettingsTests.java index 1e3e51c0838..9caf9790c7a 100644 --- a/core/src/test/java/org/elasticsearch/index/MergePolicySettingsTests.java +++ b/core/src/test/java/org/elasticsearch/index/MergePolicySettingsTests.java @@ -37,7 +37,7 @@ import static org.elasticsearch.index.IndexSettingsTests.newIndexMeta; import static org.hamcrest.Matchers.equalTo; public class MergePolicySettingsTests extends ESTestCase { - protected final ShardId shardId = new ShardId(new Index("index"), 1); + protected final ShardId shardId = new ShardId("index", "_na_", 1); public void testCompoundFileSettings() throws IOException { assertThat(new MergePolicyConfig(logger, indexSettings(Settings.EMPTY)).getMergePolicy().getNoCFSRatio(), equalTo(0.1)); diff --git a/core/src/test/java/org/elasticsearch/index/analysis/AnalysisModuleTests.java b/core/src/test/java/org/elasticsearch/index/analysis/AnalysisModuleTests.java index 5da54158484..1eb1e93f09c 100644 --- a/core/src/test/java/org/elasticsearch/index/analysis/AnalysisModuleTests.java +++ b/core/src/test/java/org/elasticsearch/index/analysis/AnalysisModuleTests.java @@ -68,8 +68,7 @@ public class AnalysisModuleTests extends ModuleTestCase { } public AnalysisService getAnalysisService(AnalysisRegistry registry, Settings settings) throws IOException { - Index index = new Index("test"); - IndexSettings idxSettings = IndexSettingsModule.newIndexSettings(index, settings); + IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("test", settings); return registry.build(idxSettings); } diff --git a/core/src/test/java/org/elasticsearch/index/analysis/AnalysisServiceTests.java b/core/src/test/java/org/elasticsearch/index/analysis/AnalysisServiceTests.java index 3dfb0975ab4..11ef2df40c8 100644 --- a/core/src/test/java/org/elasticsearch/index/analysis/AnalysisServiceTests.java +++ b/core/src/test/java/org/elasticsearch/index/analysis/AnalysisServiceTests.java @@ -58,7 +58,7 @@ public class AnalysisServiceTests extends ESTestCase { .put(IndexMetaData.SETTING_VERSION_CREATED, version) .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .build(); - IndexSettings idxSettings = IndexSettingsModule.newIndexSettings(new Index("index"), settings); + IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", settings); AnalysisService analysisService = new AnalysisRegistry(null, new Environment(settings)).build(idxSettings); assertThat(analysisService.defaultIndexAnalyzer().analyzer(), instanceOf(StandardAnalyzer.class)); assertThat(analysisService.defaultSearchAnalyzer().analyzer(), instanceOf(StandardAnalyzer.class)); @@ -68,7 +68,7 @@ public class AnalysisServiceTests extends ESTestCase { public void testOverrideDefaultAnalyzer() throws IOException { Version version = VersionUtils.randomVersion(getRandom()); Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); - AnalysisService analysisService = new AnalysisService(IndexSettingsModule.newIndexSettings(new Index("index"), settings), + AnalysisService analysisService = new AnalysisService(IndexSettingsModule.newIndexSettings("index", settings), Collections.singletonMap("default", analyzerProvider("default")), Collections.emptyMap(), Collections.emptyMap(), Collections.emptyMap()); assertThat(analysisService.defaultIndexAnalyzer().analyzer(), instanceOf(EnglishAnalyzer.class)); @@ -80,7 +80,7 @@ public class AnalysisServiceTests extends ESTestCase { Version version = VersionUtils.randomVersionBetween(getRandom(), Version.V_3_0_0, Version.CURRENT); Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); try { - AnalysisService analysisService = new AnalysisService(IndexSettingsModule.newIndexSettings(new Index("index"), settings), + AnalysisService analysisService = new AnalysisService(IndexSettingsModule.newIndexSettings("index", settings), Collections.singletonMap("default_index", new PreBuiltAnalyzerProvider("default_index", AnalyzerScope.INDEX, new EnglishAnalyzer())), Collections.emptyMap(), Collections.emptyMap(), Collections.emptyMap()); fail("Expected ISE"); @@ -93,7 +93,7 @@ public class AnalysisServiceTests extends ESTestCase { public void testBackCompatOverrideDefaultIndexAnalyzer() { Version version = VersionUtils.randomVersionBetween(getRandom(), VersionUtils.getFirstVersion(), VersionUtils.getPreviousVersion(Version.V_3_0_0)); Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); - AnalysisService analysisService = new AnalysisService(IndexSettingsModule.newIndexSettings(new Index("index"), settings), + AnalysisService analysisService = new AnalysisService(IndexSettingsModule.newIndexSettings("index", settings), Collections.singletonMap("default_index", analyzerProvider("default_index")), Collections.emptyMap(), Collections.emptyMap(), Collections.emptyMap()); assertThat(analysisService.defaultIndexAnalyzer().analyzer(), instanceOf(EnglishAnalyzer.class)); @@ -104,7 +104,7 @@ public class AnalysisServiceTests extends ESTestCase { public void testOverrideDefaultSearchAnalyzer() { Version version = VersionUtils.randomVersion(getRandom()); Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); - AnalysisService analysisService = new AnalysisService(IndexSettingsModule.newIndexSettings(new Index("index"), settings), + AnalysisService analysisService = new AnalysisService(IndexSettingsModule.newIndexSettings("index", settings), Collections.singletonMap("default_search", analyzerProvider("default_search")), Collections.emptyMap(), Collections.emptyMap(), Collections.emptyMap()); assertThat(analysisService.defaultIndexAnalyzer().analyzer(), instanceOf(StandardAnalyzer.class)); @@ -118,7 +118,7 @@ public class AnalysisServiceTests extends ESTestCase { Map analyzers = new HashMap<>(); analyzers.put("default_index", analyzerProvider("default_index")); analyzers.put("default_search", analyzerProvider("default_search")); - AnalysisService analysisService = new AnalysisService(IndexSettingsModule.newIndexSettings(new Index("index"), settings), + AnalysisService analysisService = new AnalysisService(IndexSettingsModule.newIndexSettings("index", settings), analyzers, Collections.emptyMap(), Collections.emptyMap(), Collections.emptyMap()); assertThat(analysisService.defaultIndexAnalyzer().analyzer(), instanceOf(EnglishAnalyzer.class)); assertThat(analysisService.defaultSearchAnalyzer().analyzer(), instanceOf(EnglishAnalyzer.class)); @@ -137,7 +137,7 @@ public class AnalysisServiceTests extends ESTestCase { .put("index.analysis.analyzer.custom_analyzer_1.tokenizer", "whitespace") .putArray("index.analysis.analyzer.custom_analyzer_1.filter", "lowercase", "word_delimiter").build(); - IndexSettings idxSettings = IndexSettingsModule.newIndexSettings(new Index("index"), indexSettings); + IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", indexSettings); AnalysisService analysisService = new AnalysisRegistry(null, new Environment(settings)).build(idxSettings); try (NamedAnalyzer custom_analyser = analysisService.analyzer("custom_analyzer")) { assertNotNull(custom_analyser); @@ -182,7 +182,7 @@ public class AnalysisServiceTests extends ESTestCase { .putArray("index.analysis.analyzer.custom_analyzer.filter", "lowercase", "wordDelimiter") .put("index.analysis.analyzer.custom_analyzer_1.tokenizer", "whitespace") .putArray("index.analysis.analyzer.custom_analyzer_1.filter", "lowercase", "word_delimiter").build(); - IndexSettings idxSettings = IndexSettingsModule.newIndexSettings(new Index("index"), indexSettings); + IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", indexSettings); AnalysisService analysisService = new AnalysisRegistry(null, new Environment(settings)).build(idxSettings); TokenFilterFactory word_delimiter = analysisService.tokenFilter("word_delimiter"); @@ -192,7 +192,7 @@ public class AnalysisServiceTests extends ESTestCase { assertSame(analysisService.tokenFilter("porterStem"), analysisService.tokenFilter("porter_stem")); //unconfigured - IndexSettings idxSettings1 = IndexSettingsModule.newIndexSettings(new Index("index"), settingsBuilder() + IndexSettings idxSettings1 = IndexSettingsModule.newIndexSettings("index", settingsBuilder() .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build()); AnalysisService analysisService1 = new AnalysisRegistry(null, new Environment(settings)).build(idxSettings1); assertSame(analysisService1.tokenFilter("wordDelimiter"), analysisService1.tokenFilter("word_delimiter")); @@ -203,7 +203,7 @@ public class AnalysisServiceTests extends ESTestCase { Settings settings = Settings.builder().put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()).build(); Settings indexSettings = settingsBuilder() .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build(); - IndexSettings idxSettings = IndexSettingsModule.newIndexSettings(new Index("index"), indexSettings); + IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", indexSettings); AnalysisService analysisService = new AnalysisRegistry(null, new Environment(settings)).build(idxSettings); AnalysisService otherAnalysisSergice = new AnalysisRegistry(null, new Environment(settings)).build(idxSettings); final int numIters = randomIntBetween(5, 20); diff --git a/core/src/test/java/org/elasticsearch/index/analysis/AnalysisTestsHelper.java b/core/src/test/java/org/elasticsearch/index/analysis/AnalysisTestsHelper.java index 7460ddd3e55..e1f9e36bbd9 100644 --- a/core/src/test/java/org/elasticsearch/index/analysis/AnalysisTestsHelper.java +++ b/core/src/test/java/org/elasticsearch/index/analysis/AnalysisTestsHelper.java @@ -45,11 +45,10 @@ public class AnalysisTestsHelper { public static AnalysisService createAnalysisServiceFromSettings( Settings settings) throws IOException { - Index index = new Index("test"); if (settings.get(IndexMetaData.SETTING_VERSION_CREATED) == null) { settings = Settings.builder().put(settings).put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build(); } - IndexSettings idxSettings = IndexSettingsModule.newIndexSettings(index, settings); + IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("test", settings); Environment environment = new Environment(settings); return new AnalysisRegistry(new HunspellService(settings, environment, Collections.emptyMap()), environment).build(idxSettings); } diff --git a/core/src/test/java/org/elasticsearch/index/analysis/CharFilterTests.java b/core/src/test/java/org/elasticsearch/index/analysis/CharFilterTests.java index c39c6e702f4..dd2df7ed550 100644 --- a/core/src/test/java/org/elasticsearch/index/analysis/CharFilterTests.java +++ b/core/src/test/java/org/elasticsearch/index/analysis/CharFilterTests.java @@ -33,7 +33,6 @@ import static org.elasticsearch.common.settings.Settings.settingsBuilder; */ public class CharFilterTests extends ESTokenStreamTestCase { public void testMappingCharFilter() throws Exception { - Index index = new Index("test"); Settings settings = settingsBuilder() .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) .put("index.analysis.char_filter.my_mapping.type", "mapping") @@ -42,7 +41,7 @@ public class CharFilterTests extends ESTokenStreamTestCase { .putArray("index.analysis.analyzer.custom_with_char_filter.char_filter", "my_mapping") .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .build(); - IndexSettings idxSettings = IndexSettingsModule.newIndexSettings(index, settings); + IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("test", settings); AnalysisService analysisService = new AnalysisRegistry(null, new Environment(settings)).build(idxSettings); NamedAnalyzer analyzer1 = analysisService.analyzer("custom_with_char_filter"); @@ -53,14 +52,13 @@ public class CharFilterTests extends ESTokenStreamTestCase { } public void testHtmlStripCharFilter() throws Exception { - Index index = new Index("test"); Settings settings = settingsBuilder() .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) .put("index.analysis.analyzer.custom_with_char_filter.tokenizer", "standard") .putArray("index.analysis.analyzer.custom_with_char_filter.char_filter", "html_strip") .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .build(); - IndexSettings idxSettings = IndexSettingsModule.newIndexSettings(index, settings); + IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("test", settings); AnalysisService analysisService = new AnalysisRegistry(null, new Environment(settings)).build(idxSettings); NamedAnalyzer analyzer1 = analysisService.analyzer("custom_with_char_filter"); diff --git a/core/src/test/java/org/elasticsearch/index/analysis/CompoundAnalysisTests.java b/core/src/test/java/org/elasticsearch/index/analysis/CompoundAnalysisTests.java index e00f5f67d87..fbedf42d083 100644 --- a/core/src/test/java/org/elasticsearch/index/analysis/CompoundAnalysisTests.java +++ b/core/src/test/java/org/elasticsearch/index/analysis/CompoundAnalysisTests.java @@ -50,9 +50,8 @@ import static org.hamcrest.Matchers.instanceOf; */ public class CompoundAnalysisTests extends ESTestCase { public void testDefaultsCompoundAnalysis() throws Exception { - Index index = new Index("test"); Settings settings = getJsonSettings(); - IndexSettings idxSettings = IndexSettingsModule.newIndexSettings(index, settings); + IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("test", settings); AnalysisService analysisService = new AnalysisRegistry(null, new Environment(settings), Collections.emptyMap(),Collections.singletonMap("myfilter", MyFilterTokenFilterFactory::new),Collections.emptyMap(),Collections.emptyMap()).build(idxSettings); @@ -70,8 +69,7 @@ public class CompoundAnalysisTests extends ESTestCase { } private List analyze(Settings settings, String analyzerName, String text) throws IOException { - Index index = new Index("test"); - IndexSettings idxSettings = IndexSettingsModule.newIndexSettings(index, settings); + IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("test", settings); AnalysisService analysisService = new AnalysisRegistry(null, new Environment(settings), Collections.emptyMap(), Collections.singletonMap("myfilter", MyFilterTokenFilterFactory::new),Collections.emptyMap(),Collections.emptyMap()).build(idxSettings); diff --git a/core/src/test/java/org/elasticsearch/index/analysis/NGramTokenizerFactoryTests.java b/core/src/test/java/org/elasticsearch/index/analysis/NGramTokenizerFactoryTests.java index 0cb3abbd1ff..9d8efb1de4b 100644 --- a/core/src/test/java/org/elasticsearch/index/analysis/NGramTokenizerFactoryTests.java +++ b/core/src/test/java/org/elasticsearch/index/analysis/NGramTokenizerFactoryTests.java @@ -48,7 +48,7 @@ import static org.hamcrest.Matchers.instanceOf; public class NGramTokenizerFactoryTests extends ESTokenStreamTestCase { public void testParseTokenChars() { - final Index index = new Index("test"); + final Index index = new Index("test", "_na_"); final String name = "ngr"; final Settings indexSettings = newAnalysisSettingsBuilder().build(); IndexSettings indexProperties = IndexSettingsModule.newIndexSettings(index, indexSettings); @@ -71,7 +71,7 @@ public class NGramTokenizerFactoryTests extends ESTokenStreamTestCase { } public void testNoTokenChars() throws IOException { - final Index index = new Index("test"); + final Index index = new Index("test", "_na_"); final String name = "ngr"; final Settings indexSettings = newAnalysisSettingsBuilder().build(); final Settings settings = newAnalysisSettingsBuilder().put("min_gram", 2).put("max_gram", 4).putArray("token_chars", new String[0]).build(); @@ -82,7 +82,7 @@ public class NGramTokenizerFactoryTests extends ESTokenStreamTestCase { public void testPreTokenization() throws IOException { // Make sure that pretokenization works well and that it can be used even with token chars which are supplementary characters - final Index index = new Index("test"); + final Index index = new Index("test", "_na_"); final String name = "ngr"; final Settings indexSettings = newAnalysisSettingsBuilder().build(); Settings settings = newAnalysisSettingsBuilder().put("min_gram", 2).put("max_gram", 3).put("token_chars", "letter,digit").build(); @@ -99,7 +99,7 @@ public class NGramTokenizerFactoryTests extends ESTokenStreamTestCase { public void testPreTokenizationEdge() throws IOException { // Make sure that pretokenization works well and that it can be used even with token chars which are supplementary characters - final Index index = new Index("test"); + final Index index = new Index("test", "_na_"); final String name = "ngr"; final Settings indexSettings = newAnalysisSettingsBuilder().build(); Settings settings = newAnalysisSettingsBuilder().put("min_gram", 2).put("max_gram", 3).put("token_chars", "letter,digit").build(); @@ -117,7 +117,7 @@ public class NGramTokenizerFactoryTests extends ESTokenStreamTestCase { public void testBackwardsCompatibilityEdgeNgramTokenFilter() throws Exception { int iters = scaledRandomIntBetween(20, 100); for (int i = 0; i < iters; i++) { - final Index index = new Index("test"); + final Index index = new Index("test", "_na_"); final String name = "ngr"; Version v = randomVersion(random()); if (v.onOrAfter(Version.V_0_90_2)) { diff --git a/core/src/test/java/org/elasticsearch/index/analysis/PatternCaptureTokenFilterTests.java b/core/src/test/java/org/elasticsearch/index/analysis/PatternCaptureTokenFilterTests.java index 8c6775a92ab..ed307f610bd 100644 --- a/core/src/test/java/org/elasticsearch/index/analysis/PatternCaptureTokenFilterTests.java +++ b/core/src/test/java/org/elasticsearch/index/analysis/PatternCaptureTokenFilterTests.java @@ -40,7 +40,7 @@ public class PatternCaptureTokenFilterTests extends ESTokenStreamTestCase { .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) .build(); - IndexSettings idxSettings = IndexSettingsModule.newIndexSettings(new Index("index"), settings); + IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", settings); AnalysisService analysisService = new AnalysisRegistry(null, new Environment(settings)).build(idxSettings); NamedAnalyzer analyzer1 = analysisService.analyzer("single"); @@ -58,7 +58,7 @@ public class PatternCaptureTokenFilterTests extends ESTokenStreamTestCase { public void testNoPatterns() { try { - new PatternCaptureGroupTokenFilterFactory(IndexSettingsModule.newIndexSettings(new Index("test"), Settings.EMPTY), null, "pattern_capture", settingsBuilder().put("pattern", "foobar").build()); + new PatternCaptureGroupTokenFilterFactory(IndexSettingsModule.newIndexSettings("test", Settings.EMPTY), null, "pattern_capture", settingsBuilder().put("pattern", "foobar").build()); fail ("Expected IllegalArgumentException"); } catch (IllegalArgumentException e) { assertThat(e.getMessage(), containsString("required setting 'patterns' is missing")); diff --git a/core/src/test/java/org/elasticsearch/index/analysis/StopAnalyzerTests.java b/core/src/test/java/org/elasticsearch/index/analysis/StopAnalyzerTests.java index ebaf4cb5cc4..4d316424a48 100644 --- a/core/src/test/java/org/elasticsearch/index/analysis/StopAnalyzerTests.java +++ b/core/src/test/java/org/elasticsearch/index/analysis/StopAnalyzerTests.java @@ -38,7 +38,7 @@ public class StopAnalyzerTests extends ESTokenStreamTestCase { .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) .build(); - IndexSettings idxSettings = IndexSettingsModule.newIndexSettings(new Index("index"), settings); + IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", settings); AnalysisService analysisService = new AnalysisRegistry(null, new Environment(settings)).build(idxSettings); NamedAnalyzer analyzer1 = analysisService.analyzer("analyzer1"); diff --git a/core/src/test/java/org/elasticsearch/index/analysis/synonyms/SynonymsAnalysisTests.java b/core/src/test/java/org/elasticsearch/index/analysis/synonyms/SynonymsAnalysisTests.java index c4c664f222c..104d778860c 100644 --- a/core/src/test/java/org/elasticsearch/index/analysis/synonyms/SynonymsAnalysisTests.java +++ b/core/src/test/java/org/elasticsearch/index/analysis/synonyms/SynonymsAnalysisTests.java @@ -67,7 +67,7 @@ public class SynonymsAnalysisTests extends ESTestCase { .put(Environment.PATH_HOME_SETTING.getKey(), home) .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build(); - IndexSettings idxSettings = IndexSettingsModule.newIndexSettings(new Index("index"), settings); + IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", settings); analysisService = new AnalysisRegistry(null, new Environment(settings)).build(idxSettings); diff --git a/core/src/test/java/org/elasticsearch/index/cache/bitset/BitSetFilterCacheTests.java b/core/src/test/java/org/elasticsearch/index/cache/bitset/BitSetFilterCacheTests.java index 69831d7471a..4f2f12e6cbb 100644 --- a/core/src/test/java/org/elasticsearch/index/cache/bitset/BitSetFilterCacheTests.java +++ b/core/src/test/java/org/elasticsearch/index/cache/bitset/BitSetFilterCacheTests.java @@ -34,7 +34,6 @@ import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.TermQuery; import org.apache.lucene.search.join.BitSetProducer; -import org.apache.lucene.store.BaseDirectoryWrapper; import org.apache.lucene.store.Directory; import org.apache.lucene.store.RAMDirectory; import org.apache.lucene.util.Accountable; @@ -57,7 +56,7 @@ import static org.hamcrest.Matchers.equalTo; public class BitSetFilterCacheTests extends ESTestCase { - private static final IndexSettings INDEX_SETTINGS = IndexSettingsModule.newIndexSettings(new Index("test"), Settings.EMPTY); + private static final IndexSettings INDEX_SETTINGS = IndexSettingsModule.newIndexSettings("test", Settings.EMPTY); private final IndicesWarmer warmer = new IndicesWarmer(Settings.EMPTY, null); @@ -93,7 +92,7 @@ public class BitSetFilterCacheTests extends ESTestCase { writer.commit(); DirectoryReader reader = DirectoryReader.open(writer, false); - reader = ElasticsearchDirectoryReader.wrap(reader, new ShardId(new Index("test"), 0)); + reader = ElasticsearchDirectoryReader.wrap(reader, new ShardId("test", "_na_", 0)); IndexSearcher searcher = new IndexSearcher(reader); BitsetFilterCache cache = new BitsetFilterCache(INDEX_SETTINGS, warmer, new BitsetFilterCache.Listener() { @@ -118,7 +117,7 @@ public class BitSetFilterCacheTests extends ESTestCase { writer.forceMerge(1); reader.close(); reader = DirectoryReader.open(writer, false); - reader = ElasticsearchDirectoryReader.wrap(reader, new ShardId(new Index("test"), 0)); + reader = ElasticsearchDirectoryReader.wrap(reader, new ShardId("test", "_na_", 0)); searcher = new IndexSearcher(reader); assertThat(matchCount(filter, reader), equalTo(3)); @@ -144,7 +143,7 @@ public class BitSetFilterCacheTests extends ESTestCase { writer.addDocument(document); writer.commit(); final DirectoryReader writerReader = DirectoryReader.open(writer, false); - final IndexReader reader = ElasticsearchDirectoryReader.wrap(writerReader, new ShardId("test", 0)); + final IndexReader reader = ElasticsearchDirectoryReader.wrap(writerReader, new ShardId("test", "_na_", 0)); final AtomicLong stats = new AtomicLong(); final AtomicInteger onCacheCalls = new AtomicInteger(); @@ -157,7 +156,7 @@ public class BitSetFilterCacheTests extends ESTestCase { stats.addAndGet(accountable.ramBytesUsed()); if (writerReader != reader) { assertNotNull(shardId); - assertEquals("test", shardId.index().name()); + assertEquals("test", shardId.getIndexName()); assertEquals(0, shardId.id()); } else { assertNull(shardId); @@ -170,7 +169,7 @@ public class BitSetFilterCacheTests extends ESTestCase { stats.addAndGet(-accountable.ramBytesUsed()); if (writerReader != reader) { assertNotNull(shardId); - assertEquals("test", shardId.index().name()); + assertEquals("test", shardId.getIndexName()); assertEquals(0, shardId.id()); } else { assertNull(shardId); @@ -218,7 +217,7 @@ public class BitSetFilterCacheTests extends ESTestCase { writer.addDocument(new Document()); DirectoryReader reader = DirectoryReader.open(writer, true); writer.close(); - reader = ElasticsearchDirectoryReader.wrap(reader, new ShardId(new Index("test2"), 0)); + reader = ElasticsearchDirectoryReader.wrap(reader, new ShardId("test2", "_na_", 0)); BitSetProducer producer = cache.getBitSetProducer(new MatchAllDocsQuery()); diff --git a/core/src/test/java/org/elasticsearch/index/codec/CodecTests.java b/core/src/test/java/org/elasticsearch/index/codec/CodecTests.java index c293237b5a0..3d912d41c38 100644 --- a/core/src/test/java/org/elasticsearch/index/codec/CodecTests.java +++ b/core/src/test/java/org/elasticsearch/index/codec/CodecTests.java @@ -108,7 +108,7 @@ public class CodecTests extends ESTestCase { Settings nodeSettings = settingsBuilder() .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()) .build(); - IndexSettings settings = IndexSettingsModule.newIndexSettings(new Index("_na"), nodeSettings); + IndexSettings settings = IndexSettingsModule.newIndexSettings("_na", nodeSettings); SimilarityService similarityService = new SimilarityService(settings, Collections.emptyMap()); AnalysisService analysisService = new AnalysisRegistry(null, new Environment(nodeSettings)).build(settings); MapperRegistry mapperRegistry = new MapperRegistry(Collections.emptyMap(), Collections.emptyMap()); diff --git a/core/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java b/core/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java index 2b72018aa8e..30e0ff5cb38 100644 --- a/core/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java +++ b/core/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java @@ -134,8 +134,8 @@ import static org.hamcrest.Matchers.nullValue; public class InternalEngineTests extends ESTestCase { - protected final ShardId shardId = new ShardId(new Index("index"), 1); - private static final IndexSettings INDEX_SETTINGS = IndexSettingsModule.newIndexSettings(new Index("index"), Settings.EMPTY); + protected final ShardId shardId = new ShardId(new Index("index", "_na_"), 1); + private static final IndexSettings INDEX_SETTINGS = IndexSettingsModule.newIndexSettings("index", Settings.EMPTY); protected ThreadPool threadPool; @@ -275,7 +275,7 @@ public class InternalEngineTests extends ESTestCase { public void onFailedEngine(String reason, @Nullable Throwable t) { // we don't need to notify anybody in this test } - }, new TranslogHandler(shardId.index().getName(), logger), IndexSearcher.getDefaultQueryCache(), IndexSearcher.getDefaultQueryCachingPolicy(), translogConfig, TimeValue.timeValueMinutes(5)); + }, new TranslogHandler(shardId.getIndexName(), logger), IndexSearcher.getDefaultQueryCache(), IndexSearcher.getDefaultQueryCachingPolicy(), translogConfig, TimeValue.timeValueMinutes(5)); try { config.setCreate(Lucene.indexExists(store.directory()) == false); } catch (IOException e) { @@ -1879,10 +1879,10 @@ public class InternalEngineTests extends ESTestCase { public final AtomicInteger recoveredOps = new AtomicInteger(0); public TranslogHandler(String indexName, ESLogger logger) { - super(new ShardId("test", 0), null, logger); + super(new ShardId("test", "_na_", 0), null, logger); Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build(); RootObjectMapper.Builder rootBuilder = new RootObjectMapper.Builder("test"); - Index index = new Index(indexName); + Index index = new Index(indexName, "_na_"); IndexSettings indexSettings = IndexSettingsModule.newIndexSettings(index, settings); AnalysisService analysisService = new AnalysisService(indexSettings, Collections.emptyMap(), Collections.emptyMap(), Collections.emptyMap(), Collections.emptyMap()); SimilarityService similarityService = new SimilarityService(indexSettings, Collections.emptyMap()); diff --git a/core/src/test/java/org/elasticsearch/index/engine/ShadowEngineTests.java b/core/src/test/java/org/elasticsearch/index/engine/ShadowEngineTests.java index 71ad0e16909..93a0b4345fa 100644 --- a/core/src/test/java/org/elasticsearch/index/engine/ShadowEngineTests.java +++ b/core/src/test/java/org/elasticsearch/index/engine/ShadowEngineTests.java @@ -88,7 +88,7 @@ import static org.hamcrest.Matchers.nullValue; */ public class ShadowEngineTests extends ESTestCase { - protected final ShardId shardId = new ShardId(new Index("index"), 1); + protected final ShardId shardId = new ShardId("index", "_na_", 1); protected ThreadPool threadPool; @@ -181,7 +181,7 @@ public class ShadowEngineTests extends ESTestCase { protected Store createStore(final Directory directory) throws IOException { - IndexSettings indexSettings = IndexSettingsModule.newIndexSettings(shardId.index(), Settings.EMPTY); + IndexSettings indexSettings = IndexSettingsModule.newIndexSettings(shardId.getIndex(), Settings.EMPTY); final DirectoryService directoryService = new DirectoryService(shardId, indexSettings) { @Override public Directory newDirectory() throws IOException { diff --git a/core/src/test/java/org/elasticsearch/index/fielddata/AbstractFieldDataTestCase.java b/core/src/test/java/org/elasticsearch/index/fielddata/AbstractFieldDataTestCase.java index 07ae1e70a48..012e383ac1e 100644 --- a/core/src/test/java/org/elasticsearch/index/fielddata/AbstractFieldDataTestCase.java +++ b/core/src/test/java/org/elasticsearch/index/fielddata/AbstractFieldDataTestCase.java @@ -142,7 +142,7 @@ public abstract class AbstractFieldDataTestCase extends ESSingleNodeTestCase { if (readerContext != null) { readerContext.reader().close(); } - topLevelReader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer, true), new ShardId("foo", 1)); + topLevelReader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer, true), new ShardId("foo", "_na_", 1)); LeafReader reader = SlowCompositeReaderWrapper.wrap(topLevelReader); readerContext = reader.getContext(); return readerContext; diff --git a/core/src/test/java/org/elasticsearch/index/fielddata/AbstractStringFieldDataTestCase.java b/core/src/test/java/org/elasticsearch/index/fielddata/AbstractStringFieldDataTestCase.java index 82b2cca79aa..ceb4ce66bcb 100644 --- a/core/src/test/java/org/elasticsearch/index/fielddata/AbstractStringFieldDataTestCase.java +++ b/core/src/test/java/org/elasticsearch/index/fielddata/AbstractStringFieldDataTestCase.java @@ -49,7 +49,6 @@ import org.apache.lucene.util.UnicodeUtil; import org.elasticsearch.common.lucene.index.ElasticsearchDirectoryReader; import org.elasticsearch.common.lucene.search.Queries; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.index.Index; import org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource; import org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource.Nested; import org.elasticsearch.index.fielddata.fieldcomparator.BytesRefFieldComparatorSource; @@ -389,7 +388,7 @@ public abstract class AbstractStringFieldDataTestCase extends AbstractFieldDataI } } DirectoryReader directoryReader = DirectoryReader.open(writer, true); - directoryReader = ElasticsearchDirectoryReader.wrap(directoryReader, new ShardId(new Index("test"), 0)); + directoryReader = ElasticsearchDirectoryReader.wrap(directoryReader, new ShardId(indexService.index(), 0)); IndexSearcher searcher = new IndexSearcher(directoryReader); IndexFieldData fieldData = getForField("text"); final Object missingValue; diff --git a/core/src/test/java/org/elasticsearch/index/fielddata/FieldDataCacheTests.java b/core/src/test/java/org/elasticsearch/index/fielddata/FieldDataCacheTests.java index 35a74ea3849..241cbe0e681 100644 --- a/core/src/test/java/org/elasticsearch/index/fielddata/FieldDataCacheTests.java +++ b/core/src/test/java/org/elasticsearch/index/fielddata/FieldDataCacheTests.java @@ -67,7 +67,7 @@ public class FieldDataCacheTests extends ESTestCase { } } iw.close(); - DirectoryReader ir = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(dir), new ShardId("_index", 0)); + DirectoryReader ir = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(dir), new ShardId("_index", "_na_", 0)); DummyAccountingFieldDataCache fieldDataCache = new DummyAccountingFieldDataCache(); // Testing SortedSetDVOrdinalsIndexFieldData: diff --git a/core/src/test/java/org/elasticsearch/index/fielddata/IndexFieldDataServiceTests.java b/core/src/test/java/org/elasticsearch/index/fielddata/IndexFieldDataServiceTests.java index e8b0d03b049..da2899bf2c3 100644 --- a/core/src/test/java/org/elasticsearch/index/fielddata/IndexFieldDataServiceTests.java +++ b/core/src/test/java/org/elasticsearch/index/fielddata/IndexFieldDataServiceTests.java @@ -33,7 +33,6 @@ import org.apache.lucene.store.RAMDirectory; import org.apache.lucene.util.Accountable; import org.elasticsearch.common.lucene.index.ElasticsearchDirectoryReader; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexService; import org.elasticsearch.index.fielddata.plain.PagedBytesAtomicFieldData; import org.elasticsearch.index.fielddata.plain.PagedBytesIndexFieldData; @@ -145,14 +144,14 @@ public class IndexFieldDataServiceTests extends ESSingleNodeTestCase { writer.addDocument(doc); DirectoryReader open = DirectoryReader.open(writer, true); final boolean wrap = randomBoolean(); - final IndexReader reader = wrap ? ElasticsearchDirectoryReader.wrap(open, new ShardId("test", 1)) : open; + final IndexReader reader = wrap ? ElasticsearchDirectoryReader.wrap(open, new ShardId("test", "_na_", 1)) : open; final AtomicInteger onCacheCalled = new AtomicInteger(); final AtomicInteger onRemovalCalled = new AtomicInteger(); ifdService.setListener(new IndexFieldDataCache.Listener() { @Override public void onCache(ShardId shardId, String fieldName, FieldDataType fieldDataType, Accountable ramUsage) { if (wrap) { - assertEquals(new ShardId("test", 1), shardId); + assertEquals(new ShardId("test", "_na_", 1), shardId); } else { assertNull(shardId); } @@ -162,7 +161,7 @@ public class IndexFieldDataServiceTests extends ESSingleNodeTestCase { @Override public void onRemoval(ShardId shardId, String fieldName, FieldDataType fieldDataType, boolean wasEvicted, long sizeInBytes) { if (wrap) { - assertEquals(new ShardId("test", 1), shardId); + assertEquals(new ShardId("test", "_na_", 1), shardId); } else { assertNull(shardId); } @@ -207,7 +206,7 @@ public class IndexFieldDataServiceTests extends ESSingleNodeTestCase { ThreadPool threadPool = new ThreadPool("random_threadpool_name"); try { IndicesFieldDataCache cache = new IndicesFieldDataCache(Settings.EMPTY, null, threadPool); - IndexFieldDataService ifds = new IndexFieldDataService(IndexSettingsModule.newIndexSettings(new Index("test"), Settings.EMPTY), cache, null, null); + IndexFieldDataService ifds = new IndexFieldDataService(IndexSettingsModule.newIndexSettings("test", Settings.EMPTY), cache, null, null); ft.setName("some_long"); ft.setHasDocValues(true); ifds.getForField(ft); // no exception @@ -240,7 +239,7 @@ public class IndexFieldDataServiceTests extends ESSingleNodeTestCase { StringFieldMapper.StringFieldType ft = new StringFieldMapper.StringFieldType(); try { IndicesFieldDataCache cache = new IndicesFieldDataCache(Settings.EMPTY, null, threadPool); - IndexFieldDataService ifds = new IndexFieldDataService(IndexSettingsModule.newIndexSettings(new Index("test"), Settings.EMPTY), cache, null, null); + IndexFieldDataService ifds = new IndexFieldDataService(IndexSettingsModule.newIndexSettings("test", Settings.EMPTY), cache, null, null); ft.setName("some_str"); ft.setFieldDataType(new FieldDataType("string", Settings.builder().put(FieldDataType.FORMAT_KEY, "disabled").build())); try { diff --git a/core/src/test/java/org/elasticsearch/index/mapper/DynamicMappingDisabledTests.java b/core/src/test/java/org/elasticsearch/index/mapper/DynamicMappingDisabledTests.java index f8859efa025..748dd0a0a1a 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/DynamicMappingDisabledTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/DynamicMappingDisabledTests.java @@ -73,7 +73,7 @@ public class DynamicMappingDisabledTests extends ESSingleNodeTestCase { transport = new LocalTransport(settings, THREAD_POOL, Version.CURRENT, new NamedWriteableRegistry()); transportService = new TransportService(transport, THREAD_POOL); indicesService = getInstanceFromNode(IndicesService.class); - shardStateAction = new ShardStateAction(settings, clusterService, transportService, null, null); + shardStateAction = new ShardStateAction(settings, clusterService, transportService, null, null, THREAD_POOL); actionFilters = new ActionFilters(Collections.emptySet()); indexNameExpressionResolver = new IndexNameExpressionResolver(settings); autoCreateIndex = new AutoCreateIndex(settings, indexNameExpressionResolver); diff --git a/core/src/test/java/org/elasticsearch/index/query/AbstractQueryTestCase.java b/core/src/test/java/org/elasticsearch/index/query/AbstractQueryTestCase.java index 7aba2997b20..9f2b33b96b9 100644 --- a/core/src/test/java/org/elasticsearch/index/query/AbstractQueryTestCase.java +++ b/core/src/test/java/org/elasticsearch/index/query/AbstractQueryTestCase.java @@ -192,11 +192,11 @@ public abstract class AbstractQueryTestCase> .build(); Settings indexSettings = Settings.settingsBuilder() .put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); - index = new Index(randomAsciiOfLengthBetween(1, 10)); + index = new Index(randomAsciiOfLengthBetween(1, 10), "_na_"); IndexSettings idxSettings = IndexSettingsModule.newIndexSettings(index, indexSettings); final TestClusterService clusterService = new TestClusterService(); clusterService.setState(new ClusterState.Builder(clusterService.state()).metaData(new MetaData.Builder().put( - new IndexMetaData.Builder(index.name()).settings(indexSettings).numberOfShards(1).numberOfReplicas(0)))); + new IndexMetaData.Builder(index.getName()).settings(indexSettings).numberOfShards(1).numberOfReplicas(0)))); SettingsModule settingsModule = new SettingsModule(settings, new SettingsFilter(settings)); settingsModule.registerSetting(InternalSettingsPlugin.VERSION_CREATED); final Client proxy = (Client) Proxy.newProxyInstance( diff --git a/core/src/test/java/org/elasticsearch/index/search/nested/AbstractNumberNestedSortingTestCase.java b/core/src/test/java/org/elasticsearch/index/search/nested/AbstractNumberNestedSortingTestCase.java index 65dfd8a5af4..4ef84d118fd 100644 --- a/core/src/test/java/org/elasticsearch/index/search/nested/AbstractNumberNestedSortingTestCase.java +++ b/core/src/test/java/org/elasticsearch/index/search/nested/AbstractNumberNestedSortingTestCase.java @@ -38,7 +38,6 @@ import org.apache.lucene.search.join.ScoreMode; import org.apache.lucene.search.join.ToParentBlockJoinQuery; import org.elasticsearch.common.lucene.index.ElasticsearchDirectoryReader; import org.elasticsearch.common.lucene.search.Queries; -import org.elasticsearch.index.Index; import org.elasticsearch.index.fielddata.AbstractFieldDataTestCase; import org.elasticsearch.index.fielddata.IndexFieldData; import org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource; @@ -220,7 +219,7 @@ public abstract class AbstractNumberNestedSortingTestCase extends AbstractFieldD MultiValueMode sortMode = MultiValueMode.SUM; DirectoryReader directoryReader = DirectoryReader.open(writer, false); - directoryReader = ElasticsearchDirectoryReader.wrap(directoryReader, new ShardId(new Index("test"), 0)); + directoryReader = ElasticsearchDirectoryReader.wrap(directoryReader, new ShardId(indexService.index(), 0)); IndexSearcher searcher = new IndexSearcher(directoryReader); Query parentFilter = new TermQuery(new Term("__type", "parent")); Query childFilter = Queries.not(parentFilter); diff --git a/core/src/test/java/org/elasticsearch/index/search/nested/NestedSortingTests.java b/core/src/test/java/org/elasticsearch/index/search/nested/NestedSortingTests.java index a58fea831d5..ff82b7c43ac 100644 --- a/core/src/test/java/org/elasticsearch/index/search/nested/NestedSortingTests.java +++ b/core/src/test/java/org/elasticsearch/index/search/nested/NestedSortingTests.java @@ -43,7 +43,6 @@ import org.apache.lucene.util.TestUtil; import org.elasticsearch.common.lucene.index.ElasticsearchDirectoryReader; import org.elasticsearch.common.lucene.search.Queries; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.index.Index; import org.elasticsearch.index.fielddata.AbstractFieldDataTestCase; import org.elasticsearch.index.fielddata.FieldDataType; import org.elasticsearch.index.fielddata.IndexFieldData; @@ -95,7 +94,7 @@ public class NestedSortingTests extends AbstractFieldDataTestCase { MultiValueMode sortMode = randomFrom(Arrays.asList(MultiValueMode.MIN, MultiValueMode.MAX)); DirectoryReader reader = DirectoryReader.open(writer, false); - reader = ElasticsearchDirectoryReader.wrap(reader, new ShardId(new Index("test"), 0)); + reader = ElasticsearchDirectoryReader.wrap(reader, new ShardId(indexService.index(), 0)); IndexSearcher searcher = new IndexSearcher(reader); PagedBytesIndexFieldData indexFieldData1 = getForField("f"); IndexFieldData indexFieldData2 = NoOrdinalsStringFieldDataTests.hideOrdinals(indexFieldData1); @@ -280,7 +279,7 @@ public class NestedSortingTests extends AbstractFieldDataTestCase { MultiValueMode sortMode = MultiValueMode.MIN; DirectoryReader reader = DirectoryReader.open(writer, false); - reader = ElasticsearchDirectoryReader.wrap(reader, new ShardId(new Index("test"), 0)); + reader = ElasticsearchDirectoryReader.wrap(reader, new ShardId(indexService.index(), 0)); IndexSearcher searcher = new IndexSearcher(reader); PagedBytesIndexFieldData indexFieldData = getForField("field2"); Query parentFilter = new TermQuery(new Term("__type", "parent")); diff --git a/core/src/test/java/org/elasticsearch/index/shard/IndexSearcherWrapperTests.java b/core/src/test/java/org/elasticsearch/index/shard/IndexSearcherWrapperTests.java index b7f2dd09f3c..cf95f22ae3b 100644 --- a/core/src/test/java/org/elasticsearch/index/shard/IndexSearcherWrapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/shard/IndexSearcherWrapperTests.java @@ -56,7 +56,7 @@ public class IndexSearcherWrapperTests extends ESTestCase { doc.add(new StringField("id", "1", random().nextBoolean() ? Field.Store.YES : Field.Store.NO)); doc.add(new TextField("field", "doc", random().nextBoolean() ? Field.Store.YES : Field.Store.NO)); writer.addDocument(doc); - DirectoryReader open = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer, true), new ShardId("foo", 1)); + DirectoryReader open = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer, true), new ShardId("foo", "_na_", 1)); IndexSearcher searcher = new IndexSearcher(open); assertEquals(1, searcher.search(new TermQuery(new Term("field", "doc")), 1).totalHits); final AtomicInteger closeCalls = new AtomicInteger(0); @@ -106,7 +106,7 @@ public class IndexSearcherWrapperTests extends ESTestCase { doc.add(new StringField("id", "1", random().nextBoolean() ? Field.Store.YES : Field.Store.NO)); doc.add(new TextField("field", "doc", random().nextBoolean() ? Field.Store.YES : Field.Store.NO)); writer.addDocument(doc); - DirectoryReader open = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer, true), new ShardId("foo", 1)); + DirectoryReader open = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer, true), new ShardId("foo", "_na_", 1)); IndexSearcher searcher = new IndexSearcher(open); assertEquals(1, searcher.search(new TermQuery(new Term("field", "doc")), 1).totalHits); searcher.setSimilarity(iwc.getSimilarity()); @@ -148,7 +148,7 @@ public class IndexSearcherWrapperTests extends ESTestCase { doc.add(new StringField("id", "1", random().nextBoolean() ? Field.Store.YES : Field.Store.NO)); doc.add(new TextField("field", "doc", random().nextBoolean() ? Field.Store.YES : Field.Store.NO)); writer.addDocument(doc); - DirectoryReader open = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer, true), new ShardId("foo", 1)); + DirectoryReader open = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer, true), new ShardId("foo", "_na_", 1)); IndexSearcher searcher = new IndexSearcher(open); assertEquals(1, searcher.search(new TermQuery(new Term("field", "doc")), 1).totalHits); searcher.setSimilarity(iwc.getSimilarity()); @@ -168,7 +168,7 @@ public class IndexSearcherWrapperTests extends ESTestCase { doc.add(new StringField("id", "1", random().nextBoolean() ? Field.Store.YES : Field.Store.NO)); doc.add(new TextField("field", "doc", random().nextBoolean() ? Field.Store.YES : Field.Store.NO)); writer.addDocument(doc); - DirectoryReader open = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer, true), new ShardId("foo", 1)); + DirectoryReader open = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer, true), new ShardId("foo", "_na_", 1)); IndexSearcher searcher = new IndexSearcher(open); assertEquals(1, searcher.search(new TermQuery(new Term("field", "doc")), 1).totalHits); searcher.setSimilarity(iwc.getSimilarity()); @@ -258,7 +258,7 @@ public class IndexSearcherWrapperTests extends ESTestCase { public DirectoryReader getDelegate() { if (hideDelegate) { try { - return ElasticsearchDirectoryReader.wrap(super.getDelegate(), new ShardId("foo", 1)); + return ElasticsearchDirectoryReader.wrap(super.getDelegate(), new ShardId("foo", "_na_", 1)); } catch (IOException e) { throw new RuntimeException(e); } diff --git a/core/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java b/core/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java index fd32091d891..e84b4546ce0 100644 --- a/core/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java +++ b/core/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java @@ -137,7 +137,7 @@ public class IndexShardTests extends ESSingleNodeTestCase { public void testWriteShardState() throws Exception { try (NodeEnvironment env = newNodeEnvironment()) { - ShardId id = new ShardId("foo", 1); + ShardId id = new ShardId("foo", "_na_", 1); long version = between(1, Integer.MAX_VALUE / 2); boolean primary = randomBoolean(); AllocationId allocationId = randomBoolean() ? null : randomAllocationId(); @@ -163,7 +163,7 @@ public class IndexShardTests extends ESSingleNodeTestCase { createIndex("test"); ensureGreen(); NodeEnvironment env = getInstanceFromNode(NodeEnvironment.class); - Path[] shardPaths = env.availableShardPaths(new ShardId("test", 0)); + Path[] shardPaths = env.availableShardPaths(new ShardId("test", "_na_", 0)); logger.info("--> paths: [{}]", (Object)shardPaths); // Should not be able to acquire the lock because it's already open try { @@ -175,7 +175,7 @@ public class IndexShardTests extends ESSingleNodeTestCase { // Test without the regular shard lock to assume we can acquire it // (worst case, meaning that the shard lock could be acquired and // we're green to delete the shard's directory) - ShardLock sLock = new DummyShardLock(new ShardId("test", 0)); + ShardLock sLock = new DummyShardLock(new ShardId("test", "_na_", 0)); try { env.deleteShardDirectoryUnderLock(sLock, IndexSettingsModule.newIndexSettings("test", Settings.EMPTY)); fail("should not have been able to delete the directory"); @@ -250,7 +250,7 @@ public class IndexShardTests extends ESSingleNodeTestCase { ShardStateMetaData shardStateMetaData = load(logger, env.availableShardPaths(shard.shardId)); assertEquals(shardStateMetaData, getShardStateMetadata(shard)); - routing = TestShardRouting.newShardRouting(shard.shardId.index().getName(), shard.shardId.id(), routing.currentNodeId(), null, routing.primary(), ShardRoutingState.INITIALIZING, shard.shardRouting.allocationId(), shard.shardRouting.version() + 1); + routing = TestShardRouting.newShardRouting(shard.shardId.getIndexName(), shard.shardId.id(), routing.currentNodeId(), null, routing.primary(), ShardRoutingState.INITIALIZING, shard.shardRouting.allocationId(), shard.shardRouting.version() + 1); shard.updateRoutingEntry(routing, true); shard.deleteShardState(); @@ -274,7 +274,7 @@ public class IndexShardTests extends ESSingleNodeTestCase { ShardPath shardPath = ShardPath.loadShardPath(logger, env, shard.shardId(), test.getIndexSettings()); assertNotNull(shardPath); // but index can't be opened for a failed shard - assertThat("store index should be corrupted", Store.canOpenIndex(logger, shardPath.resolveIndex()), equalTo(false)); + assertThat("store index should be corrupted", Store.canOpenIndex(logger, shardPath.resolveIndex(), shard.shardId()), equalTo(false)); } ShardStateMetaData getShardStateMetadata(IndexShard shard) { @@ -407,7 +407,7 @@ public class IndexShardTests extends ESSingleNodeTestCase { } private void setDurability(IndexShard shard, Translog.Durability durability) { - client().admin().indices().prepareUpdateSettings(shard.shardId.getIndex()).setSettings(settingsBuilder().put(IndexSettings.INDEX_TRANSLOG_DURABILITY_SETTING.getKey(), durability.name()).build()).get(); + client().admin().indices().prepareUpdateSettings(shard.shardId.getIndexName()).setSettings(settingsBuilder().put(IndexSettings.INDEX_TRANSLOG_DURABILITY_SETTING.getKey(), durability.name()).build()).get(); assertEquals(durability, shard.getTranslogDurability()); } diff --git a/core/src/test/java/org/elasticsearch/index/shard/NewPathForShardTests.java b/core/src/test/java/org/elasticsearch/index/shard/NewPathForShardTests.java index 911f2598f03..f1515cd559b 100644 --- a/core/src/test/java/org/elasticsearch/index/shard/NewPathForShardTests.java +++ b/core/src/test/java/org/elasticsearch/index/shard/NewPathForShardTests.java @@ -49,7 +49,7 @@ import java.util.Map; /** Separate test class from ShardPathTests because we need static (BeforeClass) setup to install mock filesystems... */ public class NewPathForShardTests extends ESTestCase { - private static final IndexSettings INDEX_SETTINGS = IndexSettingsModule.newIndexSettings(new Index("index"), Settings.EMPTY); + private static final IndexSettings INDEX_SETTINGS = IndexSettingsModule.newIndexSettings("index", Settings.EMPTY); // Sneakiness to install mock file stores so we can pretend how much free space we have on each path.data: private static MockFileStore aFileStore = new MockFileStore("mocka"); @@ -182,7 +182,7 @@ public class NewPathForShardTests extends ESTestCase { aFileStore.usableSpace = 100000; bFileStore.usableSpace = 1000; - ShardId shardId = new ShardId("index", 0); + ShardId shardId = new ShardId("index", "_na_", 0); ShardPath result = ShardPath.selectNewPathForShard(nodeEnv, shardId, INDEX_SETTINGS, 100, Collections.emptyMap()); assertTrue(result.getDataPath().toString().contains(aPathPart)); @@ -190,7 +190,7 @@ public class NewPathForShardTests extends ESTestCase { aFileStore.usableSpace = 1000; bFileStore.usableSpace = 100000; - shardId = new ShardId("index", 0); + shardId = new ShardId("index", "_na_", 0); result = ShardPath.selectNewPathForShard(nodeEnv, shardId, INDEX_SETTINGS, 100, Collections.emptyMap()); assertTrue(result.getDataPath().toString().contains(bPathPart)); diff --git a/core/src/test/java/org/elasticsearch/index/shard/ShardPathTests.java b/core/src/test/java/org/elasticsearch/index/shard/ShardPathTests.java index 80d5f4c8fe9..011b4ad3588 100644 --- a/core/src/test/java/org/elasticsearch/index/shard/ShardPathTests.java +++ b/core/src/test/java/org/elasticsearch/index/shard/ShardPathTests.java @@ -42,14 +42,14 @@ public class ShardPathTests extends ESTestCase { Settings.Builder builder = settingsBuilder().put(IndexMetaData.SETTING_INDEX_UUID, "0xDEADBEEF") .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT); Settings settings = builder.build(); - ShardId shardId = new ShardId("foo", 0); + ShardId shardId = new ShardId("foo", "_na_", 0); Path[] paths = env.availableShardPaths(shardId); Path path = randomFrom(paths); ShardStateMetaData.FORMAT.write(new ShardStateMetaData(2, true, "0xDEADBEEF", AllocationId.newInitializing()), 2, path); - ShardPath shardPath = ShardPath.loadShardPath(logger, env, shardId, IndexSettingsModule.newIndexSettings(shardId.index(), settings)); + ShardPath shardPath = ShardPath.loadShardPath(logger, env, shardId, IndexSettingsModule.newIndexSettings(shardId.getIndex(), settings)); assertEquals(path, shardPath.getDataPath()); assertEquals("0xDEADBEEF", shardPath.getIndexUUID()); - assertEquals("foo", shardPath.getShardId().getIndex()); + assertEquals("foo", shardPath.getShardId().getIndexName()); assertEquals(path.resolve("translog"), shardPath.resolveTranslog()); assertEquals(path.resolve("index"), shardPath.resolveIndex()); } @@ -60,12 +60,12 @@ public class ShardPathTests extends ESTestCase { Settings.Builder builder = settingsBuilder().put(IndexMetaData.SETTING_INDEX_UUID, "0xDEADBEEF") .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT); Settings settings = builder.build(); - ShardId shardId = new ShardId("foo", 0); + ShardId shardId = new ShardId("foo", "_na_", 0); Path[] paths = env.availableShardPaths(shardId); assumeTrue("This test tests multi data.path but we only got one", paths.length > 1); int id = randomIntBetween(1, 10); ShardStateMetaData.FORMAT.write(new ShardStateMetaData(id, true, "0xDEADBEEF", AllocationId.newInitializing()), id, paths); - ShardPath.loadShardPath(logger, env, shardId, IndexSettingsModule.newIndexSettings(shardId.index(), settings)); + ShardPath.loadShardPath(logger, env, shardId, IndexSettingsModule.newIndexSettings(shardId.getIndex(), settings)); fail("Expected IllegalStateException"); } catch (IllegalStateException e) { assertThat(e.getMessage(), containsString("more than one shard state found")); @@ -77,12 +77,12 @@ public class ShardPathTests extends ESTestCase { Settings.Builder builder = settingsBuilder().put(IndexMetaData.SETTING_INDEX_UUID, "foobar") .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT); Settings settings = builder.build(); - ShardId shardId = new ShardId("foo", 0); + ShardId shardId = new ShardId("foo", "_na_", 0); Path[] paths = env.availableShardPaths(shardId); Path path = randomFrom(paths); int id = randomIntBetween(1, 10); ShardStateMetaData.FORMAT.write(new ShardStateMetaData(id, true, "0xDEADBEEF", AllocationId.newInitializing()), id, path); - ShardPath.loadShardPath(logger, env, shardId, IndexSettingsModule.newIndexSettings(shardId.index(), settings)); + ShardPath.loadShardPath(logger, env, shardId, IndexSettingsModule.newIndexSettings(shardId.getIndex(), settings)); fail("Expected IllegalStateException"); } catch (IllegalStateException e) { assertThat(e.getMessage(), containsString("expected: foobar on shard path")); @@ -92,7 +92,7 @@ public class ShardPathTests extends ESTestCase { public void testIllegalCustomDataPath() { final Path path = createTempDir().resolve("foo").resolve("0"); try { - new ShardPath(true, path, path, "foo", new ShardId("foo", 0)); + new ShardPath(true, path, path, "foo", new ShardId("foo", "_na_", 0)); fail("Expected IllegalArgumentException"); } catch (IllegalArgumentException e) { assertThat(e.getMessage(), is("shard state path must be different to the data path when using custom data paths")); @@ -101,7 +101,7 @@ public class ShardPathTests extends ESTestCase { public void testValidCtor() { final Path path = createTempDir().resolve("foo").resolve("0"); - ShardPath shardPath = new ShardPath(false, path, path, "foo", new ShardId("foo", 0)); + ShardPath shardPath = new ShardPath(false, path, path, "foo", new ShardId("foo", "_na_", 0)); assertFalse(shardPath.isCustomDataPath()); assertEquals(shardPath.getDataPath(), path); assertEquals(shardPath.getShardStatePath(), path); @@ -132,11 +132,11 @@ public class ShardPathTests extends ESTestCase { nodeSettings = Settings.EMPTY; } try (final NodeEnvironment env = newNodeEnvironment(nodeSettings)) { - ShardId shardId = new ShardId("foo", 0); + ShardId shardId = new ShardId("foo", "_na_", 0); Path[] paths = env.availableShardPaths(shardId); Path path = randomFrom(paths); ShardStateMetaData.FORMAT.write(new ShardStateMetaData(2, true, "0xDEADBEEF", AllocationId.newInitializing()), 2, path); - ShardPath shardPath = ShardPath.loadShardPath(logger, env, shardId, IndexSettingsModule.newIndexSettings(shardId.index(), indexSetttings)); + ShardPath shardPath = ShardPath.loadShardPath(logger, env, shardId, IndexSettingsModule.newIndexSettings(shardId.getIndex(), indexSetttings)); boolean found = false; for (Path p : env.nodeDataPaths()) { if (p.equals(shardPath.getRootStatePath())) { diff --git a/core/src/test/java/org/elasticsearch/index/shard/ShardUtilsTests.java b/core/src/test/java/org/elasticsearch/index/shard/ShardUtilsTests.java index a59dcb49aca..105179a1f53 100644 --- a/core/src/test/java/org/elasticsearch/index/shard/ShardUtilsTests.java +++ b/core/src/test/java/org/elasticsearch/index/shard/ShardUtilsTests.java @@ -38,7 +38,7 @@ public class ShardUtilsTests extends ESTestCase { BaseDirectoryWrapper dir = newDirectory(); IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig()); writer.commit(); - ShardId id = new ShardId("foo", random().nextInt()); + ShardId id = new ShardId("foo", "_na_", random().nextInt()); try (DirectoryReader reader = DirectoryReader.open(writer, random().nextBoolean())) { ElasticsearchDirectoryReader wrap = ElasticsearchDirectoryReader.wrap(reader, id); assertEquals(id, ShardUtils.extractShardId(wrap)); diff --git a/core/src/test/java/org/elasticsearch/index/store/IndexStoreTests.java b/core/src/test/java/org/elasticsearch/index/store/IndexStoreTests.java index 214c86a498a..d9000e23a61 100644 --- a/core/src/test/java/org/elasticsearch/index/store/IndexStoreTests.java +++ b/core/src/test/java/org/elasticsearch/index/store/IndexStoreTests.java @@ -52,8 +52,8 @@ public class IndexStoreTests extends ESTestCase { final IndexModule.Type type = RandomPicks.randomFrom(random(), values); Settings settings = Settings.settingsBuilder().put(IndexModule.INDEX_STORE_TYPE_SETTING.getKey(), type.name().toLowerCase(Locale.ROOT)) .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build(); - IndexSettings indexSettings = IndexSettingsModule.newIndexSettings(new Index("foo"), settings); - FsDirectoryService service = new FsDirectoryService(indexSettings, null, new ShardPath(false, tempDir, tempDir, "foo", new ShardId("foo", 0))); + IndexSettings indexSettings = IndexSettingsModule.newIndexSettings("foo", settings); + FsDirectoryService service = new FsDirectoryService(indexSettings, null, new ShardPath(false, tempDir, tempDir, "foo", new ShardId("foo", "_na_", 0))); try (final Directory directory = service.newFSDirectory(tempDir, NoLockFactory.INSTANCE)) { switch (type) { case NIOFS: @@ -85,7 +85,7 @@ public class IndexStoreTests extends ESTestCase { public void testStoreDirectoryDefault() throws IOException { final Path tempDir = createTempDir().resolve("foo").resolve("0"); - FsDirectoryService service = new FsDirectoryService(IndexSettingsModule.newIndexSettings(new Index("foo"), Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build()), null, new ShardPath(false, tempDir, tempDir, "foo", new ShardId("foo", 0))); + FsDirectoryService service = new FsDirectoryService(IndexSettingsModule.newIndexSettings("foo", Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build()), null, new ShardPath(false, tempDir, tempDir, "foo", new ShardId("foo", "_na_", 0))); try (final Directory directory = service.newFSDirectory(tempDir, NoLockFactory.INSTANCE)) { if (Constants.WINDOWS) { assertTrue(directory.toString(), directory instanceof MMapDirectory || directory instanceof SimpleFSDirectory); @@ -100,7 +100,7 @@ public class IndexStoreTests extends ESTestCase { public void testUpdateThrottleType() throws IOException { Settings settings = Settings.settingsBuilder().put(IndexStoreConfig.INDICES_STORE_THROTTLE_TYPE_SETTING.getKey(), "all") .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build(); - IndexSettings indexSettings = IndexSettingsModule.newIndexSettings(new Index("foo"), settings); + IndexSettings indexSettings = IndexSettingsModule.newIndexSettings("foo", settings); IndexStoreConfig indexStoreConfig = new IndexStoreConfig(settings); IndexStore store = new IndexStore(indexSettings, indexStoreConfig); assertEquals(StoreRateLimiting.Type.NONE, store.rateLimiting().getType()); diff --git a/core/src/test/java/org/elasticsearch/index/store/StoreTests.java b/core/src/test/java/org/elasticsearch/index/store/StoreTests.java index ed98fc1bacd..8b11e6b4867 100644 --- a/core/src/test/java/org/elasticsearch/index/store/StoreTests.java +++ b/core/src/test/java/org/elasticsearch/index/store/StoreTests.java @@ -109,10 +109,10 @@ import static org.hamcrest.Matchers.nullValue; public class StoreTests extends ESTestCase { - private static final IndexSettings INDEX_SETTINGS = IndexSettingsModule.newIndexSettings(new Index("index"), Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, org.elasticsearch.Version.CURRENT).build()); + private static final IndexSettings INDEX_SETTINGS = IndexSettingsModule.newIndexSettings("index", Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, org.elasticsearch.Version.CURRENT).build()); public void testRefCount() throws IOException { - final ShardId shardId = new ShardId(new Index("index"), 1); + final ShardId shardId = new ShardId("index", "_na_", 1); DirectoryService directoryService = new LuceneManagedDirectoryService(random()); IndexSettings indexSettings = INDEX_SETTINGS; @@ -361,7 +361,7 @@ public class StoreTests extends ESTestCase { // agree on the oldest version of a segment. We should fix this test by // switching to a static bw index public void testWriteLegacyChecksums() throws IOException { - final ShardId shardId = new ShardId(new Index("index"), 1); + final ShardId shardId = new ShardId("index", "_na_", 1); DirectoryService directoryService = new LuceneManagedDirectoryService(random()); Store store = new Store(shardId, INDEX_SETTINGS, directoryService, new DummyShardLock(shardId)); // set default codec - all segments need checksums @@ -445,7 +445,7 @@ public class StoreTests extends ESTestCase { } public void testNewChecksums() throws IOException { - final ShardId shardId = new ShardId(new Index("index"), 1); + final ShardId shardId = new ShardId("index", "_na_", 1); DirectoryService directoryService = new LuceneManagedDirectoryService(random()); Store store = new Store(shardId, INDEX_SETTINGS, directoryService, new DummyShardLock(shardId)); // set default codec - all segments need checksums @@ -504,7 +504,7 @@ public class StoreTests extends ESTestCase { } public void testMixedChecksums() throws IOException { - final ShardId shardId = new ShardId(new Index("index"), 1); + final ShardId shardId = new ShardId("index", "_na_", 1); DirectoryService directoryService = new LuceneManagedDirectoryService(random()); Store store = new Store(shardId, INDEX_SETTINGS, directoryService, new DummyShardLock(shardId)); // this time random codec.... @@ -595,7 +595,7 @@ public class StoreTests extends ESTestCase { } public void testRenameFile() throws IOException { - final ShardId shardId = new ShardId(new Index("index"), 1); + final ShardId shardId = new ShardId("index", "_na_", 1); DirectoryService directoryService = new LuceneManagedDirectoryService(random(), false); Store store = new Store(shardId, INDEX_SETTINGS, directoryService, new DummyShardLock(shardId)); { @@ -867,7 +867,7 @@ public class StoreTests extends ESTestCase { IndexWriterConfig iwc = new IndexWriterConfig(new MockAnalyzer(random)).setCodec(TestUtil.getDefaultCodec()); iwc.setMergePolicy(NoMergePolicy.INSTANCE); iwc.setUseCompoundFile(random.nextBoolean()); - final ShardId shardId = new ShardId(new Index("index"), 1); + final ShardId shardId = new ShardId("index", "_na_", 1); DirectoryService directoryService = new LuceneManagedDirectoryService(random); Store store = new Store(shardId, INDEX_SETTINGS, directoryService, new DummyShardLock(shardId)); IndexWriter writer = new IndexWriter(store.directory(), iwc); @@ -897,7 +897,7 @@ public class StoreTests extends ESTestCase { IndexWriterConfig iwc = new IndexWriterConfig(new MockAnalyzer(random)).setCodec(TestUtil.getDefaultCodec()); iwc.setMergePolicy(NoMergePolicy.INSTANCE); iwc.setUseCompoundFile(random.nextBoolean()); - final ShardId shardId = new ShardId(new Index("index"), 1); + final ShardId shardId = new ShardId("index", "_na_", 1); DirectoryService directoryService = new LuceneManagedDirectoryService(random); store = new Store(shardId, INDEX_SETTINGS, directoryService, new DummyShardLock(shardId)); IndexWriter writer = new IndexWriter(store.directory(), iwc); @@ -995,7 +995,7 @@ public class StoreTests extends ESTestCase { } public void testCleanupFromSnapshot() throws IOException { - final ShardId shardId = new ShardId(new Index("index"), 1); + final ShardId shardId = new ShardId("index", "_na_", 1); DirectoryService directoryService = new LuceneManagedDirectoryService(random()); Store store = new Store(shardId, INDEX_SETTINGS, directoryService, new DummyShardLock(shardId)); // this time random codec.... @@ -1109,7 +1109,7 @@ public class StoreTests extends ESTestCase { metaDataMap.put("_0_1.del", new StoreFileMetaData("_0_1.del", 42, "foobarbaz", null, new BytesRef())); Store.MetadataSnapshot snapshot = new Store.MetadataSnapshot(unmodifiableMap(metaDataMap), emptyMap(), 0); - final ShardId shardId = new ShardId(new Index("index"), 1); + final ShardId shardId = new ShardId("index", "_na_", 1); DirectoryService directoryService = new LuceneManagedDirectoryService(random()); Store store = new Store(shardId, INDEX_SETTINGS, directoryService, new DummyShardLock(shardId)); for (String file : metaDataMap.keySet()) { @@ -1126,7 +1126,7 @@ public class StoreTests extends ESTestCase { } public void testOnCloseCallback() throws IOException { - final ShardId shardId = new ShardId(new Index(randomRealisticUnicodeOfCodepointLengthBetween(1, 10)), randomIntBetween(0, 100)); + final ShardId shardId = new ShardId(new Index(randomRealisticUnicodeOfCodepointLengthBetween(1, 10), "_na_"), randomIntBetween(0, 100)); DirectoryService directoryService = new LuceneManagedDirectoryService(random()); final AtomicInteger count = new AtomicInteger(0); final ShardLock lock = new DummyShardLock(shardId); @@ -1150,12 +1150,12 @@ public class StoreTests extends ESTestCase { } public void testStoreStats() throws IOException { - final ShardId shardId = new ShardId(new Index("index"), 1); + final ShardId shardId = new ShardId("index", "_na_", 1); DirectoryService directoryService = new LuceneManagedDirectoryService(random()); Settings settings = Settings.builder() .put(IndexMetaData.SETTING_VERSION_CREATED, org.elasticsearch.Version.CURRENT) .put(Store.INDEX_STORE_STATS_REFRESH_INTERVAL_SETTING.getKey(), TimeValue.timeValueMinutes(0)).build(); - Store store = new Store(shardId, IndexSettingsModule.newIndexSettings(new Index("index"), settings), directoryService, new DummyShardLock(shardId)); + Store store = new Store(shardId, IndexSettingsModule.newIndexSettings("index", settings), directoryService, new DummyShardLock(shardId)); long initialStoreSize = 0; for (String extraFiles : store.directory().listAll()) { assertTrue("expected extraFS file but got: " + extraFiles, extraFiles.startsWith("extra")); @@ -1244,7 +1244,7 @@ public class StoreTests extends ESTestCase { } public void testUserDataRead() throws IOException { - final ShardId shardId = new ShardId(new Index("index"), 1); + final ShardId shardId = new ShardId("index", "_na_", 1); DirectoryService directoryService = new LuceneManagedDirectoryService(random()); Store store = new Store(shardId, INDEX_SETTINGS, directoryService, new DummyShardLock(shardId)); IndexWriterConfig config = newIndexWriterConfig(random(), new MockAnalyzer(random())).setCodec(TestUtil.getDefaultCodec()); @@ -1279,7 +1279,7 @@ public class StoreTests extends ESTestCase { public void testStreamStoreFilesMetaData() throws Exception { Store.MetadataSnapshot metadataSnapshot = createMetaDataSnapshot(); - TransportNodesListShardStoreMetaData.StoreFilesMetaData outStoreFileMetaData = new TransportNodesListShardStoreMetaData.StoreFilesMetaData(randomBoolean(), new ShardId("test", 0),metadataSnapshot); + TransportNodesListShardStoreMetaData.StoreFilesMetaData outStoreFileMetaData = new TransportNodesListShardStoreMetaData.StoreFilesMetaData(randomBoolean(), new ShardId("test", "_na_", 0),metadataSnapshot); ByteArrayOutputStream outBuffer = new ByteArrayOutputStream(); OutputStreamStreamOutput out = new OutputStreamStreamOutput(outBuffer); org.elasticsearch.Version targetNodeVersion = randomVersion(random()); @@ -1298,7 +1298,7 @@ public class StoreTests extends ESTestCase { public void testMarkCorruptedOnTruncatedSegmentsFile() throws IOException { IndexWriterConfig iwc = newIndexWriterConfig(); - final ShardId shardId = new ShardId(new Index("index"), 1); + final ShardId shardId = new ShardId("index", "_na_", 1); DirectoryService directoryService = new LuceneManagedDirectoryService(random()); Store store = new Store(shardId, INDEX_SETTINGS, directoryService, new DummyShardLock(shardId)); IndexWriter writer = new IndexWriter(store.directory(), iwc); @@ -1342,19 +1342,19 @@ public class StoreTests extends ESTestCase { } public void testCanOpenIndex() throws IOException { + final ShardId shardId = new ShardId("index", "_na_", 1); IndexWriterConfig iwc = newIndexWriterConfig(); Path tempDir = createTempDir(); final BaseDirectoryWrapper dir = newFSDirectory(tempDir); - assertFalse(Store.canOpenIndex(logger, tempDir)); + assertFalse(Store.canOpenIndex(logger, tempDir,shardId)); IndexWriter writer = new IndexWriter(dir, iwc); Document doc = new Document(); doc.add(new StringField("id", "1", random().nextBoolean() ? Field.Store.YES : Field.Store.NO)); writer.addDocument(doc); writer.commit(); writer.close(); - assertTrue(Store.canOpenIndex(logger, tempDir)); + assertTrue(Store.canOpenIndex(logger, tempDir, shardId)); - final ShardId shardId = new ShardId(new Index("index"), 1); DirectoryService directoryService = new DirectoryService(shardId, INDEX_SETTINGS) { @Override public long throttleTimeInNanos() { @@ -1368,12 +1368,12 @@ public class StoreTests extends ESTestCase { }; Store store = new Store(shardId, INDEX_SETTINGS, directoryService, new DummyShardLock(shardId)); store.markStoreCorrupted(new CorruptIndexException("foo", "bar")); - assertFalse(Store.canOpenIndex(logger, tempDir)); + assertFalse(Store.canOpenIndex(logger, tempDir, shardId)); store.close(); } public void testDeserializeCorruptionException() throws IOException { - final ShardId shardId = new ShardId(new Index("index"), 1); + final ShardId shardId = new ShardId("index", "_na_", 1); final Directory dir = new RAMDirectory(); // I use ram dir to prevent that virusscanner being a PITA DirectoryService directoryService = new DirectoryService(shardId, INDEX_SETTINGS) { @Override @@ -1413,7 +1413,7 @@ public class StoreTests extends ESTestCase { } public void testCanReadOldCorruptionMarker() throws IOException { - final ShardId shardId = new ShardId(new Index("index"), 1); + final ShardId shardId = new ShardId("index", "_na_", 1); final Directory dir = new RAMDirectory(); // I use ram dir to prevent that virusscanner being a PITA DirectoryService directoryService = new DirectoryService(shardId, INDEX_SETTINGS) { @Override diff --git a/core/src/test/java/org/elasticsearch/index/translog/TranslogTests.java b/core/src/test/java/org/elasticsearch/index/translog/TranslogTests.java index b410c81f4bb..a46e12837a0 100644 --- a/core/src/test/java/org/elasticsearch/index/translog/TranslogTests.java +++ b/core/src/test/java/org/elasticsearch/index/translog/TranslogTests.java @@ -91,7 +91,7 @@ import static org.hamcrest.Matchers.greaterThanOrEqualTo; @LuceneTestCase.SuppressFileSystems("ExtrasFS") public class TranslogTests extends ESTestCase { - protected final ShardId shardId = new ShardId(new Index("index"), 1); + protected final ShardId shardId = new ShardId("index", "_na_", 1); protected Translog translog; protected Path translogDir; @@ -141,7 +141,7 @@ public class TranslogTests extends ESTestCase { .put(IndexMetaData.SETTING_VERSION_CREATED, org.elasticsearch.Version.CURRENT) .build(); ByteSizeValue bufferSize = randomBoolean() ? TranslogConfig.DEFAULT_BUFFER_SIZE : new ByteSizeValue(10 + randomInt(128 * 1024), ByteSizeUnit.BYTES); - return new TranslogConfig(shardId, path, IndexSettingsModule.newIndexSettings(shardId.index(), build), BigArrays.NON_RECYCLING_INSTANCE, bufferSize); + return new TranslogConfig(shardId, path, IndexSettingsModule.newIndexSettings(shardId.getIndex(), build), BigArrays.NON_RECYCLING_INSTANCE, bufferSize); } protected void addToTranslogAndList(Translog translog, ArrayList list, Translog.Operation op) throws IOException { diff --git a/core/src/test/java/org/elasticsearch/indices/IndicesLifecycleListenerIT.java b/core/src/test/java/org/elasticsearch/indices/IndicesLifecycleListenerIT.java index ed8c27892b9..f1f8a8222cb 100644 --- a/core/src/test/java/org/elasticsearch/indices/IndicesLifecycleListenerIT.java +++ b/core/src/test/java/org/elasticsearch/indices/IndicesLifecycleListenerIT.java @@ -130,7 +130,7 @@ public class IndicesLifecycleListenerIT extends ESIntegTestCase { throw new RuntimeException("FAIL"); } }); - client().admin().cluster().prepareReroute().add(new MoveAllocationCommand(new ShardId("index1", 0), node1, node2)).get(); + client().admin().cluster().prepareReroute().add(new MoveAllocationCommand("index1", 0, node1, node2)).get(); ensureGreen("index1"); ClusterState state = client().admin().cluster().prepareState().get().getState(); List shard = state.getRoutingNodes().shardsWithState(ShardRoutingState.STARTED); diff --git a/core/src/test/java/org/elasticsearch/indices/IndicesServiceTests.java b/core/src/test/java/org/elasticsearch/indices/IndicesServiceTests.java index 522ebfb0f3b..e9f1f6be518 100644 --- a/core/src/test/java/org/elasticsearch/indices/IndicesServiceTests.java +++ b/core/src/test/java/org/elasticsearch/indices/IndicesServiceTests.java @@ -64,8 +64,8 @@ public class IndicesServiceTests extends ESSingleNodeTestCase { .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, randomIntBetween(1, 4)) .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, randomIntBetween(0, 3)) .build()); - assertFalse("shard on shared filesystem", indicesService.canDeleteIndexContents(new Index("test"), idxSettings, false)); - assertTrue("shard on shared filesystem and closed", indicesService.canDeleteIndexContents(new Index("test"), idxSettings, true)); + assertFalse("shard on shared filesystem", indicesService.canDeleteIndexContents(idxSettings.getIndex(), idxSettings, false)); + assertTrue("shard on shared filesystem and closed", indicesService.canDeleteIndexContents(idxSettings.getIndex(), idxSettings, true)); } public void testCanDeleteShardContent() { @@ -73,12 +73,12 @@ public class IndicesServiceTests extends ESSingleNodeTestCase { IndexMetaData meta = IndexMetaData.builder("test").settings(settings(Version.CURRENT)).numberOfShards(1).numberOfReplicas( 1).build(); IndexSettings indexSettings = IndexSettingsModule.newIndexSettings("test", meta.getSettings()); - assertFalse("no shard location", indicesService.canDeleteShardContent(new ShardId("test", 0), indexSettings)); + assertFalse("no shard location", indicesService.canDeleteShardContent(new ShardId("test", "_na_", 0), indexSettings)); IndexService test = createIndex("test"); assertTrue(test.hasShard(0)); - assertFalse("shard is allocated", indicesService.canDeleteShardContent(new ShardId("test", 0), indexSettings)); + assertFalse("shard is allocated", indicesService.canDeleteShardContent(new ShardId("test", "_na_", 0), indexSettings)); test.removeShard(0, "boom"); - assertTrue("shard is removed", indicesService.canDeleteShardContent(new ShardId("test", 0), indexSettings)); + assertTrue("shard is removed", indicesService.canDeleteShardContent(new ShardId("test", "_na_", 0), indexSettings)); } public void testDeleteIndexStore() throws Exception { @@ -175,7 +175,7 @@ public class IndicesServiceTests extends ESSingleNodeTestCase { if (randomBoolean()) { indicesService.addPendingDelete(new ShardId(test.index(), 0), test.getIndexSettings()); indicesService.addPendingDelete(new ShardId(test.index(), 1), test.getIndexSettings()); - indicesService.addPendingDelete(new ShardId("bogus", 1), test.getIndexSettings()); + indicesService.addPendingDelete(new ShardId("bogus", "_na_", 1), test.getIndexSettings()); assertEquals(indicesService.numPendingDeletes(test.index()), 2); // shard lock released... we can now delete indicesService.processPendingDeletes(test.index(), test.getIndexSettings(), new TimeValue(0, TimeUnit.MILLISECONDS)); diff --git a/core/src/test/java/org/elasticsearch/indices/cache/query/IndicesQueryCacheTests.java b/core/src/test/java/org/elasticsearch/indices/cache/query/IndicesQueryCacheTests.java index ad5b37a2f53..ff5dc9ad708 100644 --- a/core/src/test/java/org/elasticsearch/indices/cache/query/IndicesQueryCacheTests.java +++ b/core/src/test/java/org/elasticsearch/indices/cache/query/IndicesQueryCacheTests.java @@ -86,7 +86,7 @@ public class IndicesQueryCacheTests extends ESTestCase { w.addDocument(new Document()); DirectoryReader r = DirectoryReader.open(w, false); w.close(); - ShardId shard = new ShardId(new Index("index"), 0); + ShardId shard = new ShardId("index", "_na_", 0); r = ElasticsearchDirectoryReader.wrap(r, shard); IndexSearcher s = new IndexSearcher(r); s.setQueryCachingPolicy(QueryCachingPolicy.ALWAYS_CACHE); @@ -156,7 +156,7 @@ public class IndicesQueryCacheTests extends ESTestCase { w1.addDocument(new Document()); DirectoryReader r1 = DirectoryReader.open(w1, false); w1.close(); - ShardId shard1 = new ShardId(new Index("index"), 0); + ShardId shard1 = new ShardId("index", "_na_", 0); r1 = ElasticsearchDirectoryReader.wrap(r1, shard1); IndexSearcher s1 = new IndexSearcher(r1); s1.setQueryCachingPolicy(QueryCachingPolicy.ALWAYS_CACHE); @@ -166,7 +166,7 @@ public class IndicesQueryCacheTests extends ESTestCase { w2.addDocument(new Document()); DirectoryReader r2 = DirectoryReader.open(w2, false); w2.close(); - ShardId shard2 = new ShardId(new Index("index"), 1); + ShardId shard2 = new ShardId("index", "_na_", 1); r2 = ElasticsearchDirectoryReader.wrap(r2, shard2); IndexSearcher s2 = new IndexSearcher(r2); s2.setQueryCachingPolicy(QueryCachingPolicy.ALWAYS_CACHE); @@ -281,7 +281,7 @@ public class IndicesQueryCacheTests extends ESTestCase { w1.addDocument(new Document()); DirectoryReader r1 = DirectoryReader.open(w1, false); w1.close(); - ShardId shard1 = new ShardId(new Index("index"), 0); + ShardId shard1 = new ShardId("index", "_na_", 0); r1 = ElasticsearchDirectoryReader.wrap(r1, shard1); IndexSearcher s1 = new IndexSearcher(r1); s1.setQueryCachingPolicy(QueryCachingPolicy.ALWAYS_CACHE); @@ -291,7 +291,7 @@ public class IndicesQueryCacheTests extends ESTestCase { w2.addDocument(new Document()); DirectoryReader r2 = DirectoryReader.open(w2, false); w2.close(); - ShardId shard2 = new ShardId(new Index("index"), 1); + ShardId shard2 = new ShardId("index", "_na_", 1); r2 = ElasticsearchDirectoryReader.wrap(r2, shard2); IndexSearcher s2 = new IndexSearcher(r2); s2.setQueryCachingPolicy(QueryCachingPolicy.ALWAYS_CACHE); diff --git a/core/src/test/java/org/elasticsearch/indices/flush/FlushIT.java b/core/src/test/java/org/elasticsearch/indices/flush/FlushIT.java index 46767073404..d74e490fdbc 100644 --- a/core/src/test/java/org/elasticsearch/indices/flush/FlushIT.java +++ b/core/src/test/java/org/elasticsearch/indices/flush/FlushIT.java @@ -30,6 +30,7 @@ import org.elasticsearch.cluster.routing.allocation.command.MoveAllocationComman import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.engine.Engine; import org.elasticsearch.index.shard.ShardId; @@ -90,6 +91,8 @@ public class FlushIT extends ESIntegTestCase { prepareCreate("test").setSettings(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1).get(); ensureGreen(); + final Index index = client().admin().cluster().prepareState().get().getState().metaData().index("test").getIndex(); + IndexStats indexStats = client().admin().indices().prepareStats("test").get().getIndex("test"); for (ShardStats shardStats : indexStats.getShards()) { assertNull(shardStats.getCommitStats().getUserData().get(Engine.SYNC_COMMIT_ID)); @@ -98,7 +101,7 @@ public class FlushIT extends ESIntegTestCase { ShardsSyncedFlushResult result; if (randomBoolean()) { logger.info("--> sync flushing shard 0"); - result = SyncedFlushUtil.attemptSyncedFlush(internalCluster(), new ShardId("test", 0)); + result = SyncedFlushUtil.attemptSyncedFlush(internalCluster(), new ShardId(index, 0)); } else { logger.info("--> sync flushing index [test]"); SyncedFlushResponse indicesResult = client().admin().indices().prepareSyncedFlush("test").get(); @@ -121,7 +124,7 @@ public class FlushIT extends ESIntegTestCase { ShardRouting shardRouting = clusterState.getRoutingTable().index("test").shard(0).iterator().next(); String currentNodeName = clusterState.nodes().resolveNode(shardRouting.currentNodeId()).name(); assertFalse(currentNodeName.equals(newNodeName)); - internalCluster().client().admin().cluster().prepareReroute().add(new MoveAllocationCommand(new ShardId("test", 0), currentNodeName, newNodeName)).get(); + internalCluster().client().admin().cluster().prepareReroute().add(new MoveAllocationCommand("test", 0, currentNodeName, newNodeName)).get(); client().admin().cluster().prepareHealth() .setWaitForRelocatingShards(0) diff --git a/core/src/test/java/org/elasticsearch/indices/flush/SyncedFlushSingleNodeTests.java b/core/src/test/java/org/elasticsearch/indices/flush/SyncedFlushSingleNodeTests.java index e4c9cb8a7ef..c30a5adaaca 100644 --- a/core/src/test/java/org/elasticsearch/indices/flush/SyncedFlushSingleNodeTests.java +++ b/core/src/test/java/org/elasticsearch/indices/flush/SyncedFlushSingleNodeTests.java @@ -133,7 +133,7 @@ public class SyncedFlushSingleNodeTests extends ESSingleNodeTestCase { SyncedFlushService flushService = getInstanceFromNode(SyncedFlushService.class); SyncedFlushUtil.LatchedListener listener = new SyncedFlushUtil.LatchedListener(); - flushService.attemptSyncedFlush(new ShardId("test", 1), listener); + flushService.attemptSyncedFlush(new ShardId("test", "_na_", 1), listener); listener.latch.await(); assertNotNull(listener.error); assertNull(listener.result); @@ -151,7 +151,7 @@ public class SyncedFlushSingleNodeTests extends ESSingleNodeTestCase { assertEquals("closed", listener.error.getMessage()); listener = new SyncedFlushUtil.LatchedListener(); - flushService.attemptSyncedFlush(new ShardId("index not found", 0), listener); + flushService.attemptSyncedFlush(new ShardId("index not found", "_na_", 0), listener); listener.latch.await(); assertNotNull(listener.error); assertNull(listener.result); diff --git a/core/src/test/java/org/elasticsearch/indices/recovery/IndexRecoveryIT.java b/core/src/test/java/org/elasticsearch/indices/recovery/IndexRecoveryIT.java index 88ccf99f6f1..52c21902939 100644 --- a/core/src/test/java/org/elasticsearch/indices/recovery/IndexRecoveryIT.java +++ b/core/src/test/java/org/elasticsearch/indices/recovery/IndexRecoveryIT.java @@ -258,7 +258,7 @@ public class IndexRecoveryIT extends ESIntegTestCase { logger.info("--> move shard from: {} to: {}", nodeA, nodeB); client().admin().cluster().prepareReroute() - .add(new MoveAllocationCommand(new ShardId(INDEX_NAME, 0), nodeA, nodeB)) + .add(new MoveAllocationCommand(INDEX_NAME, 0, nodeA, nodeB)) .execute().actionGet().getState(); logger.info("--> waiting for recovery to start both on source and target"); @@ -384,7 +384,7 @@ public class IndexRecoveryIT extends ESIntegTestCase { logger.info("--> move replica shard from: {} to: {}", nodeA, nodeC); client().admin().cluster().prepareReroute() - .add(new MoveAllocationCommand(new ShardId(INDEX_NAME, 0), nodeA, nodeC)) + .add(new MoveAllocationCommand(INDEX_NAME, 0, nodeA, nodeC)) .execute().actionGet().getState(); response = client().admin().indices().prepareRecoveries(INDEX_NAME).execute().actionGet(); diff --git a/core/src/test/java/org/elasticsearch/indices/recovery/RecoverySourceHandlerTests.java b/core/src/test/java/org/elasticsearch/indices/recovery/RecoverySourceHandlerTests.java index a64b8606aea..c8cad5be296 100644 --- a/core/src/test/java/org/elasticsearch/indices/recovery/RecoverySourceHandlerTests.java +++ b/core/src/test/java/org/elasticsearch/indices/recovery/RecoverySourceHandlerTests.java @@ -58,7 +58,7 @@ import java.util.List; import java.util.concurrent.atomic.AtomicBoolean; public class RecoverySourceHandlerTests extends ESTestCase { - private static final IndexSettings INDEX_SETTINGS = IndexSettingsModule.newIndexSettings(new Index("index"), Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, org.elasticsearch.Version.CURRENT).build()); + private static final IndexSettings INDEX_SETTINGS = IndexSettingsModule.newIndexSettings("index", Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, org.elasticsearch.Version.CURRENT).build()); private final ShardId shardId = new ShardId(INDEX_SETTINGS.getIndex(), 1); private final ClusterSettings service = new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); diff --git a/core/src/test/java/org/elasticsearch/indices/recovery/RecoveryStateTests.java b/core/src/test/java/org/elasticsearch/indices/recovery/RecoveryStateTests.java index 4c1a6420bfd..2f0b3a297eb 100644 --- a/core/src/test/java/org/elasticsearch/indices/recovery/RecoveryStateTests.java +++ b/core/src/test/java/org/elasticsearch/indices/recovery/RecoveryStateTests.java @@ -346,7 +346,7 @@ public class RecoveryStateTests extends ESTestCase { stages[i] = stages[j]; stages[j] = t; try { - RecoveryState state = new RecoveryState(new ShardId("bla", 0), randomBoolean(), randomFrom(Type.values()), discoveryNode, discoveryNode); + RecoveryState state = new RecoveryState(new ShardId("bla", "_na_", 0), randomBoolean(), randomFrom(Type.values()), discoveryNode, discoveryNode); for (Stage stage : stages) { state.setStage(stage); } @@ -360,7 +360,7 @@ public class RecoveryStateTests extends ESTestCase { i = randomIntBetween(1, stages.length - 1); ArrayList list = new ArrayList<>(Arrays.asList(Arrays.copyOfRange(stages, 0, i))); list.addAll(Arrays.asList(stages)); - RecoveryState state = new RecoveryState(new ShardId("bla", 0), randomBoolean(), randomFrom(Type.values()), discoveryNode, discoveryNode); + RecoveryState state = new RecoveryState(new ShardId("bla", "_na_", 0), randomBoolean(), randomFrom(Type.values()), discoveryNode, discoveryNode); for (Stage stage : list) { state.setStage(stage); } diff --git a/core/src/test/java/org/elasticsearch/indices/recovery/StartRecoveryRequestTests.java b/core/src/test/java/org/elasticsearch/indices/recovery/StartRecoveryRequestTests.java index 4ad298e39a3..6c20bfc6781 100644 --- a/core/src/test/java/org/elasticsearch/indices/recovery/StartRecoveryRequestTests.java +++ b/core/src/test/java/org/elasticsearch/indices/recovery/StartRecoveryRequestTests.java @@ -40,7 +40,7 @@ public class StartRecoveryRequestTests extends ESTestCase { public void testSerialization() throws Exception { Version targetNodeVersion = randomVersion(random()); StartRecoveryRequest outRequest = new StartRecoveryRequest( - new ShardId("test", 0), + new ShardId("test", "_na_", 0), new DiscoveryNode("a", new LocalTransportAddress("1"), targetNodeVersion), new DiscoveryNode("b", new LocalTransportAddress("1"), targetNodeVersion), true, diff --git a/core/src/test/java/org/elasticsearch/indices/store/IndicesStoreIntegrationIT.java b/core/src/test/java/org/elasticsearch/indices/store/IndicesStoreIntegrationIT.java index b4260bcfe46..dcf3dbaf4bc 100644 --- a/core/src/test/java/org/elasticsearch/indices/store/IndicesStoreIntegrationIT.java +++ b/core/src/test/java/org/elasticsearch/indices/store/IndicesStoreIntegrationIT.java @@ -144,7 +144,7 @@ public class IndicesStoreIntegrationIT extends ESIntegTestCase { CountDownLatch beginRelocationLatch = new CountDownLatch(1); CountDownLatch endRelocationLatch = new CountDownLatch(1); transportServiceNode3.addTracer(new ReclocationStartEndTracer(logger, beginRelocationLatch, endRelocationLatch)); - internalCluster().client().admin().cluster().prepareReroute().add(new MoveAllocationCommand(new ShardId("test", 0), node_1, node_3)).get(); + internalCluster().client().admin().cluster().prepareReroute().add(new MoveAllocationCommand("test", 0, node_1, node_3)).get(); // wait for relocation to start beginRelocationLatch.await(); disruption.startDisrupting(); @@ -154,7 +154,7 @@ public class IndicesStoreIntegrationIT extends ESIntegTestCase { sleep(50); disruption.stopDisrupting(); } else { - internalCluster().client().admin().cluster().prepareReroute().add(new MoveAllocationCommand(new ShardId("test", 0), node_1, node_3)).get(); + internalCluster().client().admin().cluster().prepareReroute().add(new MoveAllocationCommand("test", 0, node_1, node_3)).get(); } clusterHealth = client().admin().cluster().prepareHealth() .setWaitForRelocatingShards(0) @@ -209,7 +209,7 @@ public class IndicesStoreIntegrationIT extends ESIntegTestCase { }); logger.info("--> move shard from {} to {}, and wait for relocation to finish", node_1, node_2); - internalCluster().client().admin().cluster().prepareReroute().add(new MoveAllocationCommand(new ShardId("test", 0), node_1, node_2)).get(); + internalCluster().client().admin().cluster().prepareReroute().add(new MoveAllocationCommand("test", 0, node_1, node_2)).get(); shardActiveRequestSent.await(); ClusterHealthResponse clusterHealth = client().admin().cluster().prepareHealth() .setWaitForRelocatingShards(0) @@ -384,7 +384,7 @@ public class IndicesStoreIntegrationIT extends ESIntegTestCase { waitNoPendingTasksOnAll(); ClusterStateResponse stateResponse = client().admin().cluster().prepareState().get(); - + final Index index = stateResponse.getState().metaData().index("test").getIndex(); RoutingNode routingNode = stateResponse.getState().getRoutingNodes().node(nonMasterId); final int[] node2Shards = new int[routingNode.numberOfOwningShards()]; int i = 0; @@ -408,10 +408,10 @@ public class IndicesStoreIntegrationIT extends ESIntegTestCase { internalCluster().getInstance(ClusterService.class, nonMasterNode).submitStateUpdateTask("test", new ClusterStateUpdateTask(Priority.IMMEDIATE) { @Override public ClusterState execute(ClusterState currentState) throws Exception { - IndexRoutingTable.Builder indexRoutingTableBuilder = IndexRoutingTable.builder("test"); + IndexRoutingTable.Builder indexRoutingTableBuilder = IndexRoutingTable.builder(index); for (int i = 0; i < numShards; i++) { indexRoutingTableBuilder.addIndexShard( - new IndexShardRoutingTable.Builder(new ShardId("test", i)) + new IndexShardRoutingTable.Builder(new ShardId(index, i)) .addShard(TestShardRouting.newShardRouting("test", i, masterId, true, ShardRoutingState.STARTED, shardVersions[shardIds[i]])) .build() ); @@ -439,14 +439,14 @@ public class IndicesStoreIntegrationIT extends ESIntegTestCase { private Path indexDirectory(String server, String index) { NodeEnvironment env = internalCluster().getInstance(NodeEnvironment.class, server); - final Path[] paths = env.indexPaths(new Index(index)); + final Path[] paths = env.indexPaths(index); assert paths.length == 1; return paths[0]; } private Path shardDirectory(String server, String index, int shard) { NodeEnvironment env = internalCluster().getInstance(NodeEnvironment.class, server); - final Path[] paths = env.availableShardPaths(new ShardId(index, shard)); + final Path[] paths = env.availableShardPaths(new ShardId(index, "_na_", shard)); assert paths.length == 1; return paths[0]; } diff --git a/core/src/test/java/org/elasticsearch/indices/store/IndicesStoreTests.java b/core/src/test/java/org/elasticsearch/indices/store/IndicesStoreTests.java index ec6a3b38491..223e486dd28 100644 --- a/core/src/test/java/org/elasticsearch/indices/store/IndicesStoreTests.java +++ b/core/src/test/java/org/elasticsearch/indices/store/IndicesStoreTests.java @@ -30,9 +30,12 @@ import org.elasticsearch.cluster.routing.IndexShardRoutingTable; import org.elasticsearch.cluster.routing.ShardRoutingState; import org.elasticsearch.cluster.routing.TestShardRouting; import org.elasticsearch.cluster.routing.UnassignedInfo; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.LocalTransportAddress; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.cluster.TestClusterService; +import org.elasticsearch.transport.TransportService; import org.junit.Before; import java.util.Arrays; @@ -60,7 +63,7 @@ public class IndicesStoreTests extends ESTestCase { @Before public void before() { localNode = new DiscoveryNode("abc", new LocalTransportAddress("abc"), Version.CURRENT); - indicesStore = new IndicesStore(); + indicesStore = new IndicesStore(Settings.EMPTY, null, new TestClusterService(), new TransportService(null, null), null); } public void testShardCanBeDeletedNoShardRouting() throws Exception { @@ -69,7 +72,7 @@ public class IndicesStoreTests extends ESTestCase { ClusterState.Builder clusterState = ClusterState.builder(new ClusterName("test")); clusterState.metaData(MetaData.builder().put(IndexMetaData.builder("test").settings(settings(Version.CURRENT)).numberOfShards(numShards).numberOfReplicas(numReplicas))); - IndexShardRoutingTable.Builder routingTable = new IndexShardRoutingTable.Builder(new ShardId("test", 1)); + IndexShardRoutingTable.Builder routingTable = new IndexShardRoutingTable.Builder(new ShardId("test", "_na_", 1)); assertFalse(indicesStore.shardCanBeDeleted(clusterState.build(), routingTable.build())); } @@ -80,7 +83,7 @@ public class IndicesStoreTests extends ESTestCase { ClusterState.Builder clusterState = ClusterState.builder(new ClusterName("test")); clusterState.metaData(MetaData.builder().put(IndexMetaData.builder("test").settings(settings(Version.CURRENT)).numberOfShards(numShards).numberOfReplicas(numReplicas))); - IndexShardRoutingTable.Builder routingTable = new IndexShardRoutingTable.Builder(new ShardId("test", 1)); + IndexShardRoutingTable.Builder routingTable = new IndexShardRoutingTable.Builder(new ShardId("test", "_na_", 1)); for (int i = 0; i < numShards; i++) { int unStartedShard = randomInt(numReplicas); @@ -108,7 +111,7 @@ public class IndicesStoreTests extends ESTestCase { ClusterState.Builder clusterState = ClusterState.builder(new ClusterName("test")); clusterState.metaData(MetaData.builder().put(IndexMetaData.builder("test").settings(settings(Version.CURRENT)).numberOfShards(numShards).numberOfReplicas(numReplicas))); clusterState.nodes(DiscoveryNodes.builder().localNodeId(localNode.id()).put(localNode).put(new DiscoveryNode("xyz", new LocalTransportAddress("xyz"), Version.CURRENT))); - IndexShardRoutingTable.Builder routingTable = new IndexShardRoutingTable.Builder(new ShardId("test", 1)); + IndexShardRoutingTable.Builder routingTable = new IndexShardRoutingTable.Builder(new ShardId("test", "_na_", 1)); int localShardId = randomInt(numShards - 1); for (int i = 0; i < numShards; i++) { String nodeId = i == localShardId ? localNode.getId() : randomBoolean() ? "abc" : "xyz"; @@ -130,7 +133,7 @@ public class IndicesStoreTests extends ESTestCase { ClusterState.Builder clusterState = ClusterState.builder(new ClusterName("test")); clusterState.metaData(MetaData.builder().put(IndexMetaData.builder("test").settings(settings(Version.CURRENT)).numberOfShards(numShards).numberOfReplicas(numReplicas))); clusterState.nodes(DiscoveryNodes.builder().localNodeId(localNode.id()).put(localNode)); - IndexShardRoutingTable.Builder routingTable = new IndexShardRoutingTable.Builder(new ShardId("test", 1)); + IndexShardRoutingTable.Builder routingTable = new IndexShardRoutingTable.Builder(new ShardId("test", "_na_", 1)); for (int i = 0; i < numShards; i++) { String relocatingNodeId = randomBoolean() ? null : "def"; routingTable.addShard(TestShardRouting.newShardRouting("test", i, "xyz", relocatingNodeId, true, ShardRoutingState.STARTED, 0)); @@ -152,7 +155,7 @@ public class IndicesStoreTests extends ESTestCase { ClusterState.Builder clusterState = ClusterState.builder(new ClusterName("test")); clusterState.metaData(MetaData.builder().put(IndexMetaData.builder("test").settings(settings(Version.CURRENT)).numberOfShards(numShards).numberOfReplicas(numReplicas))); clusterState.nodes(DiscoveryNodes.builder().localNodeId(localNode.id()).put(localNode).put(new DiscoveryNode("xyz", new LocalTransportAddress("xyz"), nodeVersion))); - IndexShardRoutingTable.Builder routingTable = new IndexShardRoutingTable.Builder(new ShardId("test", 1)); + IndexShardRoutingTable.Builder routingTable = new IndexShardRoutingTable.Builder(new ShardId("test", "_na_", 1)); for (int i = 0; i < numShards; i++) { routingTable.addShard(TestShardRouting.newShardRouting("test", i, "xyz", null, true, ShardRoutingState.STARTED, 0)); for (int j = 0; j < numReplicas; j++) { @@ -177,7 +180,7 @@ public class IndicesStoreTests extends ESTestCase { .put(new DiscoveryNode("xyz", new LocalTransportAddress("xyz"), Version.CURRENT)) .put(new DiscoveryNode("def", new LocalTransportAddress("def"), nodeVersion) // <-- only set relocating, since we're testing that in this test )); - IndexShardRoutingTable.Builder routingTable = new IndexShardRoutingTable.Builder(new ShardId("test", 1)); + IndexShardRoutingTable.Builder routingTable = new IndexShardRoutingTable.Builder(new ShardId("test", "_na_", 1)); for (int i = 0; i < numShards; i++) { routingTable.addShard(TestShardRouting.newShardRouting("test", i, "xyz", "def", true, ShardRoutingState.STARTED, 0)); for (int j = 0; j < numReplicas; j++) { diff --git a/core/src/test/java/org/elasticsearch/indices/template/IndexTemplateFilteringIT.java b/core/src/test/java/org/elasticsearch/indices/template/IndexTemplateFilteringIT.java index ee0f8748083..8e0d5a882c3 100644 --- a/core/src/test/java/org/elasticsearch/indices/template/IndexTemplateFilteringIT.java +++ b/core/src/test/java/org/elasticsearch/indices/template/IndexTemplateFilteringIT.java @@ -57,7 +57,7 @@ public class IndexTemplateFilteringIT extends ESIntegTestCase { .setTemplate("no_match") .addMapping("type3", "field3", "type=string").get(); - assertAcked(prepareCreate("test").putHeader("header_test", "header_value")); + assertAcked(prepareCreate("test")); GetMappingsResponse response = client().admin().indices().prepareGetMappings("test").get(); assertThat(response, notNullValue()); @@ -70,7 +70,7 @@ public class IndexTemplateFilteringIT extends ESIntegTestCase { @Override public boolean apply(CreateIndexClusterStateUpdateRequest request, IndexTemplateMetaData template) { //make sure that no_match template is filtered out before the custom filters as it doesn't match the index name - return (template.name().equals("template2") || template.name().equals("no_match")) && request.originalMessage().getHeader("header_test").equals("header_value"); + return (template.name().equals("template2") || template.name().equals("no_match")); } } diff --git a/core/src/test/java/org/elasticsearch/mget/SimpleMgetIT.java b/core/src/test/java/org/elasticsearch/mget/SimpleMgetIT.java index b661e785329..21be585862d 100644 --- a/core/src/test/java/org/elasticsearch/mget/SimpleMgetIT.java +++ b/core/src/test/java/org/elasticsearch/mget/SimpleMgetIT.java @@ -59,7 +59,7 @@ public class SimpleMgetIT extends ESIntegTestCase { assertThat(mgetResponse.getResponses()[1].getIndex(), is("nonExistingIndex")); assertThat(mgetResponse.getResponses()[1].isFailed(), is(true)); assertThat(mgetResponse.getResponses()[1].getFailure().getMessage(), is("no such index")); - assertThat(((ElasticsearchException)mgetResponse.getResponses()[1].getFailure().getFailure()).getIndex(), is("nonExistingIndex")); + assertThat(((ElasticsearchException) mgetResponse.getResponses()[1].getFailure().getFailure()).getIndex().getName(), is("nonExistingIndex")); mgetResponse = client().prepareMultiGet() @@ -69,7 +69,7 @@ public class SimpleMgetIT extends ESIntegTestCase { assertThat(mgetResponse.getResponses()[0].getIndex(), is("nonExistingIndex")); assertThat(mgetResponse.getResponses()[0].isFailed(), is(true)); assertThat(mgetResponse.getResponses()[0].getFailure().getMessage(), is("no such index")); - assertThat(((ElasticsearchException)mgetResponse.getResponses()[0].getFailure().getFailure()).getIndex(), is("nonExistingIndex")); + assertThat(((ElasticsearchException) mgetResponse.getResponses()[0].getFailure().getFailure()).getIndex().getName(), is("nonExistingIndex")); } diff --git a/core/src/test/java/org/elasticsearch/node/internal/InternalSettingsPreparerTests.java b/core/src/test/java/org/elasticsearch/node/internal/InternalSettingsPreparerTests.java index 442b1afa6ee..204dcefea7d 100644 --- a/core/src/test/java/org/elasticsearch/node/internal/InternalSettingsPreparerTests.java +++ b/core/src/test/java/org/elasticsearch/node/internal/InternalSettingsPreparerTests.java @@ -60,13 +60,13 @@ public class InternalSettingsPreparerTests extends ESTestCase { public void testEmptySettings() { Settings settings = InternalSettingsPreparer.prepareSettings(Settings.EMPTY); assertNotNull(settings.get("name")); // a name was set - assertNotNull(settings.get(ClusterName.SETTING)); // a cluster name was set + assertNotNull(settings.get(ClusterName.CLUSTER_NAME_SETTING.getKey())); // a cluster name was set int size = settings.names().size(); Environment env = InternalSettingsPreparer.prepareEnvironment(baseEnvSettings, null); settings = env.settings(); assertNotNull(settings.get("name")); // a name was set - assertNotNull(settings.get(ClusterName.SETTING)); // a cluster name was set + assertNotNull(settings.get(ClusterName.CLUSTER_NAME_SETTING.getKey())); // a cluster name was set assertEquals(settings.toString(), size + 1 /* path.home is in the base settings */, settings.names().size()); String home = Environment.PATH_HOME_SETTING.get(baseEnvSettings); String configDir = env.configFile().toString(); @@ -75,9 +75,9 @@ public class InternalSettingsPreparerTests extends ESTestCase { public void testClusterNameDefault() { Settings settings = InternalSettingsPreparer.prepareSettings(Settings.EMPTY); - assertEquals(ClusterName.DEFAULT.value(), settings.get(ClusterName.SETTING)); + assertEquals(ClusterName.DEFAULT.value(), settings.get(ClusterName.CLUSTER_NAME_SETTING.getKey())); settings = InternalSettingsPreparer.prepareEnvironment(baseEnvSettings, null).settings(); - assertEquals(ClusterName.DEFAULT.value(), settings.get(ClusterName.SETTING)); + assertEquals(ClusterName.DEFAULT.value(), settings.get(ClusterName.CLUSTER_NAME_SETTING.getKey())); } public void testReplacePromptPlaceholders() { diff --git a/core/src/test/java/org/elasticsearch/percolator/PercolateDocumentParserTests.java b/core/src/test/java/org/elasticsearch/percolator/PercolateDocumentParserTests.java index eecc71f388a..ab0e86fd2af 100644 --- a/core/src/test/java/org/elasticsearch/percolator/PercolateDocumentParserTests.java +++ b/core/src/test/java/org/elasticsearch/percolator/PercolateDocumentParserTests.java @@ -92,7 +92,7 @@ public class PercolateDocumentParserTests extends ESTestCase { parser = new PercolateDocumentParser(highlightPhase, new SortParseElement(), aggregationPhase); request = Mockito.mock(PercolateShardRequest.class); - Mockito.when(request.shardId()).thenReturn(new ShardId(new Index("_index"), 0)); + Mockito.when(request.shardId()).thenReturn(new ShardId("_index", "_na_", 0)); Mockito.when(request.documentType()).thenReturn("type"); } @@ -104,7 +104,7 @@ public class PercolateDocumentParserTests extends ESTestCase { .endObject(); Mockito.when(request.source()).thenReturn(source.bytes()); - PercolateContext context = new PercolateContext(request, new SearchShardTarget("_node", "_index", 0), mapperService); + PercolateContext context = new PercolateContext(request, new SearchShardTarget("_node", new Index("_index", "_na_"), 0), mapperService); ParsedDocument parsedDocument = parser.parse(request, context, mapperService, queryShardContext); assertThat(parsedDocument.rootDoc().get("field1"), equalTo("value1")); } @@ -123,7 +123,7 @@ public class PercolateDocumentParserTests extends ESTestCase { .endObject(); Mockito.when(request.source()).thenReturn(source.bytes()); - PercolateContext context = new PercolateContext(request, new SearchShardTarget("_node", "_index", 0), mapperService); + PercolateContext context = new PercolateContext(request, new SearchShardTarget("_node", new Index("_index", "_na_"), 0), mapperService); ParsedDocument parsedDocument = parser.parse(request, context, mapperService, queryShardContext); assertThat(parsedDocument.rootDoc().get("field1"), equalTo("value1")); assertThat(context.percolateQuery(), equalTo(new TermQuery(new Term("field1", "value1")))); @@ -147,7 +147,7 @@ public class PercolateDocumentParserTests extends ESTestCase { Mockito.when(request.source()).thenReturn(source.bytes()); Mockito.when(request.docSource()).thenReturn(docSource.bytes()); - PercolateContext context = new PercolateContext(request, new SearchShardTarget("_node", "_index", 0), mapperService); + PercolateContext context = new PercolateContext(request, new SearchShardTarget("_node", new Index("_index", "_na_"), 0), mapperService); ParsedDocument parsedDocument = parser.parse(request, context, mapperService, queryShardContext); assertThat(parsedDocument.rootDoc().get("field1"), equalTo("value1")); assertThat(context.percolateQuery(), equalTo(new TermQuery(new Term("field1", "value1")))); @@ -174,7 +174,7 @@ public class PercolateDocumentParserTests extends ESTestCase { Mockito.when(request.source()).thenReturn(source.bytes()); Mockito.when(request.docSource()).thenReturn(docSource.bytes()); - PercolateContext context = new PercolateContext(request, new SearchShardTarget("_node", "_index", 0), mapperService); + PercolateContext context = new PercolateContext(request, new SearchShardTarget("_node", new Index("_index", "_na_"), 0), mapperService); try { parser.parse(request, context, mapperService, queryShardContext); } catch (IllegalArgumentException e) { diff --git a/core/src/test/java/org/elasticsearch/percolator/PercolatorServiceTests.java b/core/src/test/java/org/elasticsearch/percolator/PercolatorServiceTests.java index d9c784da2b1..05a4a156a01 100644 --- a/core/src/test/java/org/elasticsearch/percolator/PercolatorServiceTests.java +++ b/core/src/test/java/org/elasticsearch/percolator/PercolatorServiceTests.java @@ -42,22 +42,15 @@ import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexSettings; -import org.elasticsearch.index.analysis.AnalysisService; -import org.elasticsearch.index.analysis.AnalyzerProvider; -import org.elasticsearch.index.analysis.CharFilterFactory; -import org.elasticsearch.index.analysis.TokenFilterFactory; -import org.elasticsearch.index.analysis.TokenizerFactory; import org.elasticsearch.index.engine.Engine; -import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.ParseContext; import org.elasticsearch.index.mapper.Uid; import org.elasticsearch.index.mapper.internal.UidFieldMapper; +import org.elasticsearch.index.percolator.ExtractQueryTermsService; import org.elasticsearch.index.percolator.PercolatorFieldMapper; import org.elasticsearch.index.percolator.PercolatorQueriesRegistry; -import org.elasticsearch.index.percolator.ExtractQueryTermsService; +import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.index.shard.ShardId; -import org.elasticsearch.index.similarity.SimilarityService; -import org.elasticsearch.indices.IndicesModule; import org.elasticsearch.search.SearchShardTarget; import org.elasticsearch.search.internal.ContextIndexSearcher; import org.elasticsearch.test.ESTestCase; @@ -65,7 +58,6 @@ import org.junit.After; import org.junit.Before; import java.io.IOException; -import java.util.Collections; import static org.hamcrest.Matchers.equalTo; import static org.mockito.Mockito.mock; @@ -93,9 +85,12 @@ public class PercolatorServiceTests extends ESTestCase { public void testCount() throws Exception { PercolateContext context = mock(PercolateContext.class); - when(context.shardTarget()).thenReturn(new SearchShardTarget("_id", "_index", 0)); + when(context.shardTarget()).thenReturn(new SearchShardTarget("_id", new Index("_index", "_na_"), 0)); when(context.percolatorTypeFilter()).thenReturn(new MatchAllDocsQuery()); when(context.isOnlyCount()).thenReturn(true); + IndexShard shard = mock(IndexShard.class); + when(shard.shardId()).thenReturn(new ShardId("_index", "_na_", 0)); + when(context.indexShard()).thenReturn(shard); PercolatorQueriesRegistry registry = createRegistry(); addPercolatorQuery("1", new TermQuery(new Term("field", "brown")), indexWriter, registry); @@ -118,9 +113,12 @@ public class PercolatorServiceTests extends ESTestCase { public void testTopMatching() throws Exception { PercolateContext context = mock(PercolateContext.class); - when(context.shardTarget()).thenReturn(new SearchShardTarget("_id", "_index", 0)); + when(context.shardTarget()).thenReturn(new SearchShardTarget("_id", new Index("_index", "_na_"), 0)); when(context.percolatorTypeFilter()).thenReturn(new MatchAllDocsQuery()); when(context.size()).thenReturn(10); + IndexShard shard = mock(IndexShard.class); + when(shard.shardId()).thenReturn(new ShardId("_index", "_na_", 0)); + when(context.indexShard()).thenReturn(shard); PercolatorQueriesRegistry registry = createRegistry(); addPercolatorQuery("1", new TermQuery(new Term("field", "brown")), indexWriter, registry); @@ -158,7 +156,7 @@ public class PercolatorServiceTests extends ESTestCase { } PercolatorQueriesRegistry createRegistry() { - Index index = new Index("_index"); + Index index = new Index("_index", "_na_"); IndexSettings indexSettings = new IndexSettings(new IndexMetaData.Builder("_index").settings( Settings.builder().put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 1) diff --git a/core/src/test/java/org/elasticsearch/plugins/responseheader/TestResponseHeaderRestAction.java b/core/src/test/java/org/elasticsearch/plugins/responseheader/TestResponseHeaderRestAction.java index 4b1645a4ec6..39432bd01ea 100644 --- a/core/src/test/java/org/elasticsearch/plugins/responseheader/TestResponseHeaderRestAction.java +++ b/core/src/test/java/org/elasticsearch/plugins/responseheader/TestResponseHeaderRestAction.java @@ -33,7 +33,7 @@ public class TestResponseHeaderRestAction extends BaseRestHandler { @Inject public TestResponseHeaderRestAction(Settings settings, RestController controller, Client client) { - super(settings, controller, client); + super(settings, client); controller.registerHandler(RestRequest.Method.GET, "/_protected", this); } diff --git a/core/src/test/java/org/elasticsearch/recovery/RelocationIT.java b/core/src/test/java/org/elasticsearch/recovery/RelocationIT.java index 4a3479958db..2d0c5079fd0 100644 --- a/core/src/test/java/org/elasticsearch/recovery/RelocationIT.java +++ b/core/src/test/java/org/elasticsearch/recovery/RelocationIT.java @@ -130,7 +130,7 @@ public class RelocationIT extends ESIntegTestCase { logger.info("--> relocate the shard from node1 to node2"); client().admin().cluster().prepareReroute() - .add(new MoveAllocationCommand(new ShardId("test", 0), node_1, node_2)) + .add(new MoveAllocationCommand("test", 0, node_1, node_2)) .execute().actionGet(); clusterHealthResponse = client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForRelocatingShards(0).setTimeout(ACCEPTABLE_RELOCATION_TIME).execute().actionGet(); @@ -190,7 +190,7 @@ public class RelocationIT extends ESIntegTestCase { indexer.continueIndexing(numDocs); logger.info("--> START relocate the shard from {} to {}", nodes[fromNode], nodes[toNode]); client().admin().cluster().prepareReroute() - .add(new MoveAllocationCommand(new ShardId("test", 0), nodes[fromNode], nodes[toNode])) + .add(new MoveAllocationCommand("test", 0, nodes[fromNode], nodes[toNode])) .get(); if (rarely()) { logger.debug("--> flushing"); @@ -319,7 +319,7 @@ public class RelocationIT extends ESIntegTestCase { client().admin().cluster().prepareReroute() - .add(new MoveAllocationCommand(new ShardId("test", 0), nodes[fromNode], nodes[toNode])) + .add(new MoveAllocationCommand("test", 0, nodes[fromNode], nodes[toNode])) .get(); @@ -406,7 +406,7 @@ public class RelocationIT extends ESIntegTestCase { logger.info("--> verifying no temporary recoveries are left"); for (String node : internalCluster().getNodeNames()) { NodeEnvironment nodeEnvironment = internalCluster().getInstance(NodeEnvironment.class, node); - for (final Path shardLoc : nodeEnvironment.availableShardPaths(new ShardId(indexName, 0))) { + for (final Path shardLoc : nodeEnvironment.availableShardPaths(new ShardId(indexName, "_na_", 0))) { if (Files.exists(shardLoc)) { assertBusy(new Runnable() { @Override diff --git a/core/src/test/java/org/elasticsearch/rest/BytesRestResponseTests.java b/core/src/test/java/org/elasticsearch/rest/BytesRestResponseTests.java index 0a40da3403d..78da0e1e1e0 100644 --- a/core/src/test/java/org/elasticsearch/rest/BytesRestResponseTests.java +++ b/core/src/test/java/org/elasticsearch/rest/BytesRestResponseTests.java @@ -24,6 +24,7 @@ import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.search.SearchPhaseExecutionException; import org.elasticsearch.action.search.ShardSearchFailure; import org.elasticsearch.common.ParsingException; +import org.elasticsearch.index.Index; import org.elasticsearch.search.SearchShardTarget; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.rest.FakeRestRequest; @@ -134,9 +135,9 @@ public class BytesRestResponseTests extends ESTestCase { RestRequest request = new FakeRestRequest(); RestChannel channel = new DetailedExceptionRestChannel(request); ShardSearchFailure failure = new ShardSearchFailure(new ParsingException(1, 2, "foobar", null), - new SearchShardTarget("node_1", "foo", 1)); + new SearchShardTarget("node_1", new Index("foo", "_na_"), 1)); ShardSearchFailure failure1 = new ShardSearchFailure(new ParsingException(1, 2, "foobar", null), - new SearchShardTarget("node_1", "foo", 2)); + new SearchShardTarget("node_1", new Index("foo", "_na_"), 2)); SearchPhaseExecutionException ex = new SearchPhaseExecutionException("search", "all shards failed", new ShardSearchFailure[] {failure, failure1}); BytesRestResponse response = new BytesRestResponse(channel, new RemoteTransportException("foo", ex)); String text = response.content().toUtf8(); diff --git a/core/src/test/java/org/elasticsearch/rest/HeadersAndContextCopyClientTests.java b/core/src/test/java/org/elasticsearch/rest/HeadersAndContextCopyClientTests.java deleted file mode 100644 index 238e16d4b44..00000000000 --- a/core/src/test/java/org/elasticsearch/rest/HeadersAndContextCopyClientTests.java +++ /dev/null @@ -1,425 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.rest; - -import org.elasticsearch.action.ActionRequest; -import org.elasticsearch.action.ActionRequestBuilder; -import org.elasticsearch.action.admin.cluster.health.ClusterHealthRequest; -import org.elasticsearch.action.admin.cluster.state.ClusterStateRequest; -import org.elasticsearch.action.admin.cluster.stats.ClusterStatsRequest; -import org.elasticsearch.action.admin.indices.close.CloseIndexRequest; -import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; -import org.elasticsearch.action.admin.indices.flush.FlushRequest; -import org.elasticsearch.action.get.GetRequest; -import org.elasticsearch.action.index.IndexRequest; -import org.elasticsearch.action.search.SearchRequest; -import org.elasticsearch.client.Client; -import org.elasticsearch.client.Requests; -import org.elasticsearch.common.collect.ImmutableOpenMap; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.test.rest.FakeRestRequest; - -import java.util.Arrays; -import java.util.HashMap; -import java.util.HashSet; -import java.util.Map; -import java.util.Set; -import java.util.concurrent.ExecutorService; -import java.util.concurrent.Executors; -import java.util.concurrent.TimeUnit; - -import static org.hamcrest.CoreMatchers.equalTo; -import static org.hamcrest.CoreMatchers.notNullValue; -import static org.hamcrest.Matchers.is; - -public class HeadersAndContextCopyClientTests extends ESTestCase { - - public void testRegisterRelevantHeaders() throws InterruptedException { - - final RestController restController = new RestController(Settings.EMPTY); - - int iterations = randomIntBetween(1, 5); - - Set headers = new HashSet<>(); - ExecutorService executorService = Executors.newFixedThreadPool(iterations); - for (int i = 0; i < iterations; i++) { - int headersCount = randomInt(10); - final Set newHeaders = new HashSet<>(); - for (int j = 0; j < headersCount; j++) { - String usefulHeader = randomRealisticUnicodeOfLengthBetween(1, 30); - newHeaders.add(usefulHeader); - } - headers.addAll(newHeaders); - - executorService.submit(new Runnable() { - @Override - public void run() { - restController.registerRelevantHeaders(newHeaders.toArray(new String[newHeaders.size()])); - } - }); - } - - executorService.shutdown(); - assertThat(executorService.awaitTermination(1, TimeUnit.SECONDS), equalTo(true)); - String[] relevantHeaders = restController.relevantHeaders().toArray(new String[restController.relevantHeaders().size()]); - assertThat(relevantHeaders.length, equalTo(headers.size())); - - Arrays.sort(relevantHeaders); - String[] headersArray = new String[headers.size()]; - headersArray = headers.toArray(headersArray); - Arrays.sort(headersArray); - assertThat(relevantHeaders, equalTo(headersArray)); - } - - public void testCopyHeadersRequest() { - Map transportHeaders = randomHeaders(randomIntBetween(0, 10)); - Map restHeaders = randomHeaders(randomIntBetween(0, 10)); - Map copiedHeaders = randomHeadersFrom(restHeaders); - Set usefulRestHeaders = new HashSet<>(copiedHeaders.keySet()); - usefulRestHeaders.addAll(randomMap(randomIntBetween(0, 10), "useful-").keySet()); - Map restContext = randomContext(randomIntBetween(0, 10)); - Map transportContext = onlyOnLeft(randomContext(randomIntBetween(0, 10)), restContext); - - Map expectedHeaders = new HashMap<>(); - expectedHeaders.putAll(transportHeaders); - expectedHeaders.putAll(copiedHeaders); - - Map expectedContext = new HashMap<>(); - expectedContext.putAll(transportContext); - expectedContext.putAll(restContext); - - try (Client client = client(new NoOpClient(getTestName()), new FakeRestRequest(restHeaders, restContext), usefulRestHeaders)) { - - SearchRequest searchRequest = Requests.searchRequest(); - putHeaders(searchRequest, transportHeaders); - putContext(searchRequest, transportContext); - assertHeaders(searchRequest, transportHeaders); - client.search(searchRequest); - assertHeaders(searchRequest, expectedHeaders); - assertContext(searchRequest, expectedContext); - - GetRequest getRequest = Requests.getRequest("index"); - putHeaders(getRequest, transportHeaders); - putContext(getRequest, transportContext); - assertHeaders(getRequest, transportHeaders); - client.get(getRequest); - assertHeaders(getRequest, expectedHeaders); - assertContext(getRequest, expectedContext); - - IndexRequest indexRequest = Requests.indexRequest(); - putHeaders(indexRequest, transportHeaders); - putContext(indexRequest, transportContext); - assertHeaders(indexRequest, transportHeaders); - client.index(indexRequest); - assertHeaders(indexRequest, expectedHeaders); - assertContext(indexRequest, expectedContext); - } - } - - public void testCopyHeadersClusterAdminRequest() { - Map transportHeaders = randomHeaders(randomIntBetween(0, 10)); - Map restHeaders = randomHeaders(randomIntBetween(0, 10)); - Map copiedHeaders = randomHeadersFrom(restHeaders); - Set usefulRestHeaders = new HashSet<>(copiedHeaders.keySet()); - usefulRestHeaders.addAll(randomMap(randomIntBetween(0, 10), "useful-").keySet()); - Map restContext = randomContext(randomIntBetween(0, 10)); - Map transportContext = onlyOnLeft(randomContext(randomIntBetween(0, 10)), restContext); - - HashMap expectedHeaders = new HashMap<>(); - expectedHeaders.putAll(transportHeaders); - expectedHeaders.putAll(copiedHeaders); - - Map expectedContext = new HashMap<>(); - expectedContext.putAll(transportContext); - expectedContext.putAll(restContext); - - try (Client client = client(new NoOpClient(getTestName()), new FakeRestRequest(restHeaders, expectedContext), usefulRestHeaders)) { - - ClusterHealthRequest clusterHealthRequest = Requests.clusterHealthRequest(); - putHeaders(clusterHealthRequest, transportHeaders); - putContext(clusterHealthRequest, transportContext); - assertHeaders(clusterHealthRequest, transportHeaders); - client.admin().cluster().health(clusterHealthRequest); - assertHeaders(clusterHealthRequest, expectedHeaders); - assertContext(clusterHealthRequest, expectedContext); - - ClusterStateRequest clusterStateRequest = Requests.clusterStateRequest(); - putHeaders(clusterStateRequest, transportHeaders); - putContext(clusterStateRequest, transportContext); - assertHeaders(clusterStateRequest, transportHeaders); - client.admin().cluster().state(clusterStateRequest); - assertHeaders(clusterStateRequest, expectedHeaders); - assertContext(clusterStateRequest, expectedContext); - - ClusterStatsRequest clusterStatsRequest = Requests.clusterStatsRequest(); - putHeaders(clusterStatsRequest, transportHeaders); - putContext(clusterStatsRequest, transportContext); - assertHeaders(clusterStatsRequest, transportHeaders); - client.admin().cluster().clusterStats(clusterStatsRequest); - assertHeaders(clusterStatsRequest, expectedHeaders); - assertContext(clusterStatsRequest, expectedContext); - } - } - - public void testCopyHeadersIndicesAdminRequest() { - Map transportHeaders = randomHeaders(randomIntBetween(0, 10)); - Map restHeaders = randomHeaders(randomIntBetween(0, 10)); - Map copiedHeaders = randomHeadersFrom(restHeaders); - Set usefulRestHeaders = new HashSet<>(copiedHeaders.keySet()); - usefulRestHeaders.addAll(randomMap(randomIntBetween(0, 10), "useful-").keySet()); - Map restContext = randomContext(randomIntBetween(0, 10)); - Map transportContext = onlyOnLeft(randomContext(randomIntBetween(0, 10)), restContext); - - HashMap expectedHeaders = new HashMap<>(); - expectedHeaders.putAll(transportHeaders); - expectedHeaders.putAll(copiedHeaders); - - Map expectedContext = new HashMap<>(); - expectedContext.putAll(transportContext); - expectedContext.putAll(restContext); - - try (Client client = client(new NoOpClient(getTestName()), new FakeRestRequest(restHeaders, restContext), usefulRestHeaders)) { - - CreateIndexRequest createIndexRequest = Requests.createIndexRequest("test"); - putHeaders(createIndexRequest, transportHeaders); - putContext(createIndexRequest, transportContext); - assertHeaders(createIndexRequest, transportHeaders); - client.admin().indices().create(createIndexRequest); - assertHeaders(createIndexRequest, expectedHeaders); - assertContext(createIndexRequest, expectedContext); - - CloseIndexRequest closeIndexRequest = Requests.closeIndexRequest("test"); - putHeaders(closeIndexRequest, transportHeaders); - putContext(closeIndexRequest, transportContext); - assertHeaders(closeIndexRequest, transportHeaders); - client.admin().indices().close(closeIndexRequest); - assertHeaders(closeIndexRequest, expectedHeaders); - assertContext(closeIndexRequest, expectedContext); - - FlushRequest flushRequest = Requests.flushRequest(); - putHeaders(flushRequest, transportHeaders); - putContext(flushRequest, transportContext); - assertHeaders(flushRequest, transportHeaders); - client.admin().indices().flush(flushRequest); - assertHeaders(flushRequest, expectedHeaders); - assertContext(flushRequest, expectedContext); - } - } - - public void testCopyHeadersRequestBuilder() { - Map transportHeaders = randomHeaders(randomIntBetween(0, 10)); - Map restHeaders = randomHeaders(randomIntBetween(0, 10)); - Map copiedHeaders = randomHeadersFrom(restHeaders); - Set usefulRestHeaders = new HashSet<>(copiedHeaders.keySet()); - usefulRestHeaders.addAll(randomMap(randomIntBetween(0, 10), "useful-").keySet()); - Map restContext = randomContext(randomIntBetween(0, 10)); - Map transportContext = onlyOnLeft(randomContext(randomIntBetween(0, 10)), restContext); - - HashMap expectedHeaders = new HashMap<>(); - expectedHeaders.putAll(transportHeaders); - expectedHeaders.putAll(copiedHeaders); - - Map expectedContext = new HashMap<>(); - expectedContext.putAll(transportContext); - expectedContext.putAll(restContext); - - try (Client client = client(new NoOpClient(getTestName()), new FakeRestRequest(restHeaders, restContext), usefulRestHeaders)) { - - ActionRequestBuilder requestBuilders[] = new ActionRequestBuilder[]{ - client.prepareIndex("index", "type"), - client.prepareGet("index", "type", "id"), - client.prepareBulk(), - client.prepareDelete(), - client.prepareIndex(), - client.prepareClearScroll(), - client.prepareMultiGet(), - }; - - for (ActionRequestBuilder requestBuilder : requestBuilders) { - putHeaders(requestBuilder.request(), transportHeaders); - putContext(requestBuilder.request(), transportContext); - assertHeaders(requestBuilder.request(), transportHeaders); - requestBuilder.get(); - assertHeaders(requestBuilder.request(), expectedHeaders); - assertContext(requestBuilder.request(), expectedContext); - } - } - } - - public void testCopyHeadersClusterAdminRequestBuilder() { - Map transportHeaders = randomHeaders(randomIntBetween(0, 10)); - Map restHeaders = randomHeaders(randomIntBetween(0, 10)); - Map copiedHeaders = randomHeadersFrom(restHeaders); - Set usefulRestHeaders = new HashSet<>(copiedHeaders.keySet()); - usefulRestHeaders.addAll(randomMap(randomIntBetween(0, 10), "useful-").keySet()); - Map restContext = randomContext(randomIntBetween(0, 10)); - Map transportContext = onlyOnLeft(randomContext(randomIntBetween(0, 10)), restContext); - - HashMap expectedHeaders = new HashMap<>(); - expectedHeaders.putAll(transportHeaders); - expectedHeaders.putAll(copiedHeaders); - - Map expectedContext = new HashMap<>(); - expectedContext.putAll(transportContext); - expectedContext.putAll(restContext); - - try (Client client = client(new NoOpClient(getTestName()), new FakeRestRequest(restHeaders, restContext), usefulRestHeaders)) { - - ActionRequestBuilder requestBuilders[] = new ActionRequestBuilder[]{ - client.admin().cluster().prepareNodesInfo(), - client.admin().cluster().prepareClusterStats(), - client.admin().cluster().prepareState(), - client.admin().cluster().prepareCreateSnapshot("repo", "name"), - client.admin().cluster().prepareHealth(), - client.admin().cluster().prepareReroute() - }; - - for (ActionRequestBuilder requestBuilder : requestBuilders) { - putHeaders(requestBuilder.request(), transportHeaders); - putContext(requestBuilder.request(), transportContext); - assertHeaders(requestBuilder.request(), transportHeaders); - requestBuilder.get(); - assertHeaders(requestBuilder.request(), expectedHeaders); - assertContext(requestBuilder.request(), expectedContext); - } - } - } - - public void testCopyHeadersIndicesAdminRequestBuilder() { - Map transportHeaders = randomHeaders(randomIntBetween(0, 10)); - Map restHeaders = randomHeaders(randomIntBetween(0, 10)); - Map copiedHeaders = randomHeadersFrom(restHeaders); - Set usefulRestHeaders = new HashSet<>(copiedHeaders.keySet()); - usefulRestHeaders.addAll(randomMap(randomIntBetween(0, 10), "useful-").keySet()); - Map restContext = randomContext(randomIntBetween(0, 10)); - Map transportContext = onlyOnLeft(randomContext(randomIntBetween(0, 10)), restContext); - - HashMap expectedHeaders = new HashMap<>(); - expectedHeaders.putAll(transportHeaders); - expectedHeaders.putAll(copiedHeaders); - - Map expectedContext = new HashMap<>(); - expectedContext.putAll(transportContext); - expectedContext.putAll(restContext); - - try (Client client = client(new NoOpClient(getTestName()), new FakeRestRequest(restHeaders, restContext), usefulRestHeaders)) { - - ActionRequestBuilder requestBuilders[] = new ActionRequestBuilder[]{ - client.admin().indices().prepareValidateQuery(), - client.admin().indices().prepareCreate("test"), - client.admin().indices().prepareAliases(), - client.admin().indices().prepareAnalyze("text"), - client.admin().indices().prepareTypesExists("type"), - client.admin().indices().prepareClose() - }; - - for (ActionRequestBuilder requestBuilder : requestBuilders) { - putHeaders(requestBuilder.request(), transportHeaders); - putContext(requestBuilder.request(), transportContext); - assertHeaders(requestBuilder.request(), transportHeaders); - requestBuilder.get(); - assertHeaders(requestBuilder.request(), expectedHeaders); - assertContext(requestBuilder.request(), expectedContext); - } - } - } - - private static Map randomHeaders(int count) { - return randomMap(count, "header-"); - } - - private static Map randomContext(int count) { - return randomMap(count, "context-"); - } - - private static Map randomMap(int count, String prefix) { - Map headers = new HashMap<>(); - for (int i = 0; i < count; i++) { - headers.put(prefix + randomInt(30), randomAsciiOfLength(10)); - } - return headers; - } - - private static Map randomHeadersFrom(Map headers) { - Map newHeaders = new HashMap<>(); - if (headers.isEmpty()) { - return newHeaders; - } - int i = randomInt(headers.size() - 1); - for (Map.Entry entry : headers.entrySet()) { - if (randomInt(i) == 0) { - newHeaders.put(entry.getKey(), entry.getValue()); - } - } - return newHeaders; - } - - private static Client client(Client noOpClient, RestRequest restRequest, Set usefulRestHeaders) { - return new BaseRestHandler.HeadersAndContextCopyClient(noOpClient, restRequest, usefulRestHeaders); - } - - private static void putHeaders(ActionRequest request, Map headers) { - for (Map.Entry header : headers.entrySet()) { - request.putHeader(header.getKey(), header.getValue()); - } - } - - private static void putContext(ActionRequest request, Map context) { - for (Map.Entry header : context.entrySet()) { - request.putInContext(header.getKey(), header.getValue()); - } - } - - private static void assertHeaders(ActionRequest request, Map headers) { - if (headers.size() == 0) { - assertThat(request.getHeaders() == null || request.getHeaders().size() == 0, equalTo(true)); - } else { - assertThat(request.getHeaders(), notNullValue()); - assertThat(request.getHeaders().size(), equalTo(headers.size())); - for (String key : request.getHeaders()) { - assertThat(headers.get(key), equalTo(request.getHeader(key))); - } - } - } - - private static void assertContext(ActionRequest request, Map context) { - if (context.size() == 0) { - assertThat(request.isContextEmpty(), is(true)); - } else { - ImmutableOpenMap map = request.getContext(); - assertThat(map, notNullValue()); - assertThat(map.size(), equalTo(context.size())); - for (Object key : map.keys()) { - assertThat(context.get(key), equalTo(request.getFromContext(key))); - } - } - } - - private static Map onlyOnLeft(Map left, Map right) { - Map map = new HashMap<>(); - for (Map.Entry entry : left.entrySet()) { - if (!right.containsKey(entry.getKey())) { - map.put(entry.getKey(), entry.getValue()); - } - } - return map; - } -} diff --git a/core/src/test/java/org/elasticsearch/rest/NoOpClient.java b/core/src/test/java/org/elasticsearch/rest/NoOpClient.java index 492c2cd43ed..84d16a751e5 100644 --- a/core/src/test/java/org/elasticsearch/rest/NoOpClient.java +++ b/core/src/test/java/org/elasticsearch/rest/NoOpClient.java @@ -26,7 +26,6 @@ import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequestBuilder; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.client.support.AbstractClient; -import org.elasticsearch.client.support.Headers; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.threadpool.ThreadPool; @@ -35,7 +34,7 @@ import java.util.concurrent.TimeUnit; public class NoOpClient extends AbstractClient { public NoOpClient(String testName) { - super(Settings.EMPTY, new ThreadPool(testName), Headers.EMPTY); + super(Settings.EMPTY, new ThreadPool(testName)); } @Override @@ -51,4 +50,4 @@ public class NoOpClient extends AbstractClient { throw new ElasticsearchException(t.getMessage(), t); } } -} \ No newline at end of file +} diff --git a/core/src/test/java/org/elasticsearch/rest/RestControllerTests.java b/core/src/test/java/org/elasticsearch/rest/RestControllerTests.java new file mode 100644 index 00000000000..d6e1a97ac8f --- /dev/null +++ b/core/src/test/java/org/elasticsearch/rest/RestControllerTests.java @@ -0,0 +1,99 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.rest; + +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.rest.FakeRestRequest; + +import java.util.Arrays; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Map; +import java.util.Set; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executors; +import java.util.concurrent.TimeUnit; + +import static org.hamcrest.CoreMatchers.equalTo; + +public class RestControllerTests extends ESTestCase { + + public void testRegisterRelevantHeaders() throws InterruptedException { + + final RestController restController = new RestController(Settings.EMPTY); + + int iterations = randomIntBetween(1, 5); + + Set headers = new HashSet<>(); + ExecutorService executorService = Executors.newFixedThreadPool(iterations); + for (int i = 0; i < iterations; i++) { + int headersCount = randomInt(10); + final Set newHeaders = new HashSet<>(); + for (int j = 0; j < headersCount; j++) { + String usefulHeader = randomRealisticUnicodeOfLengthBetween(1, 30); + newHeaders.add(usefulHeader); + } + headers.addAll(newHeaders); + + executorService.submit((Runnable) () -> restController.registerRelevantHeaders(newHeaders.toArray(new String[newHeaders.size()]))); + } + + executorService.shutdown(); + assertThat(executorService.awaitTermination(1, TimeUnit.SECONDS), equalTo(true)); + String[] relevantHeaders = restController.relevantHeaders().toArray(new String[restController.relevantHeaders().size()]); + assertThat(relevantHeaders.length, equalTo(headers.size())); + + Arrays.sort(relevantHeaders); + String[] headersArray = new String[headers.size()]; + headersArray = headers.toArray(headersArray); + Arrays.sort(headersArray); + assertThat(relevantHeaders, equalTo(headersArray)); + } + + public void testApplyRelevantHeaders() { + final ThreadContext threadContext = new ThreadContext(Settings.EMPTY); + final RestController restController = new RestController(Settings.EMPTY) { + @Override + boolean checkRequestParameters(RestRequest request, RestChannel channel) { + return true; + } + + @Override + void executeHandler(RestRequest request, RestChannel channel) throws Exception { + assertEquals("true", threadContext.getHeader("header.1")); + assertEquals("true", threadContext.getHeader("header.2")); + assertNull(threadContext.getHeader("header.3")); + + } + }; + threadContext.putHeader("header.3", "true"); + restController.registerRelevantHeaders("header.1", "header.2"); + Map restHeaders = new HashMap<>(); + restHeaders.put("header.1", "true"); + restHeaders.put("header.2", "true"); + restHeaders.put("header.3", "false"); + restController.dispatchRequest(new FakeRestRequest(restHeaders), null, threadContext); + assertNull(threadContext.getHeader("header.1")); + assertNull(threadContext.getHeader("header.2")); + assertEquals("true", threadContext.getHeader("header.3")); + } +} diff --git a/core/src/test/java/org/elasticsearch/rest/RestFilterChainTests.java b/core/src/test/java/org/elasticsearch/rest/RestFilterChainTests.java index b66d00cd6a8..56ae8e2183d 100644 --- a/core/src/test/java/org/elasticsearch/rest/RestFilterChainTests.java +++ b/core/src/test/java/org/elasticsearch/rest/RestFilterChainTests.java @@ -23,6 +23,7 @@ import org.elasticsearch.common.Nullable; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.rest.FakeRestRequest; @@ -84,7 +85,7 @@ public class RestFilterChainTests extends ESTestCase { FakeRestRequest fakeRestRequest = new FakeRestRequest(); FakeRestChannel fakeRestChannel = new FakeRestChannel(fakeRestRequest, 1); - restController.dispatchRequest(fakeRestRequest, fakeRestChannel); + restController.dispatchRequest(fakeRestRequest, fakeRestChannel, new ThreadContext(Settings.EMPTY)); assertThat(fakeRestChannel.await(), equalTo(true)); @@ -142,7 +143,7 @@ public class RestFilterChainTests extends ESTestCase { FakeRestRequest fakeRestRequest = new FakeRestRequest(); FakeRestChannel fakeRestChannel = new FakeRestChannel(fakeRestRequest, additionalContinueCount + 1); - restController.dispatchRequest(fakeRestRequest, fakeRestChannel); + restController.dispatchRequest(fakeRestRequest, fakeRestChannel, new ThreadContext(Settings.EMPTY)); fakeRestChannel.await(); assertThat(testFilter.runs.get(), equalTo(1)); diff --git a/core/src/test/java/org/elasticsearch/rest/RestRequestTests.java b/core/src/test/java/org/elasticsearch/rest/RestRequestTests.java deleted file mode 100644 index 8e60b28f376..00000000000 --- a/core/src/test/java/org/elasticsearch/rest/RestRequestTests.java +++ /dev/null @@ -1,107 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.rest; - -import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.collect.ImmutableOpenMap; -import org.elasticsearch.test.ESTestCase; - -import java.util.Map; - -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.is; - -/** - * - */ -public class RestRequestTests extends ESTestCase { - public void testContext() throws Exception { - int count = randomInt(10); - Request request = new Request(); - for (int i = 0; i < count; i++) { - request.putInContext("key" + i, "val" + i); - } - assertThat(request.isContextEmpty(), is(count == 0)); - assertThat(request.contextSize(), is(count)); - ImmutableOpenMap ctx = request.getContext(); - for (int i = 0; i < count; i++) { - assertThat(request.hasInContext("key" + i), is(true)); - assertThat((String) request.getFromContext("key" + i), equalTo("val" + i)); - assertThat((String) ctx.get("key" + i), equalTo("val" + i)); - } - } - - public static class Request extends RestRequest { - @Override - public Method method() { - return null; - } - - @Override - public String uri() { - return null; - } - - @Override - public String rawPath() { - return null; - } - - @Override - public boolean hasContent() { - return false; - } - - @Override - public BytesReference content() { - return null; - } - - @Override - public String header(String name) { - return null; - } - - @Override - public Iterable> headers() { - return null; - } - - @Override - public boolean hasParam(String key) { - return false; - } - - @Override - public String param(String key) { - return null; - } - - @Override - public Map params() { - return null; - } - - @Override - public String param(String key, String defaultValue) { - return null; - } - } -} diff --git a/core/src/test/java/org/elasticsearch/script/FileScriptTests.java b/core/src/test/java/org/elasticsearch/script/FileScriptTests.java index 9a936b7f7eb..64f6cc85b34 100644 --- a/core/src/test/java/org/elasticsearch/script/FileScriptTests.java +++ b/core/src/test/java/org/elasticsearch/script/FileScriptTests.java @@ -18,7 +18,6 @@ */ package org.elasticsearch.script; -import org.elasticsearch.common.ContextAndHeaderHolder; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.test.ESTestCase; @@ -52,16 +51,14 @@ public class FileScriptTests extends ESTestCase { } public void testFileScriptFound() throws Exception { - ContextAndHeaderHolder contextAndHeaders = new ContextAndHeaderHolder(); Settings settings = Settings.builder() .put("script.engine." + MockScriptEngine.NAME + ".file.aggs", "false").build(); ScriptService scriptService = makeScriptService(settings); Script script = new Script("script1", ScriptService.ScriptType.FILE, MockScriptEngine.NAME, null); - assertNotNull(scriptService.compile(script, ScriptContext.Standard.SEARCH, contextAndHeaders, Collections.emptyMap())); + assertNotNull(scriptService.compile(script, ScriptContext.Standard.SEARCH, Collections.emptyMap())); } public void testAllOpsDisabled() throws Exception { - ContextAndHeaderHolder contextAndHeaders = new ContextAndHeaderHolder(); Settings settings = Settings.builder() .put("script.engine." + MockScriptEngine.NAME + ".file.aggs", "false") .put("script.engine." + MockScriptEngine.NAME + ".file.search", "false") @@ -72,7 +69,7 @@ public class FileScriptTests extends ESTestCase { Script script = new Script("script1", ScriptService.ScriptType.FILE, MockScriptEngine.NAME, null); for (ScriptContext context : ScriptContext.Standard.values()) { try { - scriptService.compile(script, context, contextAndHeaders, Collections.emptyMap()); + scriptService.compile(script, context, Collections.emptyMap()); fail(context.getKey() + " script should have been rejected"); } catch(Exception e) { assertTrue(e.getMessage(), e.getMessage().contains("scripts of type [file], operation [" + context.getKey() + "] and lang [" + MockScriptEngine.NAME + "] are disabled")); diff --git a/core/src/test/java/org/elasticsearch/script/NativeScriptTests.java b/core/src/test/java/org/elasticsearch/script/NativeScriptTests.java index a6db61b88aa..144aedb7fd2 100644 --- a/core/src/test/java/org/elasticsearch/script/NativeScriptTests.java +++ b/core/src/test/java/org/elasticsearch/script/NativeScriptTests.java @@ -19,7 +19,6 @@ package org.elasticsearch.script; -import org.elasticsearch.common.ContextAndHeaderHolder; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.inject.Injector; import org.elasticsearch.common.inject.ModulesBuilder; @@ -47,7 +46,6 @@ import static org.hamcrest.Matchers.notNullValue; public class NativeScriptTests extends ESTestCase { public void testNativeScript() throws InterruptedException { - ContextAndHeaderHolder contextAndHeaders = new ContextAndHeaderHolder(); Settings settings = Settings.settingsBuilder() .put("name", "testNativeScript") .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()) @@ -64,13 +62,12 @@ public class NativeScriptTests extends ESTestCase { ScriptService scriptService = injector.getInstance(ScriptService.class); ExecutableScript executable = scriptService.executable(new Script("my", ScriptType.INLINE, NativeScriptEngineService.NAME, null), - ScriptContext.Standard.SEARCH, contextAndHeaders, Collections.emptyMap()); + ScriptContext.Standard.SEARCH, Collections.emptyMap()); assertThat(executable.run().toString(), equalTo("test")); terminate(injector.getInstance(ThreadPool.class)); } public void testFineGrainedSettingsDontAffectNativeScripts() throws IOException { - ContextAndHeaderHolder contextAndHeaders = new ContextAndHeaderHolder(); Settings.Builder builder = Settings.settingsBuilder(); if (randomBoolean()) { ScriptType scriptType = randomFrom(ScriptType.values()); @@ -92,7 +89,7 @@ public class NativeScriptTests extends ESTestCase { for (ScriptContext scriptContext : scriptContextRegistry.scriptContexts()) { assertThat(scriptService.compile(new Script("my", ScriptType.INLINE, NativeScriptEngineService.NAME, null), scriptContext, - contextAndHeaders, Collections.emptyMap()), notNullValue()); + Collections.emptyMap()), notNullValue()); } } diff --git a/core/src/test/java/org/elasticsearch/script/ScriptContextTests.java b/core/src/test/java/org/elasticsearch/script/ScriptContextTests.java index 3cf4fde01da..b6e8ee6833d 100644 --- a/core/src/test/java/org/elasticsearch/script/ScriptContextTests.java +++ b/core/src/test/java/org/elasticsearch/script/ScriptContextTests.java @@ -19,7 +19,6 @@ package org.elasticsearch.script; -import org.elasticsearch.common.ContextAndHeaderHolder; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.test.ESTestCase; @@ -57,12 +56,11 @@ public class ScriptContextTests extends ESTestCase { } public void testCustomGlobalScriptContextSettings() throws Exception { - ContextAndHeaderHolder contextAndHeaders = new ContextAndHeaderHolder(); ScriptService scriptService = makeScriptService(); for (ScriptService.ScriptType scriptType : ScriptService.ScriptType.values()) { try { Script script = new Script("1", scriptType, MockScriptEngine.NAME, null); - scriptService.compile(script, new ScriptContext.Plugin(PLUGIN_NAME, "custom_globally_disabled_op"), contextAndHeaders, Collections.emptyMap()); + scriptService.compile(script, new ScriptContext.Plugin(PLUGIN_NAME, "custom_globally_disabled_op"), Collections.emptyMap()); fail("script compilation should have been rejected"); } catch (ScriptException e) { assertThat(e.getMessage(), containsString("scripts of type [" + scriptType + "], operation [" + PLUGIN_NAME + "_custom_globally_disabled_op] and lang [" + MockScriptEngine.NAME + "] are disabled")); @@ -71,29 +69,27 @@ public class ScriptContextTests extends ESTestCase { } public void testCustomScriptContextSettings() throws Exception { - ContextAndHeaderHolder contextAndHeaders = new ContextAndHeaderHolder(); ScriptService scriptService = makeScriptService(); Script script = new Script("1", ScriptService.ScriptType.INLINE, MockScriptEngine.NAME, null); try { - scriptService.compile(script, new ScriptContext.Plugin(PLUGIN_NAME, "custom_exp_disabled_op"), contextAndHeaders, Collections.emptyMap()); + scriptService.compile(script, new ScriptContext.Plugin(PLUGIN_NAME, "custom_exp_disabled_op"), Collections.emptyMap()); fail("script compilation should have been rejected"); } catch (ScriptException e) { assertTrue(e.getMessage(), e.getMessage().contains("scripts of type [inline], operation [" + PLUGIN_NAME + "_custom_exp_disabled_op] and lang [" + MockScriptEngine.NAME + "] are disabled")); } // still works for other script contexts - assertNotNull(scriptService.compile(script, ScriptContext.Standard.AGGS, contextAndHeaders, Collections.emptyMap())); - assertNotNull(scriptService.compile(script, ScriptContext.Standard.SEARCH, contextAndHeaders, Collections.emptyMap())); - assertNotNull(scriptService.compile(script, new ScriptContext.Plugin(PLUGIN_NAME, "custom_op"), contextAndHeaders, Collections.emptyMap())); + assertNotNull(scriptService.compile(script, ScriptContext.Standard.AGGS, Collections.emptyMap())); + assertNotNull(scriptService.compile(script, ScriptContext.Standard.SEARCH, Collections.emptyMap())); + assertNotNull(scriptService.compile(script, new ScriptContext.Plugin(PLUGIN_NAME, "custom_op"), Collections.emptyMap())); } public void testUnknownPluginScriptContext() throws Exception { - ContextAndHeaderHolder contextAndHeaders = new ContextAndHeaderHolder(); ScriptService scriptService = makeScriptService(); for (ScriptService.ScriptType scriptType : ScriptService.ScriptType.values()) { try { Script script = new Script("1", scriptType, MockScriptEngine.NAME, null); - scriptService.compile(script, new ScriptContext.Plugin(PLUGIN_NAME, "unknown"), contextAndHeaders, Collections.emptyMap()); + scriptService.compile(script, new ScriptContext.Plugin(PLUGIN_NAME, "unknown"), Collections.emptyMap()); fail("script compilation should have been rejected"); } catch (IllegalArgumentException e) { assertTrue(e.getMessage(), e.getMessage().contains("script context [" + PLUGIN_NAME + "_unknown] not supported")); @@ -102,7 +98,6 @@ public class ScriptContextTests extends ESTestCase { } public void testUnknownCustomScriptContext() throws Exception { - ContextAndHeaderHolder contextAndHeaders = new ContextAndHeaderHolder(); ScriptContext context = new ScriptContext() { @Override public String getKey() { @@ -113,7 +108,7 @@ public class ScriptContextTests extends ESTestCase { for (ScriptService.ScriptType scriptType : ScriptService.ScriptType.values()) { try { Script script = new Script("1", scriptType, MockScriptEngine.NAME, null); - scriptService.compile(script, context, contextAndHeaders, Collections.emptyMap()); + scriptService.compile(script, context, Collections.emptyMap()); fail("script compilation should have been rejected"); } catch (IllegalArgumentException e) { assertTrue(e.getMessage(), e.getMessage().contains("script context [test] not supported")); diff --git a/core/src/test/java/org/elasticsearch/script/ScriptServiceTests.java b/core/src/test/java/org/elasticsearch/script/ScriptServiceTests.java index a725d22de4a..0825da4d4df 100644 --- a/core/src/test/java/org/elasticsearch/script/ScriptServiceTests.java +++ b/core/src/test/java/org/elasticsearch/script/ScriptServiceTests.java @@ -18,8 +18,6 @@ */ package org.elasticsearch.script; -import org.elasticsearch.common.ContextAndHeaderHolder; -import org.elasticsearch.common.HasContextAndHeaders; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.io.Streams; import org.elasticsearch.common.settings.Settings; @@ -107,7 +105,7 @@ public class ScriptServiceTests extends ESTestCase { Environment environment = new Environment(finalSettings); scriptService = new ScriptService(finalSettings, environment, Collections.singleton(scriptEngineService), resourceWatcherService, scriptEngineRegistry, scriptContextRegistry, scriptSettings) { @Override - String getScriptFromIndex(String scriptLang, String id, HasContextAndHeaders headersContext) { + String getScriptFromIndex(String scriptLang, String id) { //mock the script that gets retrieved from an index return "100"; } @@ -125,7 +123,6 @@ public class ScriptServiceTests extends ESTestCase { public void testScriptsWithoutExtensions() throws IOException { - ContextAndHeaderHolder contextAndHeaders = new ContextAndHeaderHolder(); buildScriptService(Settings.EMPTY); logger.info("--> setup two test files one with extension and another without"); Path testFileNoExt = scriptsFilePath.resolve("test_no_ext"); @@ -136,7 +133,7 @@ public class ScriptServiceTests extends ESTestCase { logger.info("--> verify that file with extension was correctly processed"); CompiledScript compiledScript = scriptService.compile(new Script("test_script", ScriptType.FILE, "test", null), - ScriptContext.Standard.SEARCH, contextAndHeaders, Collections.emptyMap()); + ScriptContext.Standard.SEARCH, Collections.emptyMap()); assertThat(compiledScript.compiled(), equalTo((Object) "compiled_test_file")); logger.info("--> delete both files"); @@ -147,7 +144,7 @@ public class ScriptServiceTests extends ESTestCase { logger.info("--> verify that file with extension was correctly removed"); try { scriptService.compile(new Script("test_script", ScriptType.FILE, "test", null), ScriptContext.Standard.SEARCH, - contextAndHeaders, Collections.emptyMap()); + Collections.emptyMap()); fail("the script test_script should no longer exist"); } catch (IllegalArgumentException ex) { assertThat(ex.getMessage(), containsString("Unable to find on disk file script [test_script] using lang [test]")); @@ -155,38 +152,34 @@ public class ScriptServiceTests extends ESTestCase { } public void testInlineScriptCompiledOnceCache() throws IOException { - ContextAndHeaderHolder contextAndHeaders = new ContextAndHeaderHolder(); buildScriptService(Settings.EMPTY); CompiledScript compiledScript1 = scriptService.compile(new Script("1+1", ScriptType.INLINE, "test", null), - randomFrom(scriptContexts), contextAndHeaders, Collections.emptyMap()); + randomFrom(scriptContexts), Collections.emptyMap()); CompiledScript compiledScript2 = scriptService.compile(new Script("1+1", ScriptType.INLINE, "test", null), - randomFrom(scriptContexts), contextAndHeaders, Collections.emptyMap()); + randomFrom(scriptContexts), Collections.emptyMap()); assertThat(compiledScript1.compiled(), sameInstance(compiledScript2.compiled())); } public void testInlineScriptCompiledOnceMultipleLangAcronyms() throws IOException { - ContextAndHeaderHolder contextAndHeaders = new ContextAndHeaderHolder(); buildScriptService(Settings.EMPTY); CompiledScript compiledScript1 = scriptService.compile(new Script("script", ScriptType.INLINE, "test", null), - randomFrom(scriptContexts), contextAndHeaders, Collections.emptyMap()); + randomFrom(scriptContexts), Collections.emptyMap()); CompiledScript compiledScript2 = scriptService.compile(new Script("script", ScriptType.INLINE, "test2", null), - randomFrom(scriptContexts), contextAndHeaders, Collections.emptyMap()); + randomFrom(scriptContexts), Collections.emptyMap()); assertThat(compiledScript1.compiled(), sameInstance(compiledScript2.compiled())); } public void testFileScriptCompiledOnceMultipleLangAcronyms() throws IOException { - ContextAndHeaderHolder contextAndHeaders = new ContextAndHeaderHolder(); buildScriptService(Settings.EMPTY); createFileScripts("test"); CompiledScript compiledScript1 = scriptService.compile(new Script("file_script", ScriptType.FILE, "test", null), - randomFrom(scriptContexts), contextAndHeaders, Collections.emptyMap()); + randomFrom(scriptContexts), Collections.emptyMap()); CompiledScript compiledScript2 = scriptService.compile(new Script("file_script", ScriptType.FILE, "test2", null), - randomFrom(scriptContexts), contextAndHeaders, Collections.emptyMap()); + randomFrom(scriptContexts), Collections.emptyMap()); assertThat(compiledScript1.compiled(), sameInstance(compiledScript2.compiled())); } public void testDefaultBehaviourFineGrainedSettings() throws IOException { - ContextAndHeaderHolder contextAndHeaders = new ContextAndHeaderHolder(); Settings.Builder builder = Settings.builder(); //rarely inject the default settings, which have no effect if (rarely()) { @@ -203,14 +196,13 @@ public class ScriptServiceTests extends ESTestCase { for (ScriptContext scriptContext : scriptContexts) { //custom engine is sandboxed, all scripts are enabled by default - assertCompileAccepted("test", "script", ScriptType.INLINE, scriptContext, contextAndHeaders); - assertCompileAccepted("test", "script", ScriptType.INDEXED, scriptContext, contextAndHeaders); - assertCompileAccepted("test", "file_script", ScriptType.FILE, scriptContext, contextAndHeaders); + assertCompileAccepted("test", "script", ScriptType.INLINE, scriptContext); + assertCompileAccepted("test", "script", ScriptType.INDEXED, scriptContext); + assertCompileAccepted("test", "file_script", ScriptType.FILE, scriptContext); } } public void testFineGrainedSettings() throws IOException { - ContextAndHeaderHolder contextAndHeaders = new ContextAndHeaderHolder(); //collect the fine-grained settings to set for this run int numScriptSettings = randomIntBetween(0, ScriptType.values().length); Map scriptSourceSettings = new HashMap<>(); @@ -312,16 +304,16 @@ public class ScriptServiceTests extends ESTestCase { for (String lang : scriptEngineService.getTypes()) { switch (scriptMode) { case ON: - assertCompileAccepted(lang, script, scriptType, scriptContext, contextAndHeaders); + assertCompileAccepted(lang, script, scriptType, scriptContext); break; case OFF: - assertCompileRejected(lang, script, scriptType, scriptContext, contextAndHeaders); + assertCompileRejected(lang, script, scriptType, scriptContext); break; case SANDBOX: if (scriptEngineService.isSandboxed()) { - assertCompileAccepted(lang, script, scriptType, scriptContext, contextAndHeaders); + assertCompileAccepted(lang, script, scriptType, scriptContext); } else { - assertCompileRejected(lang, script, scriptType, scriptContext, contextAndHeaders); + assertCompileRejected(lang, script, scriptType, scriptContext); } break; } @@ -331,7 +323,6 @@ public class ScriptServiceTests extends ESTestCase { } public void testCompileNonRegisteredContext() throws IOException { - ContextAndHeaderHolder contextAndHeaders = new ContextAndHeaderHolder(); buildScriptService(Settings.EMPTY); String pluginName; String unknownContext; @@ -343,7 +334,7 @@ public class ScriptServiceTests extends ESTestCase { for (String type : scriptEngineService.getTypes()) { try { scriptService.compile(new Script("test", randomFrom(ScriptType.values()), type, null), new ScriptContext.Plugin( - pluginName, unknownContext), contextAndHeaders, Collections.emptyMap()); + pluginName, unknownContext), Collections.emptyMap()); fail("script compilation should have been rejected"); } catch(IllegalArgumentException e) { assertThat(e.getMessage(), containsString("script context [" + pluginName + "_" + unknownContext + "] not supported")); @@ -352,16 +343,14 @@ public class ScriptServiceTests extends ESTestCase { } public void testCompileCountedInCompilationStats() throws IOException { - ContextAndHeaderHolder contextAndHeaders = new ContextAndHeaderHolder(); buildScriptService(Settings.EMPTY); - scriptService.compile(new Script("1+1", ScriptType.INLINE, "test", null), randomFrom(scriptContexts), contextAndHeaders, Collections.emptyMap()); + scriptService.compile(new Script("1+1", ScriptType.INLINE, "test", null), randomFrom(scriptContexts), Collections.emptyMap()); assertEquals(1L, scriptService.stats().getCompilations()); } public void testExecutableCountedInCompilationStats() throws IOException { - ContextAndHeaderHolder contextAndHeaders = new ContextAndHeaderHolder(); buildScriptService(Settings.EMPTY); - scriptService.executable(new Script("1+1", ScriptType.INLINE, "test", null), randomFrom(scriptContexts), contextAndHeaders, Collections.emptyMap()); + scriptService.executable(new Script("1+1", ScriptType.INLINE, "test", null), randomFrom(scriptContexts), Collections.emptyMap()); assertEquals(1L, scriptService.stats().getCompilations()); } @@ -372,59 +361,53 @@ public class ScriptServiceTests extends ESTestCase { } public void testMultipleCompilationsCountedInCompilationStats() throws IOException { - ContextAndHeaderHolder contextAndHeaders = new ContextAndHeaderHolder(); buildScriptService(Settings.EMPTY); int numberOfCompilations = randomIntBetween(1, 1024); for (int i = 0; i < numberOfCompilations; i++) { scriptService - .compile(new Script(i + " + " + i, ScriptType.INLINE, "test", null), randomFrom(scriptContexts), contextAndHeaders, Collections.emptyMap()); + .compile(new Script(i + " + " + i, ScriptType.INLINE, "test", null), randomFrom(scriptContexts), Collections.emptyMap()); } assertEquals(numberOfCompilations, scriptService.stats().getCompilations()); } public void testCompilationStatsOnCacheHit() throws IOException { - ContextAndHeaderHolder contextAndHeaders = new ContextAndHeaderHolder(); Settings.Builder builder = Settings.builder(); builder.put(ScriptService.SCRIPT_CACHE_SIZE_SETTING.getKey(), 1); buildScriptService(builder.build()); - scriptService.executable(new Script("1+1", ScriptType.INLINE, "test", null), randomFrom(scriptContexts), contextAndHeaders, Collections.emptyMap()); - scriptService.executable(new Script("1+1", ScriptType.INLINE, "test", null), randomFrom(scriptContexts), contextAndHeaders, Collections.emptyMap()); + scriptService.executable(new Script("1+1", ScriptType.INLINE, "test", null), randomFrom(scriptContexts), Collections.emptyMap()); + scriptService.executable(new Script("1+1", ScriptType.INLINE, "test", null), randomFrom(scriptContexts), Collections.emptyMap()); assertEquals(1L, scriptService.stats().getCompilations()); } public void testFileScriptCountedInCompilationStats() throws IOException { - ContextAndHeaderHolder contextAndHeaders = new ContextAndHeaderHolder(); buildScriptService(Settings.EMPTY); createFileScripts("test"); - scriptService.compile(new Script("file_script", ScriptType.FILE, "test", null), randomFrom(scriptContexts), contextAndHeaders, Collections.emptyMap()); + scriptService.compile(new Script("file_script", ScriptType.FILE, "test", null), randomFrom(scriptContexts), Collections.emptyMap()); assertEquals(1L, scriptService.stats().getCompilations()); } public void testIndexedScriptCountedInCompilationStats() throws IOException { - ContextAndHeaderHolder contextAndHeaders = new ContextAndHeaderHolder(); buildScriptService(Settings.EMPTY); - scriptService.compile(new Script("script", ScriptType.INDEXED, "test", null), randomFrom(scriptContexts), contextAndHeaders, Collections.emptyMap()); + scriptService.compile(new Script("script", ScriptType.INDEXED, "test", null), randomFrom(scriptContexts), Collections.emptyMap()); assertEquals(1L, scriptService.stats().getCompilations()); } public void testCacheEvictionCountedInCacheEvictionsStats() throws IOException { - ContextAndHeaderHolder contextAndHeaders = new ContextAndHeaderHolder(); Settings.Builder builder = Settings.builder(); builder.put(ScriptService.SCRIPT_CACHE_SIZE_SETTING.getKey(), 1); buildScriptService(builder.build()); - scriptService.executable(new Script("1+1", ScriptType.INLINE, "test", null), randomFrom(scriptContexts), contextAndHeaders, Collections.emptyMap()); - scriptService.executable(new Script("2+2", ScriptType.INLINE, "test", null), randomFrom(scriptContexts), contextAndHeaders, Collections.emptyMap()); + scriptService.executable(new Script("1+1", ScriptType.INLINE, "test", null), randomFrom(scriptContexts), Collections.emptyMap()); + scriptService.executable(new Script("2+2", ScriptType.INLINE, "test", null), randomFrom(scriptContexts), Collections.emptyMap()); assertEquals(2L, scriptService.stats().getCompilations()); assertEquals(1L, scriptService.stats().getCacheEvictions()); } public void testDefaultLanguage() throws IOException { - ContextAndHeaderHolder contextAndHeaderHolder = new ContextAndHeaderHolder(); Settings.Builder builder = Settings.builder(); builder.put("script.default_lang", "test"); buildScriptService(builder.build()); CompiledScript script = - scriptService.compile(new Script("1 + 1", ScriptType.INLINE, null, null), randomFrom(scriptContexts), contextAndHeaderHolder, Collections.emptyMap()); + scriptService.compile(new Script("1 + 1", ScriptType.INLINE, null, null), randomFrom(scriptContexts), Collections.emptyMap()); assertEquals(script.lang(), "test"); } @@ -436,18 +419,17 @@ public class ScriptServiceTests extends ESTestCase { resourceWatcherService.notifyNow(); } - private void assertCompileRejected(String lang, String script, ScriptType scriptType, ScriptContext scriptContext, - HasContextAndHeaders contextAndHeaders) { + private void assertCompileRejected(String lang, String script, ScriptType scriptType, ScriptContext scriptContext) { try { - scriptService.compile(new Script(script, scriptType, lang, null), scriptContext, contextAndHeaders, Collections.emptyMap()); + scriptService.compile(new Script(script, scriptType, lang, null), scriptContext, Collections.emptyMap()); fail("compile should have been rejected for lang [" + lang + "], script_type [" + scriptType + "], scripted_op [" + scriptContext + "]"); } catch(ScriptException e) { //all good } } - private void assertCompileAccepted(String lang, String script, ScriptType scriptType, ScriptContext scriptContext, HasContextAndHeaders contextAndHeaders) { - assertThat(scriptService.compile(new Script(script, scriptType, lang, null), scriptContext, contextAndHeaders, Collections.emptyMap()), notNullValue()); + private void assertCompileAccepted(String lang, String script, ScriptType scriptType, ScriptContext scriptContext) { + assertThat(scriptService.compile(new Script(script, scriptType, lang, null), scriptContext, Collections.emptyMap()), notNullValue()); } public static class TestEngineService implements ScriptEngineService { diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregatorTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregatorTests.java index 0cb799d2ac0..90e6ff6de43 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregatorTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregatorTests.java @@ -36,7 +36,6 @@ import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.lucene.index.ElasticsearchDirectoryReader; import org.elasticsearch.common.lucene.search.Queries; -import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexService; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.internal.TypeFieldMapper; @@ -114,11 +113,11 @@ public class NestedAggregatorTests extends ESSingleNodeTestCase { indexWriter.commit(); indexWriter.close(); + IndexService indexService = createIndex("test"); DirectoryReader directoryReader = DirectoryReader.open(directory); - directoryReader = ElasticsearchDirectoryReader.wrap(directoryReader, new ShardId(new Index("test"), 0)); + directoryReader = ElasticsearchDirectoryReader.wrap(directoryReader, new ShardId(indexService.index(), 0)); IndexSearcher searcher = new IndexSearcher(directoryReader); - IndexService indexService = createIndex("test"); indexService.mapperService().merge("test", new CompressedXContent(PutMappingRequest.buildFromSimplifiedDef("test", "nested_field", "type=nested").string()), MapperService.MergeReason.MAPPING_UPDATE, false); SearchContext searchContext = createSearchContext(indexService); AggregationContext context = new AggregationContext(searchContext); diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/significant/SignificanceHeuristicTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/significant/SignificanceHeuristicTests.java index 0fe9113e8f8..0875491566e 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/significant/SignificanceHeuristicTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/significant/SignificanceHeuristicTests.java @@ -27,6 +27,7 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.json.JsonXContent; +import org.elasticsearch.index.Index; import org.elasticsearch.search.SearchShardTarget; import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.InternalAggregations; @@ -75,7 +76,7 @@ public class SignificanceHeuristicTests extends ESTestCase { @Override public SearchShardTarget shardTarget() { - return new SearchShardTarget("no node, this is a unit test", "no index, this is a unit test", 0); + return new SearchShardTarget("no node, this is a unit test", new Index("no index, this is a unit test", "_na_"), 0); } } diff --git a/core/src/test/java/org/elasticsearch/search/fetch/FetchSubPhasePluginIT.java b/core/src/test/java/org/elasticsearch/search/fetch/FetchSubPhasePluginIT.java index 1eff57a0567..97bd1581582 100644 --- a/core/src/test/java/org/elasticsearch/search/fetch/FetchSubPhasePluginIT.java +++ b/core/src/test/java/org/elasticsearch/search/fetch/FetchSubPhasePluginIT.java @@ -165,7 +165,7 @@ public class FetchSubPhasePluginIT extends ESIntegTestCase { hitField = new InternalSearchHitField(NAMES[0], new ArrayList<>(1)); hitContext.hit().fields().put(NAMES[0], hitField); } - TermVectorsResponse termVector = context.indexShard().getTermVectors(new TermVectorsRequest(context.indexShard().shardId().index().getName(), hitContext.hit().type(), hitContext.hit().id())); + TermVectorsResponse termVector = context.indexShard().getTermVectors(new TermVectorsRequest(context.indexShard().shardId().getIndex().getName(), hitContext.hit().type(), hitContext.hit().id())); try { Map tv = new HashMap<>(); TermsEnum terms = termVector.getFields().terms(field).iterator(); diff --git a/core/src/test/java/org/elasticsearch/search/highlight/HighlightBuilderTests.java b/core/src/test/java/org/elasticsearch/search/highlight/HighlightBuilderTests.java index 7446f99f571..5dc8528c00a 100644 --- a/core/src/test/java/org/elasticsearch/search/highlight/HighlightBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/search/highlight/HighlightBuilderTests.java @@ -275,7 +275,7 @@ public class HighlightBuilderTests extends ESTestCase { public void testBuildSearchContextHighlight() throws IOException { Settings indexSettings = Settings.settingsBuilder() .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build(); - Index index = new Index(randomAsciiOfLengthBetween(1, 10)); + Index index = new Index(randomAsciiOfLengthBetween(1, 10), "_na_"); IndexSettings idxSettings = IndexSettingsModule.newIndexSettings(index, indexSettings); // shard context will only need indicesQueriesRegistry for building Query objects nested in highlighter QueryShardContext mockShardContext = new QueryShardContext(idxSettings, null, null, null, null, null, null, indicesQueriesRegistry) { diff --git a/core/src/test/java/org/elasticsearch/search/internal/InternalSearchHitTests.java b/core/src/test/java/org/elasticsearch/search/internal/InternalSearchHitTests.java index 0525fd28db1..77fc2f0e6a9 100644 --- a/core/src/test/java/org/elasticsearch/search/internal/InternalSearchHitTests.java +++ b/core/src/test/java/org/elasticsearch/search/internal/InternalSearchHitTests.java @@ -22,6 +22,7 @@ package org.elasticsearch.search.internal; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.InputStreamStreamInput; import org.elasticsearch.common.text.Text; +import org.elasticsearch.index.Index; import org.elasticsearch.search.SearchShardTarget; import org.elasticsearch.test.ESTestCase; @@ -36,7 +37,7 @@ import static org.hamcrest.Matchers.nullValue; public class InternalSearchHitTests extends ESTestCase { public void testSerializeShardTarget() throws Exception { - SearchShardTarget target = new SearchShardTarget("_node_id", "_index", 0); + SearchShardTarget target = new SearchShardTarget("_node_id", new Index("_index", "_na_"), 0); Map innerHits = new HashMap<>(); InternalSearchHit innerHit1 = new InternalSearchHit(0, "_id", new Text("_type"), null); diff --git a/core/src/test/java/org/elasticsearch/search/rescore/QueryRescoreBuilderTests.java b/core/src/test/java/org/elasticsearch/search/rescore/QueryRescoreBuilderTests.java index 1305aa727f5..01f7e332446 100644 --- a/core/src/test/java/org/elasticsearch/search/rescore/QueryRescoreBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/search/rescore/QueryRescoreBuilderTests.java @@ -35,7 +35,6 @@ import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; -import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.mapper.ContentPath; import org.elasticsearch.index.mapper.MappedFieldType; @@ -159,8 +158,7 @@ public class QueryRescoreBuilderTests extends ESTestCase { public void testBuildRescoreSearchContext() throws ElasticsearchParseException, IOException { Settings indexSettings = Settings.settingsBuilder() .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build(); - Index index = new Index(randomAsciiOfLengthBetween(1, 10)); - IndexSettings idxSettings = IndexSettingsModule.newIndexSettings(index, indexSettings); + IndexSettings idxSettings = IndexSettingsModule.newIndexSettings(randomAsciiOfLengthBetween(1, 10), indexSettings); // shard context will only need indicesQueriesRegistry for building Query objects nested in query rescorer QueryShardContext mockShardContext = new QueryShardContext(idxSettings, null, null, null, null, null, null, indicesQueriesRegistry) { @Override diff --git a/core/src/test/java/org/elasticsearch/search/searchafter/SearchAfterBuilderTests.java b/core/src/test/java/org/elasticsearch/search/searchafter/SearchAfterBuilderTests.java index b85c0ff5c36..7de0c150016 100644 --- a/core/src/test/java/org/elasticsearch/search/searchafter/SearchAfterBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/search/searchafter/SearchAfterBuilderTests.java @@ -70,7 +70,7 @@ public class SearchAfterBuilderTests extends ESTestCase { SearchAfterBuilder searchAfterBuilder = new SearchAfterBuilder(); Object[] values = new Object[numSearchFrom]; for (int i = 0; i < numSearchFrom; i++) { - int branch = randomInt(8); + int branch = randomInt(9); switch (branch) { case 0: values[i] = randomInt(); @@ -99,6 +99,9 @@ public class SearchAfterBuilderTests extends ESTestCase { case 8: values[i] = new Text(randomAsciiOfLengthBetween(5, 20)); break; + case 9: + values[i] = null; + break; } } searchAfterBuilder.setSortValues(values); @@ -115,7 +118,7 @@ public class SearchAfterBuilderTests extends ESTestCase { jsonBuilder.startObject(); jsonBuilder.startArray("search_after"); for (int i = 0; i < numSearchAfter; i++) { - int branch = randomInt(8); + int branch = randomInt(9); switch (branch) { case 0: jsonBuilder.value(randomInt()); @@ -144,6 +147,9 @@ public class SearchAfterBuilderTests extends ESTestCase { case 8: jsonBuilder.value(new Text(randomAsciiOfLengthBetween(5, 20))); break; + case 9: + jsonBuilder.nullValue(); + break; } } jsonBuilder.endArray(); @@ -223,18 +229,7 @@ public class SearchAfterBuilderTests extends ESTestCase { assertEquals(searchAfterBuilder.hashCode(), secondSearchAfterBuilder.hashCode()); } } - - public void testWithNullValue() throws Exception { - SearchAfterBuilder builder = new SearchAfterBuilder(); - builder.setSortValues(new Object[] {1, "1", null}); - try { - serializedCopy(builder); - fail("Should fail on null values"); - } catch (IOException e) { - assertThat(e.getMessage(), Matchers.equalTo("Can't handle search_after field value of type [null]")); - } - } - + public void testWithNullArray() throws Exception { SearchAfterBuilder builder = new SearchAfterBuilder(); try { diff --git a/core/src/test/java/org/elasticsearch/search/searchafter/SearchAfterIT.java b/core/src/test/java/org/elasticsearch/search/searchafter/SearchAfterIT.java index 57ad7ce1620..e4ac3b728ea 100644 --- a/core/src/test/java/org/elasticsearch/search/searchafter/SearchAfterIT.java +++ b/core/src/test/java/org/elasticsearch/search/searchafter/SearchAfterIT.java @@ -34,12 +34,12 @@ import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.transport.RemoteTransportException; import org.hamcrest.Matchers; -import java.io.IOException; import java.util.List; import java.util.ArrayList; import java.util.Comparator; import java.util.Collections; import java.util.Arrays; +import java.util.concurrent.ExecutionException; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; @@ -52,15 +52,15 @@ public class SearchAfterIT extends ESIntegTestCase { public void testsShouldFail() throws Exception { createIndex("test"); + ensureGreen(); indexRandom(true, client().prepareIndex("test", "type1", "0").setSource("field1", 0, "field2", "toto")); - try { client().prepareSearch("test") .addSort("field1", SortOrder.ASC) .setQuery(matchAllQuery()) .searchAfter(new Object[]{0}) .setScroll("1m") - .execute().actionGet(); + .get(); fail("Should fail on search_after cannot be used with scroll."); } catch (SearchPhaseExecutionException e) { @@ -74,7 +74,7 @@ public class SearchAfterIT extends ESIntegTestCase { .setQuery(matchAllQuery()) .searchAfter(new Object[]{0}) .setFrom(10) - .execute().actionGet(); + .get(); fail("Should fail on search_after cannot be used with from > 0."); } catch (SearchPhaseExecutionException e) { @@ -87,7 +87,7 @@ public class SearchAfterIT extends ESIntegTestCase { client().prepareSearch("test") .setQuery(matchAllQuery()) .searchAfter(new Object[]{0.75f}) - .execute().actionGet(); + .get(); fail("Should fail on search_after on score only is disabled"); } catch (SearchPhaseExecutionException e) { @@ -115,7 +115,7 @@ public class SearchAfterIT extends ESIntegTestCase { .setQuery(matchAllQuery()) .addSort("field1", SortOrder.ASC) .searchAfter(new Object[]{1, 2}) - .execute().actionGet(); + .get(); fail("Should fail on search_after size differs from sort field size"); } catch (SearchPhaseExecutionException e) { assertThat(e.getCause().getClass(), Matchers.equalTo(RemoteTransportException.class)); @@ -128,7 +128,7 @@ public class SearchAfterIT extends ESIntegTestCase { .setQuery(matchAllQuery()) .addSort("field1", SortOrder.ASC) .searchAfter(new Object[]{"toto"}) - .execute().actionGet(); + .get(); fail("Should fail on search_after on score only is disabled"); } catch (SearchPhaseExecutionException e) { @@ -138,13 +138,31 @@ public class SearchAfterIT extends ESIntegTestCase { } } + public void testWithNullStrings() throws ExecutionException, InterruptedException { + createIndex("test"); + ensureGreen(); + indexRandom(true, + client().prepareIndex("test", "type1", "0").setSource("field1", 0), + client().prepareIndex("test", "type1", "1").setSource("field1", 100, "field2", "toto")); + SearchResponse searchResponse = client().prepareSearch("test") + .addSort("field1", SortOrder.ASC) + .addSort("field2", SortOrder.ASC) + .setQuery(matchAllQuery()) + .searchAfter(new Object[]{0, null}) + .get(); + assertThat(searchResponse.getHits().getTotalHits(), Matchers.equalTo(2L)); + assertThat(searchResponse.getHits().getHits().length, Matchers.equalTo(1)); + assertThat(searchResponse.getHits().getHits()[0].sourceAsMap().get("field1"), Matchers.equalTo(100)); + assertThat(searchResponse.getHits().getHits()[0].sourceAsMap().get("field2"), Matchers.equalTo("toto")); + } + public void testWithSimpleTypes() throws Exception { int numFields = randomInt(20) + 1; int[] types = new int[numFields-1]; for (int i = 0; i < numFields-1; i++) { types[i] = randomInt(6); } - List documents = new ArrayList<> (); + List documents = new ArrayList<>(); for (int i = 0; i < NUM_DOCS; i++) { List values = new ArrayList<>(); for (int type : types) { @@ -239,7 +257,7 @@ public class SearchAfterIT extends ESIntegTestCase { if (sortValues != null) { req.searchAfter(sortValues); } - SearchResponse searchResponse = req.execute().actionGet(); + SearchResponse searchResponse = req.get(); for (SearchHit hit : searchResponse.getHits()) { List toCompare = convertSortValues(documents.get(offset++)); assertThat(LST_COMPARATOR.compare(toCompare, Arrays.asList(hit.sortValues())), equalTo(0)); @@ -282,7 +300,8 @@ public class SearchAfterIT extends ESIntegTestCase { fail("Can't match type [" + type + "]"); } } - indexRequestBuilder.addMapping(typeName, mappings.toArray()).execute().actionGet(); + indexRequestBuilder.addMapping(typeName, mappings.toArray()).get(); + ensureGreen(); } // Convert Integer, Short, Byte and Boolean to Long in order to match the conversion done diff --git a/core/src/test/java/org/elasticsearch/search/suggest/CustomSuggester.java b/core/src/test/java/org/elasticsearch/search/suggest/CustomSuggester.java index 419316b5265..35d495272ca 100644 --- a/core/src/test/java/org/elasticsearch/search/suggest/CustomSuggester.java +++ b/core/src/test/java/org/elasticsearch/search/suggest/CustomSuggester.java @@ -54,7 +54,7 @@ public class CustomSuggester extends Suggester { + return (parser, mapperService, fieldData) -> { Map options = parser.map(); CustomSuggestionsContext suggestionContext = new CustomSuggestionsContext(CustomSuggester.this, options); suggestionContext.setField((String) options.get("field")); diff --git a/core/src/main/java/org/elasticsearch/common/HasHeaders.java b/core/src/test/java/org/elasticsearch/snapshots/FsBlobStoreRepositoryIT.java similarity index 50% rename from core/src/main/java/org/elasticsearch/common/HasHeaders.java rename to core/src/test/java/org/elasticsearch/snapshots/FsBlobStoreRepositoryIT.java index ab3a7da643e..22a3ecac8e3 100644 --- a/core/src/main/java/org/elasticsearch/common/HasHeaders.java +++ b/core/src/test/java/org/elasticsearch/snapshots/FsBlobStoreRepositoryIT.java @@ -16,23 +16,22 @@ * specific language governing permissions and limitations * under the License. */ +package org.elasticsearch.snapshots; -package org.elasticsearch.common; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.unit.ByteSizeUnit; +import org.elasticsearch.test.ESBlobStoreRepositoryIntegTestCase; -import java.util.Set; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; -/** - * - */ -public interface HasHeaders { +public class FsBlobStoreRepositoryIT extends ESBlobStoreRepositoryIntegTestCase { + @Override + protected void createTestRepository(String name) { + assertAcked(client().admin().cluster().preparePutRepository(name) + .setType("fs").setSettings(Settings.settingsBuilder() + .put("location", randomRepoPath()) + .put("compress", randomBoolean()) + .put("chunk_size", randomIntBetween(100, 1000), ByteSizeUnit.BYTES))); - void putHeader(String key, V value); - - V getHeader(String key); - - boolean hasHeader(String key); - - Set getHeaders(); - - void copyHeadersFrom(HasHeaders from); + } } diff --git a/core/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java b/core/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java index a245919d944..02db67cd153 100644 --- a/core/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java +++ b/core/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java @@ -1896,9 +1896,9 @@ public class SharedClusterSnapshotRestoreIT extends AbstractSnapshotIntegTestCas public ClusterState execute(ClusterState currentState) { // Simulate orphan snapshot ImmutableOpenMap.Builder shards = ImmutableOpenMap.builder(); - shards.put(new ShardId("test-idx", 0), new ShardSnapshotStatus("unknown-node", State.ABORTED)); - shards.put(new ShardId("test-idx", 1), new ShardSnapshotStatus("unknown-node", State.ABORTED)); - shards.put(new ShardId("test-idx", 2), new ShardSnapshotStatus("unknown-node", State.ABORTED)); + shards.put(new ShardId("test-idx", "_na_", 0), new ShardSnapshotStatus("unknown-node", State.ABORTED)); + shards.put(new ShardId("test-idx", "_na_", 1), new ShardSnapshotStatus("unknown-node", State.ABORTED)); + shards.put(new ShardId("test-idx", "_na_", 2), new ShardSnapshotStatus("unknown-node", State.ABORTED)); List entries = new ArrayList<>(); entries.add(new Entry(new SnapshotId("test-repo", "test-snap"), true, State.ABORTED, Collections.singletonList("test-idx"), System.currentTimeMillis(), shards.build())); return ClusterState.builder(currentState).putCustom(SnapshotsInProgress.TYPE, new SnapshotsInProgress(Collections.unmodifiableList(entries))).build(); diff --git a/core/src/test/java/org/elasticsearch/snapshots/mockstore/BlobStoreWrapper.java b/core/src/test/java/org/elasticsearch/snapshots/mockstore/BlobStoreWrapper.java index 086aac209b5..5ac1e82dba4 100644 --- a/core/src/test/java/org/elasticsearch/snapshots/mockstore/BlobStoreWrapper.java +++ b/core/src/test/java/org/elasticsearch/snapshots/mockstore/BlobStoreWrapper.java @@ -46,7 +46,7 @@ public class BlobStoreWrapper implements BlobStore { } @Override - public void close() { + public void close() throws IOException { delegate.close(); } diff --git a/core/src/test/java/org/elasticsearch/test/ESBlobStoreContainerTestCase.java b/core/src/test/java/org/elasticsearch/test/ESBlobStoreContainerTestCase.java new file mode 100644 index 00000000000..291d00a8dde --- /dev/null +++ b/core/src/test/java/org/elasticsearch/test/ESBlobStoreContainerTestCase.java @@ -0,0 +1,115 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.test; + +import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.BytesRefBuilder; +import org.elasticsearch.common.blobstore.BlobContainer; +import org.elasticsearch.common.blobstore.BlobMetaData; +import org.elasticsearch.common.blobstore.BlobPath; +import org.elasticsearch.common.blobstore.BlobStore; +import org.elasticsearch.common.bytes.BytesArray; + +import java.io.IOException; +import java.io.InputStream; +import java.util.Arrays; +import java.util.HashMap; +import java.util.Map; + +import static org.elasticsearch.test.ESBlobStoreTestCase.writeRandomBlob; +import static org.elasticsearch.test.ESBlobStoreTestCase.randomBytes; +import static org.elasticsearch.test.ESBlobStoreTestCase.readBlobFully; +import static org.hamcrest.CoreMatchers.equalTo; +import static org.hamcrest.CoreMatchers.notNullValue; + +/** + * Generic test case for blob store container implementation. + * These tests check basic blob store functionality. + */ +public abstract class ESBlobStoreContainerTestCase extends ESTestCase { + + public void testWriteRead() throws IOException { + try(final BlobStore store = newBlobStore()) { + final BlobContainer container = store.blobContainer(new BlobPath()); + byte[] data = randomBytes(randomIntBetween(10, scaledRandomIntBetween(1024, 1 << 16))); + container.writeBlob("foobar", new BytesArray(data)); + try (InputStream stream = container.readBlob("foobar")) { + BytesRefBuilder target = new BytesRefBuilder(); + while (target.length() < data.length) { + byte[] buffer = new byte[scaledRandomIntBetween(1, data.length - target.length())]; + int offset = scaledRandomIntBetween(0, buffer.length - 1); + int read = stream.read(buffer, offset, buffer.length - offset); + target.append(new BytesRef(buffer, offset, read)); + } + assertEquals(data.length, target.length()); + assertArrayEquals(data, Arrays.copyOfRange(target.bytes(), 0, target.length())); + } + } + } + + public void testMoveAndList() throws IOException { + try(final BlobStore store = newBlobStore()) { + final BlobContainer container = store.blobContainer(new BlobPath()); + assertThat(container.listBlobs().size(), equalTo(0)); + int numberOfFooBlobs = randomIntBetween(0, 10); + int numberOfBarBlobs = randomIntBetween(3, 20); + Map generatedBlobs = new HashMap<>(); + for (int i = 0; i < numberOfFooBlobs; i++) { + int length = randomIntBetween(10, 100); + String name = "foo-" + i + "-"; + generatedBlobs.put(name, (long) length); + writeRandomBlob(container, name, length); + } + for (int i = 1; i < numberOfBarBlobs; i++) { + int length = randomIntBetween(10, 100); + String name = "bar-" + i + "-"; + generatedBlobs.put(name, (long) length); + writeRandomBlob(container, name, length); + } + int length = randomIntBetween(10, 100); + String name = "bar-0-"; + generatedBlobs.put(name, (long) length); + byte[] data = writeRandomBlob(container, name, length); + + Map blobs = container.listBlobs(); + assertThat(blobs.size(), equalTo(numberOfFooBlobs + numberOfBarBlobs)); + for (Map.Entry generated : generatedBlobs.entrySet()) { + BlobMetaData blobMetaData = blobs.get(generated.getKey()); + assertThat(generated.getKey(), blobMetaData, notNullValue()); + assertThat(blobMetaData.name(), equalTo(generated.getKey())); + assertThat(blobMetaData.length(), equalTo(generated.getValue())); + } + + assertThat(container.listBlobsByPrefix("foo-").size(), equalTo(numberOfFooBlobs)); + assertThat(container.listBlobsByPrefix("bar-").size(), equalTo(numberOfBarBlobs)); + assertThat(container.listBlobsByPrefix("baz-").size(), equalTo(0)); + + String newName = "bar-new"; + // Move to a new location + container.move(name, newName); + assertThat(container.listBlobsByPrefix(name).size(), equalTo(0)); + blobs = container.listBlobsByPrefix(newName); + assertThat(blobs.size(), equalTo(1)); + assertThat(blobs.get(newName).length(), equalTo(generatedBlobs.get(name))); + assertThat(data, equalTo(readBlobFully(container, newName, length))); + } + } + + protected abstract BlobStore newBlobStore() throws IOException; +} diff --git a/core/src/test/java/org/elasticsearch/test/ESBlobStoreRepositoryIntegTestCase.java b/core/src/test/java/org/elasticsearch/test/ESBlobStoreRepositoryIntegTestCase.java new file mode 100644 index 00000000000..dc49683de63 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/test/ESBlobStoreRepositoryIntegTestCase.java @@ -0,0 +1,197 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.test; + +import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotRequestBuilder; +import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotResponse; +import org.elasticsearch.action.admin.cluster.snapshots.restore.RestoreSnapshotRequestBuilder; +import org.elasticsearch.action.admin.cluster.snapshots.restore.RestoreSnapshotResponse; +import org.elasticsearch.action.index.IndexRequestBuilder; + +import java.util.Arrays; +import java.util.HashSet; +import java.util.List; +import java.util.Locale; +import java.util.Set; +import java.util.concurrent.ExecutionException; + +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThan; + +/** + * Basic integration tests for blob-based repository validation. + */ +public abstract class ESBlobStoreRepositoryIntegTestCase extends ESIntegTestCase { + + protected abstract void createTestRepository(String name); + + public void testSnapshotAndRestore() throws Exception { + String repoName = randomAsciiName(); + logger.info("--> creating repository {}", repoName); + createTestRepository(repoName); + int indexCount = randomIntBetween(1, 5); + int[] docCounts = new int[indexCount]; + String[] indexNames = generateRandomNames(indexCount); + for (int i = 0; i < indexCount; i++) { + logger.info("--> create random index {} with {} records", indexNames[i], docCounts[i]); + docCounts[i] = iterations(10, 1000); + addRandomDocuments(indexNames[i], docCounts[i]); + assertHitCount(client().prepareSearch(indexNames[i]).setSize(0).get(), docCounts[i]); + } + + String snapshotName = randomAsciiName(); + logger.info("--> create snapshot {}:{}", repoName, snapshotName); + assertSuccessfulSnapshot(client().admin().cluster().prepareCreateSnapshot(repoName, snapshotName).setWaitForCompletion(true).setIndices(indexNames)); + + List deleteIndices = randomSubsetOf(randomIntBetween(0, indexCount), indexNames); + if (deleteIndices.size() > 0) { + logger.info("--> delete indices {}", deleteIndices); + assertAcked(client().admin().indices().prepareDelete(deleteIndices.toArray(new String[deleteIndices.size()]))); + } + + Set closeIndices = new HashSet<>(Arrays.asList(indexNames)); + closeIndices.removeAll(deleteIndices); + + if (closeIndices.size() > 0) { + for (String index : closeIndices) { + if (randomBoolean()) { + logger.info("--> add random documents to {}", index); + addRandomDocuments(index, randomIntBetween(10, 1000)); + } else { + int docCount = (int) client().prepareSearch(index).setSize(0).get().getHits().totalHits(); + int deleteCount = randomIntBetween(1, docCount); + logger.info("--> delete {} random documents from {}", deleteCount, index); + for (int i = 0; i < deleteCount; i++) { + int doc = randomIntBetween(0, docCount - 1); + client().prepareDelete(index, index, Integer.toString(doc)).get(); + } + client().admin().indices().prepareRefresh(index).get(); + } + } + + logger.info("--> close indices {}", closeIndices); + assertAcked(client().admin().indices().prepareClose(closeIndices.toArray(new String[closeIndices.size()]))); + } + + logger.info("--> restore all indices from the snapshot"); + assertSuccessfulRestore(client().admin().cluster().prepareRestoreSnapshot(repoName, snapshotName).setWaitForCompletion(true)); + + ensureGreen(); + + for (int i = 0; i < indexCount; i++) { + assertHitCount(client().prepareSearch(indexNames[i]).setSize(0).get(), docCounts[i]); + } + } + + public void testMultipleSnapshotAndRollback() throws Exception { + String repoName = randomAsciiName(); + logger.info("--> creating repository {}", repoName); + createTestRepository(repoName); + int iterationCount = randomIntBetween(2, 5); + int[] docCounts = new int[iterationCount]; + String indexName = randomAsciiName(); + String snapshotName = randomAsciiName(); + assertAcked(client().admin().indices().prepareCreate(indexName).get()); + for (int i = 0; i < iterationCount; i++) { + if (randomBoolean() && i > 0) { // don't delete on the first iteration + int docCount = docCounts[i - 1]; + if (docCount > 0) { + int deleteCount = randomIntBetween(1, docCount); + logger.info("--> delete {} random documents from {}", deleteCount, indexName); + for (int j = 0; j < deleteCount; j++) { + int doc = randomIntBetween(0, docCount - 1); + client().prepareDelete(indexName, indexName, Integer.toString(doc)).get(); + } + client().admin().indices().prepareRefresh(indexName).get(); + } + } else { + int docCount = randomIntBetween(10, 1000); + logger.info("--> add {} random documents to {}", docCount, indexName); + addRandomDocuments(indexName, docCount); + } + // Check number of documents in this iteration + docCounts[i] = (int) client().prepareSearch(indexName).setSize(0).get().getHits().totalHits(); + logger.info("--> create snapshot {}:{} with {} documents", repoName, snapshotName + "-" + i, docCounts[i]); + assertSuccessfulSnapshot(client().admin().cluster().prepareCreateSnapshot(repoName, snapshotName + "-" + i).setWaitForCompletion(true).setIndices(indexName)); + } + + int restoreOperations = randomIntBetween(1, 3); + for (int i = 0; i < restoreOperations; i++) { + int iterationToRestore = randomIntBetween(0, iterationCount - 1); + logger.info("--> performing restore of the iteration {}", iterationToRestore); + + logger.info("--> close index"); + assertAcked(client().admin().indices().prepareClose(indexName)); + + logger.info("--> restore index from the snapshot"); + assertSuccessfulRestore(client().admin().cluster().prepareRestoreSnapshot(repoName, snapshotName + "-" + iterationToRestore).setWaitForCompletion(true)); + ensureGreen(); + assertHitCount(client().prepareSearch(indexName).setSize(0).get(), docCounts[iterationToRestore]); + } + } + + protected void addRandomDocuments(String name, int numDocs) throws ExecutionException, InterruptedException { + IndexRequestBuilder[] indexRequestBuilders = new IndexRequestBuilder[numDocs]; + for (int i = 0; i < numDocs; i++) { + indexRequestBuilders[i] = client().prepareIndex(name, name, Integer.toString(i)) + .setRouting(randomAsciiOfLength(randomIntBetween(1, 10))).setSource("field", "value"); + } + indexRandom(true, indexRequestBuilders); + } + + protected String[] generateRandomNames(int num) { + Set names = new HashSet<>(); + for (int i = 0; i < num; i++) { + String name; + do { + name = randomAsciiName(); + } while (names.contains(name)); + names.add(name); + } + return names.toArray(new String[num]); + } + + public static CreateSnapshotResponse assertSuccessfulSnapshot(CreateSnapshotRequestBuilder requestBuilder) { + CreateSnapshotResponse response = requestBuilder.get(); + assertSuccessfulSnapshot(response); + return response; + } + + public static void assertSuccessfulSnapshot(CreateSnapshotResponse response) { + assertThat(response.getSnapshotInfo().successfulShards(), greaterThan(0)); + assertThat(response.getSnapshotInfo().successfulShards(), equalTo(response.getSnapshotInfo().totalShards())); + } + + public static RestoreSnapshotResponse assertSuccessfulRestore(RestoreSnapshotRequestBuilder requestBuilder) { + RestoreSnapshotResponse response = requestBuilder.get(); + assertSuccessfulRestore(response); + return response; + } + + public static void assertSuccessfulRestore(RestoreSnapshotResponse response) { + assertThat(response.getRestoreInfo().successfulShards(), greaterThan(0)); + assertThat(response.getRestoreInfo().successfulShards(), equalTo(response.getRestoreInfo().totalShards())); + } + + public static String randomAsciiName() { + return randomAsciiOfLength(randomIntBetween(1, 10)).toLowerCase(Locale.ROOT); + } +} diff --git a/core/src/test/java/org/elasticsearch/test/ESBlobStoreTestCase.java b/core/src/test/java/org/elasticsearch/test/ESBlobStoreTestCase.java new file mode 100644 index 00000000000..80432d628ef --- /dev/null +++ b/core/src/test/java/org/elasticsearch/test/ESBlobStoreTestCase.java @@ -0,0 +1,82 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.test; + +import org.elasticsearch.common.blobstore.BlobContainer; +import org.elasticsearch.common.blobstore.BlobPath; +import org.elasticsearch.common.blobstore.BlobStore; +import org.elasticsearch.common.bytes.BytesArray; +import org.junit.Test; + +import java.io.IOException; +import java.io.InputStream; + +import static org.hamcrest.CoreMatchers.equalTo; + +/** + * Generic test case for blob store implementation. + * These tests check basic blob store functionality. + */ +public abstract class ESBlobStoreTestCase extends ESTestCase { + + public void testContainerCreationAndDeletion() throws IOException { + try(final BlobStore store = newBlobStore()) { + final BlobContainer containerFoo = store.blobContainer(new BlobPath().add("foo")); + final BlobContainer containerBar = store.blobContainer(new BlobPath().add("bar")); + byte[] data1 = randomBytes(randomIntBetween(10, scaledRandomIntBetween(1024, 1 << 16))); + byte[] data2 = randomBytes(randomIntBetween(10, scaledRandomIntBetween(1024, 1 << 16))); + containerFoo.writeBlob("test", new BytesArray(data1)); + containerBar.writeBlob("test", new BytesArray(data2)); + + assertArrayEquals(readBlobFully(containerFoo, "test", data1.length), data1); + assertArrayEquals(readBlobFully(containerBar, "test", data2.length), data2); + + assertTrue(containerFoo.blobExists("test")); + assertTrue(containerBar.blobExists("test")); + store.delete(new BlobPath()); + assertFalse(containerFoo.blobExists("test")); + assertFalse(containerBar.blobExists("test")); + } + } + + public static byte[] writeRandomBlob(BlobContainer container, String name, int length) throws IOException { + byte[] data = randomBytes(length); + container.writeBlob(name, new BytesArray(data)); + return data; + } + + public static byte[] readBlobFully(BlobContainer container, String name, int length) throws IOException { + byte[] data = new byte[length]; + try (InputStream inputStream = container.readBlob(name)) { + assertThat(inputStream.read(data), equalTo(length)); + assertThat(inputStream.read(), equalTo(-1)); + } + return data; + } + + public static byte[] randomBytes(int length) { + byte[] data = new byte[length]; + for (int i = 0; i < data.length; i++) { + data[i] = (byte) randomInt(); + } + return data; + } + + protected abstract BlobStore newBlobStore() throws IOException; +} diff --git a/core/src/test/java/org/elasticsearch/threadpool/UpdateThreadPoolSettingsTests.java b/core/src/test/java/org/elasticsearch/threadpool/UpdateThreadPoolSettingsTests.java index 2fe11b5875c..06e20fd31f1 100644 --- a/core/src/test/java/org/elasticsearch/threadpool/UpdateThreadPoolSettingsTests.java +++ b/core/src/test/java/org/elasticsearch/threadpool/UpdateThreadPoolSettingsTests.java @@ -325,7 +325,7 @@ public class UpdateThreadPoolSettingsTests extends ESTestCase { try { Settings nodeSettings = Settings.settingsBuilder() .put("threadpool." + threadPoolName + ".queue_size", 1000) - .put("name", "testCachedExecutorType").build(); + .put("name", "testShutdownNowInterrupts").build(); threadPool = new ThreadPool(nodeSettings); ClusterSettings clusterSettings = new ClusterSettings(nodeSettings, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); threadPool.setClusterSettings(clusterSettings); diff --git a/core/src/test/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java b/core/src/test/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java index a5b6e08de3f..747b218b797 100644 --- a/core/src/test/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java +++ b/core/src/test/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java @@ -37,6 +37,7 @@ import org.junit.Before; import java.io.IOException; import java.util.concurrent.CountDownLatch; +import java.util.concurrent.ExecutionException; import java.util.concurrent.Semaphore; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicReference; @@ -204,6 +205,61 @@ public abstract class AbstractSimpleTransportTestCase extends ESTestCase { serviceA.removeHandler("sayHello"); } + public void testThreadContext() throws ExecutionException, InterruptedException { + + serviceA.registerRequestHandler("ping_pong", StringMessageRequest::new, ThreadPool.Names.GENERIC, (request, channel) -> { + assertEquals("ping_user", threadPool.getThreadContext().getHeader("test.ping.user")); + assertNull(threadPool.getThreadContext().getTransient("my_private_context")); + try { + StringMessageResponse response = new StringMessageResponse("pong"); + threadPool.getThreadContext().putHeader("test.pong.user", "pong_user"); + channel.sendResponse(response); + } catch (IOException e) { + assertThat(e.getMessage(), false, equalTo(true)); + } + }); + final Object context = new Object(); + final String executor = randomFrom(ThreadPool.THREAD_POOL_TYPES.keySet().toArray(new String[0])); + BaseTransportResponseHandler baseTransportResponseHandler = new BaseTransportResponseHandler() { + @Override + public StringMessageResponse newInstance() { + return new StringMessageResponse(); + } + + @Override + public String executor() { + return executor; + } + + @Override + public void handleResponse(StringMessageResponse response) { + assertThat("pong", equalTo(response.message)); + assertEquals("ping_user", threadPool.getThreadContext().getHeader("test.ping.user")); + assertNull(threadPool.getThreadContext().getHeader("test.pong.user")); + assertSame(context, threadPool.getThreadContext().getTransient("my_private_context")); + threadPool.getThreadContext().putHeader("some.temp.header", "booooom"); + } + + @Override + public void handleException(TransportException exp) { + assertThat("got exception instead of a response: " + exp.getMessage(), false, equalTo(true)); + } + }; + StringMessageRequest ping = new StringMessageRequest("ping"); + threadPool.getThreadContext().putHeader("test.ping.user", "ping_user"); + threadPool.getThreadContext().putTransient("my_private_context", context); + + TransportFuture res = serviceB.submitRequest(nodeA, "ping_pong", ping, baseTransportResponseHandler); + + StringMessageResponse message = res.get(); + assertThat("pong", equalTo(message.message)); + assertEquals("ping_user", threadPool.getThreadContext().getHeader("test.ping.user")); + assertSame(context, threadPool.getThreadContext().getTransient("my_private_context")); + assertNull("this header is only visible in the handler context", threadPool.getThreadContext().getHeader("some.temp.header")); + + serviceA.removeHandler("sayHello"); + } + public void testLocalNodeConnection() throws InterruptedException { assertTrue("serviceA is not connected to nodeA", serviceA.nodeConnected(nodeA)); if (((TransportService) serviceA).getLocalNode() != null) { diff --git a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/ContextAndHeaderTransportTests.java b/core/src/test/java/org/elasticsearch/transport/ContextAndHeaderTransportIT.java similarity index 51% rename from modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/ContextAndHeaderTransportTests.java rename to core/src/test/java/org/elasticsearch/transport/ContextAndHeaderTransportIT.java index 587be0956ca..20663aee29d 100644 --- a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/ContextAndHeaderTransportTests.java +++ b/core/src/test/java/org/elasticsearch/transport/ContextAndHeaderTransportIT.java @@ -17,27 +17,26 @@ * under the License. */ -package org.elasticsearch.messy.tests; +package org.elasticsearch.transport; import org.apache.http.impl.client.CloseableHttpClient; import org.apache.http.impl.client.HttpClients; -import org.elasticsearch.action.Action; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.ActionModule; import org.elasticsearch.action.ActionRequest; -import org.elasticsearch.action.ActionRequestBuilder; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.admin.indices.refresh.RefreshRequest; import org.elasticsearch.action.get.GetRequest; import org.elasticsearch.action.index.IndexRequest; -import org.elasticsearch.action.indexedscripts.put.PutIndexedScriptRequest; -import org.elasticsearch.action.indexedscripts.put.PutIndexedScriptResponse; import org.elasticsearch.action.percolate.PercolateResponse; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.action.support.ActionFilter; import org.elasticsearch.action.termvectors.MultiTermVectorsRequest; import org.elasticsearch.client.Client; -import org.elasticsearch.client.FilterClient; -import org.elasticsearch.common.lucene.search.function.CombineFunction; +import org.elasticsearch.common.inject.AbstractModule; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.inject.Module; import org.elasticsearch.common.network.NetworkModule; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.http.HttpServerTransport; @@ -47,25 +46,24 @@ import org.elasticsearch.index.query.MoreLikeThisQueryBuilder; import org.elasticsearch.index.query.MoreLikeThisQueryBuilder.Item; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.index.query.TermsQueryBuilder; -import org.elasticsearch.index.query.functionscore.script.ScriptScoreFunctionBuilder; import org.elasticsearch.indices.cache.query.terms.TermsLookup; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.rest.RestController; -import org.elasticsearch.script.Script; -import org.elasticsearch.script.ScriptService.ScriptType; -import org.elasticsearch.script.groovy.GroovyPlugin; -import org.elasticsearch.script.groovy.GroovyScriptEngineService; -import org.elasticsearch.test.ActionRecordingPlugin; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.ESIntegTestCase.ClusterScope; import org.elasticsearch.test.rest.client.http.HttpRequestBuilder; import org.elasticsearch.test.rest.client.http.HttpResponse; +import org.elasticsearch.threadpool.ThreadPool; import org.junit.After; import org.junit.Before; +import java.util.ArrayList; import java.util.Collection; +import java.util.Collections; import java.util.List; import java.util.Locale; +import java.util.Map; +import java.util.concurrent.CopyOnWriteArrayList; import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_SHARDS; import static org.elasticsearch.common.settings.Settings.settingsBuilder; @@ -81,7 +79,8 @@ import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.is; @ClusterScope(scope = SUITE) -public class ContextAndHeaderTransportTests extends ESIntegTestCase { +public class ContextAndHeaderTransportIT extends ESIntegTestCase { + private static final List requests = new CopyOnWriteArrayList<>(); private String randomHeaderKey = randomAsciiOfLength(10); private String randomHeaderValue = randomAsciiOfLength(20); private String queryIndex = "query-" + randomAsciiOfLength(10).toLowerCase(Locale.ROOT); @@ -98,29 +97,28 @@ public class ContextAndHeaderTransportTests extends ESIntegTestCase { @Override protected Collection> nodePlugins() { - return pluginList(ActionRecordingPlugin.class, GroovyPlugin.class); + return pluginList(ActionLoggingPlugin.class); } @Before public void createIndices() throws Exception { String mapping = jsonBuilder().startObject().startObject("type") - .startObject("properties") - .startObject("location").field("type", "geo_shape").endObject() - .startObject("name").field("type", "string").endObject() - .endObject() - .endObject().endObject().string(); + .startObject("properties") + .startObject("location").field("type", "geo_shape").endObject() + .startObject("name").field("type", "string").endObject() + .endObject() + .endObject().endObject().string(); Settings settings = settingsBuilder() - .put(indexSettings()) - .put(SETTING_NUMBER_OF_SHARDS, 1) // A single shard will help to keep the tests repeatable. - .build(); + .put(indexSettings()) + .put(SETTING_NUMBER_OF_SHARDS, 1) // A single shard will help to keep the tests repeatable. + .build(); assertAcked(transportClient().admin().indices().prepareCreate(lookupIndex) - .setSettings(settings).addMapping("type", mapping)); + .setSettings(settings).addMapping("type", mapping)); assertAcked(transportClient().admin().indices().prepareCreate(queryIndex) - .setSettings(settings).addMapping("type", mapping)); + .setSettings(settings).addMapping("type", mapping)); ensureGreen(queryIndex, lookupIndex); - - ActionRecordingPlugin.clear(); + requests.clear(); } @After @@ -131,86 +129,88 @@ public class ContextAndHeaderTransportTests extends ESIntegTestCase { public void testThatTermsLookupGetRequestContainsContextAndHeaders() throws Exception { transportClient().prepareIndex(lookupIndex, "type", "1") - .setSource(jsonBuilder().startObject().array("followers", "foo", "bar", "baz").endObject()).get(); + .setSource(jsonBuilder().startObject().array("followers", "foo", "bar", "baz").endObject()).get(); transportClient().prepareIndex(queryIndex, "type", "1") - .setSource(jsonBuilder().startObject().field("username", "foo").endObject()).get(); + .setSource(jsonBuilder().startObject().field("username", "foo").endObject()).get(); transportClient().admin().indices().prepareRefresh(queryIndex, lookupIndex).get(); TermsQueryBuilder termsLookupFilterBuilder = QueryBuilders.termsLookupQuery("username", new TermsLookup(lookupIndex, "type", "1", "followers")); BoolQueryBuilder queryBuilder = QueryBuilders.boolQuery().must(QueryBuilders.matchAllQuery()).must(termsLookupFilterBuilder); SearchResponse searchResponse = transportClient() - .prepareSearch(queryIndex) - .setQuery(queryBuilder) - .get(); + .prepareSearch(queryIndex) + .setQuery(queryBuilder) + .get(); assertNoFailures(searchResponse); assertHitCount(searchResponse, 1); assertGetRequestsContainHeaders(); } + + public void testThatGeoShapeQueryGetRequestContainsContextAndHeaders() throws Exception { transportClient().prepareIndex(lookupIndex, "type", "1").setSource(jsonBuilder().startObject() - .field("name", "Munich Suburban Area") - .startObject("location") - .field("type", "polygon") - .startArray("coordinates").startArray() - .startArray().value(11.34).value(48.25).endArray() - .startArray().value(11.68).value(48.25).endArray() - .startArray().value(11.65).value(48.06).endArray() - .startArray().value(11.37).value(48.13).endArray() - .startArray().value(11.34).value(48.25).endArray() // close the polygon - .endArray().endArray() - .endObject() - .endObject()) - .get(); + .field("name", "Munich Suburban Area") + .startObject("location") + .field("type", "polygon") + .startArray("coordinates").startArray() + .startArray().value(11.34).value(48.25).endArray() + .startArray().value(11.68).value(48.25).endArray() + .startArray().value(11.65).value(48.06).endArray() + .startArray().value(11.37).value(48.13).endArray() + .startArray().value(11.34).value(48.25).endArray() // close the polygon + .endArray().endArray() + .endObject() + .endObject()) + .get(); // second document transportClient().prepareIndex(queryIndex, "type", "1").setSource(jsonBuilder().startObject() - .field("name", "Munich Center") - .startObject("location") - .field("type", "point") - .startArray("coordinates").value(11.57).value(48.13).endArray() - .endObject() - .endObject()) - .get(); + .field("name", "Munich Center") + .startObject("location") + .field("type", "point") + .startArray("coordinates").value(11.57).value(48.13).endArray() + .endObject() + .endObject()) + .get(); transportClient().admin().indices().prepareRefresh(lookupIndex, queryIndex).get(); GeoShapeQueryBuilder queryBuilder = QueryBuilders.geoShapeQuery("location", "1", "type") - .indexedShapeIndex(lookupIndex) - .indexedShapePath("location"); + .indexedShapeIndex(lookupIndex) + .indexedShapePath("location"); SearchResponse searchResponse = transportClient() - .prepareSearch(queryIndex) - .setQuery(queryBuilder) - .get(); + .prepareSearch(queryIndex) + .setQuery(queryBuilder) + .get(); assertNoFailures(searchResponse); assertHitCount(searchResponse, 1); - assertThat(ActionRecordingPlugin.allRequests(), hasSize(greaterThan(0))); + assertThat(requests, hasSize(greaterThan(0))); assertGetRequestsContainHeaders(); } public void testThatMoreLikeThisQueryMultiTermVectorRequestContainsContextAndHeaders() throws Exception { transportClient().prepareIndex(lookupIndex, "type", "1") - .setSource(jsonBuilder().startObject().field("name", "Star Wars - The new republic").endObject()) - .get(); + .setSource(jsonBuilder().startObject().field("name", "Star Wars - The new republic").endObject()) + .get(); transportClient().prepareIndex(queryIndex, "type", "1") - .setSource(jsonBuilder().startObject().field("name", "Jar Jar Binks - A horrible mistake").endObject()) - .get(); + .setSource(jsonBuilder().startObject().field("name", "Jar Jar Binks - A horrible mistake").endObject()) + .get(); transportClient().prepareIndex(queryIndex, "type", "2") - .setSource(jsonBuilder().startObject().field("name", "Star Wars - Return of the jedi").endObject()) - .get(); + .setSource(jsonBuilder().startObject().field("name", "Star Wars - Return of the jedi").endObject()) + .get(); transportClient().admin().indices().prepareRefresh(lookupIndex, queryIndex).get(); - MoreLikeThisQueryBuilder moreLikeThisQueryBuilder = QueryBuilders.moreLikeThisQuery(new String[] {"name"}, null, - new Item[] {new Item(lookupIndex, "type", "1")}) - .minTermFreq(1) - .minDocFreq(1); + MoreLikeThisQueryBuilder moreLikeThisQueryBuilder = QueryBuilders.moreLikeThisQuery(new String[]{"name"}, null, + new Item[]{new Item(lookupIndex, "type", "1")}) + .minTermFreq(1) + .minDocFreq(1); SearchResponse searchResponse = transportClient() - .prepareSearch(queryIndex) - .setQuery(moreLikeThisQueryBuilder) - .get(); + .prepareSearch(queryIndex) + .setQuery(moreLikeThisQueryBuilder) + .get(); assertNoFailures(searchResponse); assertHitCount(searchResponse, 1); @@ -218,46 +218,22 @@ public class ContextAndHeaderTransportTests extends ESIntegTestCase { } public void testThatPercolatingExistingDocumentGetRequestContainsContextAndHeaders() throws Exception { - transportClient().prepareIndex(lookupIndex, ".percolator", "1") - .setSource(jsonBuilder().startObject().startObject("query").startObject("match").field("name", "star wars").endObject().endObject().endObject()) - .get(); - transportClient().prepareIndex(lookupIndex, "type", "1") - .setSource(jsonBuilder().startObject().field("name", "Star Wars - The new republic").endObject()) - .get(); - transportClient().admin().indices().prepareRefresh(lookupIndex).get(); + Client client = transportClient(); + client.prepareIndex(lookupIndex, ".percolator", "1") + .setSource(jsonBuilder().startObject().startObject("query").startObject("match").field("name", "star wars").endObject().endObject().endObject()) + .get(); + client.prepareIndex(lookupIndex, "type", "1") + .setSource(jsonBuilder().startObject().field("name", "Star Wars - The new republic").endObject()) + .get(); + client.admin().indices().prepareRefresh(lookupIndex).get(); - GetRequest getRequest = transportClient().prepareGet(lookupIndex, "type", "1").request(); - PercolateResponse response = transportClient().preparePercolate().setDocumentType("type").setGetRequest(getRequest).get(); + GetRequest getRequest = client.prepareGet(lookupIndex, "type", "1").request(); + PercolateResponse response = client.preparePercolate().setDocumentType("type").setGetRequest(getRequest).get(); assertThat(response.getCount(), is(1l)); assertGetRequestsContainHeaders(); } - public void testThatIndexedScriptGetRequestContainsContextAndHeaders() throws Exception { - PutIndexedScriptResponse scriptResponse = transportClient().preparePutIndexedScript(GroovyScriptEngineService.NAME, "my_script", - jsonBuilder().startObject().field("script", "_score * 10").endObject().string() - ).get(); - assertThat(scriptResponse.isCreated(), is(true)); - - transportClient().prepareIndex(queryIndex, "type", "1") - .setSource(jsonBuilder().startObject().field("name", "Star Wars - The new republic").endObject()) - .get(); - transportClient().admin().indices().prepareRefresh(queryIndex).get(); - - SearchResponse searchResponse = transportClient() - .prepareSearch(queryIndex) - .setQuery( - QueryBuilders.functionScoreQuery( - new ScriptScoreFunctionBuilder(new Script("my_script", ScriptType.INDEXED, "groovy", null))).boostMode( - CombineFunction.REPLACE)).get(); - assertNoFailures(searchResponse); - assertHitCount(searchResponse, 1); - assertThat(searchResponse.getHits().getMaxScore(), is(10.0f)); - - assertGetRequestsContainHeaders(".scripts"); - assertRequestsContainHeader(PutIndexedScriptRequest.class); - } - public void testThatRelevantHttpHeadersBecomeRequestHeaders() throws Exception { String releventHeaderName = "relevant_" + randomHeaderKey; for (RestController restController : internalCluster().getDataNodeInstances(RestController.class)) { @@ -266,26 +242,37 @@ public class ContextAndHeaderTransportTests extends ESIntegTestCase { CloseableHttpClient httpClient = HttpClients.createDefault(); HttpResponse response = new HttpRequestBuilder(httpClient) - .httpTransport(internalCluster().getDataNodeInstance(HttpServerTransport.class)) - .addHeader(randomHeaderKey, randomHeaderValue) - .addHeader(releventHeaderName, randomHeaderValue) - .path("/" + queryIndex + "/_search") - .execute(); + .httpTransport(internalCluster().getDataNodeInstance(HttpServerTransport.class)) + .addHeader(randomHeaderKey, randomHeaderValue) + .addHeader(releventHeaderName, randomHeaderValue) + .path("/" + queryIndex + "/_search") + .execute(); assertThat(response, hasStatus(OK)); - List searchRequests = ActionRecordingPlugin.requestsOfType(SearchRequest.class); + List searchRequests = getRequests(SearchRequest.class); assertThat(searchRequests, hasSize(greaterThan(0))); - for (SearchRequest searchRequest : searchRequests) { - assertThat(searchRequest.hasHeader(releventHeaderName), is(true)); + for (RequestAndHeaders requestAndHeaders : searchRequests) { + assertThat(requestAndHeaders.headers.containsKey(releventHeaderName), is(true)); // was not specified, thus is not included - assertThat(searchRequest.hasHeader(randomHeaderKey), is(false)); + assertThat(requestAndHeaders.headers.containsKey(randomHeaderKey), is(false)); } } - private void assertRequestsContainHeader(Class> clazz) { - List> classRequests = ActionRecordingPlugin.requestsOfType(clazz); - for (ActionRequest request : classRequests) { - assertRequestContainsHeader(request); + private List getRequests(Class clazz) { + List results = new ArrayList<>(); + for (RequestAndHeaders request : requests) { + if (request.request.getClass().equals(clazz)) { + results.add(request); + } + } + + return results; + } + + private void assertRequestsContainHeader(Class clazz) { + List classRequests = getRequests(clazz); + for (RequestAndHeaders request : classRequests) { + assertRequestContainsHeader(request.request, request.headers); } } @@ -294,43 +281,99 @@ public class ContextAndHeaderTransportTests extends ESIntegTestCase { } private void assertGetRequestsContainHeaders(String index) { - List getRequests = ActionRecordingPlugin.requestsOfType(GetRequest.class); + List getRequests = getRequests(GetRequest.class); assertThat(getRequests, hasSize(greaterThan(0))); - for (GetRequest request : getRequests) { - if (!request.index().equals(index)) { + for (RequestAndHeaders request : getRequests) { + if (!((GetRequest)request.request).index().equals(index)) { continue; } - assertRequestContainsHeader(request); + assertRequestContainsHeader(request.request, request.headers); } } - private void assertRequestContainsHeader(ActionRequest request) { + private void assertRequestContainsHeader(ActionRequest request, Map context) { String msg = String.format(Locale.ROOT, "Expected header %s to be in request %s", randomHeaderKey, request.getClass().getName()); if (request instanceof IndexRequest) { IndexRequest indexRequest = (IndexRequest) request; msg = String.format(Locale.ROOT, "Expected header %s to be in index request %s/%s/%s", randomHeaderKey, - indexRequest.index(), indexRequest.type(), indexRequest.id()); + indexRequest.index(), indexRequest.type(), indexRequest.id()); } - assertThat(msg, request.hasHeader(randomHeaderKey), is(true)); - assertThat(request.getHeader(randomHeaderKey).toString(), is(randomHeaderValue)); + assertThat(msg, context.containsKey(randomHeaderKey), is(true)); + assertThat(context.get(randomHeaderKey).toString(), is(randomHeaderValue)); } /** * a transport client that adds our random header */ private Client transportClient() { - Client transportClient = internalCluster().transportClient(); - FilterClient filterClient = new FilterClient(transportClient) { - @Override - protected , Response extends ActionResponse, RequestBuilder extends ActionRequestBuilder> void doExecute( - Action action, Request request, - ActionListener listener) { - request.putHeader(randomHeaderKey, randomHeaderValue); - super.doExecute(action, request, listener); - } - }; + return internalCluster().transportClient().filterWithHeader(Collections.singletonMap(randomHeaderKey, randomHeaderValue)); + } - return filterClient; + public static class ActionLoggingPlugin extends Plugin { + + @Override + public String name() { + return "test-action-logging"; + } + + @Override + public String description() { + return "Test action logging"; + } + + @Override + public Collection nodeModules() { + return Collections.singletonList(new ActionLoggingModule()); + } + + public void onModule(ActionModule module) { + module.registerFilter(LoggingFilter.class); + } + } + + public static class ActionLoggingModule extends AbstractModule { + @Override + protected void configure() { + bind(LoggingFilter.class).asEagerSingleton(); + } + + } + + public static class LoggingFilter extends ActionFilter.Simple { + + private final ThreadPool threadPool; + + @Inject + public LoggingFilter(Settings settings, ThreadPool pool) { + super(settings); + this.threadPool = pool; + } + + @Override + public int order() { + return 999; + } + + @Override + protected boolean apply(String action, ActionRequest request, ActionListener listener) { + requests.add(new RequestAndHeaders(threadPool.getThreadContext().getHeaders(), request)); + return true; + } + + @Override + protected boolean apply(String action, ActionResponse response, ActionListener listener) { + return true; + } + } + + private static class RequestAndHeaders { + final Map headers; + final ActionRequest request; + + private RequestAndHeaders(Map headers, ActionRequest request) { + this.headers = headers; + this.request = request; + } } } diff --git a/core/src/test/java/org/elasticsearch/transport/TransportMessageTests.java b/core/src/test/java/org/elasticsearch/transport/TransportMessageTests.java deleted file mode 100644 index a94b06f6f06..00000000000 --- a/core/src/test/java/org/elasticsearch/transport/TransportMessageTests.java +++ /dev/null @@ -1,92 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.transport; - -import org.elasticsearch.Version; -import org.elasticsearch.common.io.stream.BytesStreamOutput; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.test.ESTestCase; - -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.is; - -/** - * - */ -public class TransportMessageTests extends ESTestCase { - public void testSerialization() throws Exception { - Message message = new Message(); - message.putHeader("key1", "value1"); - message.putHeader("key2", "value2"); - message.putInContext("key3", "value3"); - - BytesStreamOutput out = new BytesStreamOutput(); - out.setVersion(Version.CURRENT); - message.writeTo(out); - StreamInput in = StreamInput.wrap(out.bytes()); - in.setVersion(Version.CURRENT); - message = new Message(); - message.readFrom(in); - assertThat(message.getHeaders().size(), is(2)); - assertThat((String) message.getHeader("key1"), equalTo("value1")); - assertThat((String) message.getHeader("key2"), equalTo("value2")); - assertThat(message.isContextEmpty(), is(true)); - - // ensure that casting is not needed - String key1 = message.getHeader("key1"); - assertThat(key1, is("value1")); - } - - public void testCopyHeadersAndContext() throws Exception { - Message m1 = new Message(); - m1.putHeader("key1", "value1"); - m1.putHeader("key2", "value2"); - m1.putInContext("key3", "value3"); - - Message m2 = new Message(m1); - - assertThat(m2.getHeaders().size(), is(2)); - assertThat((String) m2.getHeader("key1"), equalTo("value1")); - assertThat((String) m2.getHeader("key2"), equalTo("value2")); - assertThat((String) m2.getFromContext("key3"), equalTo("value3")); - - // ensure that casting is not needed - String key3 = m2.getFromContext("key3"); - assertThat(key3, is("value3")); - testContext(m2, "key3", "value3"); - } - - // ensure that generic arg like this is not needed: TransportMessage transportMessage - private void testContext(TransportMessage transportMessage, String key, String expectedValue) { - String result = transportMessage.getFromContext(key); - assertThat(result, is(expectedValue)); - - } - - private static class Message extends TransportMessage { - - private Message() { - } - - private Message(Message message) { - super(message); - } - } -} diff --git a/core/src/test/java/org/elasticsearch/transport/netty/NettyTransportIT.java b/core/src/test/java/org/elasticsearch/transport/netty/NettyTransportIT.java index ce090cd1cbc..8304dd084d5 100644 --- a/core/src/test/java/org/elasticsearch/transport/netty/NettyTransportIT.java +++ b/core/src/test/java/org/elasticsearch/transport/netty/NettyTransportIT.java @@ -50,6 +50,7 @@ import org.jboss.netty.channel.ChannelPipelineFactory; import java.io.IOException; import java.net.InetSocketAddress; import java.util.Collection; +import java.util.Collections; import static org.elasticsearch.common.settings.Settings.settingsBuilder; import static org.hamcrest.Matchers.containsString; @@ -79,9 +80,8 @@ public class NettyTransportIT extends ESIntegTestCase { Client transportClient = internalCluster().transportClient(); ClusterHealthResponse clusterIndexHealths = transportClient.admin().cluster().prepareHealth().get(); assertThat(clusterIndexHealths.getStatus(), is(ClusterHealthStatus.GREEN)); - try { - transportClient.admin().cluster().prepareHealth().putHeader("ERROR", "MY MESSAGE").get(); + transportClient.filterWithHeader(Collections.singletonMap("ERROR", "MY MESSAGE")).admin().cluster().prepareHealth().get(); fail("Expected exception, but didnt happen"); } catch (ElasticsearchException e) { assertThat(e.getMessage(), containsString("MY MESSAGE")); @@ -142,8 +142,9 @@ public class NettyTransportIT extends ESIntegTestCase { final TransportRequest request = reg.newRequest(); request.remoteAddress(new InetSocketTransportAddress((InetSocketAddress) channel.getRemoteAddress())); request.readFrom(buffer); - if (request.hasHeader("ERROR")) { - throw new ElasticsearchException((String) request.getHeader("ERROR")); + String error = threadPool.getThreadContext().getHeader("ERROR"); + if (error != null) { + throw new ElasticsearchException(error); } if (reg.getExecutor() == ThreadPool.Names.SAME) { //noinspection unchecked diff --git a/dev-tools/smoke_test_rc.py b/dev-tools/smoke_test_rc.py index bc7b99cb452..8c8a6fb9fae 100644 --- a/dev-tools/smoke_test_rc.py +++ b/dev-tools/smoke_test_rc.py @@ -68,7 +68,7 @@ DEFAULT_PLUGINS = ["analysis-icu", "discovery-gce", "discovery-multicast", "lang-javascript", - "lang-plan-a", + "lang-painless", "lang-python", "mapper-attachments", "mapper-murmur3", diff --git a/modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/ContextAndHeaderTransportTests.java b/modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/ContextAndHeaderTransportTests.java deleted file mode 100644 index c40c2bded0a..00000000000 --- a/modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/ContextAndHeaderTransportTests.java +++ /dev/null @@ -1,312 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.messy.tests; - -import org.elasticsearch.action.Action; -import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.ActionRequest; -import org.elasticsearch.action.ActionRequestBuilder; -import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.action.admin.indices.create.CreateIndexRequestBuilder; -import org.elasticsearch.action.admin.indices.refresh.RefreshRequest; -import org.elasticsearch.action.get.GetRequest; -import org.elasticsearch.action.index.IndexRequest; -import org.elasticsearch.action.indexedscripts.put.PutIndexedScriptRequest; -import org.elasticsearch.action.indexedscripts.put.PutIndexedScriptResponse; -import org.elasticsearch.action.search.SearchRequestBuilder; -import org.elasticsearch.action.search.SearchResponse; -import org.elasticsearch.client.Client; -import org.elasticsearch.client.FilterClient; -import org.elasticsearch.common.network.NetworkModule; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentFactory; -import org.elasticsearch.index.query.QueryBuilders; -import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.script.ScriptService.ScriptType; -import org.elasticsearch.script.Template; -import org.elasticsearch.script.mustache.MustachePlugin; -import org.elasticsearch.script.mustache.MustacheScriptEngineService; -import org.elasticsearch.search.suggest.Suggest; -import org.elasticsearch.search.suggest.SuggestBuilder; -import org.elasticsearch.search.suggest.phrase.PhraseSuggestionBuilder; -import org.elasticsearch.test.ActionRecordingPlugin; -import org.elasticsearch.test.ESIntegTestCase; -import org.elasticsearch.test.ESIntegTestCase.ClusterScope; -import org.junit.After; -import org.junit.Before; - -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collection; -import java.util.HashMap; -import java.util.List; -import java.util.Locale; -import java.util.Map; - -import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_SHARDS; -import static org.elasticsearch.common.settings.Settings.settingsBuilder; -import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; -import static org.elasticsearch.search.suggest.SuggestBuilders.phraseSuggestion; -import static org.elasticsearch.test.ESIntegTestCase.Scope.SUITE; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSuggestionSize; -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.greaterThan; -import static org.hamcrest.Matchers.hasSize; -import static org.hamcrest.Matchers.is; - -@ClusterScope(scope = SUITE) -public class ContextAndHeaderTransportTests extends ESIntegTestCase { - private String randomHeaderKey = randomAsciiOfLength(10); - private String randomHeaderValue = randomAsciiOfLength(20); - private String queryIndex = "query-" + randomAsciiOfLength(10).toLowerCase(Locale.ROOT); - private String lookupIndex = "lookup-" + randomAsciiOfLength(10).toLowerCase(Locale.ROOT); - - @Override - protected Settings nodeSettings(int nodeOrdinal) { - return settingsBuilder() - .put(super.nodeSettings(nodeOrdinal)) - .put("script.indexed", "true") - .put(NetworkModule.HTTP_ENABLED.getKey(), true) - .build(); - } - - @Override - protected Collection> nodePlugins() { - return pluginList(ActionRecordingPlugin.class, MustachePlugin.class); - } - - @Before - public void createIndices() throws Exception { - String mapping = jsonBuilder().startObject().startObject("type") - .startObject("properties") - .startObject("location").field("type", "geo_shape").endObject() - .startObject("name").field("type", "string").endObject() - .endObject() - .endObject().endObject().string(); - - Settings settings = settingsBuilder() - .put(indexSettings()) - .put(SETTING_NUMBER_OF_SHARDS, 1) // A single shard will help to keep the tests repeatable. - .build(); - assertAcked(transportClient().admin().indices().prepareCreate(lookupIndex) - .setSettings(settings).addMapping("type", mapping)); - assertAcked(transportClient().admin().indices().prepareCreate(queryIndex) - .setSettings(settings).addMapping("type", mapping)); - ensureGreen(queryIndex, lookupIndex); - } - - @After - public void checkAllRequestsContainHeaders() { - assertRequestsContainHeader(IndexRequest.class); - assertRequestsContainHeader(RefreshRequest.class); - ActionRecordingPlugin.clear(); - } - - public void testThatIndexedScriptGetRequestInTemplateQueryContainsContextAndHeaders() throws Exception { - PutIndexedScriptResponse scriptResponse = transportClient() - .preparePutIndexedScript( - MustacheScriptEngineService.NAME, - "my_script", - jsonBuilder().startObject().field("script", "{ \"match\": { \"name\": \"Star Wars\" }}").endObject() - .string()).get(); - assertThat(scriptResponse.isCreated(), is(true)); - - transportClient().prepareIndex(queryIndex, "type", "1") - .setSource(jsonBuilder().startObject().field("name", "Star Wars - The new republic").endObject()).get(); - transportClient().admin().indices().prepareRefresh(queryIndex).get(); - - SearchResponse searchResponse = transportClient() - .prepareSearch(queryIndex) - .setQuery( - QueryBuilders.templateQuery(new Template("my_script", ScriptType.INDEXED, - MustacheScriptEngineService.NAME, null, null))).get(); - assertNoFailures(searchResponse); - assertHitCount(searchResponse, 1); - - assertGetRequestsContainHeaders(".scripts"); - assertRequestsContainHeader(PutIndexedScriptRequest.class); - } - - public void testThatSearchTemplatesWithIndexedTemplatesGetRequestContainsContextAndHeaders() throws Exception { - PutIndexedScriptResponse scriptResponse = transportClient().preparePutIndexedScript(MustacheScriptEngineService.NAME, "the_template", - jsonBuilder().startObject().startObject("template").startObject("query").startObject("match") - .field("name", "{{query_string}}").endObject().endObject().endObject().endObject().string() - ).get(); - assertThat(scriptResponse.isCreated(), is(true)); - - transportClient().prepareIndex(queryIndex, "type", "1") - .setSource(jsonBuilder().startObject().field("name", "Star Wars - The new republic").endObject()) - .get(); - transportClient().admin().indices().prepareRefresh(queryIndex).get(); - - Map params = new HashMap<>(); - params.put("query_string", "star wars"); - - SearchResponse searchResponse = transportClient().prepareSearch(queryIndex).setTemplate(new Template("the_template", ScriptType.INDEXED, MustacheScriptEngineService.NAME, null, params)) - .get(); - - assertNoFailures(searchResponse); - assertHitCount(searchResponse, 1); - - assertGetRequestsContainHeaders(".scripts"); - assertRequestsContainHeader(PutIndexedScriptRequest.class); - } - - public void testThatIndexedScriptGetRequestInPhraseSuggestContainsContextAndHeaders() throws Exception { - CreateIndexRequestBuilder builder = transportClient().admin().indices().prepareCreate("test").setSettings(settingsBuilder() - .put(indexSettings()) - .put(SETTING_NUMBER_OF_SHARDS, 1) // A single shard will help to keep the tests repeatable. - .put("index.analysis.analyzer.text.tokenizer", "standard") - .putArray("index.analysis.analyzer.text.filter", "lowercase", "my_shingle") - .put("index.analysis.filter.my_shingle.type", "shingle") - .put("index.analysis.filter.my_shingle.output_unigrams", true) - .put("index.analysis.filter.my_shingle.min_shingle_size", 2) - .put("index.analysis.filter.my_shingle.max_shingle_size", 3)); - - XContentBuilder mapping = XContentFactory.jsonBuilder() - .startObject() - .startObject("type1") - .startObject("properties") - .startObject("title") - .field("type", "string") - .field("analyzer", "text") - .endObject() - .endObject() - .endObject() - .endObject(); - assertAcked(builder.addMapping("type1", mapping)); - ensureGreen(); - - List titles = new ArrayList<>(); - - titles.add("United States House of Representatives Elections in Washington 2006"); - titles.add("United States House of Representatives Elections in Washington 2005"); - titles.add("State"); - titles.add("Houses of Parliament"); - titles.add("Representative Government"); - titles.add("Election"); - - for (String title: titles) { - transportClient().prepareIndex("test", "type1").setSource("title", title).get(); - } - transportClient().admin().indices().prepareRefresh("test").get(); - - String filterStringAsFilter = XContentFactory.jsonBuilder() - .startObject() - .startObject("match_phrase") - .field("title", "{{suggestion}}") - .endObject() - .endObject() - .string(); - - PutIndexedScriptResponse scriptResponse = transportClient() - .preparePutIndexedScript( - MustacheScriptEngineService.NAME, - "my_script", - jsonBuilder().startObject().field("script", filterStringAsFilter).endObject() - .string()).get(); - assertThat(scriptResponse.isCreated(), is(true)); - - PhraseSuggestionBuilder suggest = phraseSuggestion("title") - .field("title") - .addCandidateGenerator(PhraseSuggestionBuilder.candidateGenerator("title") - .suggestMode("always") - .maxTermFreq(.99f) - .size(10) - .maxInspections(200) - ) - .confidence(0f) - .maxErrors(2f) - .shardSize(30000) - .size(10); - - PhraseSuggestionBuilder filteredFilterSuggest = suggest.collateQuery(new Template("my_script", ScriptType.INDEXED, - MustacheScriptEngineService.NAME, null, null)); - - SearchRequestBuilder searchRequestBuilder = transportClient().prepareSearch("test").setSize(0); - SuggestBuilder suggestBuilder = new SuggestBuilder(); - String suggestText = "united states house of representatives elections in washington 2006"; - if (suggestText != null) { - suggestBuilder.setText(suggestText); - } - suggestBuilder.addSuggestion(filteredFilterSuggest); - searchRequestBuilder.suggest(suggestBuilder); - SearchResponse actionGet = searchRequestBuilder.execute().actionGet(); - assertThat(Arrays.toString(actionGet.getShardFailures()), actionGet.getFailedShards(), equalTo(0)); - Suggest searchSuggest = actionGet.getSuggest(); - - assertSuggestionSize(searchSuggest, 0, 2, "title"); - - assertGetRequestsContainHeaders(".scripts"); - assertRequestsContainHeader(PutIndexedScriptRequest.class); - } - - private void assertRequestsContainHeader(Class> clazz) { - List> classRequests = ActionRecordingPlugin.requestsOfType(clazz); - for (ActionRequest request : classRequests) { - assertRequestContainsHeader(request); - } - } - - private void assertGetRequestsContainHeaders(String index) { - List getRequests = ActionRecordingPlugin.requestsOfType(GetRequest.class); - assertThat(getRequests, hasSize(greaterThan(0))); - - for (GetRequest request : getRequests) { - if (!request.index().equals(index)) { - continue; - } - assertRequestContainsHeader(request); - } - } - - private void assertRequestContainsHeader(ActionRequest request) { - String msg = String.format(Locale.ROOT, "Expected header %s to be in request %s", randomHeaderKey, request.getClass().getName()); - if (request instanceof IndexRequest) { - IndexRequest indexRequest = (IndexRequest) request; - msg = String.format(Locale.ROOT, "Expected header %s to be in index request %s/%s/%s", randomHeaderKey, - indexRequest.index(), indexRequest.type(), indexRequest.id()); - } - assertThat(msg, request.hasHeader(randomHeaderKey), is(true)); - assertThat(request.getHeader(randomHeaderKey).toString(), is(randomHeaderValue)); - } - - /** - * a transport client that adds our random header - */ - private Client transportClient() { - Client transportClient = internalCluster().transportClient(); - FilterClient filterClient = new FilterClient(transportClient) { - @Override - protected , Response extends ActionResponse, RequestBuilder extends ActionRequestBuilder> void doExecute( - Action action, Request request, - ActionListener listener) { - request.putHeader(randomHeaderKey, randomHeaderValue); - super.doExecute(action, request, listener); - } - }; - - return filterClient; - } -} diff --git a/modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/TemplateQueryParserTests.java b/modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/TemplateQueryParserTests.java index 5ed8a7504d9..661187286d7 100644 --- a/modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/TemplateQueryParserTests.java +++ b/modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/TemplateQueryParserTests.java @@ -99,8 +99,8 @@ public class TemplateQueryParserTests extends ESTestCase { new Class[]{Client.class}, (proxy1, method, args) -> { throw new UnsupportedOperationException("client is just a dummy"); }); - Index index = new Index("test"); - IndexSettings idxSettings = IndexSettingsModule.newIndexSettings(index, settings); + IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("test", settings); + Index index = idxSettings.getIndex(); SettingsModule settingsModule = new SettingsModule(settings, new SettingsFilter(settings)); ScriptModule scriptModule = new ScriptModule(settingsModule); // TODO: make this use a mock engine instead of mustache and it will no longer be messy! diff --git a/plugins/analysis-icu/src/test/java/org/elasticsearch/index/analysis/AnalysisTestUtils.java b/plugins/analysis-icu/src/test/java/org/elasticsearch/index/analysis/AnalysisTestUtils.java index 36a43e344d9..cca590b2c51 100644 --- a/plugins/analysis-icu/src/test/java/org/elasticsearch/index/analysis/AnalysisTestUtils.java +++ b/plugins/analysis-icu/src/test/java/org/elasticsearch/index/analysis/AnalysisTestUtils.java @@ -40,7 +40,7 @@ import static org.elasticsearch.common.settings.Settings.settingsBuilder; public class AnalysisTestUtils { public static AnalysisService createAnalysisService(Settings settings) throws IOException { - Index index = new Index("test"); + Index index = new Index("test", "_na_"); Settings indexSettings = settingsBuilder().put(settings) .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) .build(); diff --git a/plugins/analysis-kuromoji/src/test/java/org/elasticsearch/index/analysis/KuromojiAnalysisTests.java b/plugins/analysis-kuromoji/src/test/java/org/elasticsearch/index/analysis/KuromojiAnalysisTests.java index 016053810d2..b34a8908f43 100644 --- a/plugins/analysis-kuromoji/src/test/java/org/elasticsearch/index/analysis/KuromojiAnalysisTests.java +++ b/plugins/analysis-kuromoji/src/test/java/org/elasticsearch/index/analysis/KuromojiAnalysisTests.java @@ -205,7 +205,7 @@ public class KuromojiAnalysisTests extends ESTestCase { .build(); final SettingsModule settingsModule = new SettingsModule(settings, new SettingsFilter(settings)); settingsModule.registerSetting(InternalSettingsPlugin.VERSION_CREATED); - Index index = new Index("test"); + Index index = new Index("test", "_na_"); AnalysisModule analysisModule = new AnalysisModule(new Environment(settings)); new AnalysisKuromojiPlugin().onModule(analysisModule); diff --git a/plugins/analysis-phonetic/src/test/java/org/elasticsearch/index/analysis/SimplePhoneticAnalysisTests.java b/plugins/analysis-phonetic/src/test/java/org/elasticsearch/index/analysis/SimplePhoneticAnalysisTests.java index 6dd341346e5..18fe8010fd8 100644 --- a/plugins/analysis-phonetic/src/test/java/org/elasticsearch/index/analysis/SimplePhoneticAnalysisTests.java +++ b/plugins/analysis-phonetic/src/test/java/org/elasticsearch/index/analysis/SimplePhoneticAnalysisTests.java @@ -56,7 +56,7 @@ public class SimplePhoneticAnalysisTests extends ESTestCase { } private AnalysisService testSimpleConfiguration(Settings settings) throws IOException { - Index index = new Index("test"); + Index index = new Index("test", "_na_"); AnalysisModule analysisModule = new AnalysisModule(new Environment(settings)); new AnalysisPhoneticPlugin().onModule(analysisModule); SettingsModule settingsModule = new SettingsModule(settings, new SettingsFilter(settings)); diff --git a/plugins/analysis-smartcn/src/test/java/org/elasticsearch/index/analysis/SimpleSmartChineseAnalysisTests.java b/plugins/analysis-smartcn/src/test/java/org/elasticsearch/index/analysis/SimpleSmartChineseAnalysisTests.java index d33d36d4c60..613ff88f1ae 100644 --- a/plugins/analysis-smartcn/src/test/java/org/elasticsearch/index/analysis/SimpleSmartChineseAnalysisTests.java +++ b/plugins/analysis-smartcn/src/test/java/org/elasticsearch/index/analysis/SimpleSmartChineseAnalysisTests.java @@ -45,7 +45,7 @@ import static org.hamcrest.Matchers.instanceOf; */ public class SimpleSmartChineseAnalysisTests extends ESTestCase { public void testDefaultsIcuAnalysis() throws IOException { - Index index = new Index("test"); + Index index = new Index("test", "_na_"); Settings settings = settingsBuilder() .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()) .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) diff --git a/plugins/analysis-stempel/src/test/java/org/elasticsearch/index/analysis/PolishAnalysisTests.java b/plugins/analysis-stempel/src/test/java/org/elasticsearch/index/analysis/PolishAnalysisTests.java index 05c7252bdf7..2a6309f4af8 100644 --- a/plugins/analysis-stempel/src/test/java/org/elasticsearch/index/analysis/PolishAnalysisTests.java +++ b/plugins/analysis-stempel/src/test/java/org/elasticsearch/index/analysis/PolishAnalysisTests.java @@ -48,7 +48,6 @@ import static org.hamcrest.Matchers.instanceOf; */ public class PolishAnalysisTests extends ESTestCase { public void testDefaultsPolishAnalysis() throws IOException { - Index index = new Index("test"); Settings settings = settingsBuilder() .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()) .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) @@ -63,7 +62,7 @@ public class PolishAnalysisTests extends ESTestCase { new EnvironmentModule(new Environment(settings)), analysisModule) .createInjector(); - final AnalysisService analysisService = parentInjector.getInstance(AnalysisRegistry.class).build(IndexSettingsModule.newIndexSettings(index, settings)); + final AnalysisService analysisService = parentInjector.getInstance(AnalysisRegistry.class).build(IndexSettingsModule.newIndexSettings("test", settings)); TokenFilterFactory tokenizerFactory = analysisService.tokenFilter("polish_stem"); MatcherAssert.assertThat(tokenizerFactory, instanceOf(PolishStemTokenFilterFactory.class)); diff --git a/plugins/analysis-stempel/src/test/java/org/elasticsearch/index/analysis/SimplePolishTokenFilterTests.java b/plugins/analysis-stempel/src/test/java/org/elasticsearch/index/analysis/SimplePolishTokenFilterTests.java index 306a835c36e..0b604b11ce0 100644 --- a/plugins/analysis-stempel/src/test/java/org/elasticsearch/index/analysis/SimplePolishTokenFilterTests.java +++ b/plugins/analysis-stempel/src/test/java/org/elasticsearch/index/analysis/SimplePolishTokenFilterTests.java @@ -56,7 +56,7 @@ public class SimplePolishTokenFilterTests extends ESTestCase { } private void testToken(String source, String expected) throws IOException { - Index index = new Index("test"); + Index index = new Index("test", "_na_"); Settings settings = Settings.settingsBuilder() .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()) @@ -78,7 +78,7 @@ public class SimplePolishTokenFilterTests extends ESTestCase { } private void testAnalyzer(String source, String... expected_terms) throws IOException { - Index index = new Index("test"); + Index index = new Index("test", "_na_"); Settings settings = Settings.settingsBuilder() .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()) diff --git a/plugins/delete-by-query/src/main/java/org/elasticsearch/action/deletebyquery/TransportDeleteByQueryAction.java b/plugins/delete-by-query/src/main/java/org/elasticsearch/action/deletebyquery/TransportDeleteByQueryAction.java index 9fd42ae513a..f4127c4e532 100644 --- a/plugins/delete-by-query/src/main/java/org/elasticsearch/action/deletebyquery/TransportDeleteByQueryAction.java +++ b/plugins/delete-by-query/src/main/java/org/elasticsearch/action/deletebyquery/TransportDeleteByQueryAction.java @@ -110,7 +110,7 @@ public class TransportDeleteByQueryAction extends HandledTransportAction() { + scrollAction.execute(new SearchScrollRequest().scrollId(scrollId).scroll(request.scroll()), new ActionListener() { @Override public void onResponse(SearchResponse scrollResponse) { deleteHits(scrollId, scrollResponse); @@ -202,9 +202,9 @@ public class TransportDeleteByQueryAction extends HandledTransportAction() { @Override @@ -319,10 +319,6 @@ public class TransportDeleteByQueryAction extends HandledTransportAction= (startTime + request.timeout().millis())); } - void addShardFailure(ShardOperationFailedException failure) { - addShardFailures(new ShardOperationFailedException[]{failure}); - } - void addShardFailures(ShardOperationFailedException[] failures) { if (!CollectionUtils.isEmpty(failures)) { ShardOperationFailedException[] duplicates = new ShardOperationFailedException[shardFailures.length + failures.length]; diff --git a/plugins/delete-by-query/src/main/java/org/elasticsearch/rest/action/deletebyquery/RestDeleteByQueryAction.java b/plugins/delete-by-query/src/main/java/org/elasticsearch/rest/action/deletebyquery/RestDeleteByQueryAction.java index 2b8dc02289c..a7146c2a768 100644 --- a/plugins/delete-by-query/src/main/java/org/elasticsearch/rest/action/deletebyquery/RestDeleteByQueryAction.java +++ b/plugins/delete-by-query/src/main/java/org/elasticsearch/rest/action/deletebyquery/RestDeleteByQueryAction.java @@ -49,7 +49,7 @@ public class RestDeleteByQueryAction extends BaseRestHandler { @Inject public RestDeleteByQueryAction(Settings settings, RestController controller, Client client, IndicesQueriesRegistry indicesQueriesRegistry) { - super(settings, controller, client); + super(settings, client); this.indicesQueriesRegistry = indicesQueriesRegistry; controller.registerHandler(DELETE, "/{index}/_query", this); controller.registerHandler(DELETE, "/{index}/{type}/_query", this); diff --git a/plugins/delete-by-query/src/test/java/org/elasticsearch/action/deletebyquery/TransportDeleteByQueryActionTests.java b/plugins/delete-by-query/src/test/java/org/elasticsearch/action/deletebyquery/TransportDeleteByQueryActionTests.java index 57bfa4c2328..980ee76c2ce 100644 --- a/plugins/delete-by-query/src/test/java/org/elasticsearch/action/deletebyquery/TransportDeleteByQueryActionTests.java +++ b/plugins/delete-by-query/src/test/java/org/elasticsearch/action/deletebyquery/TransportDeleteByQueryActionTests.java @@ -32,6 +32,7 @@ import org.elasticsearch.common.text.Text; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.CollectionUtils; import org.elasticsearch.common.util.concurrent.CountDown; +import org.elasticsearch.index.Index; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchShardTarget; @@ -226,7 +227,7 @@ public class TransportDeleteByQueryActionTests extends ESSingleNodeTestCase { } else { deleted++; } - items[i] = new BulkItemResponse(i, "delete", new DeleteResponse(new ShardId("test", 0), "type", String.valueOf(i), 1, delete)); + items[i] = new BulkItemResponse(i, "delete", new DeleteResponse(new ShardId("test", "_na_", 0), "type", String.valueOf(i), 1, delete)); } else { items[i] = new BulkItemResponse(i, "delete", new BulkItemResponse.Failure("test", "type", String.valueOf(i), new Throwable("item failed"))); failed++; @@ -282,7 +283,7 @@ public class TransportDeleteByQueryActionTests extends ESSingleNodeTestCase { deleted[0] = deleted[0] + 1; deleted[index] = deleted[index] + 1; } - items[i] = new BulkItemResponse(i, "delete", new DeleteResponse(new ShardId("test-" + index, 0), "type", String.valueOf(i), 1, delete)); + items[i] = new BulkItemResponse(i, "delete", new DeleteResponse(new ShardId("test-" + index, "_na_", 0), "type", String.valueOf(i), 1, delete)); } else { items[i] = new BulkItemResponse(i, "delete", new BulkItemResponse.Failure("test-" + index, "type", String.valueOf(i), new Throwable("item failed"))); failed[0] = failed[0] + 1; @@ -340,7 +341,7 @@ public class TransportDeleteByQueryActionTests extends ESSingleNodeTestCase { SearchHit[] docs = new SearchHit[nbDocs]; for (int i = 0; i < nbDocs; i++) { InternalSearchHit doc = new InternalSearchHit(randomInt(), String.valueOf(i), new Text("type"), null); - doc.shard(new SearchShardTarget("node", "test", randomInt())); + doc.shard(new SearchShardTarget("node", new Index("test", "_na_"), randomInt())); docs[i] = doc; } diff --git a/plugins/lang-plan-a/ant.xml b/plugins/lang-painless/ant.xml similarity index 93% rename from plugins/lang-plan-a/ant.xml rename to plugins/lang-painless/ant.xml index bf1c9b93757..fa53d714257 100644 --- a/plugins/lang-plan-a/ant.xml +++ b/plugins/lang-painless/ant.xml @@ -15,10 +15,10 @@ - - - - + + + + @@ -49,7 +49,7 @@ - + @@ -63,7 +63,7 @@ - + @@ -71,7 +71,7 @@ - + @@ -85,7 +85,7 @@ - + @@ -93,7 +93,7 @@ - + @@ -110,7 +110,7 @@ - + diff --git a/plugins/lang-plan-a/build.gradle b/plugins/lang-painless/build.gradle similarity index 96% rename from plugins/lang-plan-a/build.gradle rename to plugins/lang-painless/build.gradle index f40bf4fd8e7..bf0694a0b73 100644 --- a/plugins/lang-plan-a/build.gradle +++ b/plugins/lang-painless/build.gradle @@ -21,7 +21,7 @@ import org.apache.tools.ant.types.Path esplugin { description 'An easy, safe and fast scripting language for Elasticsearch' - classname 'org.elasticsearch.plan.a.PlanAPlugin' + classname 'org.elasticsearch.painless.PainlessPlugin' } dependencies { diff --git a/plugins/lang-plan-a/licenses/antlr4-runtime-4.5.1-1.jar.sha1 b/plugins/lang-painless/licenses/antlr4-runtime-4.5.1-1.jar.sha1 similarity index 100% rename from plugins/lang-plan-a/licenses/antlr4-runtime-4.5.1-1.jar.sha1 rename to plugins/lang-painless/licenses/antlr4-runtime-4.5.1-1.jar.sha1 diff --git a/plugins/lang-plan-a/licenses/antlr4-runtime-LICENSE.txt b/plugins/lang-painless/licenses/antlr4-runtime-LICENSE.txt similarity index 100% rename from plugins/lang-plan-a/licenses/antlr4-runtime-LICENSE.txt rename to plugins/lang-painless/licenses/antlr4-runtime-LICENSE.txt diff --git a/plugins/lang-plan-a/licenses/antlr4-runtime-NOTICE.txt b/plugins/lang-painless/licenses/antlr4-runtime-NOTICE.txt similarity index 100% rename from plugins/lang-plan-a/licenses/antlr4-runtime-NOTICE.txt rename to plugins/lang-painless/licenses/antlr4-runtime-NOTICE.txt diff --git a/plugins/lang-plan-a/licenses/asm-5.0.4.jar.sha1 b/plugins/lang-painless/licenses/asm-5.0.4.jar.sha1 similarity index 100% rename from plugins/lang-plan-a/licenses/asm-5.0.4.jar.sha1 rename to plugins/lang-painless/licenses/asm-5.0.4.jar.sha1 diff --git a/plugins/lang-plan-a/licenses/asm-LICENSE.txt b/plugins/lang-painless/licenses/asm-LICENSE.txt similarity index 100% rename from plugins/lang-plan-a/licenses/asm-LICENSE.txt rename to plugins/lang-painless/licenses/asm-LICENSE.txt diff --git a/plugins/lang-plan-a/licenses/asm-NOTICE.txt b/plugins/lang-painless/licenses/asm-NOTICE.txt similarity index 100% rename from plugins/lang-plan-a/licenses/asm-NOTICE.txt rename to plugins/lang-painless/licenses/asm-NOTICE.txt diff --git a/plugins/lang-plan-a/licenses/asm-commons-5.0.4.jar.sha1 b/plugins/lang-painless/licenses/asm-commons-5.0.4.jar.sha1 similarity index 100% rename from plugins/lang-plan-a/licenses/asm-commons-5.0.4.jar.sha1 rename to plugins/lang-painless/licenses/asm-commons-5.0.4.jar.sha1 diff --git a/plugins/lang-plan-a/licenses/asm-tree-5.0.4.jar.sha1 b/plugins/lang-painless/licenses/asm-tree-5.0.4.jar.sha1 similarity index 100% rename from plugins/lang-plan-a/licenses/asm-tree-5.0.4.jar.sha1 rename to plugins/lang-painless/licenses/asm-tree-5.0.4.jar.sha1 diff --git a/plugins/lang-plan-a/src/main/antlr/PlanALexer.g4 b/plugins/lang-painless/src/main/antlr/PainlessLexer.g4 similarity index 98% rename from plugins/lang-plan-a/src/main/antlr/PlanALexer.g4 rename to plugins/lang-painless/src/main/antlr/PainlessLexer.g4 index 5110a73e8ca..11cd97cc9e3 100644 --- a/plugins/lang-plan-a/src/main/antlr/PlanALexer.g4 +++ b/plugins/lang-painless/src/main/antlr/PainlessLexer.g4 @@ -17,7 +17,7 @@ * under the License. */ -lexer grammar PlanALexer; +lexer grammar PainlessLexer; @header { import java.util.Set; diff --git a/plugins/lang-plan-a/src/main/antlr/PlanAParser.g4 b/plugins/lang-painless/src/main/antlr/PainlessParser.g4 similarity index 98% rename from plugins/lang-plan-a/src/main/antlr/PlanAParser.g4 rename to plugins/lang-painless/src/main/antlr/PainlessParser.g4 index 851d924b33f..4779c61d4b0 100644 --- a/plugins/lang-plan-a/src/main/antlr/PlanAParser.g4 +++ b/plugins/lang-painless/src/main/antlr/PainlessParser.g4 @@ -17,9 +17,9 @@ * under the License. */ -parser grammar PlanAParser; +parser grammar PainlessParser; -options { tokenVocab=PlanALexer; } +options { tokenVocab=PainlessLexer; } source : statement+ EOF diff --git a/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/Analyzer.java b/plugins/lang-painless/src/main/java/org/elasticsearch/painless/Analyzer.java similarity index 81% rename from plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/Analyzer.java rename to plugins/lang-painless/src/main/java/org/elasticsearch/painless/Analyzer.java index 77769736bf1..33ab695d527 100644 --- a/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/Analyzer.java +++ b/plugins/lang-painless/src/main/java/org/elasticsearch/painless/Analyzer.java @@ -17,75 +17,71 @@ * under the License. */ -package org.elasticsearch.plan.a; +package org.elasticsearch.painless; import org.antlr.v4.runtime.ParserRuleContext; -import org.elasticsearch.plan.a.Definition.Cast; -import org.elasticsearch.plan.a.Definition.Constructor; -import org.elasticsearch.plan.a.Definition.Field; -import org.elasticsearch.plan.a.Definition.Method; -import org.elasticsearch.plan.a.Definition.Pair; -import org.elasticsearch.plan.a.Definition.Sort; -import org.elasticsearch.plan.a.Definition.Struct; -import org.elasticsearch.plan.a.Definition.Transform; -import org.elasticsearch.plan.a.Definition.Type; -import org.elasticsearch.plan.a.Metadata.ExpressionMetadata; -import org.elasticsearch.plan.a.Metadata.ExtNodeMetadata; -import org.elasticsearch.plan.a.Metadata.ExternalMetadata; -import org.elasticsearch.plan.a.Metadata.StatementMetadata; -import org.elasticsearch.plan.a.PlanAParser.AfterthoughtContext; -import org.elasticsearch.plan.a.PlanAParser.ArgumentsContext; -import org.elasticsearch.plan.a.PlanAParser.AssignmentContext; -import org.elasticsearch.plan.a.PlanAParser.BinaryContext; -import org.elasticsearch.plan.a.PlanAParser.BlockContext; -import org.elasticsearch.plan.a.PlanAParser.BoolContext; -import org.elasticsearch.plan.a.PlanAParser.BreakContext; -import org.elasticsearch.plan.a.PlanAParser.CastContext; -import org.elasticsearch.plan.a.PlanAParser.CharContext; -import org.elasticsearch.plan.a.PlanAParser.CompContext; -import org.elasticsearch.plan.a.PlanAParser.ConditionalContext; -import org.elasticsearch.plan.a.PlanAParser.ContinueContext; -import org.elasticsearch.plan.a.PlanAParser.DeclContext; -import org.elasticsearch.plan.a.PlanAParser.DeclarationContext; -import org.elasticsearch.plan.a.PlanAParser.DecltypeContext; -import org.elasticsearch.plan.a.PlanAParser.DeclvarContext; -import org.elasticsearch.plan.a.PlanAParser.DoContext; -import org.elasticsearch.plan.a.PlanAParser.EmptyContext; -import org.elasticsearch.plan.a.PlanAParser.ExprContext; -import org.elasticsearch.plan.a.PlanAParser.ExpressionContext; -import org.elasticsearch.plan.a.PlanAParser.ExtbraceContext; -import org.elasticsearch.plan.a.PlanAParser.ExtcallContext; -import org.elasticsearch.plan.a.PlanAParser.ExtcastContext; -import org.elasticsearch.plan.a.PlanAParser.ExtdotContext; -import org.elasticsearch.plan.a.PlanAParser.ExternalContext; -import org.elasticsearch.plan.a.PlanAParser.ExtfieldContext; -import org.elasticsearch.plan.a.PlanAParser.ExtnewContext; -import org.elasticsearch.plan.a.PlanAParser.ExtprecContext; -import org.elasticsearch.plan.a.PlanAParser.ExtstartContext; -import org.elasticsearch.plan.a.PlanAParser.ExtstringContext; -import org.elasticsearch.plan.a.PlanAParser.ExttypeContext; -import org.elasticsearch.plan.a.PlanAParser.ExtvarContext; -import org.elasticsearch.plan.a.PlanAParser.FalseContext; -import org.elasticsearch.plan.a.PlanAParser.ForContext; -import org.elasticsearch.plan.a.PlanAParser.IfContext; -import org.elasticsearch.plan.a.PlanAParser.IncrementContext; -import org.elasticsearch.plan.a.PlanAParser.InitializerContext; -import org.elasticsearch.plan.a.PlanAParser.MultipleContext; -import org.elasticsearch.plan.a.PlanAParser.NullContext; -import org.elasticsearch.plan.a.PlanAParser.NumericContext; -import org.elasticsearch.plan.a.PlanAParser.PostincContext; -import org.elasticsearch.plan.a.PlanAParser.PrecedenceContext; -import org.elasticsearch.plan.a.PlanAParser.PreincContext; -import org.elasticsearch.plan.a.PlanAParser.ReturnContext; -import org.elasticsearch.plan.a.PlanAParser.SingleContext; -import org.elasticsearch.plan.a.PlanAParser.SourceContext; -import org.elasticsearch.plan.a.PlanAParser.StatementContext; -import org.elasticsearch.plan.a.PlanAParser.ThrowContext; -import org.elasticsearch.plan.a.PlanAParser.TrapContext; -import org.elasticsearch.plan.a.PlanAParser.TrueContext; -import org.elasticsearch.plan.a.PlanAParser.TryContext; -import org.elasticsearch.plan.a.PlanAParser.UnaryContext; -import org.elasticsearch.plan.a.PlanAParser.WhileContext; +import org.elasticsearch.painless.Definition.Cast; +import org.elasticsearch.painless.Definition.Constructor; +import org.elasticsearch.painless.Definition.Field; +import org.elasticsearch.painless.Definition.Method; +import org.elasticsearch.painless.Definition.Pair; +import org.elasticsearch.painless.Definition.Sort; +import org.elasticsearch.painless.Definition.Struct; +import org.elasticsearch.painless.Definition.Transform; +import org.elasticsearch.painless.Definition.Type; +import org.elasticsearch.painless.PainlessParser.AfterthoughtContext; +import org.elasticsearch.painless.PainlessParser.ArgumentsContext; +import org.elasticsearch.painless.PainlessParser.AssignmentContext; +import org.elasticsearch.painless.PainlessParser.BinaryContext; +import org.elasticsearch.painless.PainlessParser.BlockContext; +import org.elasticsearch.painless.PainlessParser.BoolContext; +import org.elasticsearch.painless.PainlessParser.BreakContext; +import org.elasticsearch.painless.PainlessParser.CastContext; +import org.elasticsearch.painless.PainlessParser.CharContext; +import org.elasticsearch.painless.PainlessParser.CompContext; +import org.elasticsearch.painless.PainlessParser.ConditionalContext; +import org.elasticsearch.painless.PainlessParser.ContinueContext; +import org.elasticsearch.painless.PainlessParser.DeclContext; +import org.elasticsearch.painless.PainlessParser.DeclarationContext; +import org.elasticsearch.painless.PainlessParser.DecltypeContext; +import org.elasticsearch.painless.PainlessParser.DeclvarContext; +import org.elasticsearch.painless.PainlessParser.DoContext; +import org.elasticsearch.painless.PainlessParser.EmptyContext; +import org.elasticsearch.painless.PainlessParser.ExprContext; +import org.elasticsearch.painless.PainlessParser.ExpressionContext; +import org.elasticsearch.painless.PainlessParser.ExtbraceContext; +import org.elasticsearch.painless.PainlessParser.ExtcallContext; +import org.elasticsearch.painless.PainlessParser.ExtcastContext; +import org.elasticsearch.painless.PainlessParser.ExtdotContext; +import org.elasticsearch.painless.PainlessParser.ExternalContext; +import org.elasticsearch.painless.PainlessParser.ExtfieldContext; +import org.elasticsearch.painless.PainlessParser.ExtnewContext; +import org.elasticsearch.painless.PainlessParser.ExtprecContext; +import org.elasticsearch.painless.PainlessParser.ExtstartContext; +import org.elasticsearch.painless.PainlessParser.ExtstringContext; +import org.elasticsearch.painless.PainlessParser.ExttypeContext; +import org.elasticsearch.painless.PainlessParser.ExtvarContext; +import org.elasticsearch.painless.PainlessParser.FalseContext; +import org.elasticsearch.painless.PainlessParser.ForContext; +import org.elasticsearch.painless.PainlessParser.IfContext; +import org.elasticsearch.painless.PainlessParser.IncrementContext; +import org.elasticsearch.painless.PainlessParser.InitializerContext; +import org.elasticsearch.painless.PainlessParser.MultipleContext; +import org.elasticsearch.painless.PainlessParser.NullContext; +import org.elasticsearch.painless.PainlessParser.NumericContext; +import org.elasticsearch.painless.PainlessParser.PostincContext; +import org.elasticsearch.painless.PainlessParser.PrecedenceContext; +import org.elasticsearch.painless.PainlessParser.PreincContext; +import org.elasticsearch.painless.PainlessParser.ReturnContext; +import org.elasticsearch.painless.PainlessParser.SingleContext; +import org.elasticsearch.painless.PainlessParser.SourceContext; +import org.elasticsearch.painless.PainlessParser.StatementContext; +import org.elasticsearch.painless.PainlessParser.ThrowContext; +import org.elasticsearch.painless.PainlessParser.TrapContext; +import org.elasticsearch.painless.PainlessParser.TrueContext; +import org.elasticsearch.painless.PainlessParser.TryContext; +import org.elasticsearch.painless.PainlessParser.UnaryContext; +import org.elasticsearch.painless.PainlessParser.WhileContext; import java.util.ArrayDeque; import java.util.Arrays; @@ -94,20 +90,19 @@ import java.util.Iterator; import java.util.List; import java.util.Map; -import static org.elasticsearch.plan.a.Metadata.error; -import static org.elasticsearch.plan.a.PlanAParser.ADD; -import static org.elasticsearch.plan.a.PlanAParser.BWAND; -import static org.elasticsearch.plan.a.PlanAParser.BWOR; -import static org.elasticsearch.plan.a.PlanAParser.BWXOR; -import static org.elasticsearch.plan.a.PlanAParser.DIV; -import static org.elasticsearch.plan.a.PlanAParser.LSH; -import static org.elasticsearch.plan.a.PlanAParser.MUL; -import static org.elasticsearch.plan.a.PlanAParser.REM; -import static org.elasticsearch.plan.a.PlanAParser.RSH; -import static org.elasticsearch.plan.a.PlanAParser.SUB; -import static org.elasticsearch.plan.a.PlanAParser.USH; +import static org.elasticsearch.painless.PainlessParser.ADD; +import static org.elasticsearch.painless.PainlessParser.BWAND; +import static org.elasticsearch.painless.PainlessParser.BWOR; +import static org.elasticsearch.painless.PainlessParser.BWXOR; +import static org.elasticsearch.painless.PainlessParser.DIV; +import static org.elasticsearch.painless.PainlessParser.LSH; +import static org.elasticsearch.painless.PainlessParser.MUL; +import static org.elasticsearch.painless.PainlessParser.REM; +import static org.elasticsearch.painless.PainlessParser.RSH; +import static org.elasticsearch.painless.PainlessParser.SUB; +import static org.elasticsearch.painless.PainlessParser.USH; -class Analyzer extends PlanAParserBaseVisitor { +class Analyzer extends PainlessParserBaseVisitor { private static class Variable { final String name; final Type type; @@ -181,7 +176,7 @@ class Analyzer extends PlanAParserBaseVisitor { throw new IllegalArgumentException("Argument name [" + name + "] already defined within the scope."); } else { throw new IllegalArgumentException( - error(source) + "Variable name [" + name + "] already defined within the scope."); + Metadata.error(source) + "Variable name [" + name + "] already defined within the scope."); } } @@ -203,7 +198,7 @@ class Analyzer extends PlanAParserBaseVisitor { @Override public Void visitSource(final SourceContext ctx) { - final StatementMetadata sourcesmd = metadata.getStatementMetadata(ctx); + final Metadata.StatementMetadata sourcesmd = metadata.getStatementMetadata(ctx); final List statectxs = ctx.statement(); final StatementContext lastctx = statectxs.get(statectxs.size() - 1); @@ -211,11 +206,11 @@ class Analyzer extends PlanAParserBaseVisitor { for (final StatementContext statectx : statectxs) { if (sourcesmd.allLast) { - throw new IllegalArgumentException(error(statectx) + + throw new IllegalArgumentException(Metadata.error(statectx) + "Statement will never be executed because all prior paths escape."); } - final StatementMetadata statesmd = metadata.createStatementMetadata(statectx); + final Metadata.StatementMetadata statesmd = metadata.createStatementMetadata(statectx); statesmd.lastSource = statectx == lastctx; visit(statectx); @@ -230,20 +225,20 @@ class Analyzer extends PlanAParserBaseVisitor { @Override public Void visitIf(final IfContext ctx) { - final StatementMetadata ifsmd = metadata.getStatementMetadata(ctx); + final Metadata.StatementMetadata ifsmd = metadata.getStatementMetadata(ctx); final ExpressionContext exprctx = metadata.updateExpressionTree(ctx.expression()); - final ExpressionMetadata expremd = metadata.createExpressionMetadata(exprctx); + final Metadata.ExpressionMetadata expremd = metadata.createExpressionMetadata(exprctx); expremd.to = definition.booleanType; visit(exprctx); markCast(expremd); if (expremd.postConst != null) { - throw new IllegalArgumentException(error(ctx) + "If statement is not necessary."); + throw new IllegalArgumentException(Metadata.error(ctx) + "If statement is not necessary."); } final BlockContext blockctx0 = ctx.block(0); - final StatementMetadata blocksmd0 = metadata.createStatementMetadata(blockctx0); + final Metadata.StatementMetadata blocksmd0 = metadata.createStatementMetadata(blockctx0); blocksmd0.lastSource = ifsmd.lastSource; blocksmd0.inLoop = ifsmd.inLoop; blocksmd0.lastLoop = ifsmd.lastLoop; @@ -258,7 +253,7 @@ class Analyzer extends PlanAParserBaseVisitor { if (ctx.ELSE() != null) { final BlockContext blockctx1 = ctx.block(1); - final StatementMetadata blocksmd1 = metadata.createStatementMetadata(blockctx1); + final Metadata.StatementMetadata blocksmd1 = metadata.createStatementMetadata(blockctx1); blocksmd1.lastSource = ifsmd.lastSource; incrementScope(); visit(blockctx1); @@ -278,12 +273,12 @@ class Analyzer extends PlanAParserBaseVisitor { @Override public Void visitWhile(final WhileContext ctx) { - final StatementMetadata whilesmd = metadata.getStatementMetadata(ctx); + final Metadata.StatementMetadata whilesmd = metadata.getStatementMetadata(ctx); incrementScope(); final ExpressionContext exprctx = metadata.updateExpressionTree(ctx.expression()); - final ExpressionMetadata expremd = metadata.createExpressionMetadata(exprctx); + final Metadata.ExpressionMetadata expremd = metadata.createExpressionMetadata(exprctx); expremd.to = definition.booleanType; visit(exprctx); markCast(expremd); @@ -294,24 +289,24 @@ class Analyzer extends PlanAParserBaseVisitor { continuous = (boolean)expremd.postConst; if (!continuous) { - throw new IllegalArgumentException(error(ctx) + "The loop will never be executed."); + throw new IllegalArgumentException(Metadata.error(ctx) + "The loop will never be executed."); } if (ctx.empty() != null) { - throw new IllegalArgumentException(error(ctx) + "The loop will never exit."); + throw new IllegalArgumentException(Metadata.error(ctx) + "The loop will never exit."); } } final BlockContext blockctx = ctx.block(); if (blockctx != null) { - final StatementMetadata blocksmd = metadata.createStatementMetadata(blockctx); + final Metadata.StatementMetadata blocksmd = metadata.createStatementMetadata(blockctx); blocksmd.beginLoop = true; blocksmd.inLoop = true; visit(blockctx); if (blocksmd.loopEscape && !blocksmd.anyContinue) { - throw new IllegalArgumentException(error(ctx) + "All paths escape so the loop is not necessary."); + throw new IllegalArgumentException(Metadata.error(ctx) + "All paths escape so the loop is not necessary."); } if (continuous && !blocksmd.anyBreak) { @@ -329,22 +324,22 @@ class Analyzer extends PlanAParserBaseVisitor { @Override public Void visitDo(final DoContext ctx) { - final StatementMetadata dosmd = metadata.getStatementMetadata(ctx); + final Metadata.StatementMetadata dosmd = metadata.getStatementMetadata(ctx); incrementScope(); final BlockContext blockctx = ctx.block(); - final StatementMetadata blocksmd = metadata.createStatementMetadata(blockctx); + final Metadata.StatementMetadata blocksmd = metadata.createStatementMetadata(blockctx); blocksmd.beginLoop = true; blocksmd.inLoop = true; visit(blockctx); if (blocksmd.loopEscape && !blocksmd.anyContinue) { - throw new IllegalArgumentException(error(ctx) + "All paths escape so the loop is not necessary."); + throw new IllegalArgumentException(Metadata.error(ctx) + "All paths escape so the loop is not necessary."); } final ExpressionContext exprctx = metadata.updateExpressionTree(ctx.expression()); - final ExpressionMetadata expremd = metadata.createExpressionMetadata(exprctx); + final Metadata.ExpressionMetadata expremd = metadata.createExpressionMetadata(exprctx); expremd.to = definition.booleanType; visit(exprctx); markCast(expremd); @@ -353,7 +348,7 @@ class Analyzer extends PlanAParserBaseVisitor { final boolean continuous = (boolean)expremd.postConst; if (!continuous) { - throw new IllegalArgumentException(error(ctx) + "All paths escape so the loop is not necessary."); + throw new IllegalArgumentException(Metadata.error(ctx) + "All paths escape so the loop is not necessary."); } if (!blocksmd.anyBreak) { @@ -371,7 +366,7 @@ class Analyzer extends PlanAParserBaseVisitor { @Override public Void visitFor(final ForContext ctx) { - final StatementMetadata forsmd = metadata.getStatementMetadata(ctx); + final Metadata.StatementMetadata forsmd = metadata.getStatementMetadata(ctx); boolean continuous = false; incrementScope(); @@ -386,7 +381,7 @@ class Analyzer extends PlanAParserBaseVisitor { final ExpressionContext exprctx = metadata.updateExpressionTree(ctx.expression()); if (exprctx != null) { - final ExpressionMetadata expremd = metadata.createExpressionMetadata(exprctx); + final Metadata.ExpressionMetadata expremd = metadata.createExpressionMetadata(exprctx); expremd.to = definition.booleanType; visit(exprctx); markCast(expremd); @@ -395,11 +390,11 @@ class Analyzer extends PlanAParserBaseVisitor { continuous = (boolean)expremd.postConst; if (!continuous) { - throw new IllegalArgumentException(error(ctx) + "The loop will never be executed."); + throw new IllegalArgumentException(Metadata.error(ctx) + "The loop will never be executed."); } if (ctx.empty() != null) { - throw new IllegalArgumentException(error(ctx) + "The loop is continuous."); + throw new IllegalArgumentException(Metadata.error(ctx) + "The loop is continuous."); } } } else { @@ -416,13 +411,13 @@ class Analyzer extends PlanAParserBaseVisitor { final BlockContext blockctx = ctx.block(); if (blockctx != null) { - final StatementMetadata blocksmd = metadata.createStatementMetadata(blockctx); + final Metadata.StatementMetadata blocksmd = metadata.createStatementMetadata(blockctx); blocksmd.beginLoop = true; blocksmd.inLoop = true; visit(blockctx); if (blocksmd.loopEscape && !blocksmd.anyContinue) { - throw new IllegalArgumentException(error(ctx) + "All paths escape so the loop is not necessary."); + throw new IllegalArgumentException(Metadata.error(ctx) + "All paths escape so the loop is not necessary."); } if (continuous && !blocksmd.anyBreak) { @@ -440,7 +435,7 @@ class Analyzer extends PlanAParserBaseVisitor { @Override public Void visitDecl(final DeclContext ctx) { - final StatementMetadata declsmd = metadata.getStatementMetadata(ctx); + final Metadata.StatementMetadata declsmd = metadata.getStatementMetadata(ctx); final DeclarationContext declctx = ctx.declaration(); metadata.createStatementMetadata(declctx); @@ -453,14 +448,14 @@ class Analyzer extends PlanAParserBaseVisitor { @Override public Void visitContinue(final ContinueContext ctx) { - final StatementMetadata continuesmd = metadata.getStatementMetadata(ctx); + final Metadata.StatementMetadata continuesmd = metadata.getStatementMetadata(ctx); if (!continuesmd.inLoop) { - throw new IllegalArgumentException(error(ctx) + "Cannot have a continue statement outside of a loop."); + throw new IllegalArgumentException(Metadata.error(ctx) + "Cannot have a continue statement outside of a loop."); } if (continuesmd.lastLoop) { - throw new IllegalArgumentException(error(ctx) + "Unnessary continue statement at the end of a loop."); + throw new IllegalArgumentException(Metadata.error(ctx) + "Unnessary continue statement at the end of a loop."); } continuesmd.allLast = true; @@ -473,10 +468,10 @@ class Analyzer extends PlanAParserBaseVisitor { @Override public Void visitBreak(final BreakContext ctx) { - final StatementMetadata breaksmd = metadata.getStatementMetadata(ctx); + final Metadata.StatementMetadata breaksmd = metadata.getStatementMetadata(ctx); if (!breaksmd.inLoop) { - throw new IllegalArgumentException(error(ctx) + "Cannot have a break statement outside of a loop."); + throw new IllegalArgumentException(Metadata.error(ctx) + "Cannot have a break statement outside of a loop."); } breaksmd.loopEscape = true; @@ -490,10 +485,10 @@ class Analyzer extends PlanAParserBaseVisitor { @Override public Void visitReturn(final ReturnContext ctx) { - final StatementMetadata returnsmd = metadata.getStatementMetadata(ctx); + final Metadata.StatementMetadata returnsmd = metadata.getStatementMetadata(ctx); final ExpressionContext exprctx = metadata.updateExpressionTree(ctx.expression()); - final ExpressionMetadata expremd = metadata.createExpressionMetadata(exprctx); + final Metadata.ExpressionMetadata expremd = metadata.createExpressionMetadata(exprctx); expremd.to = definition.objectType; visit(exprctx); markCast(expremd); @@ -509,10 +504,10 @@ class Analyzer extends PlanAParserBaseVisitor { @Override public Void visitTry(final TryContext ctx) { - final StatementMetadata trysmd = metadata.getStatementMetadata(ctx); + final Metadata.StatementMetadata trysmd = metadata.getStatementMetadata(ctx); final BlockContext blockctx = ctx.block(); - final StatementMetadata blocksmd = metadata.createStatementMetadata(blockctx); + final Metadata.StatementMetadata blocksmd = metadata.createStatementMetadata(blockctx); blocksmd.lastSource = trysmd.lastSource; blocksmd.inLoop = trysmd.inLoop; blocksmd.lastLoop = trysmd.lastLoop; @@ -529,7 +524,7 @@ class Analyzer extends PlanAParserBaseVisitor { int trapcount = 0; for (final TrapContext trapctx : ctx.trap()) { - final StatementMetadata trapsmd = metadata.createStatementMetadata(trapctx); + final Metadata.StatementMetadata trapsmd = metadata.createStatementMetadata(trapctx); trapsmd.lastSource = trysmd.lastSource; trapsmd.inLoop = trysmd.inLoop; trapsmd.lastLoop = trysmd.lastLoop; @@ -553,10 +548,10 @@ class Analyzer extends PlanAParserBaseVisitor { @Override public Void visitThrow(final ThrowContext ctx) { - final StatementMetadata throwsmd = metadata.getStatementMetadata(ctx); + final Metadata.StatementMetadata throwsmd = metadata.getStatementMetadata(ctx); final ExpressionContext exprctx = metadata.updateExpressionTree(ctx.expression()); - final ExpressionMetadata expremd = metadata.createExpressionMetadata(exprctx); + final Metadata.ExpressionMetadata expremd = metadata.createExpressionMetadata(exprctx); expremd.to = definition.exceptionType; visit(exprctx); markCast(expremd); @@ -572,14 +567,14 @@ class Analyzer extends PlanAParserBaseVisitor { @Override public Void visitExpr(final ExprContext ctx) { - final StatementMetadata exprsmd = metadata.getStatementMetadata(ctx); + final Metadata.StatementMetadata exprsmd = metadata.getStatementMetadata(ctx); final ExpressionContext exprctx = metadata.updateExpressionTree(ctx.expression()); - final ExpressionMetadata expremd = metadata.createExpressionMetadata(exprctx); + final Metadata.ExpressionMetadata expremd = metadata.createExpressionMetadata(exprctx); expremd.read = exprsmd.lastSource; visit(exprctx); if (!expremd.statement && !exprsmd.lastSource) { - throw new IllegalArgumentException(error(ctx) + "Not a statement."); + throw new IllegalArgumentException(Metadata.error(ctx) + "Not a statement."); } final boolean rtn = exprsmd.lastSource && expremd.from.sort != Sort.VOID; @@ -596,17 +591,17 @@ class Analyzer extends PlanAParserBaseVisitor { @Override public Void visitMultiple(final MultipleContext ctx) { - final StatementMetadata multiplesmd = metadata.getStatementMetadata(ctx); + final Metadata.StatementMetadata multiplesmd = metadata.getStatementMetadata(ctx); final List statectxs = ctx.statement(); final StatementContext lastctx = statectxs.get(statectxs.size() - 1); for (StatementContext statectx : statectxs) { if (multiplesmd.allLast) { - throw new IllegalArgumentException(error(statectx) + + throw new IllegalArgumentException(Metadata.error(statectx) + "Statement will never be executed because all prior paths escape."); } - final StatementMetadata statesmd = metadata.createStatementMetadata(statectx); + final Metadata.StatementMetadata statesmd = metadata.createStatementMetadata(statectx); statesmd.lastSource = multiplesmd.lastSource && statectx == lastctx; statesmd.inLoop = multiplesmd.inLoop; statesmd.lastLoop = (multiplesmd.beginLoop || multiplesmd.lastLoop) && statectx == lastctx; @@ -626,10 +621,10 @@ class Analyzer extends PlanAParserBaseVisitor { @Override public Void visitSingle(final SingleContext ctx) { - final StatementMetadata singlesmd = metadata.getStatementMetadata(ctx); + final Metadata.StatementMetadata singlesmd = metadata.getStatementMetadata(ctx); final StatementContext statectx = ctx.statement(); - final StatementMetadata statesmd = metadata.createStatementMetadata(statectx); + final Metadata.StatementMetadata statesmd = metadata.createStatementMetadata(statectx); statesmd.lastSource = singlesmd.lastSource; statesmd.inLoop = singlesmd.inLoop; statesmd.lastLoop = singlesmd.beginLoop || singlesmd.lastLoop; @@ -648,7 +643,7 @@ class Analyzer extends PlanAParserBaseVisitor { @Override public Void visitEmpty(final EmptyContext ctx) { - throw new UnsupportedOperationException(error(ctx) + "Unexpected parser state."); + throw new UnsupportedOperationException(Metadata.error(ctx) + "Unexpected parser state."); } @Override @@ -660,7 +655,7 @@ class Analyzer extends PlanAParserBaseVisitor { metadata.createStatementMetadata(declctx); visit(declctx); } else if (exprctx != null) { - final ExpressionMetadata expremd = metadata.createExpressionMetadata(exprctx); + final Metadata.ExpressionMetadata expremd = metadata.createExpressionMetadata(exprctx); expremd.read = false; visit(exprctx); @@ -668,11 +663,11 @@ class Analyzer extends PlanAParserBaseVisitor { markCast(expremd); if (!expremd.statement) { - throw new IllegalArgumentException(error(exprctx) + + throw new IllegalArgumentException(Metadata.error(exprctx) + "The intializer of a for loop must be a statement."); } } else { - throw new IllegalStateException(error(ctx) + "Unexpected parser state."); + throw new IllegalStateException(Metadata.error(ctx) + "Unexpected parser state."); } return null; @@ -683,7 +678,7 @@ class Analyzer extends PlanAParserBaseVisitor { final ExpressionContext exprctx = metadata.updateExpressionTree(ctx.expression()); if (exprctx != null) { - final ExpressionMetadata expremd = metadata.createExpressionMetadata(exprctx); + final Metadata.ExpressionMetadata expremd = metadata.createExpressionMetadata(exprctx); expremd.read = false; visit(exprctx); @@ -691,7 +686,7 @@ class Analyzer extends PlanAParserBaseVisitor { markCast(expremd); if (!expremd.statement) { - throw new IllegalArgumentException(error(exprctx) + + throw new IllegalArgumentException(Metadata.error(exprctx) + "The afterthought of a for loop must be a statement."); } } @@ -702,11 +697,11 @@ class Analyzer extends PlanAParserBaseVisitor { @Override public Void visitDeclaration(final DeclarationContext ctx) { final DecltypeContext decltypectx = ctx.decltype(); - final ExpressionMetadata decltypeemd = metadata.createExpressionMetadata(decltypectx); + final Metadata.ExpressionMetadata decltypeemd = metadata.createExpressionMetadata(decltypectx); visit(decltypectx); for (final DeclvarContext declvarctx : ctx.declvar()) { - final ExpressionMetadata declvaremd = metadata.createExpressionMetadata(declvarctx); + final Metadata.ExpressionMetadata declvaremd = metadata.createExpressionMetadata(declvarctx); declvaremd.to = decltypeemd.from; visit(declvarctx); } @@ -716,7 +711,7 @@ class Analyzer extends PlanAParserBaseVisitor { @Override public Void visitDecltype(final DecltypeContext ctx) { - final ExpressionMetadata decltypeemd = metadata.getExpressionMetadata(ctx); + final Metadata.ExpressionMetadata decltypeemd = metadata.getExpressionMetadata(ctx); final String name = ctx.getText(); decltypeemd.from = definition.getType(name); @@ -726,7 +721,7 @@ class Analyzer extends PlanAParserBaseVisitor { @Override public Void visitDeclvar(final DeclvarContext ctx) { - final ExpressionMetadata declvaremd = metadata.getExpressionMetadata(ctx); + final Metadata.ExpressionMetadata declvaremd = metadata.getExpressionMetadata(ctx); final String name = ctx.ID().getText(); declvaremd.postConst = addVariable(ctx, name, declvaremd.to).slot; @@ -734,7 +729,7 @@ class Analyzer extends PlanAParserBaseVisitor { final ExpressionContext exprctx = metadata.updateExpressionTree(ctx.expression()); if (exprctx != null) { - final ExpressionMetadata expremd = metadata.createExpressionMetadata(exprctx); + final Metadata.ExpressionMetadata expremd = metadata.createExpressionMetadata(exprctx); expremd.to = declvaremd.to; visit(exprctx); markCast(expremd); @@ -745,7 +740,7 @@ class Analyzer extends PlanAParserBaseVisitor { @Override public Void visitTrap(final TrapContext ctx) { - final StatementMetadata trapsmd = metadata.getStatementMetadata(ctx); + final Metadata.StatementMetadata trapsmd = metadata.getStatementMetadata(ctx); final String type = ctx.TYPE().getText(); trapsmd.exception = definition.getType(type); @@ -753,7 +748,7 @@ class Analyzer extends PlanAParserBaseVisitor { try { trapsmd.exception.clazz.asSubclass(Exception.class); } catch (final ClassCastException exception) { - throw new IllegalArgumentException(error(ctx) + "Invalid exception type [" + trapsmd.exception.name + "]."); + throw new IllegalArgumentException(Metadata.error(ctx) + "Invalid exception type [" + trapsmd.exception.name + "]."); } final String id = ctx.ID().getText(); @@ -762,7 +757,7 @@ class Analyzer extends PlanAParserBaseVisitor { final BlockContext blockctx = ctx.block(); if (blockctx != null) { - final StatementMetadata blocksmd = metadata.createStatementMetadata(blockctx); + final Metadata.StatementMetadata blocksmd = metadata.createStatementMetadata(blockctx); blocksmd.lastSource = trapsmd.lastSource; blocksmd.inLoop = trapsmd.inLoop; blocksmd.lastLoop = trapsmd.lastLoop; @@ -774,7 +769,7 @@ class Analyzer extends PlanAParserBaseVisitor { trapsmd.anyContinue = blocksmd.anyContinue; trapsmd.anyBreak = blocksmd.anyBreak; } else if (ctx.emptyscope() == null) { - throw new IllegalStateException(error(ctx) + "Unexpected parser state."); + throw new IllegalStateException(Metadata.error(ctx) + "Unexpected parser state."); } return null; @@ -782,12 +777,12 @@ class Analyzer extends PlanAParserBaseVisitor { @Override public Void visitPrecedence(final PrecedenceContext ctx) { - throw new UnsupportedOperationException(error(ctx) + "Unexpected parser state."); + throw new UnsupportedOperationException(Metadata.error(ctx) + "Unexpected parser state."); } @Override public Void visitNumeric(final NumericContext ctx) { - final ExpressionMetadata numericemd = metadata.getExpressionMetadata(ctx); + final Metadata.ExpressionMetadata numericemd = metadata.getExpressionMetadata(ctx); final boolean negate = ctx.parent instanceof UnaryContext && ((UnaryContext)ctx.parent).SUB() != null; if (ctx.DECIMAL() != null) { @@ -798,14 +793,14 @@ class Analyzer extends PlanAParserBaseVisitor { numericemd.from = definition.floatType; numericemd.preConst = Float.parseFloat(svalue.substring(0, svalue.length() - 1)); } catch (NumberFormatException exception) { - throw new IllegalArgumentException(error(ctx) + "Invalid float constant [" + svalue + "]."); + throw new IllegalArgumentException(Metadata.error(ctx) + "Invalid float constant [" + svalue + "]."); } } else { try { numericemd.from = definition.doubleType; numericemd.preConst = Double.parseDouble(svalue); } catch (NumberFormatException exception) { - throw new IllegalArgumentException(error(ctx) + "Invalid double constant [" + svalue + "]."); + throw new IllegalArgumentException(Metadata.error(ctx) + "Invalid double constant [" + svalue + "]."); } } } else { @@ -822,7 +817,7 @@ class Analyzer extends PlanAParserBaseVisitor { svalue += ctx.HEX().getText(); radix = 16; } else { - throw new IllegalStateException(error(ctx) + "Unexpected parser state."); + throw new IllegalStateException(Metadata.error(ctx) + "Unexpected parser state."); } if (svalue.endsWith("d") || svalue.endsWith("D")) { @@ -830,21 +825,21 @@ class Analyzer extends PlanAParserBaseVisitor { numericemd.from = definition.doubleType; numericemd.preConst = Double.parseDouble(svalue.substring(0, svalue.length() - 1)); } catch (NumberFormatException exception) { - throw new IllegalArgumentException(error(ctx) + "Invalid float constant [" + svalue + "]."); + throw new IllegalArgumentException(Metadata.error(ctx) + "Invalid float constant [" + svalue + "]."); } } else if (svalue.endsWith("f") || svalue.endsWith("F")) { try { numericemd.from = definition.floatType; numericemd.preConst = Float.parseFloat(svalue.substring(0, svalue.length() - 1)); } catch (NumberFormatException exception) { - throw new IllegalArgumentException(error(ctx) + "Invalid float constant [" + svalue + "]."); + throw new IllegalArgumentException(Metadata.error(ctx) + "Invalid float constant [" + svalue + "]."); } } else if (svalue.endsWith("l") || svalue.endsWith("L")) { try { numericemd.from = definition.longType; numericemd.preConst = Long.parseLong(svalue.substring(0, svalue.length() - 1), radix); } catch (NumberFormatException exception) { - throw new IllegalArgumentException(error(ctx) + "Invalid long constant [" + svalue + "]."); + throw new IllegalArgumentException(Metadata.error(ctx) + "Invalid long constant [" + svalue + "]."); } } else { try { @@ -866,7 +861,7 @@ class Analyzer extends PlanAParserBaseVisitor { numericemd.preConst = value; } } catch (NumberFormatException exception) { - throw new IllegalArgumentException(error(ctx) + "Invalid int constant [" + svalue + "]."); + throw new IllegalArgumentException(Metadata.error(ctx) + "Invalid int constant [" + svalue + "]."); } } } @@ -876,10 +871,10 @@ class Analyzer extends PlanAParserBaseVisitor { @Override public Void visitChar(final CharContext ctx) { - final ExpressionMetadata charemd = metadata.getExpressionMetadata(ctx); + final Metadata.ExpressionMetadata charemd = metadata.getExpressionMetadata(ctx); if (ctx.CHAR() == null) { - throw new IllegalStateException(error(ctx) + "Unexpected parser state."); + throw new IllegalStateException(Metadata.error(ctx) + "Unexpected parser state."); } charemd.preConst = ctx.CHAR().getText().charAt(0); @@ -890,10 +885,10 @@ class Analyzer extends PlanAParserBaseVisitor { @Override public Void visitTrue(final TrueContext ctx) { - final ExpressionMetadata trueemd = metadata.getExpressionMetadata(ctx); + final Metadata.ExpressionMetadata trueemd = metadata.getExpressionMetadata(ctx); if (ctx.TRUE() == null) { - throw new IllegalStateException(error(ctx) + "Unexpected parser state."); + throw new IllegalStateException(Metadata.error(ctx) + "Unexpected parser state."); } trueemd.preConst = true; @@ -904,10 +899,10 @@ class Analyzer extends PlanAParserBaseVisitor { @Override public Void visitFalse(final FalseContext ctx) { - final ExpressionMetadata falseemd = metadata.getExpressionMetadata(ctx); + final Metadata.ExpressionMetadata falseemd = metadata.getExpressionMetadata(ctx); if (ctx.FALSE() == null) { - throw new IllegalStateException(error(ctx) + "Unexpected parser state."); + throw new IllegalStateException(Metadata.error(ctx) + "Unexpected parser state."); } falseemd.preConst = false; @@ -918,10 +913,10 @@ class Analyzer extends PlanAParserBaseVisitor { @Override public Void visitNull(final NullContext ctx) { - final ExpressionMetadata nullemd = metadata.getExpressionMetadata(ctx); + final Metadata.ExpressionMetadata nullemd = metadata.getExpressionMetadata(ctx); if (ctx.NULL() == null) { - throw new IllegalStateException(error(ctx) + "Unexpected parser state."); + throw new IllegalStateException(Metadata.error(ctx) + "Unexpected parser state."); } nullemd.isNull = true; @@ -941,10 +936,10 @@ class Analyzer extends PlanAParserBaseVisitor { @Override public Void visitExternal(final ExternalContext ctx) { - final ExpressionMetadata extemd = metadata.getExpressionMetadata(ctx); + final Metadata.ExpressionMetadata extemd = metadata.getExpressionMetadata(ctx); final ExtstartContext extstartctx = ctx.extstart(); - final ExternalMetadata extstartemd = metadata.createExternalMetadata(extstartctx); + final Metadata.ExternalMetadata extstartemd = metadata.createExternalMetadata(extstartctx); extstartemd.read = extemd.read; visit(extstartctx); @@ -958,10 +953,10 @@ class Analyzer extends PlanAParserBaseVisitor { @Override public Void visitPostinc(final PostincContext ctx) { - final ExpressionMetadata postincemd = metadata.getExpressionMetadata(ctx); + final Metadata.ExpressionMetadata postincemd = metadata.getExpressionMetadata(ctx); final ExtstartContext extstartctx = ctx.extstart(); - final ExternalMetadata extstartemd = metadata.createExternalMetadata(extstartctx); + final Metadata.ExternalMetadata extstartemd = metadata.createExternalMetadata(extstartctx); extstartemd.read = postincemd.read; extstartemd.storeExpr = ctx.increment(); extstartemd.token = ADD; @@ -977,10 +972,10 @@ class Analyzer extends PlanAParserBaseVisitor { @Override public Void visitPreinc(final PreincContext ctx) { - final ExpressionMetadata preincemd = metadata.getExpressionMetadata(ctx); + final Metadata.ExpressionMetadata preincemd = metadata.getExpressionMetadata(ctx); final ExtstartContext extstartctx = ctx.extstart(); - final ExternalMetadata extstartemd = metadata.createExternalMetadata(extstartctx); + final Metadata.ExternalMetadata extstartemd = metadata.createExternalMetadata(extstartctx); extstartemd.read = preincemd.read; extstartemd.storeExpr = ctx.increment(); extstartemd.token = ADD; @@ -996,10 +991,10 @@ class Analyzer extends PlanAParserBaseVisitor { @Override public Void visitUnary(final UnaryContext ctx) { - final ExpressionMetadata unaryemd = metadata.getExpressionMetadata(ctx); + final Metadata.ExpressionMetadata unaryemd = metadata.getExpressionMetadata(ctx); final ExpressionContext exprctx = metadata.updateExpressionTree(ctx.expression()); - final ExpressionMetadata expremd = metadata.createExpressionMetadata(exprctx); + final Metadata.ExpressionMetadata expremd = metadata.createExpressionMetadata(exprctx); if (ctx.BOOLNOT() != null) { expremd.to = definition.booleanType; @@ -1033,7 +1028,7 @@ class Analyzer extends PlanAParserBaseVisitor { } else if (sort == Sort.LONG) { unaryemd.preConst = ~(long)expremd.postConst; } else { - throw new IllegalStateException(error(ctx) + "Unexpected parser state."); + throw new IllegalStateException(Metadata.error(ctx) + "Unexpected parser state."); } } else if (ctx.SUB() != null) { if (exprctx instanceof NumericContext) { @@ -1056,7 +1051,7 @@ class Analyzer extends PlanAParserBaseVisitor { } else if (sort == Sort.DOUBLE) { unaryemd.preConst = -(double)expremd.postConst; } else { - throw new IllegalStateException(error(ctx) + "Unexpected parser state."); + throw new IllegalStateException(Metadata.error(ctx) + "Unexpected parser state."); } } } else if (ctx.ADD() != null) { @@ -1069,17 +1064,17 @@ class Analyzer extends PlanAParserBaseVisitor { } else if (sort == Sort.DOUBLE) { unaryemd.preConst = +(double)expremd.postConst; } else { - throw new IllegalStateException(error(ctx) + "Unexpected parser state."); + throw new IllegalStateException(Metadata.error(ctx) + "Unexpected parser state."); } } else { - throw new IllegalStateException(error(ctx) + "Unexpected parser state."); + throw new IllegalStateException(Metadata.error(ctx) + "Unexpected parser state."); } } unaryemd.from = promote; unaryemd.typesafe = expremd.typesafe; } else { - throw new IllegalStateException(error(ctx) + "Unexpected parser state."); + throw new IllegalStateException(Metadata.error(ctx) + "Unexpected parser state."); } return null; @@ -1087,17 +1082,17 @@ class Analyzer extends PlanAParserBaseVisitor { @Override public Void visitCast(final CastContext ctx) { - final ExpressionMetadata castemd = metadata.getExpressionMetadata(ctx); + final Metadata.ExpressionMetadata castemd = metadata.getExpressionMetadata(ctx); final DecltypeContext decltypectx = ctx.decltype(); - final ExpressionMetadata decltypemd = metadata.createExpressionMetadata(decltypectx); + final Metadata.ExpressionMetadata decltypemd = metadata.createExpressionMetadata(decltypectx); visit(decltypectx); final Type type = decltypemd.from; castemd.from = type; final ExpressionContext exprctx = metadata.updateExpressionTree(ctx.expression()); - final ExpressionMetadata expremd = metadata.createExpressionMetadata(exprctx); + final Metadata.ExpressionMetadata expremd = metadata.createExpressionMetadata(exprctx); expremd.to = type; expremd.explicit = true; visit(exprctx); @@ -1114,14 +1109,14 @@ class Analyzer extends PlanAParserBaseVisitor { @Override public Void visitBinary(final BinaryContext ctx) { - final ExpressionMetadata binaryemd = metadata.getExpressionMetadata(ctx); + final Metadata.ExpressionMetadata binaryemd = metadata.getExpressionMetadata(ctx); final ExpressionContext exprctx0 = metadata.updateExpressionTree(ctx.expression(0)); - final ExpressionMetadata expremd0 = metadata.createExpressionMetadata(exprctx0); + final Metadata.ExpressionMetadata expremd0 = metadata.createExpressionMetadata(exprctx0); visit(exprctx0); final ExpressionContext exprctx1 = metadata.updateExpressionTree(ctx.expression(1)); - final ExpressionMetadata expremd1 = metadata.createExpressionMetadata(exprctx1); + final Metadata.ExpressionMetadata expremd1 = metadata.createExpressionMetadata(exprctx1); visit(exprctx1); final boolean decimal = ctx.MUL() != null || ctx.DIV() != null || ctx.REM() != null || ctx.SUB() != null; @@ -1169,7 +1164,7 @@ class Analyzer extends PlanAParserBaseVisitor { binaryemd.preConst = Utility.multiplyWithoutOverflow((double)expremd0.postConst, (double)expremd1.postConst); } } else { - throw new IllegalStateException(error(ctx) + "Unexpected parser state."); + throw new IllegalStateException(Metadata.error(ctx) + "Unexpected parser state."); } } else if (ctx.DIV() != null) { if (sort == Sort.INT) { @@ -1197,7 +1192,7 @@ class Analyzer extends PlanAParserBaseVisitor { binaryemd.preConst = Utility.divideWithoutOverflow((double)expremd0.postConst, (double)expremd1.postConst); } } else { - throw new IllegalStateException(error(ctx) + "Unexpected parser state."); + throw new IllegalStateException(Metadata.error(ctx) + "Unexpected parser state."); } } else if (ctx.REM() != null) { if (sort == Sort.INT) { @@ -1217,7 +1212,7 @@ class Analyzer extends PlanAParserBaseVisitor { binaryemd.preConst = Utility.remainderWithoutOverflow((double)expremd0.postConst, (double)expremd1.postConst); } } else { - throw new IllegalStateException(error(ctx) + "Unexpected parser state."); + throw new IllegalStateException(Metadata.error(ctx) + "Unexpected parser state."); } } else if (ctx.ADD() != null) { if (sort == Sort.INT) { @@ -1247,7 +1242,7 @@ class Analyzer extends PlanAParserBaseVisitor { } else if (sort == Sort.STRING) { binaryemd.preConst = "" + expremd0.postConst + expremd1.postConst; } else { - throw new IllegalStateException(error(ctx) + "Unexpected parser state."); + throw new IllegalStateException(Metadata.error(ctx) + "Unexpected parser state."); } } else if (ctx.SUB() != null) { if (sort == Sort.INT) { @@ -1275,7 +1270,7 @@ class Analyzer extends PlanAParserBaseVisitor { binaryemd.preConst = Utility.subtractWithoutOverflow((double)expremd0.postConst, (double)expremd1.postConst); } } else { - throw new IllegalStateException(error(ctx) + "Unexpected parser state."); + throw new IllegalStateException(Metadata.error(ctx) + "Unexpected parser state."); } } else if (ctx.LSH() != null) { if (sort == Sort.INT) { @@ -1283,7 +1278,7 @@ class Analyzer extends PlanAParserBaseVisitor { } else if (sort == Sort.LONG) { binaryemd.preConst = (long)expremd0.postConst << (long)expremd1.postConst; } else { - throw new IllegalStateException(error(ctx) + "Unexpected parser state."); + throw new IllegalStateException(Metadata.error(ctx) + "Unexpected parser state."); } } else if (ctx.RSH() != null) { if (sort == Sort.INT) { @@ -1291,7 +1286,7 @@ class Analyzer extends PlanAParserBaseVisitor { } else if (sort == Sort.LONG) { binaryemd.preConst = (long)expremd0.postConst >> (long)expremd1.postConst; } else { - throw new IllegalStateException(error(ctx) + "Unexpected parser state."); + throw new IllegalStateException(Metadata.error(ctx) + "Unexpected parser state."); } } else if (ctx.USH() != null) { if (sort == Sort.INT) { @@ -1299,7 +1294,7 @@ class Analyzer extends PlanAParserBaseVisitor { } else if (sort == Sort.LONG) { binaryemd.preConst = (long)expremd0.postConst >>> (long)expremd1.postConst; } else { - throw new IllegalStateException(error(ctx) + "Unexpected parser state."); + throw new IllegalStateException(Metadata.error(ctx) + "Unexpected parser state."); } } else if (ctx.BWAND() != null) { if (sort == Sort.INT) { @@ -1307,7 +1302,7 @@ class Analyzer extends PlanAParserBaseVisitor { } else if (sort == Sort.LONG) { binaryemd.preConst = (long)expremd0.postConst & (long)expremd1.postConst; } else { - throw new IllegalStateException(error(ctx) + "Unexpected parser state."); + throw new IllegalStateException(Metadata.error(ctx) + "Unexpected parser state."); } } else if (ctx.BWXOR() != null) { if (sort == Sort.BOOL) { @@ -1317,7 +1312,7 @@ class Analyzer extends PlanAParserBaseVisitor { } else if (sort == Sort.LONG) { binaryemd.preConst = (long)expremd0.postConst ^ (long)expremd1.postConst; } else { - throw new IllegalStateException(error(ctx) + "Unexpected parser state."); + throw new IllegalStateException(Metadata.error(ctx) + "Unexpected parser state."); } } else if (ctx.BWOR() != null) { if (sort == Sort.INT) { @@ -1325,10 +1320,10 @@ class Analyzer extends PlanAParserBaseVisitor { } else if (sort == Sort.LONG) { binaryemd.preConst = (long)expremd0.postConst | (long)expremd1.postConst; } else { - throw new IllegalStateException(error(ctx) + "Unexpected parser state."); + throw new IllegalStateException(Metadata.error(ctx) + "Unexpected parser state."); } } else { - throw new IllegalStateException(error(ctx) + "Unexpected parser state."); + throw new IllegalStateException(Metadata.error(ctx) + "Unexpected parser state."); } } @@ -1340,20 +1335,20 @@ class Analyzer extends PlanAParserBaseVisitor { @Override public Void visitComp(final CompContext ctx) { - final ExpressionMetadata compemd = metadata.getExpressionMetadata(ctx); + final Metadata.ExpressionMetadata compemd = metadata.getExpressionMetadata(ctx); final boolean equality = ctx.EQ() != null || ctx.NE() != null; final boolean reference = ctx.EQR() != null || ctx.NER() != null; final ExpressionContext exprctx0 = metadata.updateExpressionTree(ctx.expression(0)); - final ExpressionMetadata expremd0 = metadata.createExpressionMetadata(exprctx0); + final Metadata.ExpressionMetadata expremd0 = metadata.createExpressionMetadata(exprctx0); visit(exprctx0); final ExpressionContext exprctx1 = metadata.updateExpressionTree(ctx.expression(1)); - final ExpressionMetadata expremd1 = metadata.createExpressionMetadata(exprctx1); + final Metadata.ExpressionMetadata expremd1 = metadata.createExpressionMetadata(exprctx1); visit(exprctx1); if (expremd0.isNull && expremd1.isNull) { - throw new IllegalArgumentException(error(ctx) + "Unnecessary comparison of null constants."); + throw new IllegalArgumentException(Metadata.error(ctx) + "Unnecessary comparison of null constants."); } final Type promote = equality ? promoteEquality(expremd0.from, expremd1.from) : @@ -1450,7 +1445,7 @@ class Analyzer extends PlanAParserBaseVisitor { compemd.preConst = (double)expremd0.postConst < (double)expremd1.postConst; } } else { - throw new IllegalStateException(error(ctx) + "Unexpected parser state."); + throw new IllegalStateException(Metadata.error(ctx) + "Unexpected parser state."); } } @@ -1462,16 +1457,16 @@ class Analyzer extends PlanAParserBaseVisitor { @Override public Void visitBool(final BoolContext ctx) { - final ExpressionMetadata boolemd = metadata.getExpressionMetadata(ctx); + final Metadata.ExpressionMetadata boolemd = metadata.getExpressionMetadata(ctx); final ExpressionContext exprctx0 = metadata.updateExpressionTree(ctx.expression(0)); - final ExpressionMetadata expremd0 = metadata.createExpressionMetadata(exprctx0); + final Metadata.ExpressionMetadata expremd0 = metadata.createExpressionMetadata(exprctx0); expremd0.to = definition.booleanType; visit(exprctx0); markCast(expremd0); final ExpressionContext exprctx1 = metadata.updateExpressionTree(ctx.expression(1)); - final ExpressionMetadata expremd1 = metadata.createExpressionMetadata(exprctx1); + final Metadata.ExpressionMetadata expremd1 = metadata.createExpressionMetadata(exprctx1); expremd1.to = definition.booleanType; visit(exprctx1); markCast(expremd1); @@ -1482,7 +1477,7 @@ class Analyzer extends PlanAParserBaseVisitor { } else if (ctx.BOOLOR() != null) { boolemd.preConst = (boolean)expremd0.postConst || (boolean)expremd1.postConst; } else { - throw new IllegalStateException(error(ctx) + "Unexpected parser state."); + throw new IllegalStateException(Metadata.error(ctx) + "Unexpected parser state."); } } @@ -1494,26 +1489,26 @@ class Analyzer extends PlanAParserBaseVisitor { @Override public Void visitConditional(final ConditionalContext ctx) { - final ExpressionMetadata condemd = metadata.getExpressionMetadata(ctx); + final Metadata.ExpressionMetadata condemd = metadata.getExpressionMetadata(ctx); final ExpressionContext exprctx0 = metadata.updateExpressionTree(ctx.expression(0)); - final ExpressionMetadata expremd0 = metadata.createExpressionMetadata(exprctx0); + final Metadata.ExpressionMetadata expremd0 = metadata.createExpressionMetadata(exprctx0); expremd0.to = definition.booleanType; visit(exprctx0); markCast(expremd0); if (expremd0.postConst != null) { - throw new IllegalArgumentException(error(ctx) + "Unnecessary conditional statement."); + throw new IllegalArgumentException(Metadata.error(ctx) + "Unnecessary conditional statement."); } final ExpressionContext exprctx1 = metadata.updateExpressionTree(ctx.expression(1)); - final ExpressionMetadata expremd1 = metadata.createExpressionMetadata(exprctx1); + final Metadata.ExpressionMetadata expremd1 = metadata.createExpressionMetadata(exprctx1); expremd1.to = condemd.to; expremd1.explicit = condemd.explicit; visit(exprctx1); final ExpressionContext exprctx2 = metadata.updateExpressionTree(ctx.expression(2)); - final ExpressionMetadata expremd2 = metadata.createExpressionMetadata(exprctx2); + final Metadata.ExpressionMetadata expremd2 = metadata.createExpressionMetadata(exprctx2); expremd2.to = condemd.to; expremd2.explicit = condemd.explicit; visit(exprctx2); @@ -1538,10 +1533,10 @@ class Analyzer extends PlanAParserBaseVisitor { @Override public Void visitAssignment(final AssignmentContext ctx) { - final ExpressionMetadata assignemd = metadata.getExpressionMetadata(ctx); + final Metadata.ExpressionMetadata assignemd = metadata.getExpressionMetadata(ctx); final ExtstartContext extstartctx = ctx.extstart(); - final ExternalMetadata extstartemd = metadata.createExternalMetadata(extstartctx); + final Metadata.ExternalMetadata extstartemd = metadata.createExternalMetadata(extstartctx); extstartemd.read = assignemd.read; extstartemd.storeExpr = metadata.updateExpressionTree(ctx.expression()); @@ -1615,9 +1610,9 @@ class Analyzer extends PlanAParserBaseVisitor { @Override public Void visitExtprec(final ExtprecContext ctx) { - final ExtNodeMetadata precenmd = metadata.getExtNodeMetadata(ctx); + final Metadata.ExtNodeMetadata precenmd = metadata.getExtNodeMetadata(ctx); final ParserRuleContext parent = precenmd.parent; - final ExternalMetadata parentemd = metadata.getExternalMetadata(parent); + final Metadata.ExternalMetadata parentemd = metadata.getExternalMetadata(parent); final ExtprecContext precctx = ctx.extprec(); final ExtcastContext castctx = ctx.extcast(); @@ -1652,7 +1647,7 @@ class Analyzer extends PlanAParserBaseVisitor { metadata.createExtNodeMetadata(ctx, stringctx); visit(stringctx); } else { - throw new IllegalStateException(error(ctx) + "Unexpected parser state."); + throw new IllegalStateException(Metadata.error(ctx) + "Unexpected parser state."); } parentemd.statement = false; @@ -1674,9 +1669,9 @@ class Analyzer extends PlanAParserBaseVisitor { @Override public Void visitExtcast(final ExtcastContext ctx) { - final ExtNodeMetadata castenmd = metadata.getExtNodeMetadata(ctx); + final Metadata.ExtNodeMetadata castenmd = metadata.getExtNodeMetadata(ctx); final ParserRuleContext parent = castenmd.parent; - final ExternalMetadata parentemd = metadata.getExternalMetadata(parent); + final Metadata.ExternalMetadata parentemd = metadata.getExternalMetadata(parent); final ExtprecContext precctx = ctx.extprec(); final ExtcastContext castctx = ctx.extcast(); @@ -1704,11 +1699,11 @@ class Analyzer extends PlanAParserBaseVisitor { metadata.createExtNodeMetadata(ctx, stringctx); visit(stringctx); } else { - throw new IllegalStateException(error(ctx) + "Unexpected parser state."); + throw new IllegalStateException(Metadata.error(ctx) + "Unexpected parser state."); } final DecltypeContext declctx = ctx.decltype(); - final ExpressionMetadata declemd = metadata.createExpressionMetadata(declctx); + final Metadata.ExpressionMetadata declemd = metadata.createExpressionMetadata(declctx); visit(declctx); castenmd.castTo = getLegalCast(ctx, parentemd.current, declemd.from, true); @@ -1721,9 +1716,9 @@ class Analyzer extends PlanAParserBaseVisitor { @Override public Void visitExtbrace(final ExtbraceContext ctx) { - final ExtNodeMetadata braceenmd = metadata.getExtNodeMetadata(ctx); + final Metadata.ExtNodeMetadata braceenmd = metadata.getExtNodeMetadata(ctx); final ParserRuleContext parent = braceenmd.parent; - final ExternalMetadata parentemd = metadata.getExternalMetadata(parent); + final Metadata.ExternalMetadata parentemd = metadata.getExternalMetadata(parent); final boolean array = parentemd.current.sort == Sort.ARRAY; final boolean def = parentemd.current.sort == Sort.DEF; @@ -1750,7 +1745,7 @@ class Analyzer extends PlanAParserBaseVisitor { braceenmd.last = parentemd.scope == 0 && dotctx == null && bracectx == null; final ExpressionContext exprctx = metadata.updateExpressionTree(ctx.expression()); - final ExpressionMetadata expremd = metadata.createExpressionMetadata(exprctx); + final Metadata.ExpressionMetadata expremd = metadata.createExpressionMetadata(exprctx); if (array || def) { expremd.to = array ? definition.intType : definition.objectType; @@ -1785,18 +1780,18 @@ class Analyzer extends PlanAParserBaseVisitor { setter = parentemd.current.struct.methods.get("put"); if (getter != null && (getter.rtn.sort == Sort.VOID || getter.arguments.size() != 1)) { - throw new IllegalArgumentException(error(ctx) + + throw new IllegalArgumentException(Metadata.error(ctx) + "Illegal map get shortcut for type [" + parentemd.current.name + "]."); } if (setter != null && setter.arguments.size() != 2) { - throw new IllegalArgumentException(error(ctx) + + throw new IllegalArgumentException(Metadata.error(ctx) + "Illegal map set shortcut for type [" + parentemd.current.name + "]."); } if (getter != null && setter != null && (!getter.arguments.get(0).equals(setter.arguments.get(0)) || !getter.rtn.equals(setter.arguments.get(1)))) { - throw new IllegalArgumentException(error(ctx) + "Shortcut argument types must match."); + throw new IllegalArgumentException(Metadata.error(ctx) + "Shortcut argument types must match."); } valuetype = setter != null ? setter.arguments.get(0) : getter != null ? getter.arguments.get(0) : null; @@ -1807,24 +1802,24 @@ class Analyzer extends PlanAParserBaseVisitor { if (getter != null && (getter.rtn.sort == Sort.VOID || getter.arguments.size() != 1 || getter.arguments.get(0).sort != Sort.INT)) { - throw new IllegalArgumentException(error(ctx) + + throw new IllegalArgumentException(Metadata.error(ctx) + "Illegal list get shortcut for type [" + parentemd.current.name + "]."); } if (setter != null && (setter.arguments.size() != 2 || setter.arguments.get(0).sort != Sort.INT)) { - throw new IllegalArgumentException(error(ctx) + + throw new IllegalArgumentException(Metadata.error(ctx) + "Illegal list set shortcut for type [" + parentemd.current.name + "]."); } if (getter != null && setter != null && (!getter.arguments.get(0).equals(setter.arguments.get(0)) || !getter.rtn.equals(setter.arguments.get(1)))) { - throw new IllegalArgumentException(error(ctx) + "Shortcut argument types must match."); + throw new IllegalArgumentException(Metadata.error(ctx) + "Shortcut argument types must match."); } valuetype = definition.intType; settype = setter == null ? null : setter.arguments.get(1); } else { - throw new IllegalStateException(error(ctx) + "Unexpected parser state."); + throw new IllegalStateException(Metadata.error(ctx) + "Unexpected parser state."); } if ((get || set) && (!get || getter != null) && (!set || setter != null)) { @@ -1840,7 +1835,7 @@ class Analyzer extends PlanAParserBaseVisitor { } if (braceenmd.target == null) { - throw new IllegalArgumentException(error(ctx) + + throw new IllegalArgumentException(Metadata.error(ctx) + "Attempting to address a non-array type [" + parentemd.current.name + "] as an array."); } @@ -1849,7 +1844,7 @@ class Analyzer extends PlanAParserBaseVisitor { @Override public Void visitExtdot(final ExtdotContext ctx) { - final ExtNodeMetadata dotemnd = metadata.getExtNodeMetadata(ctx); + final Metadata.ExtNodeMetadata dotemnd = metadata.getExtNodeMetadata(ctx); final ParserRuleContext parent = dotemnd.parent; final ExtcallContext callctx = ctx.extcall(); @@ -1868,12 +1863,12 @@ class Analyzer extends PlanAParserBaseVisitor { @Override public Void visitExttype(final ExttypeContext ctx) { - final ExtNodeMetadata typeenmd = metadata.getExtNodeMetadata(ctx); + final Metadata.ExtNodeMetadata typeenmd = metadata.getExtNodeMetadata(ctx); final ParserRuleContext parent = typeenmd.parent; - final ExternalMetadata parentemd = metadata.getExternalMetadata(parent); + final Metadata.ExternalMetadata parentemd = metadata.getExternalMetadata(parent); if (parentemd.current != null) { - throw new IllegalArgumentException(error(ctx) + "Unexpected static type."); + throw new IllegalArgumentException(Metadata.error(ctx) + "Unexpected static type."); } final String typestr = ctx.TYPE().getText(); @@ -1890,9 +1885,9 @@ class Analyzer extends PlanAParserBaseVisitor { @Override public Void visitExtcall(final ExtcallContext ctx) { - final ExtNodeMetadata callenmd = metadata.getExtNodeMetadata(ctx); + final Metadata.ExtNodeMetadata callenmd = metadata.getExtNodeMetadata(ctx); final ParserRuleContext parent = callenmd.parent; - final ExternalMetadata parentemd = metadata.getExternalMetadata(parent); + final Metadata.ExternalMetadata parentemd = metadata.getExternalMetadata(parent); final ExtdotContext dotctx = ctx.extdot(); final ExtbraceContext bracectx = ctx.extbrace(); @@ -1902,9 +1897,9 @@ class Analyzer extends PlanAParserBaseVisitor { final String name = ctx.EXTID().getText(); if (parentemd.current.sort == Sort.ARRAY) { - throw new IllegalArgumentException(error(ctx) + "Unexpected call [" + name + "] on an array."); + throw new IllegalArgumentException(Metadata.error(ctx) + "Unexpected call [" + name + "] on an array."); } else if (callenmd.last && parentemd.storeExpr != null) { - throw new IllegalArgumentException(error(ctx) + "Cannot assign a value to a call [" + name + "]."); + throw new IllegalArgumentException(Metadata.error(ctx) + "Cannot assign a value to a call [" + name + "]."); } final Struct struct = parentemd.current.struct; @@ -1917,7 +1912,7 @@ class Analyzer extends PlanAParserBaseVisitor { if (method == null && !def) { throw new IllegalArgumentException( - error(ctx) + "Unknown call [" + name + "] on type [" + struct.name + "]."); + Metadata.error(ctx) + "Unknown call [" + name + "] on type [" + struct.name + "]."); } else if (method != null) { types = new Type[method.arguments.size()]; method.arguments.toArray(types); @@ -1928,7 +1923,7 @@ class Analyzer extends PlanAParserBaseVisitor { parentemd.current = method.rtn; if (size != types.length) { - throw new IllegalArgumentException(error(ctx) + "When calling [" + name + "] on type " + + throw new IllegalArgumentException(Metadata.error(ctx) + "When calling [" + name + "] on type " + "[" + struct.name + "] expected [" + types.length + "] arguments," + " but found [" + arguments.size() + "]."); } @@ -1944,7 +1939,7 @@ class Analyzer extends PlanAParserBaseVisitor { for (int argument = 0; argument < size; ++argument) { final ExpressionContext exprctx = metadata.updateExpressionTree(arguments.get(argument)); - final ExpressionMetadata expremd = metadata.createExpressionMetadata(exprctx); + final Metadata.ExpressionMetadata expremd = metadata.createExpressionMetadata(exprctx); expremd.to = types[argument]; visit(exprctx); markCast(expremd); @@ -1965,9 +1960,9 @@ class Analyzer extends PlanAParserBaseVisitor { @Override public Void visitExtvar(final ExtvarContext ctx) { - final ExtNodeMetadata varenmd = metadata.getExtNodeMetadata(ctx); + final Metadata.ExtNodeMetadata varenmd = metadata.getExtNodeMetadata(ctx); final ParserRuleContext parent = varenmd.parent; - final ExternalMetadata parentemd = metadata.getExternalMetadata(parent); + final Metadata.ExternalMetadata parentemd = metadata.getExternalMetadata(parent); final String name = ctx.ID().getText(); @@ -1975,7 +1970,7 @@ class Analyzer extends PlanAParserBaseVisitor { final ExtbraceContext bracectx = ctx.extbrace(); if (parentemd.current != null) { - throw new IllegalStateException(error(ctx) + "Unexpected variable [" + name + "] load."); + throw new IllegalStateException(Metadata.error(ctx) + "Unexpected variable [" + name + "] load."); } varenmd.last = parentemd.scope == 0 && dotctx == null && bracectx == null; @@ -1983,7 +1978,7 @@ class Analyzer extends PlanAParserBaseVisitor { final Variable variable = getVariable(name); if (variable == null) { - throw new IllegalArgumentException(error(ctx) + "Unknown variable [" + name + "]."); + throw new IllegalArgumentException(Metadata.error(ctx) + "Unknown variable [" + name + "]."); } varenmd.target = variable.slot; @@ -2004,12 +1999,12 @@ class Analyzer extends PlanAParserBaseVisitor { @Override public Void visitExtfield(final ExtfieldContext ctx) { - final ExtNodeMetadata memberenmd = metadata.getExtNodeMetadata(ctx); + final Metadata.ExtNodeMetadata memberenmd = metadata.getExtNodeMetadata(ctx); final ParserRuleContext parent = memberenmd.parent; - final ExternalMetadata parentemd = metadata.getExternalMetadata(parent); + final Metadata.ExternalMetadata parentemd = metadata.getExternalMetadata(parent); if (ctx.EXTID() == null && ctx.EXTINTEGER() == null) { - throw new IllegalArgumentException(error(ctx) + "Unexpected parser state."); + throw new IllegalArgumentException(Metadata.error(ctx) + "Unexpected parser state."); } final String value = ctx.EXTID() == null ? ctx.EXTINTEGER().getText() : ctx.EXTID().getText(); @@ -2021,23 +2016,23 @@ class Analyzer extends PlanAParserBaseVisitor { final boolean store = memberenmd.last && parentemd.storeExpr != null; if (parentemd.current == null) { - throw new IllegalStateException(error(ctx) + "Unexpected field [" + value + "] load."); + throw new IllegalStateException(Metadata.error(ctx) + "Unexpected field [" + value + "] load."); } if (parentemd.current.sort == Sort.ARRAY) { if ("length".equals(value)) { if (!parentemd.read) { - throw new IllegalArgumentException(error(ctx) + "Must read array field [length]."); + throw new IllegalArgumentException(Metadata.error(ctx) + "Must read array field [length]."); } else if (store) { throw new IllegalArgumentException( - error(ctx) + "Cannot write to read-only array field [length]."); + Metadata.error(ctx) + "Cannot write to read-only array field [length]."); } memberenmd.target = "#length"; memberenmd.type = definition.intType; parentemd.current = definition.intType; } else { - throw new IllegalArgumentException(error(ctx) + "Unexpected array field [" + value + "]."); + throw new IllegalArgumentException(Metadata.error(ctx) + "Unexpected array field [" + value + "]."); } } else if (parentemd.current.sort == Sort.DEF) { memberenmd.target = value; @@ -2050,7 +2045,7 @@ class Analyzer extends PlanAParserBaseVisitor { if (field != null) { if (store && java.lang.reflect.Modifier.isFinal(field.reflect.getModifiers())) { - throw new IllegalArgumentException(error(ctx) + "Cannot write to read-only" + + throw new IllegalArgumentException(Metadata.error(ctx) + "Cannot write to read-only" + " field [" + value + "] for type [" + struct.name + "]."); } @@ -2067,12 +2062,12 @@ class Analyzer extends PlanAParserBaseVisitor { Object constant = null; if (getter != null && (getter.rtn.sort == Sort.VOID || !getter.arguments.isEmpty())) { - throw new IllegalArgumentException(error(ctx) + + throw new IllegalArgumentException(Metadata.error(ctx) + "Illegal get shortcut on field [" + value + "] for type [" + struct.name + "]."); } if (setter != null && (setter.rtn.sort != Sort.VOID || setter.arguments.size() != 1)) { - throw new IllegalArgumentException(error(ctx) + + throw new IllegalArgumentException(Metadata.error(ctx) + "Illegal set shortcut on field [" + value + "] for type [" + struct.name + "]."); } @@ -2088,18 +2083,18 @@ class Analyzer extends PlanAParserBaseVisitor { if (getter != null && (getter.rtn.sort == Sort.VOID || getter.arguments.size() != 1 || getter.arguments.get(0).sort != Sort.STRING)) { - throw new IllegalArgumentException(error(ctx) + + throw new IllegalArgumentException(Metadata.error(ctx) + "Illegal map get shortcut [" + value + "] for type [" + struct.name + "]."); } if (setter != null && (setter.arguments.size() != 2 || setter.arguments.get(0).sort != Sort.STRING)) { - throw new IllegalArgumentException(error(ctx) + + throw new IllegalArgumentException(Metadata.error(ctx) + "Illegal map set shortcut [" + value + "] for type [" + struct.name + "]."); } if (getter != null && setter != null && !getter.rtn.equals(setter.arguments.get(1))) { - throw new IllegalArgumentException(error(ctx) + "Shortcut argument types must match."); + throw new IllegalArgumentException(Metadata.error(ctx) + "Shortcut argument types must match."); } settype = setter == null ? null : setter.arguments.get(1); @@ -2116,18 +2111,18 @@ class Analyzer extends PlanAParserBaseVisitor { if (getter != null && (getter.rtn.sort == Sort.VOID || getter.arguments.size() != 1 || getter.arguments.get(0).sort != Sort.INT)) { - throw new IllegalArgumentException(error(ctx) + + throw new IllegalArgumentException(Metadata.error(ctx) + "Illegal list get shortcut [" + value + "] for type [" + struct.name + "]."); } if (setter != null && (setter.rtn.sort != Sort.VOID || setter.arguments.size() != 2 || setter.arguments.get(0).sort != Sort.INT)) { - throw new IllegalArgumentException(error(ctx) + + throw new IllegalArgumentException(Metadata.error(ctx) + "Illegal list set shortcut [" + value + "] for type [" + struct.name + "]."); } if (getter != null && setter != null && !getter.rtn.equals(setter.arguments.get(1))) { - throw new IllegalArgumentException(error(ctx) + "Shortcut argument types must match."); + throw new IllegalArgumentException(Metadata.error(ctx) + "Shortcut argument types must match."); } settype = setter == null ? null : setter.arguments.get(1); @@ -2135,14 +2130,14 @@ class Analyzer extends PlanAParserBaseVisitor { try { constant = Integer.parseInt(value); } catch (NumberFormatException exception) { - throw new IllegalArgumentException(error(ctx) + + throw new IllegalArgumentException(Metadata.error(ctx) + "Illegal list shortcut value [" + value + "]."); } } catch (ClassCastException exception) { //Do nothing. } } else { - throw new IllegalStateException(error(ctx) + "Unexpected parser state."); + throw new IllegalStateException(Metadata.error(ctx) + "Unexpected parser state."); } } @@ -2156,7 +2151,7 @@ class Analyzer extends PlanAParserBaseVisitor { if (memberenmd.target == null) { throw new IllegalArgumentException( - error(ctx) + "Unknown field [" + value + "] for type [" + struct.name + "]."); + Metadata.error(ctx) + "Unknown field [" + value + "] for type [" + struct.name + "]."); } } @@ -2175,9 +2170,9 @@ class Analyzer extends PlanAParserBaseVisitor { @Override public Void visitExtnew(ExtnewContext ctx) { - final ExtNodeMetadata newenmd = metadata.getExtNodeMetadata(ctx); + final Metadata.ExtNodeMetadata newenmd = metadata.getExtNodeMetadata(ctx); final ParserRuleContext parent = newenmd.parent; - final ExternalMetadata parentemd = metadata.getExternalMetadata(parent); + final Metadata.ExternalMetadata parentemd = metadata.getExternalMetadata(parent); final ExtdotContext dotctx = ctx.extdot(); final ExtbraceContext bracectx = ctx.extbrace(); @@ -2188,11 +2183,11 @@ class Analyzer extends PlanAParserBaseVisitor { final Struct struct = definition.structs.get(name); if (parentemd.current != null) { - throw new IllegalArgumentException(error(ctx) + "Unexpected new call."); + throw new IllegalArgumentException(Metadata.error(ctx) + "Unexpected new call."); } else if (struct == null) { - throw new IllegalArgumentException(error(ctx) + "Specified type [" + name + "] not found."); + throw new IllegalArgumentException(Metadata.error(ctx) + "Specified type [" + name + "] not found."); } else if (newenmd.last && parentemd.storeExpr != null) { - throw new IllegalArgumentException(error(ctx) + "Cannot assign a value to a new call."); + throw new IllegalArgumentException(Metadata.error(ctx) + "Cannot assign a value to a new call."); } final boolean newclass = ctx.arguments() != null; @@ -2205,7 +2200,7 @@ class Analyzer extends PlanAParserBaseVisitor { if (newarray) { if (!parentemd.read) { - throw new IllegalArgumentException(error(ctx) + "A newly created array must be assigned."); + throw new IllegalArgumentException(Metadata.error(ctx) + "A newly created array must be assigned."); } types = new Type[size]; @@ -2220,7 +2215,7 @@ class Analyzer extends PlanAParserBaseVisitor { newenmd.type = definition.getType(struct, 0); parentemd.current = definition.getType(struct, 1); } else { - throw new IllegalArgumentException(error(ctx) + "A newly created array cannot have zero dimensions."); + throw new IllegalArgumentException(Metadata.error(ctx) + "A newly created array cannot have zero dimensions."); } } else if (newclass) { final Constructor constructor = struct.constructors.get("new"); @@ -2235,21 +2230,21 @@ class Analyzer extends PlanAParserBaseVisitor { parentemd.current = newenmd.type; } else { throw new IllegalArgumentException( - error(ctx) + "Unknown new call on type [" + struct.name + "]."); + Metadata.error(ctx) + "Unknown new call on type [" + struct.name + "]."); } } else { - throw new IllegalArgumentException(error(ctx) + "Unknown parser state."); + throw new IllegalArgumentException(Metadata.error(ctx) + "Unknown parser state."); } if (size != types.length) { - throw new IllegalArgumentException(error(ctx) + "When calling [" + name + "] on type " + + throw new IllegalArgumentException(Metadata.error(ctx) + "When calling [" + name + "] on type " + "[" + struct.name + "] expected [" + types.length + "] arguments," + " but found [" + arguments.size() + "]."); } for (int argument = 0; argument < size; ++argument) { final ExpressionContext exprctx = metadata.updateExpressionTree(arguments.get(argument)); - final ExpressionMetadata expremd = metadata.createExpressionMetadata(exprctx); + final Metadata.ExpressionMetadata expremd = metadata.createExpressionMetadata(exprctx); expremd.to = types[argument]; visit(exprctx); markCast(expremd); @@ -2268,9 +2263,9 @@ class Analyzer extends PlanAParserBaseVisitor { @Override public Void visitExtstring(final ExtstringContext ctx) { - final ExtNodeMetadata memberenmd = metadata.getExtNodeMetadata(ctx); + final Metadata.ExtNodeMetadata memberenmd = metadata.getExtNodeMetadata(ctx); final ParserRuleContext parent = memberenmd.parent; - final ExternalMetadata parentemd = metadata.getExternalMetadata(parent); + final Metadata.ExternalMetadata parentemd = metadata.getExternalMetadata(parent); final String string = ctx.STRING().getText(); @@ -2281,14 +2276,14 @@ class Analyzer extends PlanAParserBaseVisitor { final boolean store = memberenmd.last && parentemd.storeExpr != null; if (parentemd.current != null) { - throw new IllegalStateException(error(ctx) + "Unexpected String constant [" + string + "]."); + throw new IllegalStateException(Metadata.error(ctx) + "Unexpected String constant [" + string + "]."); } if (!parentemd.read) { - throw new IllegalArgumentException(error(ctx) + "Must read String constant [" + string + "]."); + throw new IllegalArgumentException(Metadata.error(ctx) + "Must read String constant [" + string + "]."); } else if (store) { throw new IllegalArgumentException( - error(ctx) + "Cannot write to read-only String constant [" + string + "]."); + Metadata.error(ctx) + "Cannot write to read-only String constant [" + string + "]."); } memberenmd.target = string; @@ -2312,12 +2307,12 @@ class Analyzer extends PlanAParserBaseVisitor { @Override public Void visitArguments(final ArgumentsContext ctx) { - throw new UnsupportedOperationException(error(ctx) + "Unexpected parser state."); + throw new UnsupportedOperationException(Metadata.error(ctx) + "Unexpected parser state."); } @Override public Void visitIncrement(IncrementContext ctx) { - final ExpressionMetadata incremd = metadata.getExpressionMetadata(ctx); + final Metadata.ExpressionMetadata incremd = metadata.getExpressionMetadata(ctx); final Sort sort = incremd.to == null ? null : incremd.to.sort; final boolean positive = ctx.INCR() != null; @@ -2348,13 +2343,13 @@ class Analyzer extends PlanAParserBaseVisitor { } private void analyzeLoadStoreExternal(final ParserRuleContext source) { - final ExtNodeMetadata extenmd = metadata.getExtNodeMetadata(source); + final Metadata.ExtNodeMetadata extenmd = metadata.getExtNodeMetadata(source); final ParserRuleContext parent = extenmd.parent; - final ExternalMetadata parentemd = metadata.getExternalMetadata(parent); + final Metadata.ExternalMetadata parentemd = metadata.getExternalMetadata(parent); if (extenmd.last && parentemd.storeExpr != null) { final ParserRuleContext store = parentemd.storeExpr; - final ExpressionMetadata storeemd = metadata.createExpressionMetadata(parentemd.storeExpr); + final Metadata.ExpressionMetadata storeemd = metadata.createExpressionMetadata(parentemd.storeExpr); final int token = parentemd.token; if (token > 0) { @@ -2386,9 +2381,9 @@ class Analyzer extends PlanAParserBaseVisitor { } } - private void markCast(final ExpressionMetadata emd) { + private void markCast(final Metadata.ExpressionMetadata emd) { if (emd.from == null) { - throw new IllegalStateException(error(emd.source) + "From cast type should never be null."); + throw new IllegalStateException(Metadata.error(emd.source) + "From cast type should never be null."); } if (emd.to != null) { @@ -2398,7 +2393,7 @@ class Analyzer extends PlanAParserBaseVisitor { emd.postConst = constCast(emd.source, emd.preConst, emd.cast); } } else { - throw new IllegalStateException(error(emd.source) + "To cast type should never be null."); + throw new IllegalStateException(Metadata.error(emd.source) + "To cast type should never be null."); } } @@ -2836,11 +2831,11 @@ class Analyzer extends PlanAParserBaseVisitor { return cast; } else { throw new ClassCastException( - error(source) + "Cannot cast from [" + from.name + "] to [" + to.name + "]."); + Metadata.error(source) + "Cannot cast from [" + from.name + "] to [" + to.name + "]."); } } catch (final ClassCastException cce1) { throw new ClassCastException( - error(source) + "Cannot cast from [" + from.name + "] to [" + to.name + "]."); + Metadata.error(source) + "Cannot cast from [" + from.name + "] to [" + to.name + "]."); } } } @@ -2850,7 +2845,7 @@ class Analyzer extends PlanAParserBaseVisitor { if (transform == null) { throw new ClassCastException( - error(source) + "Cannot cast from [" + cast.from.name + "] to [" + cast.to.name + "]."); + Metadata.error(source) + "Cannot cast from [" + cast.from.name + "] to [" + cast.to.name + "]."); } return transform; @@ -2884,10 +2879,10 @@ class Analyzer extends PlanAParserBaseVisitor { case FLOAT: return number.floatValue(); case DOUBLE: return number.doubleValue(); default: - throw new IllegalStateException(error(source) + "Expected numeric type for cast."); + throw new IllegalStateException(Metadata.error(source) + "Expected numeric type for cast."); } } else { - throw new IllegalStateException(error(source) + "No valid constant cast from " + + throw new IllegalStateException(Metadata.error(source) + "No valid constant cast from " + "[" + cast.from.clazz.getCanonicalName() + "] to " + "[" + cast.to.clazz.getCanonicalName() + "]."); } @@ -2908,7 +2903,7 @@ class Analyzer extends PlanAParserBaseVisitor { } catch (IllegalAccessException | IllegalArgumentException | java.lang.reflect.InvocationTargetException | NullPointerException | ExceptionInInitializerError exception) { - throw new IllegalStateException(error(source) + "Unable to invoke transform to cast constant from " + + throw new IllegalStateException(Metadata.error(source) + "Unable to invoke transform to cast constant from " + "[" + transform.from.name + "] to [" + transform.to.name + "]."); } } diff --git a/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/Compiler.java b/plugins/lang-painless/src/main/java/org/elasticsearch/painless/Compiler.java similarity index 89% rename from plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/Compiler.java rename to plugins/lang-painless/src/main/java/org/elasticsearch/painless/Compiler.java index b6aa5f075ab..efb6b22f298 100644 --- a/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/Compiler.java +++ b/plugins/lang-painless/src/main/java/org/elasticsearch/painless/Compiler.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.plan.a; +package org.elasticsearch.painless; import org.antlr.v4.runtime.ANTLRInputStream; import org.antlr.v4.runtime.CommonTokenStream; @@ -31,14 +31,14 @@ import java.security.SecureClassLoader; import java.security.cert.Certificate; /** - * The Compiler is the entry point for generating a Plan A script. The compiler will generate an ANTLR + * The Compiler is the entry point for generating a Painless script. The compiler will generate an ANTLR * parse tree based on the source code that is passed in. Two passes will then be run over the parse tree, * one for analysis using the {@link Analyzer} and another to generate the actual byte code using ASM in * the {@link Writer}. */ final class Compiler { /** - * The default language API to be used with Plan A. The second construction is used + * The default language API to be used with Painless. The second construction is used * to finalize all the variables, so there is no mistake of modification afterwards. */ private static Definition DEFAULT_DEFINITION = new Definition(new Definition()); @@ -61,7 +61,7 @@ final class Compiler { } /** - * A secure class loader used to define Plan A scripts. + * A secure class loader used to define Painless scripts. */ static class Loader extends SecureClassLoader { /** @@ -83,12 +83,12 @@ final class Compiler { } /** - * Runs the two-pass compiler to generate a Plan A script. + * Runs the two-pass compiler to generate a Painless script. * @param loader The ClassLoader used to define the script. * @param name The name of the script. * @param source The source code for the script. * @param settings The CompilerSettings to be used during the compilation. - * @return An {@link Executable} Plan A script. + * @return An {@link Executable} Painless script. */ static Executable compile(final Loader loader, final String name, final String source, final Definition custom, final CompilerSettings settings) { @@ -107,13 +107,13 @@ final class Compiler { * to ensure that the first error generated by ANTLR will cause the compilation to fail rather than * use ANTLR's recovery strategies that may be potentially dangerous. * @param source The source code for the script. - * @param definition The Plan A API. + * @param definition The Painless API. * @return The root node for the ANTLR parse tree. */ private static ParserRuleContext createParseTree(final String source, final Definition definition) { final ANTLRInputStream stream = new ANTLRInputStream(source); final ErrorHandlingLexer lexer = new ErrorHandlingLexer(stream); - final PlanAParser parser = new PlanAParser(new CommonTokenStream(lexer)); + final PainlessParser parser = new PainlessParser(new CommonTokenStream(lexer)); final ParserErrorStrategy strategy = new ParserErrorStrategy(); lexer.removeErrorListeners(); @@ -127,13 +127,13 @@ final class Compiler { } /** - * Generates an {@link Executable} that can run a Plan A script. + * Generates an {@link Executable} that can run a Painless script. * @param loader The {@link Loader} to define the script's class file. - * @param definition The Plan A API. + * @param definition The Painless API. * @param name The name of the script. * @param source The source text of the script. * @param bytes The ASM generated byte code to define the class with. - * @return A Plan A {@link Executable} script. + * @return A Painless {@link Executable} script. */ private static Executable createExecutable(final Loader loader, final Definition definition, final String name, final String source, final byte[] bytes) { diff --git a/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/CompilerSettings.java b/plugins/lang-painless/src/main/java/org/elasticsearch/painless/CompilerSettings.java similarity index 98% rename from plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/CompilerSettings.java rename to plugins/lang-painless/src/main/java/org/elasticsearch/painless/CompilerSettings.java index 4b5e753e342..79f7c212ef3 100644 --- a/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/CompilerSettings.java +++ b/plugins/lang-painless/src/main/java/org/elasticsearch/painless/CompilerSettings.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.plan.a; +package org.elasticsearch.painless; /** * Settings to use when compiling a script. diff --git a/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/Def.java b/plugins/lang-painless/src/main/java/org/elasticsearch/painless/Def.java similarity index 99% rename from plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/Def.java rename to plugins/lang-painless/src/main/java/org/elasticsearch/painless/Def.java index eb5e0bdd66f..71c7de23c85 100644 --- a/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/Def.java +++ b/plugins/lang-painless/src/main/java/org/elasticsearch/painless/Def.java @@ -17,14 +17,14 @@ * under the License. */ -package org.elasticsearch.plan.a; +package org.elasticsearch.painless; -import org.elasticsearch.plan.a.Definition.Cast; -import org.elasticsearch.plan.a.Definition.Field; -import org.elasticsearch.plan.a.Definition.Method; -import org.elasticsearch.plan.a.Definition.Struct; -import org.elasticsearch.plan.a.Definition.Transform; -import org.elasticsearch.plan.a.Definition.Type; +import org.elasticsearch.painless.Definition.Cast; +import org.elasticsearch.painless.Definition.Field; +import org.elasticsearch.painless.Definition.Method; +import org.elasticsearch.painless.Definition.Struct; +import org.elasticsearch.painless.Definition.Transform; +import org.elasticsearch.painless.Definition.Type; import java.lang.invoke.MethodHandle; import java.lang.reflect.Array; diff --git a/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/Definition.java b/plugins/lang-painless/src/main/java/org/elasticsearch/painless/Definition.java similarity index 99% rename from plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/Definition.java rename to plugins/lang-painless/src/main/java/org/elasticsearch/painless/Definition.java index 613bdafd8ee..94b372a1335 100644 --- a/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/Definition.java +++ b/plugins/lang-painless/src/main/java/org/elasticsearch/painless/Definition.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.plan.a; +package org.elasticsearch.painless; import java.lang.invoke.MethodHandle; import java.lang.invoke.MethodHandles; diff --git a/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/ErrorHandlingLexer.java b/plugins/lang-painless/src/main/java/org/elasticsearch/painless/ErrorHandlingLexer.java similarity index 95% rename from plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/ErrorHandlingLexer.java rename to plugins/lang-painless/src/main/java/org/elasticsearch/painless/ErrorHandlingLexer.java index 768b0aea2ec..e42de385d75 100644 --- a/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/ErrorHandlingLexer.java +++ b/plugins/lang-painless/src/main/java/org/elasticsearch/painless/ErrorHandlingLexer.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.plan.a; +package org.elasticsearch.painless; import org.antlr.v4.runtime.CharStream; import org.antlr.v4.runtime.LexerNoViableAltException; @@ -25,7 +25,7 @@ import org.antlr.v4.runtime.misc.Interval; import java.text.ParseException; -class ErrorHandlingLexer extends PlanALexer { +class ErrorHandlingLexer extends PainlessLexer { public ErrorHandlingLexer(CharStream charStream) { super(charStream); } diff --git a/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/Executable.java b/plugins/lang-painless/src/main/java/org/elasticsearch/painless/Executable.java similarity index 97% rename from plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/Executable.java rename to plugins/lang-painless/src/main/java/org/elasticsearch/painless/Executable.java index 09e28cf8216..6d9a7108fa9 100644 --- a/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/Executable.java +++ b/plugins/lang-painless/src/main/java/org/elasticsearch/painless/Executable.java @@ -1,5 +1,3 @@ -package org.elasticsearch.plan.a; - /* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with @@ -19,6 +17,8 @@ package org.elasticsearch.plan.a; * under the License. */ +package org.elasticsearch.painless; + import java.util.Map; public abstract class Executable { diff --git a/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/Metadata.java b/plugins/lang-painless/src/main/java/org/elasticsearch/painless/Metadata.java similarity index 98% rename from plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/Metadata.java rename to plugins/lang-painless/src/main/java/org/elasticsearch/painless/Metadata.java index 221b951497f..cace48ff433 100644 --- a/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/Metadata.java +++ b/plugins/lang-painless/src/main/java/org/elasticsearch/painless/Metadata.java @@ -17,14 +17,14 @@ * under the License. */ -package org.elasticsearch.plan.a; +package org.elasticsearch.painless; import org.antlr.v4.runtime.ParserRuleContext; import org.antlr.v4.runtime.tree.ParseTree; -import org.elasticsearch.plan.a.Definition.Cast; -import org.elasticsearch.plan.a.Definition.Type; -import org.elasticsearch.plan.a.PlanAParser.ExpressionContext; -import org.elasticsearch.plan.a.PlanAParser.PrecedenceContext; +import org.elasticsearch.painless.Definition.Cast; +import org.elasticsearch.painless.Definition.Type; +import org.elasticsearch.painless.PainlessParser.ExpressionContext; +import org.elasticsearch.painless.PainlessParser.PrecedenceContext; import java.util.HashMap; import java.util.Map; @@ -396,7 +396,7 @@ class Metadata { } /** - * Acts as both the Plan A API and white-list for what types and methods are allowed. + * Acts as both the Painless API and white-list for what types and methods are allowed. */ final Definition definition; @@ -407,7 +407,7 @@ class Metadata { final String source; /** - * Toot node of the ANTLR tree for the Plan A script. + * Toot node of the ANTLR tree for the Painless script. */ final ParserRuleContext root; @@ -457,7 +457,7 @@ class Metadata { /** * Constructor. - * @param definition The Plan A definition. + * @param definition The Painless definition. * @param source The source text for the script. * @param root The root ANTLR node. * @param settings The compile-time settings. diff --git a/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/PlanAError.java b/plugins/lang-painless/src/main/java/org/elasticsearch/painless/PainlessError.java similarity index 77% rename from plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/PlanAError.java rename to plugins/lang-painless/src/main/java/org/elasticsearch/painless/PainlessError.java index dea3c2021a8..0d6fa915caf 100644 --- a/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/PlanAError.java +++ b/plugins/lang-painless/src/main/java/org/elasticsearch/painless/PainlessError.java @@ -17,21 +17,21 @@ * under the License. */ -package org.elasticsearch.plan.a; +package org.elasticsearch.painless; /** - * The PlanAError class is used to throw internal errors caused by Plan A scripts that cannot be + * The PainlessError class is used to throw internal errors caused by Painless scripts that cannot be * caught using a standard {@link Exception}. This prevents the user from catching this specific error - * (as Exceptions are available in the Plan A API, but Errors are not,) and possibly continuing to do + * (as Exceptions are available in the Painless API, but Errors are not,) and possibly continuing to do * something hazardous. The alternative was extending {@link Throwable}, but that seemed worse than using * an {@link Error} in this case. */ -public class PlanAError extends Error { +public class PainlessError extends Error { /** * Constructor. * @param message The error message. */ - public PlanAError(final String message) { + public PainlessError(final String message) { super(message); } } diff --git a/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/PlanALexer.java b/plugins/lang-painless/src/main/java/org/elasticsearch/painless/PainlessLexer.java similarity index 99% rename from plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/PlanALexer.java rename to plugins/lang-painless/src/main/java/org/elasticsearch/painless/PainlessLexer.java index e35df0102d2..3a01626d872 100644 --- a/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/PlanALexer.java +++ b/plugins/lang-painless/src/main/java/org/elasticsearch/painless/PainlessLexer.java @@ -1,5 +1,5 @@ // ANTLR GENERATED CODE: DO NOT EDIT -package org.elasticsearch.plan.a; +package org.elasticsearch.painless; import org.antlr.v4.runtime.CharStream; import org.antlr.v4.runtime.Lexer; @@ -16,7 +16,7 @@ import org.antlr.v4.runtime.dfa.DFA; import java.util.Set; @SuppressWarnings({"all", "warnings", "unchecked", "unused", "cast"}) -class PlanALexer extends Lexer { +class PainlessLexer extends Lexer { static { RuntimeMetaData.checkVersion("4.5.1", RuntimeMetaData.VERSION); } protected static final DFA[] _decisionToDFA; @@ -112,13 +112,13 @@ class PlanALexer extends Lexer { } - public PlanALexer(CharStream input) { + public PainlessLexer(CharStream input) { super(input); _interp = new LexerATNSimulator(this,_ATN,_decisionToDFA,_sharedContextCache); } @Override - public String getGrammarFileName() { return "PlanALexer.g4"; } + public String getGrammarFileName() { return "PainlessLexer.g4"; } @Override public String[] getRuleNames() { return ruleNames; } diff --git a/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/PlanAParser.java b/plugins/lang-painless/src/main/java/org/elasticsearch/painless/PainlessParser.java similarity index 84% rename from plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/PlanAParser.java rename to plugins/lang-painless/src/main/java/org/elasticsearch/painless/PainlessParser.java index e7de571ef2e..e7b331de661 100644 --- a/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/PlanAParser.java +++ b/plugins/lang-painless/src/main/java/org/elasticsearch/painless/PainlessParser.java @@ -1,5 +1,5 @@ // ANTLR GENERATED CODE: DO NOT EDIT -package org.elasticsearch.plan.a; +package org.elasticsearch.painless; import org.antlr.v4.runtime.FailedPredicateException; import org.antlr.v4.runtime.NoViableAltException; @@ -22,7 +22,7 @@ import org.antlr.v4.runtime.tree.TerminalNode; import java.util.List; @SuppressWarnings({"all", "warnings", "unchecked", "unused", "cast"}) -class PlanAParser extends Parser { +class PainlessParser extends Parser { static { RuntimeMetaData.checkVersion("4.5.1", RuntimeMetaData.VERSION); } protected static final DFA[] _decisionToDFA; @@ -108,7 +108,7 @@ class PlanAParser extends Parser { } @Override - public String getGrammarFileName() { return "PlanAParser.g4"; } + public String getGrammarFileName() { return "PainlessParser.g4"; } @Override public String[] getRuleNames() { return ruleNames; } @@ -119,12 +119,12 @@ class PlanAParser extends Parser { @Override public ATN getATN() { return _ATN; } - public PlanAParser(TokenStream input) { + public PainlessParser(TokenStream input) { super(input); _interp = new ParserATNSimulator(this,_ATN,_decisionToDFA,_sharedContextCache); } public static class SourceContext extends ParserRuleContext { - public TerminalNode EOF() { return getToken(PlanAParser.EOF, 0); } + public TerminalNode EOF() { return getToken(PainlessParser.EOF, 0); } public List statement() { return getRuleContexts(StatementContext.class); } @@ -137,7 +137,7 @@ class PlanAParser extends Parser { @Override public int getRuleIndex() { return RULE_source; } @Override public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof PlanAParserVisitor ) return ((PlanAParserVisitor)visitor).visitSource(this); + if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitSource(this); else return visitor.visitChildren(this); } } @@ -193,55 +193,55 @@ class PlanAParser extends Parser { public DeclarationContext declaration() { return getRuleContext(DeclarationContext.class,0); } - public TerminalNode SEMICOLON() { return getToken(PlanAParser.SEMICOLON, 0); } + public TerminalNode SEMICOLON() { return getToken(PainlessParser.SEMICOLON, 0); } public DeclContext(StatementContext ctx) { copyFrom(ctx); } @Override public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof PlanAParserVisitor ) return ((PlanAParserVisitor)visitor).visitDecl(this); + if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitDecl(this); else return visitor.visitChildren(this); } } public static class BreakContext extends StatementContext { - public TerminalNode BREAK() { return getToken(PlanAParser.BREAK, 0); } - public TerminalNode SEMICOLON() { return getToken(PlanAParser.SEMICOLON, 0); } + public TerminalNode BREAK() { return getToken(PainlessParser.BREAK, 0); } + public TerminalNode SEMICOLON() { return getToken(PainlessParser.SEMICOLON, 0); } public BreakContext(StatementContext ctx) { copyFrom(ctx); } @Override public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof PlanAParserVisitor ) return ((PlanAParserVisitor)visitor).visitBreak(this); + if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitBreak(this); else return visitor.visitChildren(this); } } public static class ThrowContext extends StatementContext { - public TerminalNode THROW() { return getToken(PlanAParser.THROW, 0); } + public TerminalNode THROW() { return getToken(PainlessParser.THROW, 0); } public ExpressionContext expression() { return getRuleContext(ExpressionContext.class,0); } - public TerminalNode SEMICOLON() { return getToken(PlanAParser.SEMICOLON, 0); } + public TerminalNode SEMICOLON() { return getToken(PainlessParser.SEMICOLON, 0); } public ThrowContext(StatementContext ctx) { copyFrom(ctx); } @Override public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof PlanAParserVisitor ) return ((PlanAParserVisitor)visitor).visitThrow(this); + if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitThrow(this); else return visitor.visitChildren(this); } } public static class ContinueContext extends StatementContext { - public TerminalNode CONTINUE() { return getToken(PlanAParser.CONTINUE, 0); } - public TerminalNode SEMICOLON() { return getToken(PlanAParser.SEMICOLON, 0); } + public TerminalNode CONTINUE() { return getToken(PainlessParser.CONTINUE, 0); } + public TerminalNode SEMICOLON() { return getToken(PainlessParser.SEMICOLON, 0); } public ContinueContext(StatementContext ctx) { copyFrom(ctx); } @Override public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof PlanAParserVisitor ) return ((PlanAParserVisitor)visitor).visitContinue(this); + if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitContinue(this); else return visitor.visitChildren(this); } } public static class ForContext extends StatementContext { - public TerminalNode FOR() { return getToken(PlanAParser.FOR, 0); } - public TerminalNode LP() { return getToken(PlanAParser.LP, 0); } - public List SEMICOLON() { return getTokens(PlanAParser.SEMICOLON); } + public TerminalNode FOR() { return getToken(PainlessParser.FOR, 0); } + public TerminalNode LP() { return getToken(PainlessParser.LP, 0); } + public List SEMICOLON() { return getTokens(PainlessParser.SEMICOLON); } public TerminalNode SEMICOLON(int i) { - return getToken(PlanAParser.SEMICOLON, i); + return getToken(PainlessParser.SEMICOLON, i); } - public TerminalNode RP() { return getToken(PlanAParser.RP, 0); } + public TerminalNode RP() { return getToken(PainlessParser.RP, 0); } public BlockContext block() { return getRuleContext(BlockContext.class,0); } @@ -260,12 +260,12 @@ class PlanAParser extends Parser { public ForContext(StatementContext ctx) { copyFrom(ctx); } @Override public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof PlanAParserVisitor ) return ((PlanAParserVisitor)visitor).visitFor(this); + if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitFor(this); else return visitor.visitChildren(this); } } public static class TryContext extends StatementContext { - public TerminalNode TRY() { return getToken(PlanAParser.TRY, 0); } + public TerminalNode TRY() { return getToken(PainlessParser.TRY, 0); } public BlockContext block() { return getRuleContext(BlockContext.class,0); } @@ -278,7 +278,7 @@ class PlanAParser extends Parser { public TryContext(StatementContext ctx) { copyFrom(ctx); } @Override public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof PlanAParserVisitor ) return ((PlanAParserVisitor)visitor).visitTry(this); + if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitTry(this); else return visitor.visitChildren(this); } } @@ -286,40 +286,40 @@ class PlanAParser extends Parser { public ExpressionContext expression() { return getRuleContext(ExpressionContext.class,0); } - public TerminalNode SEMICOLON() { return getToken(PlanAParser.SEMICOLON, 0); } + public TerminalNode SEMICOLON() { return getToken(PainlessParser.SEMICOLON, 0); } public ExprContext(StatementContext ctx) { copyFrom(ctx); } @Override public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof PlanAParserVisitor ) return ((PlanAParserVisitor)visitor).visitExpr(this); + if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitExpr(this); else return visitor.visitChildren(this); } } public static class DoContext extends StatementContext { - public TerminalNode DO() { return getToken(PlanAParser.DO, 0); } + public TerminalNode DO() { return getToken(PainlessParser.DO, 0); } public BlockContext block() { return getRuleContext(BlockContext.class,0); } - public TerminalNode WHILE() { return getToken(PlanAParser.WHILE, 0); } - public TerminalNode LP() { return getToken(PlanAParser.LP, 0); } + public TerminalNode WHILE() { return getToken(PainlessParser.WHILE, 0); } + public TerminalNode LP() { return getToken(PainlessParser.LP, 0); } public ExpressionContext expression() { return getRuleContext(ExpressionContext.class,0); } - public TerminalNode RP() { return getToken(PlanAParser.RP, 0); } - public TerminalNode SEMICOLON() { return getToken(PlanAParser.SEMICOLON, 0); } + public TerminalNode RP() { return getToken(PainlessParser.RP, 0); } + public TerminalNode SEMICOLON() { return getToken(PainlessParser.SEMICOLON, 0); } public DoContext(StatementContext ctx) { copyFrom(ctx); } @Override public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof PlanAParserVisitor ) return ((PlanAParserVisitor)visitor).visitDo(this); + if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitDo(this); else return visitor.visitChildren(this); } } public static class WhileContext extends StatementContext { - public TerminalNode WHILE() { return getToken(PlanAParser.WHILE, 0); } - public TerminalNode LP() { return getToken(PlanAParser.LP, 0); } + public TerminalNode WHILE() { return getToken(PainlessParser.WHILE, 0); } + public TerminalNode LP() { return getToken(PainlessParser.LP, 0); } public ExpressionContext expression() { return getRuleContext(ExpressionContext.class,0); } - public TerminalNode RP() { return getToken(PlanAParser.RP, 0); } + public TerminalNode RP() { return getToken(PainlessParser.RP, 0); } public BlockContext block() { return getRuleContext(BlockContext.class,0); } @@ -329,41 +329,41 @@ class PlanAParser extends Parser { public WhileContext(StatementContext ctx) { copyFrom(ctx); } @Override public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof PlanAParserVisitor ) return ((PlanAParserVisitor)visitor).visitWhile(this); + if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitWhile(this); else return visitor.visitChildren(this); } } public static class IfContext extends StatementContext { - public TerminalNode IF() { return getToken(PlanAParser.IF, 0); } - public TerminalNode LP() { return getToken(PlanAParser.LP, 0); } + public TerminalNode IF() { return getToken(PainlessParser.IF, 0); } + public TerminalNode LP() { return getToken(PainlessParser.LP, 0); } public ExpressionContext expression() { return getRuleContext(ExpressionContext.class,0); } - public TerminalNode RP() { return getToken(PlanAParser.RP, 0); } + public TerminalNode RP() { return getToken(PainlessParser.RP, 0); } public List block() { return getRuleContexts(BlockContext.class); } public BlockContext block(int i) { return getRuleContext(BlockContext.class,i); } - public TerminalNode ELSE() { return getToken(PlanAParser.ELSE, 0); } + public TerminalNode ELSE() { return getToken(PainlessParser.ELSE, 0); } public IfContext(StatementContext ctx) { copyFrom(ctx); } @Override public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof PlanAParserVisitor ) return ((PlanAParserVisitor)visitor).visitIf(this); + if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitIf(this); else return visitor.visitChildren(this); } } public static class ReturnContext extends StatementContext { - public TerminalNode RETURN() { return getToken(PlanAParser.RETURN, 0); } + public TerminalNode RETURN() { return getToken(PainlessParser.RETURN, 0); } public ExpressionContext expression() { return getRuleContext(ExpressionContext.class,0); } - public TerminalNode SEMICOLON() { return getToken(PlanAParser.SEMICOLON, 0); } + public TerminalNode SEMICOLON() { return getToken(PainlessParser.SEMICOLON, 0); } public ReturnContext(StatementContext ctx) { copyFrom(ctx); } @Override public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof PlanAParserVisitor ) return ((PlanAParserVisitor)visitor).visitReturn(this); + if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitReturn(this); else return visitor.visitChildren(this); } } @@ -684,13 +684,13 @@ class PlanAParser extends Parser { public SingleContext(BlockContext ctx) { copyFrom(ctx); } @Override public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof PlanAParserVisitor ) return ((PlanAParserVisitor)visitor).visitSingle(this); + if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitSingle(this); else return visitor.visitChildren(this); } } public static class MultipleContext extends BlockContext { - public TerminalNode LBRACK() { return getToken(PlanAParser.LBRACK, 0); } - public TerminalNode RBRACK() { return getToken(PlanAParser.RBRACK, 0); } + public TerminalNode LBRACK() { return getToken(PainlessParser.LBRACK, 0); } + public TerminalNode RBRACK() { return getToken(PainlessParser.RBRACK, 0); } public List statement() { return getRuleContexts(StatementContext.class); } @@ -700,7 +700,7 @@ class PlanAParser extends Parser { public MultipleContext(BlockContext ctx) { copyFrom(ctx); } @Override public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof PlanAParserVisitor ) return ((PlanAParserVisitor)visitor).visitMultiple(this); + if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitMultiple(this); else return visitor.visitChildren(this); } } @@ -790,14 +790,14 @@ class PlanAParser extends Parser { public EmptyscopeContext emptyscope() { return getRuleContext(EmptyscopeContext.class,0); } - public TerminalNode SEMICOLON() { return getToken(PlanAParser.SEMICOLON, 0); } + public TerminalNode SEMICOLON() { return getToken(PainlessParser.SEMICOLON, 0); } public EmptyContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); } @Override public int getRuleIndex() { return RULE_empty; } @Override public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof PlanAParserVisitor ) return ((PlanAParserVisitor)visitor).visitEmpty(this); + if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitEmpty(this); else return visitor.visitChildren(this); } } @@ -838,15 +838,15 @@ class PlanAParser extends Parser { } public static class EmptyscopeContext extends ParserRuleContext { - public TerminalNode LBRACK() { return getToken(PlanAParser.LBRACK, 0); } - public TerminalNode RBRACK() { return getToken(PlanAParser.RBRACK, 0); } + public TerminalNode LBRACK() { return getToken(PainlessParser.LBRACK, 0); } + public TerminalNode RBRACK() { return getToken(PainlessParser.RBRACK, 0); } public EmptyscopeContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); } @Override public int getRuleIndex() { return RULE_emptyscope; } @Override public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof PlanAParserVisitor ) return ((PlanAParserVisitor)visitor).visitEmptyscope(this); + if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitEmptyscope(this); else return visitor.visitChildren(this); } } @@ -887,7 +887,7 @@ class PlanAParser extends Parser { @Override public int getRuleIndex() { return RULE_initializer; } @Override public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof PlanAParserVisitor ) return ((PlanAParserVisitor)visitor).visitInitializer(this); + if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitInitializer(this); else return visitor.visitChildren(this); } } @@ -935,7 +935,7 @@ class PlanAParser extends Parser { @Override public int getRuleIndex() { return RULE_afterthought; } @Override public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof PlanAParserVisitor ) return ((PlanAParserVisitor)visitor).visitAfterthought(this); + if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitAfterthought(this); else return visitor.visitChildren(this); } } @@ -971,9 +971,9 @@ class PlanAParser extends Parser { public DeclvarContext declvar(int i) { return getRuleContext(DeclvarContext.class,i); } - public List COMMA() { return getTokens(PlanAParser.COMMA); } + public List COMMA() { return getTokens(PainlessParser.COMMA); } public TerminalNode COMMA(int i) { - return getToken(PlanAParser.COMMA, i); + return getToken(PainlessParser.COMMA, i); } public DeclarationContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); @@ -981,7 +981,7 @@ class PlanAParser extends Parser { @Override public int getRuleIndex() { return RULE_declaration; } @Override public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof PlanAParserVisitor ) return ((PlanAParserVisitor)visitor).visitDeclaration(this); + if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitDeclaration(this); else return visitor.visitChildren(this); } } @@ -1027,14 +1027,14 @@ class PlanAParser extends Parser { } public static class DecltypeContext extends ParserRuleContext { - public TerminalNode TYPE() { return getToken(PlanAParser.TYPE, 0); } - public List LBRACE() { return getTokens(PlanAParser.LBRACE); } + public TerminalNode TYPE() { return getToken(PainlessParser.TYPE, 0); } + public List LBRACE() { return getTokens(PainlessParser.LBRACE); } public TerminalNode LBRACE(int i) { - return getToken(PlanAParser.LBRACE, i); + return getToken(PainlessParser.LBRACE, i); } - public List RBRACE() { return getTokens(PlanAParser.RBRACE); } + public List RBRACE() { return getTokens(PainlessParser.RBRACE); } public TerminalNode RBRACE(int i) { - return getToken(PlanAParser.RBRACE, i); + return getToken(PainlessParser.RBRACE, i); } public DecltypeContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); @@ -1042,7 +1042,7 @@ class PlanAParser extends Parser { @Override public int getRuleIndex() { return RULE_decltype; } @Override public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof PlanAParserVisitor ) return ((PlanAParserVisitor)visitor).visitDecltype(this); + if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitDecltype(this); else return visitor.visitChildren(this); } } @@ -1086,8 +1086,8 @@ class PlanAParser extends Parser { } public static class DeclvarContext extends ParserRuleContext { - public TerminalNode ID() { return getToken(PlanAParser.ID, 0); } - public TerminalNode ASSIGN() { return getToken(PlanAParser.ASSIGN, 0); } + public TerminalNode ID() { return getToken(PainlessParser.ID, 0); } + public TerminalNode ASSIGN() { return getToken(PainlessParser.ASSIGN, 0); } public ExpressionContext expression() { return getRuleContext(ExpressionContext.class,0); } @@ -1097,7 +1097,7 @@ class PlanAParser extends Parser { @Override public int getRuleIndex() { return RULE_declvar; } @Override public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof PlanAParserVisitor ) return ((PlanAParserVisitor)visitor).visitDeclvar(this); + if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitDeclvar(this); else return visitor.visitChildren(this); } } @@ -1136,11 +1136,11 @@ class PlanAParser extends Parser { } public static class TrapContext extends ParserRuleContext { - public TerminalNode CATCH() { return getToken(PlanAParser.CATCH, 0); } - public TerminalNode LP() { return getToken(PlanAParser.LP, 0); } - public TerminalNode RP() { return getToken(PlanAParser.RP, 0); } - public TerminalNode TYPE() { return getToken(PlanAParser.TYPE, 0); } - public TerminalNode ID() { return getToken(PlanAParser.ID, 0); } + public TerminalNode CATCH() { return getToken(PainlessParser.CATCH, 0); } + public TerminalNode LP() { return getToken(PainlessParser.LP, 0); } + public TerminalNode RP() { return getToken(PainlessParser.RP, 0); } + public TerminalNode TYPE() { return getToken(PainlessParser.TYPE, 0); } + public TerminalNode ID() { return getToken(PainlessParser.ID, 0); } public BlockContext block() { return getRuleContext(BlockContext.class,0); } @@ -1153,7 +1153,7 @@ class PlanAParser extends Parser { @Override public int getRuleIndex() { return RULE_trap; } @Override public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof PlanAParserVisitor ) return ((PlanAParserVisitor)visitor).visitTrap(this); + if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitTrap(this); else return visitor.visitChildren(this); } } @@ -1222,18 +1222,18 @@ class PlanAParser extends Parser { public ExpressionContext expression(int i) { return getRuleContext(ExpressionContext.class,i); } - public TerminalNode LT() { return getToken(PlanAParser.LT, 0); } - public TerminalNode LTE() { return getToken(PlanAParser.LTE, 0); } - public TerminalNode GT() { return getToken(PlanAParser.GT, 0); } - public TerminalNode GTE() { return getToken(PlanAParser.GTE, 0); } - public TerminalNode EQ() { return getToken(PlanAParser.EQ, 0); } - public TerminalNode EQR() { return getToken(PlanAParser.EQR, 0); } - public TerminalNode NE() { return getToken(PlanAParser.NE, 0); } - public TerminalNode NER() { return getToken(PlanAParser.NER, 0); } + public TerminalNode LT() { return getToken(PainlessParser.LT, 0); } + public TerminalNode LTE() { return getToken(PainlessParser.LTE, 0); } + public TerminalNode GT() { return getToken(PainlessParser.GT, 0); } + public TerminalNode GTE() { return getToken(PainlessParser.GTE, 0); } + public TerminalNode EQ() { return getToken(PainlessParser.EQ, 0); } + public TerminalNode EQR() { return getToken(PainlessParser.EQR, 0); } + public TerminalNode NE() { return getToken(PainlessParser.NE, 0); } + public TerminalNode NER() { return getToken(PainlessParser.NER, 0); } public CompContext(ExpressionContext ctx) { copyFrom(ctx); } @Override public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof PlanAParserVisitor ) return ((PlanAParserVisitor)visitor).visitComp(this); + if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitComp(this); else return visitor.visitChildren(this); } } @@ -1244,12 +1244,12 @@ class PlanAParser extends Parser { public ExpressionContext expression(int i) { return getRuleContext(ExpressionContext.class,i); } - public TerminalNode BOOLAND() { return getToken(PlanAParser.BOOLAND, 0); } - public TerminalNode BOOLOR() { return getToken(PlanAParser.BOOLOR, 0); } + public TerminalNode BOOLAND() { return getToken(PainlessParser.BOOLAND, 0); } + public TerminalNode BOOLOR() { return getToken(PainlessParser.BOOLOR, 0); } public BoolContext(ExpressionContext ctx) { copyFrom(ctx); } @Override public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof PlanAParserVisitor ) return ((PlanAParserVisitor)visitor).visitBool(this); + if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitBool(this); else return visitor.visitChildren(this); } } @@ -1260,12 +1260,12 @@ class PlanAParser extends Parser { public ExpressionContext expression(int i) { return getRuleContext(ExpressionContext.class,i); } - public TerminalNode COND() { return getToken(PlanAParser.COND, 0); } - public TerminalNode COLON() { return getToken(PlanAParser.COLON, 0); } + public TerminalNode COND() { return getToken(PainlessParser.COND, 0); } + public TerminalNode COLON() { return getToken(PainlessParser.COLON, 0); } public ConditionalContext(ExpressionContext ctx) { copyFrom(ctx); } @Override public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof PlanAParserVisitor ) return ((PlanAParserVisitor)visitor).visitConditional(this); + if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitConditional(this); else return visitor.visitChildren(this); } } @@ -1276,43 +1276,43 @@ class PlanAParser extends Parser { public ExpressionContext expression() { return getRuleContext(ExpressionContext.class,0); } - public TerminalNode ASSIGN() { return getToken(PlanAParser.ASSIGN, 0); } - public TerminalNode AADD() { return getToken(PlanAParser.AADD, 0); } - public TerminalNode ASUB() { return getToken(PlanAParser.ASUB, 0); } - public TerminalNode AMUL() { return getToken(PlanAParser.AMUL, 0); } - public TerminalNode ADIV() { return getToken(PlanAParser.ADIV, 0); } - public TerminalNode AREM() { return getToken(PlanAParser.AREM, 0); } - public TerminalNode AAND() { return getToken(PlanAParser.AAND, 0); } - public TerminalNode AXOR() { return getToken(PlanAParser.AXOR, 0); } - public TerminalNode AOR() { return getToken(PlanAParser.AOR, 0); } - public TerminalNode ALSH() { return getToken(PlanAParser.ALSH, 0); } - public TerminalNode ARSH() { return getToken(PlanAParser.ARSH, 0); } - public TerminalNode AUSH() { return getToken(PlanAParser.AUSH, 0); } + public TerminalNode ASSIGN() { return getToken(PainlessParser.ASSIGN, 0); } + public TerminalNode AADD() { return getToken(PainlessParser.AADD, 0); } + public TerminalNode ASUB() { return getToken(PainlessParser.ASUB, 0); } + public TerminalNode AMUL() { return getToken(PainlessParser.AMUL, 0); } + public TerminalNode ADIV() { return getToken(PainlessParser.ADIV, 0); } + public TerminalNode AREM() { return getToken(PainlessParser.AREM, 0); } + public TerminalNode AAND() { return getToken(PainlessParser.AAND, 0); } + public TerminalNode AXOR() { return getToken(PainlessParser.AXOR, 0); } + public TerminalNode AOR() { return getToken(PainlessParser.AOR, 0); } + public TerminalNode ALSH() { return getToken(PainlessParser.ALSH, 0); } + public TerminalNode ARSH() { return getToken(PainlessParser.ARSH, 0); } + public TerminalNode AUSH() { return getToken(PainlessParser.AUSH, 0); } public AssignmentContext(ExpressionContext ctx) { copyFrom(ctx); } @Override public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof PlanAParserVisitor ) return ((PlanAParserVisitor)visitor).visitAssignment(this); + if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitAssignment(this); else return visitor.visitChildren(this); } } public static class FalseContext extends ExpressionContext { - public TerminalNode FALSE() { return getToken(PlanAParser.FALSE, 0); } + public TerminalNode FALSE() { return getToken(PainlessParser.FALSE, 0); } public FalseContext(ExpressionContext ctx) { copyFrom(ctx); } @Override public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof PlanAParserVisitor ) return ((PlanAParserVisitor)visitor).visitFalse(this); + if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitFalse(this); else return visitor.visitChildren(this); } } public static class NumericContext extends ExpressionContext { - public TerminalNode OCTAL() { return getToken(PlanAParser.OCTAL, 0); } - public TerminalNode HEX() { return getToken(PlanAParser.HEX, 0); } - public TerminalNode INTEGER() { return getToken(PlanAParser.INTEGER, 0); } - public TerminalNode DECIMAL() { return getToken(PlanAParser.DECIMAL, 0); } + public TerminalNode OCTAL() { return getToken(PainlessParser.OCTAL, 0); } + public TerminalNode HEX() { return getToken(PainlessParser.HEX, 0); } + public TerminalNode INTEGER() { return getToken(PainlessParser.INTEGER, 0); } + public TerminalNode DECIMAL() { return getToken(PainlessParser.DECIMAL, 0); } public NumericContext(ExpressionContext ctx) { copyFrom(ctx); } @Override public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof PlanAParserVisitor ) return ((PlanAParserVisitor)visitor).visitNumeric(this); + if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitNumeric(this); else return visitor.visitChildren(this); } } @@ -1320,27 +1320,27 @@ class PlanAParser extends Parser { public ExpressionContext expression() { return getRuleContext(ExpressionContext.class,0); } - public TerminalNode BOOLNOT() { return getToken(PlanAParser.BOOLNOT, 0); } - public TerminalNode BWNOT() { return getToken(PlanAParser.BWNOT, 0); } - public TerminalNode ADD() { return getToken(PlanAParser.ADD, 0); } - public TerminalNode SUB() { return getToken(PlanAParser.SUB, 0); } + public TerminalNode BOOLNOT() { return getToken(PainlessParser.BOOLNOT, 0); } + public TerminalNode BWNOT() { return getToken(PainlessParser.BWNOT, 0); } + public TerminalNode ADD() { return getToken(PainlessParser.ADD, 0); } + public TerminalNode SUB() { return getToken(PainlessParser.SUB, 0); } public UnaryContext(ExpressionContext ctx) { copyFrom(ctx); } @Override public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof PlanAParserVisitor ) return ((PlanAParserVisitor)visitor).visitUnary(this); + if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitUnary(this); else return visitor.visitChildren(this); } } public static class PrecedenceContext extends ExpressionContext { - public TerminalNode LP() { return getToken(PlanAParser.LP, 0); } + public TerminalNode LP() { return getToken(PainlessParser.LP, 0); } public ExpressionContext expression() { return getRuleContext(ExpressionContext.class,0); } - public TerminalNode RP() { return getToken(PlanAParser.RP, 0); } + public TerminalNode RP() { return getToken(PainlessParser.RP, 0); } public PrecedenceContext(ExpressionContext ctx) { copyFrom(ctx); } @Override public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof PlanAParserVisitor ) return ((PlanAParserVisitor)visitor).visitPrecedence(this); + if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitPrecedence(this); else return visitor.visitChildren(this); } } @@ -1354,7 +1354,7 @@ class PlanAParser extends Parser { public PreincContext(ExpressionContext ctx) { copyFrom(ctx); } @Override public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof PlanAParserVisitor ) return ((PlanAParserVisitor)visitor).visitPreinc(this); + if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitPreinc(this); else return visitor.visitChildren(this); } } @@ -1368,23 +1368,23 @@ class PlanAParser extends Parser { public PostincContext(ExpressionContext ctx) { copyFrom(ctx); } @Override public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof PlanAParserVisitor ) return ((PlanAParserVisitor)visitor).visitPostinc(this); + if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitPostinc(this); else return visitor.visitChildren(this); } } public static class CastContext extends ExpressionContext { - public TerminalNode LP() { return getToken(PlanAParser.LP, 0); } + public TerminalNode LP() { return getToken(PainlessParser.LP, 0); } public DecltypeContext decltype() { return getRuleContext(DecltypeContext.class,0); } - public TerminalNode RP() { return getToken(PlanAParser.RP, 0); } + public TerminalNode RP() { return getToken(PainlessParser.RP, 0); } public ExpressionContext expression() { return getRuleContext(ExpressionContext.class,0); } public CastContext(ExpressionContext ctx) { copyFrom(ctx); } @Override public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof PlanAParserVisitor ) return ((PlanAParserVisitor)visitor).visitCast(this); + if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitCast(this); else return visitor.visitChildren(this); } } @@ -1395,16 +1395,16 @@ class PlanAParser extends Parser { public ExternalContext(ExpressionContext ctx) { copyFrom(ctx); } @Override public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof PlanAParserVisitor ) return ((PlanAParserVisitor)visitor).visitExternal(this); + if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitExternal(this); else return visitor.visitChildren(this); } } public static class NullContext extends ExpressionContext { - public TerminalNode NULL() { return getToken(PlanAParser.NULL, 0); } + public TerminalNode NULL() { return getToken(PainlessParser.NULL, 0); } public NullContext(ExpressionContext ctx) { copyFrom(ctx); } @Override public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof PlanAParserVisitor ) return ((PlanAParserVisitor)visitor).visitNull(this); + if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitNull(this); else return visitor.visitChildren(this); } } @@ -1415,39 +1415,39 @@ class PlanAParser extends Parser { public ExpressionContext expression(int i) { return getRuleContext(ExpressionContext.class,i); } - public TerminalNode MUL() { return getToken(PlanAParser.MUL, 0); } - public TerminalNode DIV() { return getToken(PlanAParser.DIV, 0); } - public TerminalNode REM() { return getToken(PlanAParser.REM, 0); } - public TerminalNode ADD() { return getToken(PlanAParser.ADD, 0); } - public TerminalNode SUB() { return getToken(PlanAParser.SUB, 0); } - public TerminalNode LSH() { return getToken(PlanAParser.LSH, 0); } - public TerminalNode RSH() { return getToken(PlanAParser.RSH, 0); } - public TerminalNode USH() { return getToken(PlanAParser.USH, 0); } - public TerminalNode BWAND() { return getToken(PlanAParser.BWAND, 0); } - public TerminalNode BWXOR() { return getToken(PlanAParser.BWXOR, 0); } - public TerminalNode BWOR() { return getToken(PlanAParser.BWOR, 0); } + public TerminalNode MUL() { return getToken(PainlessParser.MUL, 0); } + public TerminalNode DIV() { return getToken(PainlessParser.DIV, 0); } + public TerminalNode REM() { return getToken(PainlessParser.REM, 0); } + public TerminalNode ADD() { return getToken(PainlessParser.ADD, 0); } + public TerminalNode SUB() { return getToken(PainlessParser.SUB, 0); } + public TerminalNode LSH() { return getToken(PainlessParser.LSH, 0); } + public TerminalNode RSH() { return getToken(PainlessParser.RSH, 0); } + public TerminalNode USH() { return getToken(PainlessParser.USH, 0); } + public TerminalNode BWAND() { return getToken(PainlessParser.BWAND, 0); } + public TerminalNode BWXOR() { return getToken(PainlessParser.BWXOR, 0); } + public TerminalNode BWOR() { return getToken(PainlessParser.BWOR, 0); } public BinaryContext(ExpressionContext ctx) { copyFrom(ctx); } @Override public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof PlanAParserVisitor ) return ((PlanAParserVisitor)visitor).visitBinary(this); + if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitBinary(this); else return visitor.visitChildren(this); } } public static class CharContext extends ExpressionContext { - public TerminalNode CHAR() { return getToken(PlanAParser.CHAR, 0); } + public TerminalNode CHAR() { return getToken(PainlessParser.CHAR, 0); } public CharContext(ExpressionContext ctx) { copyFrom(ctx); } @Override public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof PlanAParserVisitor ) return ((PlanAParserVisitor)visitor).visitChar(this); + if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitChar(this); else return visitor.visitChildren(this); } } public static class TrueContext extends ExpressionContext { - public TerminalNode TRUE() { return getToken(PlanAParser.TRUE, 0); } + public TerminalNode TRUE() { return getToken(PainlessParser.TRUE, 0); } public TrueContext(ExpressionContext ctx) { copyFrom(ctx); } @Override public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof PlanAParserVisitor ) return ((PlanAParserVisitor)visitor).visitTrue(this); + if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitTrue(this); else return visitor.visitChildren(this); } } @@ -1832,7 +1832,7 @@ class PlanAParser extends Parser { @Override public int getRuleIndex() { return RULE_extstart; } @Override public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof PlanAParserVisitor ) return ((PlanAParserVisitor)visitor).visitExtstart(this); + if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitExtstart(this); else return visitor.visitChildren(this); } } @@ -1899,8 +1899,8 @@ class PlanAParser extends Parser { } public static class ExtprecContext extends ParserRuleContext { - public TerminalNode LP() { return getToken(PlanAParser.LP, 0); } - public TerminalNode RP() { return getToken(PlanAParser.RP, 0); } + public TerminalNode LP() { return getToken(PainlessParser.LP, 0); } + public TerminalNode RP() { return getToken(PainlessParser.RP, 0); } public ExtprecContext extprec() { return getRuleContext(ExtprecContext.class,0); } @@ -1931,7 +1931,7 @@ class PlanAParser extends Parser { @Override public int getRuleIndex() { return RULE_extprec; } @Override public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof PlanAParserVisitor ) return ((PlanAParserVisitor)visitor).visitExtprec(this); + if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitExtprec(this); else return visitor.visitChildren(this); } } @@ -2014,11 +2014,11 @@ class PlanAParser extends Parser { } public static class ExtcastContext extends ParserRuleContext { - public TerminalNode LP() { return getToken(PlanAParser.LP, 0); } + public TerminalNode LP() { return getToken(PainlessParser.LP, 0); } public DecltypeContext decltype() { return getRuleContext(DecltypeContext.class,0); } - public TerminalNode RP() { return getToken(PlanAParser.RP, 0); } + public TerminalNode RP() { return getToken(PainlessParser.RP, 0); } public ExtprecContext extprec() { return getRuleContext(ExtprecContext.class,0); } @@ -2043,7 +2043,7 @@ class PlanAParser extends Parser { @Override public int getRuleIndex() { return RULE_extcast; } @Override public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof PlanAParserVisitor ) return ((PlanAParserVisitor)visitor).visitExtcast(this); + if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitExtcast(this); else return visitor.visitChildren(this); } } @@ -2113,11 +2113,11 @@ class PlanAParser extends Parser { } public static class ExtbraceContext extends ParserRuleContext { - public TerminalNode LBRACE() { return getToken(PlanAParser.LBRACE, 0); } + public TerminalNode LBRACE() { return getToken(PainlessParser.LBRACE, 0); } public ExpressionContext expression() { return getRuleContext(ExpressionContext.class,0); } - public TerminalNode RBRACE() { return getToken(PlanAParser.RBRACE, 0); } + public TerminalNode RBRACE() { return getToken(PainlessParser.RBRACE, 0); } public ExtdotContext extdot() { return getRuleContext(ExtdotContext.class,0); } @@ -2130,7 +2130,7 @@ class PlanAParser extends Parser { @Override public int getRuleIndex() { return RULE_extbrace; } @Override public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof PlanAParserVisitor ) return ((PlanAParserVisitor)visitor).visitExtbrace(this); + if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitExtbrace(this); else return visitor.visitChildren(this); } } @@ -2176,7 +2176,7 @@ class PlanAParser extends Parser { } public static class ExtdotContext extends ParserRuleContext { - public TerminalNode DOT() { return getToken(PlanAParser.DOT, 0); } + public TerminalNode DOT() { return getToken(PainlessParser.DOT, 0); } public ExtcallContext extcall() { return getRuleContext(ExtcallContext.class,0); } @@ -2189,7 +2189,7 @@ class PlanAParser extends Parser { @Override public int getRuleIndex() { return RULE_extdot; } @Override public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof PlanAParserVisitor ) return ((PlanAParserVisitor)visitor).visitExtdot(this); + if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitExtdot(this); else return visitor.visitChildren(this); } } @@ -2231,7 +2231,7 @@ class PlanAParser extends Parser { } public static class ExttypeContext extends ParserRuleContext { - public TerminalNode TYPE() { return getToken(PlanAParser.TYPE, 0); } + public TerminalNode TYPE() { return getToken(PainlessParser.TYPE, 0); } public ExtdotContext extdot() { return getRuleContext(ExtdotContext.class,0); } @@ -2241,7 +2241,7 @@ class PlanAParser extends Parser { @Override public int getRuleIndex() { return RULE_exttype; } @Override public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof PlanAParserVisitor ) return ((PlanAParserVisitor)visitor).visitExttype(this); + if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitExttype(this); else return visitor.visitChildren(this); } } @@ -2270,7 +2270,7 @@ class PlanAParser extends Parser { } public static class ExtcallContext extends ParserRuleContext { - public TerminalNode EXTID() { return getToken(PlanAParser.EXTID, 0); } + public TerminalNode EXTID() { return getToken(PainlessParser.EXTID, 0); } public ArgumentsContext arguments() { return getRuleContext(ArgumentsContext.class,0); } @@ -2286,7 +2286,7 @@ class PlanAParser extends Parser { @Override public int getRuleIndex() { return RULE_extcall; } @Override public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof PlanAParserVisitor ) return ((PlanAParserVisitor)visitor).visitExtcall(this); + if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitExtcall(this); else return visitor.visitChildren(this); } } @@ -2330,7 +2330,7 @@ class PlanAParser extends Parser { } public static class ExtvarContext extends ParserRuleContext { - public TerminalNode ID() { return getToken(PlanAParser.ID, 0); } + public TerminalNode ID() { return getToken(PainlessParser.ID, 0); } public ExtdotContext extdot() { return getRuleContext(ExtdotContext.class,0); } @@ -2343,7 +2343,7 @@ class PlanAParser extends Parser { @Override public int getRuleIndex() { return RULE_extvar; } @Override public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof PlanAParserVisitor ) return ((PlanAParserVisitor)visitor).visitExtvar(this); + if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitExtvar(this); else return visitor.visitChildren(this); } } @@ -2385,8 +2385,8 @@ class PlanAParser extends Parser { } public static class ExtfieldContext extends ParserRuleContext { - public TerminalNode EXTID() { return getToken(PlanAParser.EXTID, 0); } - public TerminalNode EXTINTEGER() { return getToken(PlanAParser.EXTINTEGER, 0); } + public TerminalNode EXTID() { return getToken(PainlessParser.EXTID, 0); } + public TerminalNode EXTINTEGER() { return getToken(PainlessParser.EXTINTEGER, 0); } public ExtdotContext extdot() { return getRuleContext(ExtdotContext.class,0); } @@ -2399,7 +2399,7 @@ class PlanAParser extends Parser { @Override public int getRuleIndex() { return RULE_extfield; } @Override public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof PlanAParserVisitor ) return ((PlanAParserVisitor)visitor).visitExtfield(this); + if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitExtfield(this); else return visitor.visitChildren(this); } } @@ -2447,8 +2447,8 @@ class PlanAParser extends Parser { } public static class ExtnewContext extends ParserRuleContext { - public TerminalNode NEW() { return getToken(PlanAParser.NEW, 0); } - public TerminalNode TYPE() { return getToken(PlanAParser.TYPE, 0); } + public TerminalNode NEW() { return getToken(PainlessParser.NEW, 0); } + public TerminalNode TYPE() { return getToken(PainlessParser.TYPE, 0); } public ArgumentsContext arguments() { return getRuleContext(ArgumentsContext.class,0); } @@ -2458,9 +2458,9 @@ class PlanAParser extends Parser { public ExtbraceContext extbrace() { return getRuleContext(ExtbraceContext.class,0); } - public List LBRACE() { return getTokens(PlanAParser.LBRACE); } + public List LBRACE() { return getTokens(PainlessParser.LBRACE); } public TerminalNode LBRACE(int i) { - return getToken(PlanAParser.LBRACE, i); + return getToken(PainlessParser.LBRACE, i); } public List expression() { return getRuleContexts(ExpressionContext.class); @@ -2468,9 +2468,9 @@ class PlanAParser extends Parser { public ExpressionContext expression(int i) { return getRuleContext(ExpressionContext.class,i); } - public List RBRACE() { return getTokens(PlanAParser.RBRACE); } + public List RBRACE() { return getTokens(PainlessParser.RBRACE); } public TerminalNode RBRACE(int i) { - return getToken(PlanAParser.RBRACE, i); + return getToken(PainlessParser.RBRACE, i); } public ExtnewContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); @@ -2478,7 +2478,7 @@ class PlanAParser extends Parser { @Override public int getRuleIndex() { return RULE_extnew; } @Override public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof PlanAParserVisitor ) return ((PlanAParserVisitor)visitor).visitExtnew(this); + if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitExtnew(this); else return visitor.visitChildren(this); } } @@ -2575,7 +2575,7 @@ class PlanAParser extends Parser { } public static class ExtstringContext extends ParserRuleContext { - public TerminalNode STRING() { return getToken(PlanAParser.STRING, 0); } + public TerminalNode STRING() { return getToken(PainlessParser.STRING, 0); } public ExtdotContext extdot() { return getRuleContext(ExtdotContext.class,0); } @@ -2588,7 +2588,7 @@ class PlanAParser extends Parser { @Override public int getRuleIndex() { return RULE_extstring; } @Override public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof PlanAParserVisitor ) return ((PlanAParserVisitor)visitor).visitExtstring(this); + if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitExtstring(this); else return visitor.visitChildren(this); } } @@ -2630,17 +2630,17 @@ class PlanAParser extends Parser { } public static class ArgumentsContext extends ParserRuleContext { - public TerminalNode LP() { return getToken(PlanAParser.LP, 0); } - public TerminalNode RP() { return getToken(PlanAParser.RP, 0); } + public TerminalNode LP() { return getToken(PainlessParser.LP, 0); } + public TerminalNode RP() { return getToken(PainlessParser.RP, 0); } public List expression() { return getRuleContexts(ExpressionContext.class); } public ExpressionContext expression(int i) { return getRuleContext(ExpressionContext.class,i); } - public List COMMA() { return getTokens(PlanAParser.COMMA); } + public List COMMA() { return getTokens(PainlessParser.COMMA); } public TerminalNode COMMA(int i) { - return getToken(PlanAParser.COMMA, i); + return getToken(PainlessParser.COMMA, i); } public ArgumentsContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); @@ -2648,7 +2648,7 @@ class PlanAParser extends Parser { @Override public int getRuleIndex() { return RULE_arguments; } @Override public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof PlanAParserVisitor ) return ((PlanAParserVisitor)visitor).visitArguments(this); + if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitArguments(this); else return visitor.visitChildren(this); } } @@ -2705,15 +2705,15 @@ class PlanAParser extends Parser { } public static class IncrementContext extends ParserRuleContext { - public TerminalNode INCR() { return getToken(PlanAParser.INCR, 0); } - public TerminalNode DECR() { return getToken(PlanAParser.DECR, 0); } + public TerminalNode INCR() { return getToken(PainlessParser.INCR, 0); } + public TerminalNode DECR() { return getToken(PainlessParser.DECR, 0); } public IncrementContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); } @Override public int getRuleIndex() { return RULE_increment; } @Override public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof PlanAParserVisitor ) return ((PlanAParserVisitor)visitor).visitIncrement(this); + if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitIncrement(this); else return visitor.visitChildren(this); } } diff --git a/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/PlanAParserBaseVisitor.java b/plugins/lang-painless/src/main/java/org/elasticsearch/painless/PainlessParserBaseVisitor.java similarity index 60% rename from plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/PlanAParserBaseVisitor.java rename to plugins/lang-painless/src/main/java/org/elasticsearch/painless/PainlessParserBaseVisitor.java index 7997b57ae6b..ee231b1fce8 100644 --- a/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/PlanAParserBaseVisitor.java +++ b/plugins/lang-painless/src/main/java/org/elasticsearch/painless/PainlessParserBaseVisitor.java @@ -1,371 +1,371 @@ // ANTLR GENERATED CODE: DO NOT EDIT -package org.elasticsearch.plan.a; +package org.elasticsearch.painless; import org.antlr.v4.runtime.tree.AbstractParseTreeVisitor; /** - * This class provides an empty implementation of {@link PlanAParserVisitor}, + * This class provides an empty implementation of {@link PainlessParserVisitor}, * which can be extended to create a visitor which only needs to handle a subset * of the available methods. * * @param The return type of the visit operation. Use {@link Void} for * operations with no return type. */ -class PlanAParserBaseVisitor extends AbstractParseTreeVisitor implements PlanAParserVisitor { +class PainlessParserBaseVisitor extends AbstractParseTreeVisitor implements PainlessParserVisitor { /** * {@inheritDoc} * *

The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.

*/ - @Override public T visitSource(PlanAParser.SourceContext ctx) { return visitChildren(ctx); } + @Override public T visitSource(PainlessParser.SourceContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * *

The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.

*/ - @Override public T visitIf(PlanAParser.IfContext ctx) { return visitChildren(ctx); } + @Override public T visitIf(PainlessParser.IfContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * *

The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.

*/ - @Override public T visitWhile(PlanAParser.WhileContext ctx) { return visitChildren(ctx); } + @Override public T visitWhile(PainlessParser.WhileContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * *

The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.

*/ - @Override public T visitDo(PlanAParser.DoContext ctx) { return visitChildren(ctx); } + @Override public T visitDo(PainlessParser.DoContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * *

The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.

*/ - @Override public T visitFor(PlanAParser.ForContext ctx) { return visitChildren(ctx); } + @Override public T visitFor(PainlessParser.ForContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * *

The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.

*/ - @Override public T visitDecl(PlanAParser.DeclContext ctx) { return visitChildren(ctx); } + @Override public T visitDecl(PainlessParser.DeclContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * *

The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.

*/ - @Override public T visitContinue(PlanAParser.ContinueContext ctx) { return visitChildren(ctx); } + @Override public T visitContinue(PainlessParser.ContinueContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * *

The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.

*/ - @Override public T visitBreak(PlanAParser.BreakContext ctx) { return visitChildren(ctx); } + @Override public T visitBreak(PainlessParser.BreakContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * *

The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.

*/ - @Override public T visitReturn(PlanAParser.ReturnContext ctx) { return visitChildren(ctx); } + @Override public T visitReturn(PainlessParser.ReturnContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * *

The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.

*/ - @Override public T visitTry(PlanAParser.TryContext ctx) { return visitChildren(ctx); } + @Override public T visitTry(PainlessParser.TryContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * *

The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.

*/ - @Override public T visitThrow(PlanAParser.ThrowContext ctx) { return visitChildren(ctx); } + @Override public T visitThrow(PainlessParser.ThrowContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * *

The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.

*/ - @Override public T visitExpr(PlanAParser.ExprContext ctx) { return visitChildren(ctx); } + @Override public T visitExpr(PainlessParser.ExprContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * *

The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.

*/ - @Override public T visitMultiple(PlanAParser.MultipleContext ctx) { return visitChildren(ctx); } + @Override public T visitMultiple(PainlessParser.MultipleContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * *

The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.

*/ - @Override public T visitSingle(PlanAParser.SingleContext ctx) { return visitChildren(ctx); } + @Override public T visitSingle(PainlessParser.SingleContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * *

The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.

*/ - @Override public T visitEmpty(PlanAParser.EmptyContext ctx) { return visitChildren(ctx); } + @Override public T visitEmpty(PainlessParser.EmptyContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * *

The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.

*/ - @Override public T visitEmptyscope(PlanAParser.EmptyscopeContext ctx) { return visitChildren(ctx); } + @Override public T visitEmptyscope(PainlessParser.EmptyscopeContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * *

The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.

*/ - @Override public T visitInitializer(PlanAParser.InitializerContext ctx) { return visitChildren(ctx); } + @Override public T visitInitializer(PainlessParser.InitializerContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * *

The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.

*/ - @Override public T visitAfterthought(PlanAParser.AfterthoughtContext ctx) { return visitChildren(ctx); } + @Override public T visitAfterthought(PainlessParser.AfterthoughtContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * *

The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.

*/ - @Override public T visitDeclaration(PlanAParser.DeclarationContext ctx) { return visitChildren(ctx); } + @Override public T visitDeclaration(PainlessParser.DeclarationContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * *

The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.

*/ - @Override public T visitDecltype(PlanAParser.DecltypeContext ctx) { return visitChildren(ctx); } + @Override public T visitDecltype(PainlessParser.DecltypeContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * *

The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.

*/ - @Override public T visitDeclvar(PlanAParser.DeclvarContext ctx) { return visitChildren(ctx); } + @Override public T visitDeclvar(PainlessParser.DeclvarContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * *

The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.

*/ - @Override public T visitTrap(PlanAParser.TrapContext ctx) { return visitChildren(ctx); } + @Override public T visitTrap(PainlessParser.TrapContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * *

The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.

*/ - @Override public T visitComp(PlanAParser.CompContext ctx) { return visitChildren(ctx); } + @Override public T visitComp(PainlessParser.CompContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * *

The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.

*/ - @Override public T visitBool(PlanAParser.BoolContext ctx) { return visitChildren(ctx); } + @Override public T visitBool(PainlessParser.BoolContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * *

The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.

*/ - @Override public T visitConditional(PlanAParser.ConditionalContext ctx) { return visitChildren(ctx); } + @Override public T visitConditional(PainlessParser.ConditionalContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * *

The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.

*/ - @Override public T visitAssignment(PlanAParser.AssignmentContext ctx) { return visitChildren(ctx); } + @Override public T visitAssignment(PainlessParser.AssignmentContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * *

The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.

*/ - @Override public T visitFalse(PlanAParser.FalseContext ctx) { return visitChildren(ctx); } + @Override public T visitFalse(PainlessParser.FalseContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * *

The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.

*/ - @Override public T visitNumeric(PlanAParser.NumericContext ctx) { return visitChildren(ctx); } + @Override public T visitNumeric(PainlessParser.NumericContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * *

The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.

*/ - @Override public T visitUnary(PlanAParser.UnaryContext ctx) { return visitChildren(ctx); } + @Override public T visitUnary(PainlessParser.UnaryContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * *

The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.

*/ - @Override public T visitPrecedence(PlanAParser.PrecedenceContext ctx) { return visitChildren(ctx); } + @Override public T visitPrecedence(PainlessParser.PrecedenceContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * *

The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.

*/ - @Override public T visitPreinc(PlanAParser.PreincContext ctx) { return visitChildren(ctx); } + @Override public T visitPreinc(PainlessParser.PreincContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * *

The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.

*/ - @Override public T visitPostinc(PlanAParser.PostincContext ctx) { return visitChildren(ctx); } + @Override public T visitPostinc(PainlessParser.PostincContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * *

The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.

*/ - @Override public T visitCast(PlanAParser.CastContext ctx) { return visitChildren(ctx); } + @Override public T visitCast(PainlessParser.CastContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * *

The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.

*/ - @Override public T visitExternal(PlanAParser.ExternalContext ctx) { return visitChildren(ctx); } + @Override public T visitExternal(PainlessParser.ExternalContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * *

The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.

*/ - @Override public T visitNull(PlanAParser.NullContext ctx) { return visitChildren(ctx); } + @Override public T visitNull(PainlessParser.NullContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * *

The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.

*/ - @Override public T visitBinary(PlanAParser.BinaryContext ctx) { return visitChildren(ctx); } + @Override public T visitBinary(PainlessParser.BinaryContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * *

The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.

*/ - @Override public T visitChar(PlanAParser.CharContext ctx) { return visitChildren(ctx); } + @Override public T visitChar(PainlessParser.CharContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * *

The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.

*/ - @Override public T visitTrue(PlanAParser.TrueContext ctx) { return visitChildren(ctx); } + @Override public T visitTrue(PainlessParser.TrueContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * *

The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.

*/ - @Override public T visitExtstart(PlanAParser.ExtstartContext ctx) { return visitChildren(ctx); } + @Override public T visitExtstart(PainlessParser.ExtstartContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * *

The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.

*/ - @Override public T visitExtprec(PlanAParser.ExtprecContext ctx) { return visitChildren(ctx); } + @Override public T visitExtprec(PainlessParser.ExtprecContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * *

The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.

*/ - @Override public T visitExtcast(PlanAParser.ExtcastContext ctx) { return visitChildren(ctx); } + @Override public T visitExtcast(PainlessParser.ExtcastContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * *

The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.

*/ - @Override public T visitExtbrace(PlanAParser.ExtbraceContext ctx) { return visitChildren(ctx); } + @Override public T visitExtbrace(PainlessParser.ExtbraceContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * *

The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.

*/ - @Override public T visitExtdot(PlanAParser.ExtdotContext ctx) { return visitChildren(ctx); } + @Override public T visitExtdot(PainlessParser.ExtdotContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * *

The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.

*/ - @Override public T visitExttype(PlanAParser.ExttypeContext ctx) { return visitChildren(ctx); } + @Override public T visitExttype(PainlessParser.ExttypeContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * *

The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.

*/ - @Override public T visitExtcall(PlanAParser.ExtcallContext ctx) { return visitChildren(ctx); } + @Override public T visitExtcall(PainlessParser.ExtcallContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * *

The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.

*/ - @Override public T visitExtvar(PlanAParser.ExtvarContext ctx) { return visitChildren(ctx); } + @Override public T visitExtvar(PainlessParser.ExtvarContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * *

The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.

*/ - @Override public T visitExtfield(PlanAParser.ExtfieldContext ctx) { return visitChildren(ctx); } + @Override public T visitExtfield(PainlessParser.ExtfieldContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * *

The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.

*/ - @Override public T visitExtnew(PlanAParser.ExtnewContext ctx) { return visitChildren(ctx); } + @Override public T visitExtnew(PainlessParser.ExtnewContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * *

The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.

*/ - @Override public T visitExtstring(PlanAParser.ExtstringContext ctx) { return visitChildren(ctx); } + @Override public T visitExtstring(PainlessParser.ExtstringContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * *

The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.

*/ - @Override public T visitArguments(PlanAParser.ArgumentsContext ctx) { return visitChildren(ctx); } + @Override public T visitArguments(PainlessParser.ArgumentsContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * *

The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.

*/ - @Override public T visitIncrement(PlanAParser.IncrementContext ctx) { return visitChildren(ctx); } + @Override public T visitIncrement(PainlessParser.IncrementContext ctx) { return visitChildren(ctx); } } diff --git a/plugins/lang-painless/src/main/java/org/elasticsearch/painless/PainlessParserVisitor.java b/plugins/lang-painless/src/main/java/org/elasticsearch/painless/PainlessParserVisitor.java new file mode 100644 index 00000000000..e1a002a02a0 --- /dev/null +++ b/plugins/lang-painless/src/main/java/org/elasticsearch/painless/PainlessParserVisitor.java @@ -0,0 +1,348 @@ +// ANTLR GENERATED CODE: DO NOT EDIT +package org.elasticsearch.painless; +import org.antlr.v4.runtime.tree.ParseTreeVisitor; + +/** + * This interface defines a complete generic visitor for a parse tree produced + * by {@link PainlessParser}. + * + * @param The return type of the visit operation. Use {@link Void} for + * operations with no return type. + */ +interface PainlessParserVisitor extends ParseTreeVisitor { + /** + * Visit a parse tree produced by {@link PainlessParser#source}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitSource(PainlessParser.SourceContext ctx); + /** + * Visit a parse tree produced by the {@code if} + * labeled alternative in {@link PainlessParser#statement}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitIf(PainlessParser.IfContext ctx); + /** + * Visit a parse tree produced by the {@code while} + * labeled alternative in {@link PainlessParser#statement}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitWhile(PainlessParser.WhileContext ctx); + /** + * Visit a parse tree produced by the {@code do} + * labeled alternative in {@link PainlessParser#statement}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitDo(PainlessParser.DoContext ctx); + /** + * Visit a parse tree produced by the {@code for} + * labeled alternative in {@link PainlessParser#statement}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitFor(PainlessParser.ForContext ctx); + /** + * Visit a parse tree produced by the {@code decl} + * labeled alternative in {@link PainlessParser#statement}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitDecl(PainlessParser.DeclContext ctx); + /** + * Visit a parse tree produced by the {@code continue} + * labeled alternative in {@link PainlessParser#statement}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitContinue(PainlessParser.ContinueContext ctx); + /** + * Visit a parse tree produced by the {@code break} + * labeled alternative in {@link PainlessParser#statement}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitBreak(PainlessParser.BreakContext ctx); + /** + * Visit a parse tree produced by the {@code return} + * labeled alternative in {@link PainlessParser#statement}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitReturn(PainlessParser.ReturnContext ctx); + /** + * Visit a parse tree produced by the {@code try} + * labeled alternative in {@link PainlessParser#statement}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitTry(PainlessParser.TryContext ctx); + /** + * Visit a parse tree produced by the {@code throw} + * labeled alternative in {@link PainlessParser#statement}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitThrow(PainlessParser.ThrowContext ctx); + /** + * Visit a parse tree produced by the {@code expr} + * labeled alternative in {@link PainlessParser#statement}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitExpr(PainlessParser.ExprContext ctx); + /** + * Visit a parse tree produced by the {@code multiple} + * labeled alternative in {@link PainlessParser#block}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitMultiple(PainlessParser.MultipleContext ctx); + /** + * Visit a parse tree produced by the {@code single} + * labeled alternative in {@link PainlessParser#block}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitSingle(PainlessParser.SingleContext ctx); + /** + * Visit a parse tree produced by {@link PainlessParser#empty}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitEmpty(PainlessParser.EmptyContext ctx); + /** + * Visit a parse tree produced by {@link PainlessParser#emptyscope}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitEmptyscope(PainlessParser.EmptyscopeContext ctx); + /** + * Visit a parse tree produced by {@link PainlessParser#initializer}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitInitializer(PainlessParser.InitializerContext ctx); + /** + * Visit a parse tree produced by {@link PainlessParser#afterthought}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitAfterthought(PainlessParser.AfterthoughtContext ctx); + /** + * Visit a parse tree produced by {@link PainlessParser#declaration}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitDeclaration(PainlessParser.DeclarationContext ctx); + /** + * Visit a parse tree produced by {@link PainlessParser#decltype}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitDecltype(PainlessParser.DecltypeContext ctx); + /** + * Visit a parse tree produced by {@link PainlessParser#declvar}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitDeclvar(PainlessParser.DeclvarContext ctx); + /** + * Visit a parse tree produced by {@link PainlessParser#trap}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitTrap(PainlessParser.TrapContext ctx); + /** + * Visit a parse tree produced by the {@code comp} + * labeled alternative in {@link PainlessParser#expression}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitComp(PainlessParser.CompContext ctx); + /** + * Visit a parse tree produced by the {@code bool} + * labeled alternative in {@link PainlessParser#expression}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitBool(PainlessParser.BoolContext ctx); + /** + * Visit a parse tree produced by the {@code conditional} + * labeled alternative in {@link PainlessParser#expression}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitConditional(PainlessParser.ConditionalContext ctx); + /** + * Visit a parse tree produced by the {@code assignment} + * labeled alternative in {@link PainlessParser#expression}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitAssignment(PainlessParser.AssignmentContext ctx); + /** + * Visit a parse tree produced by the {@code false} + * labeled alternative in {@link PainlessParser#expression}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitFalse(PainlessParser.FalseContext ctx); + /** + * Visit a parse tree produced by the {@code numeric} + * labeled alternative in {@link PainlessParser#expression}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitNumeric(PainlessParser.NumericContext ctx); + /** + * Visit a parse tree produced by the {@code unary} + * labeled alternative in {@link PainlessParser#expression}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitUnary(PainlessParser.UnaryContext ctx); + /** + * Visit a parse tree produced by the {@code precedence} + * labeled alternative in {@link PainlessParser#expression}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitPrecedence(PainlessParser.PrecedenceContext ctx); + /** + * Visit a parse tree produced by the {@code preinc} + * labeled alternative in {@link PainlessParser#expression}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitPreinc(PainlessParser.PreincContext ctx); + /** + * Visit a parse tree produced by the {@code postinc} + * labeled alternative in {@link PainlessParser#expression}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitPostinc(PainlessParser.PostincContext ctx); + /** + * Visit a parse tree produced by the {@code cast} + * labeled alternative in {@link PainlessParser#expression}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitCast(PainlessParser.CastContext ctx); + /** + * Visit a parse tree produced by the {@code external} + * labeled alternative in {@link PainlessParser#expression}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitExternal(PainlessParser.ExternalContext ctx); + /** + * Visit a parse tree produced by the {@code null} + * labeled alternative in {@link PainlessParser#expression}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitNull(PainlessParser.NullContext ctx); + /** + * Visit a parse tree produced by the {@code binary} + * labeled alternative in {@link PainlessParser#expression}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitBinary(PainlessParser.BinaryContext ctx); + /** + * Visit a parse tree produced by the {@code char} + * labeled alternative in {@link PainlessParser#expression}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitChar(PainlessParser.CharContext ctx); + /** + * Visit a parse tree produced by the {@code true} + * labeled alternative in {@link PainlessParser#expression}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitTrue(PainlessParser.TrueContext ctx); + /** + * Visit a parse tree produced by {@link PainlessParser#extstart}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitExtstart(PainlessParser.ExtstartContext ctx); + /** + * Visit a parse tree produced by {@link PainlessParser#extprec}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitExtprec(PainlessParser.ExtprecContext ctx); + /** + * Visit a parse tree produced by {@link PainlessParser#extcast}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitExtcast(PainlessParser.ExtcastContext ctx); + /** + * Visit a parse tree produced by {@link PainlessParser#extbrace}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitExtbrace(PainlessParser.ExtbraceContext ctx); + /** + * Visit a parse tree produced by {@link PainlessParser#extdot}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitExtdot(PainlessParser.ExtdotContext ctx); + /** + * Visit a parse tree produced by {@link PainlessParser#exttype}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitExttype(PainlessParser.ExttypeContext ctx); + /** + * Visit a parse tree produced by {@link PainlessParser#extcall}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitExtcall(PainlessParser.ExtcallContext ctx); + /** + * Visit a parse tree produced by {@link PainlessParser#extvar}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitExtvar(PainlessParser.ExtvarContext ctx); + /** + * Visit a parse tree produced by {@link PainlessParser#extfield}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitExtfield(PainlessParser.ExtfieldContext ctx); + /** + * Visit a parse tree produced by {@link PainlessParser#extnew}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitExtnew(PainlessParser.ExtnewContext ctx); + /** + * Visit a parse tree produced by {@link PainlessParser#extstring}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitExtstring(PainlessParser.ExtstringContext ctx); + /** + * Visit a parse tree produced by {@link PainlessParser#arguments}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitArguments(PainlessParser.ArgumentsContext ctx); + /** + * Visit a parse tree produced by {@link PainlessParser#increment}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitIncrement(PainlessParser.IncrementContext ctx); +} diff --git a/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/PlanAPlugin.java b/plugins/lang-painless/src/main/java/org/elasticsearch/painless/PainlessPlugin.java similarity index 81% rename from plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/PlanAPlugin.java rename to plugins/lang-painless/src/main/java/org/elasticsearch/painless/PainlessPlugin.java index dbfda5ef57f..b64ed71f8de 100644 --- a/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/PlanAPlugin.java +++ b/plugins/lang-painless/src/main/java/org/elasticsearch/painless/PainlessPlugin.java @@ -17,25 +17,25 @@ * under the License. */ -package org.elasticsearch.plan.a; +package org.elasticsearch.painless; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.script.ScriptEngineRegistry; import org.elasticsearch.script.ScriptModule; -public final class PlanAPlugin extends Plugin { +public final class PainlessPlugin extends Plugin { @Override public String name() { - return "lang-plan-a"; + return "lang-painless"; } @Override public String description() { - return "Plan A scripting language for Elasticsearch"; + return "Painless scripting language for Elasticsearch"; } public void onModule(final ScriptModule module) { - module.addScriptEngine(new ScriptEngineRegistry.ScriptEngineRegistration(PlanAScriptEngineService.class, PlanAScriptEngineService.TYPES)); + module.addScriptEngine(new ScriptEngineRegistry.ScriptEngineRegistration(PainlessScriptEngineService.class, PainlessScriptEngineService.TYPES)); } } diff --git a/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/PlanAScriptEngineService.java b/plugins/lang-painless/src/main/java/org/elasticsearch/painless/PainlessScriptEngineService.java similarity index 90% rename from plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/PlanAScriptEngineService.java rename to plugins/lang-painless/src/main/java/org/elasticsearch/painless/PainlessScriptEngineService.java index 34524d16734..34ebddd9fb0 100644 --- a/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/PlanAScriptEngineService.java +++ b/plugins/lang-painless/src/main/java/org/elasticsearch/painless/PainlessScriptEngineService.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.plan.a; +package org.elasticsearch.painless; import org.apache.lucene.index.LeafReaderContext; import org.elasticsearch.SpecialPermission; @@ -43,17 +43,30 @@ import java.util.List; import java.util.Map; /** - * Implementation of a ScriptEngine for the Plan A language. + * Implementation of a ScriptEngine for the Painless language. */ -public class PlanAScriptEngineService extends AbstractComponent implements ScriptEngineService { +public class PainlessScriptEngineService extends AbstractComponent implements ScriptEngineService { /** - * Standard name of the Plan A language. + * Standard name of the Painless language. */ - public static final String NAME = "plan-a"; + public static final String NAME = "painless"; + /** + * Standard list of names for the Painless language. (There is only one.) + */ public static final List TYPES = Collections.singletonList(NAME); + /** + * Standard extension of the Painless language. + */ + public static final String EXTENSION = "pain"; + + /** + * Standard list of extensions for the Painless language. (There is only one.) + */ + public static final List EXTENSIONS = Collections.singletonList(EXTENSION); + /** * Default compiler settings to be used. */ @@ -92,7 +105,7 @@ public class PlanAScriptEngineService extends AbstractComponent implements Scrip * @param settings The settings to initialize the engine with. */ @Inject - public PlanAScriptEngineService(final Settings settings) { + public PainlessScriptEngineService(final Settings settings) { super(settings); } @@ -111,7 +124,7 @@ public class PlanAScriptEngineService extends AbstractComponent implements Scrip */ @Override public List getExtensions() { - return TYPES; + return EXTENSIONS; } /** @@ -124,7 +137,7 @@ public class PlanAScriptEngineService extends AbstractComponent implements Scrip } /** - * Compiles a Plan A script with the specified parameters. + * Compiles a Painless script with the specified parameters. * @param script The code to be compiled. * @param params The params used to modify the compiler settings on a per script basis. * @return Compiled script object represented by an {@link Executable}. diff --git a/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/ParserErrorStrategy.java b/plugins/lang-painless/src/main/java/org/elasticsearch/painless/ParserErrorStrategy.java similarity index 97% rename from plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/ParserErrorStrategy.java rename to plugins/lang-painless/src/main/java/org/elasticsearch/painless/ParserErrorStrategy.java index 5032ae3222a..0bffa9535f6 100644 --- a/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/ParserErrorStrategy.java +++ b/plugins/lang-painless/src/main/java/org/elasticsearch/painless/ParserErrorStrategy.java @@ -1,5 +1,3 @@ -package org.elasticsearch.plan.a; - /* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with @@ -19,6 +17,8 @@ package org.elasticsearch.plan.a; * under the License. */ +package org.elasticsearch.painless; + import org.antlr.v4.runtime.DefaultErrorStrategy; import org.antlr.v4.runtime.InputMismatchException; import org.antlr.v4.runtime.NoViableAltException; @@ -41,7 +41,7 @@ class ParserErrorStrategy extends DefaultErrorStrategy { " unexpected token [" + getTokenErrorDisplay(token) + "]" + " was expecting one of [" + re.getExpectedTokens().toString(recognizer.getVocabulary()) + "]."; } else if (re instanceof NoViableAltException) { - if (token.getType() == PlanAParser.EOF) { + if (token.getType() == PainlessParser.EOF) { message = "Error: unexpected end of script."; } else { message = "Error[" + token.getLine() + ":" + token.getCharPositionInLine() + "]:" + diff --git a/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/ScriptImpl.java b/plugins/lang-painless/src/main/java/org/elasticsearch/painless/ScriptImpl.java similarity index 93% rename from plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/ScriptImpl.java rename to plugins/lang-painless/src/main/java/org/elasticsearch/painless/ScriptImpl.java index 9ce00118c58..34fc2cdd90a 100644 --- a/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/ScriptImpl.java +++ b/plugins/lang-painless/src/main/java/org/elasticsearch/painless/ScriptImpl.java @@ -17,10 +17,7 @@ * under the License. */ -package org.elasticsearch.plan.a; - -import java.util.HashMap; -import java.util.Map; +package org.elasticsearch.painless; import org.apache.lucene.search.Scorer; import org.elasticsearch.script.ExecutableScript; @@ -28,13 +25,16 @@ import org.elasticsearch.script.LeafSearchScript; import org.elasticsearch.script.ScoreAccessor; import org.elasticsearch.search.lookup.LeafSearchLookup; +import java.util.HashMap; +import java.util.Map; + /** * ScriptImpl can be used as either an {@link ExecutableScript} or a {@link LeafSearchScript} - * to run a previously compiled Plan A script. + * to run a previously compiled Painless script. */ final class ScriptImpl implements ExecutableScript, LeafSearchScript { /** - * The Plan A Executable script that can be run. + * The Painless Executable script that can be run. */ private final Executable executable; @@ -49,8 +49,8 @@ final class ScriptImpl implements ExecutableScript, LeafSearchScript { private final LeafSearchLookup lookup; /** - * Creates a ScriptImpl for the a previously compiled Plan A script. - * @param executable The previously compiled Plan A script. + * Creates a ScriptImpl for the a previously compiled Painless script. + * @param executable The previously compiled Painless script. * @param vars The initial variables to run the script with. * @param lookup The lookup to allow search fields to be available if this is run as a search script. */ diff --git a/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/Utility.java b/plugins/lang-painless/src/main/java/org/elasticsearch/painless/Utility.java similarity index 99% rename from plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/Utility.java rename to plugins/lang-painless/src/main/java/org/elasticsearch/painless/Utility.java index f132d1edf2f..6eed17ff983 100644 --- a/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/Utility.java +++ b/plugins/lang-painless/src/main/java/org/elasticsearch/painless/Utility.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.plan.a; +package org.elasticsearch.painless; public class Utility { public static boolean NumberToboolean(final Number value) { diff --git a/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/Writer.java b/plugins/lang-painless/src/main/java/org/elasticsearch/painless/Writer.java similarity index 85% rename from plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/Writer.java rename to plugins/lang-painless/src/main/java/org/elasticsearch/painless/Writer.java index 34b5b535afa..1abc2894a13 100644 --- a/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/Writer.java +++ b/plugins/lang-painless/src/main/java/org/elasticsearch/painless/Writer.java @@ -17,10 +17,17 @@ * under the License. */ -package org.elasticsearch.plan.a; +package org.elasticsearch.painless; import org.antlr.v4.runtime.ParserRuleContext; import org.antlr.v4.runtime.tree.ParseTree; +import org.elasticsearch.painless.Definition.Cast; +import org.elasticsearch.painless.Definition.Constructor; +import org.elasticsearch.painless.Definition.Field; +import org.elasticsearch.painless.Definition.Method; +import org.elasticsearch.painless.Definition.Sort; +import org.elasticsearch.painless.Definition.Transform; +import org.elasticsearch.painless.Definition.Type; import org.elasticsearch.script.ScoreAccessor; import org.objectweb.asm.ClassWriter; import org.objectweb.asm.Label; @@ -35,85 +42,73 @@ import java.util.List; import java.util.Map; import java.util.Set; -import static org.elasticsearch.plan.a.Definition.Cast; -import static org.elasticsearch.plan.a.Definition.Constructor; -import static org.elasticsearch.plan.a.Definition.Field; -import static org.elasticsearch.plan.a.Definition.Method; -import static org.elasticsearch.plan.a.Definition.Sort; -import static org.elasticsearch.plan.a.Definition.Transform; -import static org.elasticsearch.plan.a.Definition.Type; -import static org.elasticsearch.plan.a.Metadata.ExpressionMetadata; -import static org.elasticsearch.plan.a.Metadata.ExtNodeMetadata; -import static org.elasticsearch.plan.a.Metadata.ExternalMetadata; -import static org.elasticsearch.plan.a.Metadata.StatementMetadata; -import static org.elasticsearch.plan.a.Metadata.error; -import static org.elasticsearch.plan.a.PlanAParser.ADD; -import static org.elasticsearch.plan.a.PlanAParser.AfterthoughtContext; -import static org.elasticsearch.plan.a.PlanAParser.ArgumentsContext; -import static org.elasticsearch.plan.a.PlanAParser.AssignmentContext; -import static org.elasticsearch.plan.a.PlanAParser.BWAND; -import static org.elasticsearch.plan.a.PlanAParser.BWOR; -import static org.elasticsearch.plan.a.PlanAParser.BWXOR; -import static org.elasticsearch.plan.a.PlanAParser.BinaryContext; -import static org.elasticsearch.plan.a.PlanAParser.BlockContext; -import static org.elasticsearch.plan.a.PlanAParser.BoolContext; -import static org.elasticsearch.plan.a.PlanAParser.BreakContext; -import static org.elasticsearch.plan.a.PlanAParser.CastContext; -import static org.elasticsearch.plan.a.PlanAParser.CharContext; -import static org.elasticsearch.plan.a.PlanAParser.CompContext; -import static org.elasticsearch.plan.a.PlanAParser.ConditionalContext; -import static org.elasticsearch.plan.a.PlanAParser.ContinueContext; -import static org.elasticsearch.plan.a.PlanAParser.DIV; -import static org.elasticsearch.plan.a.PlanAParser.DeclContext; -import static org.elasticsearch.plan.a.PlanAParser.DeclarationContext; -import static org.elasticsearch.plan.a.PlanAParser.DecltypeContext; -import static org.elasticsearch.plan.a.PlanAParser.DeclvarContext; -import static org.elasticsearch.plan.a.PlanAParser.DoContext; -import static org.elasticsearch.plan.a.PlanAParser.EmptyContext; -import static org.elasticsearch.plan.a.PlanAParser.EmptyscopeContext; -import static org.elasticsearch.plan.a.PlanAParser.ExprContext; -import static org.elasticsearch.plan.a.PlanAParser.ExpressionContext; -import static org.elasticsearch.plan.a.PlanAParser.ExtbraceContext; -import static org.elasticsearch.plan.a.PlanAParser.ExtcallContext; -import static org.elasticsearch.plan.a.PlanAParser.ExtcastContext; -import static org.elasticsearch.plan.a.PlanAParser.ExtdotContext; -import static org.elasticsearch.plan.a.PlanAParser.ExternalContext; -import static org.elasticsearch.plan.a.PlanAParser.ExtfieldContext; -import static org.elasticsearch.plan.a.PlanAParser.ExtnewContext; -import static org.elasticsearch.plan.a.PlanAParser.ExtprecContext; -import static org.elasticsearch.plan.a.PlanAParser.ExtstartContext; -import static org.elasticsearch.plan.a.PlanAParser.ExtstringContext; -import static org.elasticsearch.plan.a.PlanAParser.ExttypeContext; -import static org.elasticsearch.plan.a.PlanAParser.ExtvarContext; -import static org.elasticsearch.plan.a.PlanAParser.FalseContext; -import static org.elasticsearch.plan.a.PlanAParser.ForContext; -import static org.elasticsearch.plan.a.PlanAParser.IfContext; -import static org.elasticsearch.plan.a.PlanAParser.IncrementContext; -import static org.elasticsearch.plan.a.PlanAParser.InitializerContext; -import static org.elasticsearch.plan.a.PlanAParser.LSH; -import static org.elasticsearch.plan.a.PlanAParser.MUL; -import static org.elasticsearch.plan.a.PlanAParser.MultipleContext; -import static org.elasticsearch.plan.a.PlanAParser.NullContext; -import static org.elasticsearch.plan.a.PlanAParser.NumericContext; -import static org.elasticsearch.plan.a.PlanAParser.PostincContext; -import static org.elasticsearch.plan.a.PlanAParser.PrecedenceContext; -import static org.elasticsearch.plan.a.PlanAParser.PreincContext; -import static org.elasticsearch.plan.a.PlanAParser.REM; -import static org.elasticsearch.plan.a.PlanAParser.RSH; -import static org.elasticsearch.plan.a.PlanAParser.ReturnContext; -import static org.elasticsearch.plan.a.PlanAParser.SUB; -import static org.elasticsearch.plan.a.PlanAParser.SingleContext; -import static org.elasticsearch.plan.a.PlanAParser.SourceContext; -import static org.elasticsearch.plan.a.PlanAParser.StatementContext; -import static org.elasticsearch.plan.a.PlanAParser.ThrowContext; -import static org.elasticsearch.plan.a.PlanAParser.TrapContext; -import static org.elasticsearch.plan.a.PlanAParser.TrueContext; -import static org.elasticsearch.plan.a.PlanAParser.TryContext; -import static org.elasticsearch.plan.a.PlanAParser.USH; -import static org.elasticsearch.plan.a.PlanAParser.UnaryContext; -import static org.elasticsearch.plan.a.PlanAParser.WhileContext; +import static org.elasticsearch.painless.PainlessParser.ADD; +import static org.elasticsearch.painless.PainlessParser.AfterthoughtContext; +import static org.elasticsearch.painless.PainlessParser.ArgumentsContext; +import static org.elasticsearch.painless.PainlessParser.AssignmentContext; +import static org.elasticsearch.painless.PainlessParser.BWAND; +import static org.elasticsearch.painless.PainlessParser.BWOR; +import static org.elasticsearch.painless.PainlessParser.BWXOR; +import static org.elasticsearch.painless.PainlessParser.BinaryContext; +import static org.elasticsearch.painless.PainlessParser.BlockContext; +import static org.elasticsearch.painless.PainlessParser.BoolContext; +import static org.elasticsearch.painless.PainlessParser.BreakContext; +import static org.elasticsearch.painless.PainlessParser.CastContext; +import static org.elasticsearch.painless.PainlessParser.CharContext; +import static org.elasticsearch.painless.PainlessParser.CompContext; +import static org.elasticsearch.painless.PainlessParser.ConditionalContext; +import static org.elasticsearch.painless.PainlessParser.ContinueContext; +import static org.elasticsearch.painless.PainlessParser.DIV; +import static org.elasticsearch.painless.PainlessParser.DeclContext; +import static org.elasticsearch.painless.PainlessParser.DeclarationContext; +import static org.elasticsearch.painless.PainlessParser.DecltypeContext; +import static org.elasticsearch.painless.PainlessParser.DeclvarContext; +import static org.elasticsearch.painless.PainlessParser.DoContext; +import static org.elasticsearch.painless.PainlessParser.EmptyContext; +import static org.elasticsearch.painless.PainlessParser.EmptyscopeContext; +import static org.elasticsearch.painless.PainlessParser.ExprContext; +import static org.elasticsearch.painless.PainlessParser.ExpressionContext; +import static org.elasticsearch.painless.PainlessParser.ExtbraceContext; +import static org.elasticsearch.painless.PainlessParser.ExtcallContext; +import static org.elasticsearch.painless.PainlessParser.ExtcastContext; +import static org.elasticsearch.painless.PainlessParser.ExtdotContext; +import static org.elasticsearch.painless.PainlessParser.ExternalContext; +import static org.elasticsearch.painless.PainlessParser.ExtfieldContext; +import static org.elasticsearch.painless.PainlessParser.ExtnewContext; +import static org.elasticsearch.painless.PainlessParser.ExtprecContext; +import static org.elasticsearch.painless.PainlessParser.ExtstartContext; +import static org.elasticsearch.painless.PainlessParser.ExtstringContext; +import static org.elasticsearch.painless.PainlessParser.ExttypeContext; +import static org.elasticsearch.painless.PainlessParser.ExtvarContext; +import static org.elasticsearch.painless.PainlessParser.FalseContext; +import static org.elasticsearch.painless.PainlessParser.ForContext; +import static org.elasticsearch.painless.PainlessParser.IfContext; +import static org.elasticsearch.painless.PainlessParser.IncrementContext; +import static org.elasticsearch.painless.PainlessParser.InitializerContext; +import static org.elasticsearch.painless.PainlessParser.LSH; +import static org.elasticsearch.painless.PainlessParser.MUL; +import static org.elasticsearch.painless.PainlessParser.MultipleContext; +import static org.elasticsearch.painless.PainlessParser.NullContext; +import static org.elasticsearch.painless.PainlessParser.NumericContext; +import static org.elasticsearch.painless.PainlessParser.PostincContext; +import static org.elasticsearch.painless.PainlessParser.PrecedenceContext; +import static org.elasticsearch.painless.PainlessParser.PreincContext; +import static org.elasticsearch.painless.PainlessParser.REM; +import static org.elasticsearch.painless.PainlessParser.RSH; +import static org.elasticsearch.painless.PainlessParser.ReturnContext; +import static org.elasticsearch.painless.PainlessParser.SUB; +import static org.elasticsearch.painless.PainlessParser.SingleContext; +import static org.elasticsearch.painless.PainlessParser.SourceContext; +import static org.elasticsearch.painless.PainlessParser.StatementContext; +import static org.elasticsearch.painless.PainlessParser.ThrowContext; +import static org.elasticsearch.painless.PainlessParser.TrapContext; +import static org.elasticsearch.painless.PainlessParser.TrueContext; +import static org.elasticsearch.painless.PainlessParser.TryContext; +import static org.elasticsearch.painless.PainlessParser.USH; +import static org.elasticsearch.painless.PainlessParser.UnaryContext; +import static org.elasticsearch.painless.PainlessParser.WhileContext; -class Writer extends PlanAParserBaseVisitor { +class Writer extends PainlessParserBaseVisitor { private static class Branch { final ParserRuleContext source; @@ -128,15 +123,15 @@ class Writer extends PlanAParserBaseVisitor { } final static String BASE_CLASS_NAME = Executable.class.getName(); - final static String CLASS_NAME = BASE_CLASS_NAME + "$CompiledPlanAExecutable"; + final static String CLASS_NAME = BASE_CLASS_NAME + "$CompiledPainlessExecutable"; private final static org.objectweb.asm.Type BASE_CLASS_TYPE = org.objectweb.asm.Type.getType(Executable.class); private final static org.objectweb.asm.Type CLASS_TYPE = org.objectweb.asm.Type.getType("L" + CLASS_NAME.replace(".", "/") + ";"); - private final static org.objectweb.asm.Type PLAN_A_ERROR_TYPE = org.objectweb.asm.Type.getType(PlanAError.class); + private final static org.objectweb.asm.Type PAINLESS_ERROR_TYPE = org.objectweb.asm.Type.getType(PainlessError.class); private final static org.objectweb.asm.commons.Method CONSTRUCTOR = org.objectweb.asm.commons.Method.getMethod( - "void (org.elasticsearch.plan.a.Definition, java.lang.String, java.lang.String)"); + "void (org.elasticsearch.painless.Definition, java.lang.String, java.lang.String)"); private final static org.objectweb.asm.commons.Method EXECUTE = org.objectweb.asm.commons.Method.getMethod( "java.lang.Object execute(java.util.Map)"); private final static String SIGNATURE = "(Ljava/util/Map;)Ljava/lang/Object;"; @@ -153,18 +148,18 @@ class Writer extends PlanAParserBaseVisitor { private final static org.objectweb.asm.commons.Method DEF_METHOD_CALL = org.objectweb.asm.commons.Method.getMethod( "java.lang.Object methodCall(java.lang.Object, java.lang.String, " + - "org.elasticsearch.plan.a.Definition, java.lang.Object[], boolean[])"); + "org.elasticsearch.painless.Definition, java.lang.Object[], boolean[])"); private final static org.objectweb.asm.commons.Method DEF_ARRAY_STORE = org.objectweb.asm.commons.Method.getMethod( "void arrayStore(java.lang.Object, java.lang.Object, java.lang.Object, " + - "org.elasticsearch.plan.a.Definition, boolean, boolean)"); + "org.elasticsearch.painless.Definition, boolean, boolean)"); private final static org.objectweb.asm.commons.Method DEF_ARRAY_LOAD = org.objectweb.asm.commons.Method.getMethod( "java.lang.Object arrayLoad(java.lang.Object, java.lang.Object, " + - "org.elasticsearch.plan.a.Definition, boolean)"); + "org.elasticsearch.painless.Definition, boolean)"); private final static org.objectweb.asm.commons.Method DEF_FIELD_STORE = org.objectweb.asm.commons.Method.getMethod( "void fieldStore(java.lang.Object, java.lang.Object, java.lang.String, " + - "org.elasticsearch.plan.a.Definition, boolean)"); + "org.elasticsearch.painless.Definition, boolean)"); private final static org.objectweb.asm.commons.Method DEF_FIELD_LOAD = org.objectweb.asm.commons.Method.getMethod( - "java.lang.Object fieldLoad(java.lang.Object, java.lang.String, org.elasticsearch.plan.a.Definition)"); + "java.lang.Object fieldLoad(java.lang.Object, java.lang.String, org.elasticsearch.painless.Definition)"); private final static org.objectweb.asm.commons.Method DEF_NOT_CALL = org.objectweb.asm.commons.Method.getMethod( "java.lang.Object not(java.lang.Object)"); @@ -409,7 +404,7 @@ class Writer extends PlanAParserBaseVisitor { @Override public Void visitSource(final SourceContext ctx) { - final StatementMetadata sourcesmd = metadata.getStatementMetadata(ctx); + final Metadata.StatementMetadata sourcesmd = metadata.getStatementMetadata(ctx); for (final StatementContext sctx : ctx.statement()) { visit(sctx); @@ -434,7 +429,7 @@ class Writer extends PlanAParserBaseVisitor { visit(exprctx); final BlockContext blockctx0 = ctx.block(0); - final StatementMetadata blockmd0 = metadata.getStatementMetadata(blockctx0); + final Metadata.StatementMetadata blockmd0 = metadata.getStatementMetadata(blockctx0); visit(blockctx0); if (els) { @@ -467,14 +462,14 @@ class Writer extends PlanAParserBaseVisitor { boolean allLast = false; if (blockctx != null) { - final StatementMetadata blocksmd = metadata.getStatementMetadata(blockctx); + final Metadata.StatementMetadata blocksmd = metadata.getStatementMetadata(blockctx); allLast = blocksmd.allLast; writeLoopCounter(blocksmd.count > 0 ? blocksmd.count : 1); visit(blockctx); } else if (ctx.empty() != null) { writeLoopCounter(1); } else { - throw new IllegalStateException(error(ctx) + "Unexpected writer state."); + throw new IllegalStateException(Metadata.error(ctx) + "Unexpected writer state."); } if (!allLast) { @@ -497,7 +492,7 @@ class Writer extends PlanAParserBaseVisitor { branch.fals = branch.end; final BlockContext blockctx = ctx.block(); - final StatementMetadata blocksmd = metadata.getStatementMetadata(blockctx); + final Metadata.StatementMetadata blocksmd = metadata.getStatementMetadata(blockctx); jumps.push(branch); execute.mark(start); @@ -538,7 +533,7 @@ class Writer extends PlanAParserBaseVisitor { boolean allLast = false; if (blockctx != null) { - StatementMetadata blocksmd = metadata.getStatementMetadata(blockctx); + Metadata.StatementMetadata blocksmd = metadata.getStatementMetadata(blockctx); allLast = blocksmd.allLast; int count = blocksmd.count > 0 ? blocksmd.count : 1; @@ -552,7 +547,7 @@ class Writer extends PlanAParserBaseVisitor { } else if (ctx.empty() != null) { writeLoopCounter(1); } else { - throw new IllegalStateException(error(ctx) + "Unexpected writer state."); + throw new IllegalStateException(Metadata.error(ctx) + "Unexpected writer state."); } if (atctx != null) { @@ -615,7 +610,7 @@ class Writer extends PlanAParserBaseVisitor { execute.mark(branch.begin); final BlockContext blockctx = ctx.block(); - final StatementMetadata blocksmd = metadata.getStatementMetadata(blockctx); + final Metadata.StatementMetadata blocksmd = metadata.getStatementMetadata(blockctx); visit(blockctx); if (!blocksmd.allLast) { @@ -645,9 +640,9 @@ class Writer extends PlanAParserBaseVisitor { @Override public Void visitExpr(final ExprContext ctx) { - final StatementMetadata exprsmd = metadata.getStatementMetadata(ctx); + final Metadata.StatementMetadata exprsmd = metadata.getStatementMetadata(ctx); final ExpressionContext exprctx = ctx.expression(); - final ExpressionMetadata expremd = metadata.getExpressionMetadata(exprctx); + final Metadata.ExpressionMetadata expremd = metadata.getExpressionMetadata(exprctx); visit(exprctx); if (exprsmd.methodEscape) { @@ -677,7 +672,7 @@ class Writer extends PlanAParserBaseVisitor { @Override public Void visitEmpty(final EmptyContext ctx) { - throw new UnsupportedOperationException(error(ctx) + "Unexpected writer state."); + throw new UnsupportedOperationException(Metadata.error(ctx) + "Unexpected writer state."); } @Override @@ -688,11 +683,11 @@ class Writer extends PlanAParserBaseVisitor { if (declctx != null) { visit(declctx); } else if (exprctx != null) { - final ExpressionMetadata expremd = metadata.getExpressionMetadata(exprctx); + final Metadata.ExpressionMetadata expremd = metadata.getExpressionMetadata(exprctx); visit(exprctx); writePop(expremd.to.type.getSize()); } else { - throw new IllegalStateException(error(ctx) + "Unexpected writer state."); + throw new IllegalStateException(Metadata.error(ctx) + "Unexpected writer state."); } return null; @@ -701,7 +696,7 @@ class Writer extends PlanAParserBaseVisitor { @Override public Void visitAfterthought(AfterthoughtContext ctx) { final ExpressionContext exprctx = ctx.expression(); - final ExpressionMetadata expremd = metadata.getExpressionMetadata(exprctx); + final Metadata.ExpressionMetadata expremd = metadata.getExpressionMetadata(exprctx); visit(ctx.expression()); writePop(expremd.to.type.getSize()); @@ -719,12 +714,12 @@ class Writer extends PlanAParserBaseVisitor { @Override public Void visitDecltype(final DecltypeContext ctx) { - throw new UnsupportedOperationException(error(ctx) + "Unexpected writer state."); + throw new UnsupportedOperationException(Metadata.error(ctx) + "Unexpected writer state."); } @Override public Void visitDeclvar(final DeclvarContext ctx) { - final ExpressionMetadata declvaremd = metadata.getExpressionMetadata(ctx); + final Metadata.ExpressionMetadata declvaremd = metadata.getExpressionMetadata(ctx); final org.objectweb.asm.Type type = declvaremd.to.type; final Sort sort = declvaremd.to.sort; final int slot = (int)declvaremd.postConst; @@ -737,7 +732,7 @@ class Writer extends PlanAParserBaseVisitor { } switch (sort) { - case VOID: throw new IllegalStateException(error(ctx) + "Unexpected writer state."); + case VOID: throw new IllegalStateException(Metadata.error(ctx) + "Unexpected writer state."); case BOOL: case BYTE: case SHORT: @@ -756,7 +751,7 @@ class Writer extends PlanAParserBaseVisitor { @Override public Void visitTrap(final TrapContext ctx) { - final StatementMetadata trapsmd = metadata.getStatementMetadata(ctx); + final Metadata.StatementMetadata trapsmd = metadata.getStatementMetadata(ctx); final Branch branch = getBranch(ctx); final Label jump = new Label(); @@ -770,7 +765,7 @@ class Writer extends PlanAParserBaseVisitor { if (blockctx != null) { visit(ctx.block()); } else if (emptyctx == null) { - throw new IllegalStateException(error(ctx) + "Unexpected writer state."); + throw new IllegalStateException(Metadata.error(ctx) + "Unexpected writer state."); } execute.visitTryCatchBlock(branch.begin, branch.end, jump, trapsmd.exception.type.getInternalName()); @@ -784,12 +779,12 @@ class Writer extends PlanAParserBaseVisitor { @Override public Void visitPrecedence(final PrecedenceContext ctx) { - throw new UnsupportedOperationException(error(ctx) + "Unexpected writer state."); + throw new UnsupportedOperationException(Metadata.error(ctx) + "Unexpected writer state."); } @Override public Void visitNumeric(final NumericContext ctx) { - final ExpressionMetadata numericemd = metadata.getExpressionMetadata(ctx); + final Metadata.ExpressionMetadata numericemd = metadata.getExpressionMetadata(ctx); final Object postConst = numericemd.postConst; if (postConst == null) { @@ -806,7 +801,7 @@ class Writer extends PlanAParserBaseVisitor { @Override public Void visitChar(final CharContext ctx) { - final ExpressionMetadata charemd = metadata.getExpressionMetadata(ctx); + final Metadata.ExpressionMetadata charemd = metadata.getExpressionMetadata(ctx); final Object postConst = charemd.postConst; if (postConst == null) { @@ -823,7 +818,7 @@ class Writer extends PlanAParserBaseVisitor { @Override public Void visitTrue(final TrueContext ctx) { - final ExpressionMetadata trueemd = metadata.getExpressionMetadata(ctx); + final Metadata.ExpressionMetadata trueemd = metadata.getExpressionMetadata(ctx); final Object postConst = trueemd.postConst; final Branch branch = getBranch(ctx); @@ -843,7 +838,7 @@ class Writer extends PlanAParserBaseVisitor { @Override public Void visitFalse(final FalseContext ctx) { - final ExpressionMetadata falseemd = metadata.getExpressionMetadata(ctx); + final Metadata.ExpressionMetadata falseemd = metadata.getExpressionMetadata(ctx); final Object postConst = falseemd.postConst; final Branch branch = getBranch(ctx); @@ -863,7 +858,7 @@ class Writer extends PlanAParserBaseVisitor { @Override public Void visitNull(final NullContext ctx) { - final ExpressionMetadata nullemd = metadata.getExpressionMetadata(ctx); + final Metadata.ExpressionMetadata nullemd = metadata.getExpressionMetadata(ctx); execute.visitInsn(Opcodes.ACONST_NULL); checkWriteCast(nullemd); @@ -874,7 +869,7 @@ class Writer extends PlanAParserBaseVisitor { @Override public Void visitExternal(final ExternalContext ctx) { - final ExpressionMetadata expremd = metadata.getExpressionMetadata(ctx); + final Metadata.ExpressionMetadata expremd = metadata.getExpressionMetadata(ctx); visit(ctx.extstart()); checkWriteCast(expremd); checkWriteBranch(ctx); @@ -885,7 +880,7 @@ class Writer extends PlanAParserBaseVisitor { @Override public Void visitPostinc(final PostincContext ctx) { - final ExpressionMetadata expremd = metadata.getExpressionMetadata(ctx); + final Metadata.ExpressionMetadata expremd = metadata.getExpressionMetadata(ctx); visit(ctx.extstart()); checkWriteCast(expremd); checkWriteBranch(ctx); @@ -895,7 +890,7 @@ class Writer extends PlanAParserBaseVisitor { @Override public Void visitPreinc(final PreincContext ctx) { - final ExpressionMetadata expremd = metadata.getExpressionMetadata(ctx); + final Metadata.ExpressionMetadata expremd = metadata.getExpressionMetadata(ctx); visit(ctx.extstart()); checkWriteCast(expremd); checkWriteBranch(ctx); @@ -905,7 +900,7 @@ class Writer extends PlanAParserBaseVisitor { @Override public Void visitUnary(final UnaryContext ctx) { - final ExpressionMetadata unaryemd = metadata.getExpressionMetadata(ctx); + final Metadata.ExpressionMetadata unaryemd = metadata.getExpressionMetadata(ctx); final Object postConst = unaryemd.postConst; final Object preConst = unaryemd.preConst; final Branch branch = getBranch(ctx); @@ -930,7 +925,7 @@ class Writer extends PlanAParserBaseVisitor { writeConstant(ctx, preConst); checkWriteCast(unaryemd); } else { - throw new IllegalStateException(error(ctx) + "Unexpected writer state."); + throw new IllegalStateException(Metadata.error(ctx) + "Unexpected writer state."); } } else { final ExpressionContext exprctx = ctx.expression(); @@ -972,7 +967,7 @@ class Writer extends PlanAParserBaseVisitor { } else if (sort == Sort.LONG) { writeConstant(ctx, -1L); } else { - throw new IllegalStateException(error(ctx) + "Unexpected writer state."); + throw new IllegalStateException(Metadata.error(ctx) + "Unexpected writer state."); } execute.math(GeneratorAdapter.XOR, type); @@ -989,12 +984,12 @@ class Writer extends PlanAParserBaseVisitor { } else if (sort == Sort.LONG) { execute.invokeStatic(definition.mathType.type, NEGATEEXACT_LONG); } else { - throw new IllegalStateException(error(ctx) + "Unexpected writer state."); + throw new IllegalStateException(Metadata.error(ctx) + "Unexpected writer state."); } } } } else if (ctx.ADD() == null) { - throw new IllegalStateException(error(ctx) + "Unexpected writer state."); + throw new IllegalStateException(Metadata.error(ctx) + "Unexpected writer state."); } checkWriteCast(unaryemd); @@ -1007,7 +1002,7 @@ class Writer extends PlanAParserBaseVisitor { @Override public Void visitCast(final CastContext ctx) { - final ExpressionMetadata castemd = metadata.getExpressionMetadata(ctx); + final Metadata.ExpressionMetadata castemd = metadata.getExpressionMetadata(ctx); final Object postConst = castemd.postConst; if (postConst == null) { @@ -1024,7 +1019,7 @@ class Writer extends PlanAParserBaseVisitor { @Override public Void visitBinary(final BinaryContext ctx) { - final ExpressionMetadata binaryemd = metadata.getExpressionMetadata(ctx); + final Metadata.ExpressionMetadata binaryemd = metadata.getExpressionMetadata(ctx); final Object postConst = binaryemd.postConst; final Object preConst = binaryemd.preConst; final Branch branch = getBranch(ctx); @@ -1036,7 +1031,7 @@ class Writer extends PlanAParserBaseVisitor { writeConstant(ctx, preConst); checkWriteCast(binaryemd); } else { - throw new IllegalStateException(error(ctx) + "Unexpected writer state."); + throw new IllegalStateException(Metadata.error(ctx) + "Unexpected writer state."); } } else if (binaryemd.from.sort == Sort.STRING) { final boolean marked = strings.contains(ctx); @@ -1046,7 +1041,7 @@ class Writer extends PlanAParserBaseVisitor { } final ExpressionContext exprctx0 = ctx.expression(0); - final ExpressionMetadata expremd0 = metadata.getExpressionMetadata(exprctx0); + final Metadata.ExpressionMetadata expremd0 = metadata.getExpressionMetadata(exprctx0); strings.add(exprctx0); visit(exprctx0); @@ -1056,7 +1051,7 @@ class Writer extends PlanAParserBaseVisitor { } final ExpressionContext exprctx1 = ctx.expression(1); - final ExpressionMetadata expremd1 = metadata.getExpressionMetadata(exprctx1); + final Metadata.ExpressionMetadata expremd1 = metadata.getExpressionMetadata(exprctx1); strings.add(exprctx1); visit(exprctx1); @@ -1093,7 +1088,7 @@ class Writer extends PlanAParserBaseVisitor { else if (ctx.BWXOR() != null) writeBinaryInstruction(ctx, type, BWXOR); else if (ctx.BWOR() != null) writeBinaryInstruction(ctx, type, BWOR); else { - throw new IllegalStateException(error(ctx) + "Unexpected writer state."); + throw new IllegalStateException(Metadata.error(ctx) + "Unexpected writer state."); } checkWriteCast(binaryemd); @@ -1106,7 +1101,7 @@ class Writer extends PlanAParserBaseVisitor { @Override public Void visitComp(final CompContext ctx) { - final ExpressionMetadata compemd = metadata.getExpressionMetadata(ctx); + final Metadata.ExpressionMetadata compemd = metadata.getExpressionMetadata(ctx); final Object postConst = compemd.postConst; final Object preConst = compemd.preConst; final Branch branch = getBranch(ctx); @@ -1126,14 +1121,14 @@ class Writer extends PlanAParserBaseVisitor { writeConstant(ctx, preConst); checkWriteCast(compemd); } else { - throw new IllegalStateException(error(ctx) + "Unexpected writer state."); + throw new IllegalStateException(Metadata.error(ctx) + "Unexpected writer state."); } } else { final ExpressionContext exprctx0 = ctx.expression(0); - final ExpressionMetadata expremd0 = metadata.getExpressionMetadata(exprctx0); + final Metadata.ExpressionMetadata expremd0 = metadata.getExpressionMetadata(exprctx0); final ExpressionContext exprctx1 = ctx.expression(1); - final ExpressionMetadata expremd1 = metadata.getExpressionMetadata(exprctx1); + final Metadata.ExpressionMetadata expremd1 = metadata.getExpressionMetadata(exprctx1); final org.objectweb.asm.Type type = expremd1.to.type; final Sort sort1 = expremd1.to.sort; @@ -1164,12 +1159,12 @@ class Writer extends PlanAParserBaseVisitor { case BYTE: case SHORT: case CHAR: - throw new IllegalStateException(error(ctx) + "Unexpected writer state."); + throw new IllegalStateException(Metadata.error(ctx) + "Unexpected writer state."); case BOOL: if (eq) execute.ifZCmp(GeneratorAdapter.EQ, jump); else if (ne) execute.ifZCmp(GeneratorAdapter.NE, jump); else { - throw new IllegalStateException(error(ctx) + "Unexpected writer state."); + throw new IllegalStateException(Metadata.error(ctx) + "Unexpected writer state."); } break; @@ -1184,7 +1179,7 @@ class Writer extends PlanAParserBaseVisitor { else if (gt) execute.ifCmp(type, GeneratorAdapter.GT, jump); else if (gte) execute.ifCmp(type, GeneratorAdapter.GE, jump); else { - throw new IllegalStateException(error(ctx) + "Unexpected writer state."); + throw new IllegalStateException(Metadata.error(ctx) + "Unexpected writer state."); } break; @@ -1215,7 +1210,7 @@ class Writer extends PlanAParserBaseVisitor { } else if (gte) { execute.invokeStatic(definition.defobjType.type, DEF_GTE_CALL); } else { - throw new IllegalStateException(error(ctx) + "Unexpected writer state."); + throw new IllegalStateException(Metadata.error(ctx) + "Unexpected writer state."); } writejump = expremd1.isNull || ne || ctx.EQR() != null; @@ -1250,7 +1245,7 @@ class Writer extends PlanAParserBaseVisitor { execute.ifCmp(type, GeneratorAdapter.NE, jump); } } else { - throw new IllegalStateException(error(ctx) + "Unexpected writer state."); + throw new IllegalStateException(Metadata.error(ctx) + "Unexpected writer state."); } } @@ -1272,7 +1267,7 @@ class Writer extends PlanAParserBaseVisitor { @Override public Void visitBool(final BoolContext ctx) { - final ExpressionMetadata boolemd = metadata.getExpressionMetadata(ctx); + final Metadata.ExpressionMetadata boolemd = metadata.getExpressionMetadata(ctx); final Object postConst = boolemd.postConst; final Object preConst = boolemd.preConst; final Branch branch = getBranch(ctx); @@ -1292,7 +1287,7 @@ class Writer extends PlanAParserBaseVisitor { writeConstant(ctx, preConst); checkWriteCast(boolemd); } else { - throw new IllegalStateException(error(ctx) + "Unexpected writer state."); + throw new IllegalStateException(Metadata.error(ctx) + "Unexpected writer state."); } } else { final ExpressionContext exprctx0 = ctx.expression(0); @@ -1329,7 +1324,7 @@ class Writer extends PlanAParserBaseVisitor { execute.push(false); execute.mark(aend); } else { - throw new IllegalStateException(error(ctx) + "Unexpected writer state."); + throw new IllegalStateException(Metadata.error(ctx) + "Unexpected writer state."); } checkWriteCast(boolemd); @@ -1361,7 +1356,7 @@ class Writer extends PlanAParserBaseVisitor { execute.mark(branch0.tru); } } else { - throw new IllegalStateException(error(ctx) + "Unexpected writer state."); + throw new IllegalStateException(Metadata.error(ctx) + "Unexpected writer state."); } } } @@ -1371,7 +1366,7 @@ class Writer extends PlanAParserBaseVisitor { @Override public Void visitConditional(final ConditionalContext ctx) { - final ExpressionMetadata condemd = metadata.getExpressionMetadata(ctx); + final Metadata.ExpressionMetadata condemd = metadata.getExpressionMetadata(ctx); final Branch branch = getBranch(ctx); final ExpressionContext expr0 = ctx.expression(0); @@ -1402,7 +1397,7 @@ class Writer extends PlanAParserBaseVisitor { @Override public Void visitAssignment(final AssignmentContext ctx) { - final ExpressionMetadata expremd = metadata.getExpressionMetadata(ctx); + final Metadata.ExpressionMetadata expremd = metadata.getExpressionMetadata(ctx); visit(ctx.extstart()); checkWriteCast(expremd); checkWriteBranch(ctx); @@ -1412,10 +1407,10 @@ class Writer extends PlanAParserBaseVisitor { @Override public Void visitExtstart(ExtstartContext ctx) { - final ExternalMetadata startemd = metadata.getExternalMetadata(ctx); + final Metadata.ExternalMetadata startemd = metadata.getExternalMetadata(ctx); if (startemd.token == ADD) { - final ExpressionMetadata storeemd = metadata.getExpressionMetadata(startemd.storeExpr); + final Metadata.ExpressionMetadata storeemd = metadata.getExpressionMetadata(startemd.storeExpr); if (startemd.current.sort == Sort.STRING || storeemd.from.sort == Sort.STRING) { writeNewStrings(); @@ -1471,7 +1466,7 @@ class Writer extends PlanAParserBaseVisitor { } else if (stringctx != null) { visit(stringctx); } else { - throw new IllegalStateException(error(ctx) + "Unexpected writer state."); + throw new IllegalStateException(Metadata.error(ctx) + "Unexpected writer state."); } final ExtdotContext dotctx = ctx.extdot(); @@ -1488,7 +1483,7 @@ class Writer extends PlanAParserBaseVisitor { @Override public Void visitExtcast(final ExtcastContext ctx) { - ExtNodeMetadata castenmd = metadata.getExtNodeMetadata(ctx); + Metadata.ExtNodeMetadata castenmd = metadata.getExtNodeMetadata(ctx); final ExtprecContext precctx = ctx.extprec(); final ExtcastContext castctx = ctx.extcast(); @@ -1510,7 +1505,7 @@ class Writer extends PlanAParserBaseVisitor { } else if (stringctx != null) { visit(stringctx); } else { - throw new IllegalStateException(error(ctx) + "Unexpected writer state."); + throw new IllegalStateException(Metadata.error(ctx) + "Unexpected writer state."); } checkWriteCast(ctx, castenmd.castTo); @@ -1624,7 +1619,7 @@ class Writer extends PlanAParserBaseVisitor { @Override public Void visitExtstring(ExtstringContext ctx) { - final ExtNodeMetadata stringenmd = metadata.getExtNodeMetadata(ctx); + final Metadata.ExtNodeMetadata stringenmd = metadata.getExtNodeMetadata(ctx); writeConstant(ctx, stringenmd.target); @@ -1642,12 +1637,12 @@ class Writer extends PlanAParserBaseVisitor { @Override public Void visitArguments(final ArgumentsContext ctx) { - throw new UnsupportedOperationException(error(ctx) + "Unexpected writer state."); + throw new UnsupportedOperationException(Metadata.error(ctx) + "Unexpected writer state."); } @Override public Void visitIncrement(IncrementContext ctx) { - final ExpressionMetadata incremd = metadata.getExpressionMetadata(ctx); + final Metadata.ExpressionMetadata incremd = metadata.getExpressionMetadata(ctx); final Object postConst = incremd.postConst; if (postConst == null) { @@ -1669,7 +1664,7 @@ class Writer extends PlanAParserBaseVisitor { execute.visitVarInsn(Opcodes.ILOAD, metadata.loopCounterSlot); execute.push(0); execute.ifICmp(GeneratorAdapter.GT, end); - execute.throwException(PLAN_A_ERROR_TYPE, + execute.throwException(PAINLESS_ERROR_TYPE, "The maximum number of statements that can be executed in a loop has been reached."); execute.mark(end); } @@ -1684,7 +1679,7 @@ class Writer extends PlanAParserBaseVisitor { } else if (constant instanceof Boolean) { writeBoolean(source, constant); } else if (constant != null) { - throw new IllegalStateException(error(source) + "Unexpected writer state."); + throw new IllegalStateException(Metadata.error(source) + "Unexpected writer state."); } } @@ -1698,7 +1693,7 @@ class Writer extends PlanAParserBaseVisitor { } else if (numeric instanceof Number) { execute.push(((Number)numeric).intValue()); } else { - throw new IllegalStateException(error(source) + "Unexpected writer state."); + throw new IllegalStateException(Metadata.error(source) + "Unexpected writer state."); } } @@ -1706,7 +1701,7 @@ class Writer extends PlanAParserBaseVisitor { if (string instanceof String) { execute.push((String)string); } else { - throw new IllegalStateException(error(source) + "Unexpected writer state."); + throw new IllegalStateException(Metadata.error(source) + "Unexpected writer state."); } } @@ -1714,7 +1709,7 @@ class Writer extends PlanAParserBaseVisitor { if (bool instanceof Boolean) { execute.push((boolean)bool); } else { - throw new IllegalStateException(error(source) + "Unexpected writer state."); + throw new IllegalStateException(Metadata.error(source) + "Unexpected writer state."); } } @@ -1767,7 +1762,7 @@ class Writer extends PlanAParserBaseVisitor { case ADD: execute.invokeStatic(definition.mathType.type, ADDEXACT_INT); break; case SUB: execute.invokeStatic(definition.mathType.type, SUBEXACT_INT); break; default: - throw new IllegalStateException(error(source) + "Unexpected writer state."); + throw new IllegalStateException(Metadata.error(source) + "Unexpected writer state."); } break; @@ -1778,7 +1773,7 @@ class Writer extends PlanAParserBaseVisitor { case ADD: execute.invokeStatic(definition.mathType.type, ADDEXACT_LONG); break; case SUB: execute.invokeStatic(definition.mathType.type, SUBEXACT_LONG); break; default: - throw new IllegalStateException(error(source) + "Unexpected writer state."); + throw new IllegalStateException(Metadata.error(source) + "Unexpected writer state."); } break; @@ -1790,7 +1785,7 @@ class Writer extends PlanAParserBaseVisitor { case ADD: execute.invokeStatic(definition.utilityType.type, ADDWOOVERLOW_FLOAT); break; case SUB: execute.invokeStatic(definition.utilityType.type, SUBWOOVERLOW_FLOAT); break; default: - throw new IllegalStateException(error(source) + "Unexpected writer state."); + throw new IllegalStateException(Metadata.error(source) + "Unexpected writer state."); } break; @@ -1802,17 +1797,17 @@ class Writer extends PlanAParserBaseVisitor { case ADD: execute.invokeStatic(definition.utilityType.type, ADDWOOVERLOW_DOUBLE); break; case SUB: execute.invokeStatic(definition.utilityType.type, SUBWOOVERLOW_DOUBLE); break; default: - throw new IllegalStateException(error(source) + "Unexpected writer state."); + throw new IllegalStateException(Metadata.error(source) + "Unexpected writer state."); } break; default: - throw new IllegalStateException(error(source) + "Unexpected writer state."); + throw new IllegalStateException(Metadata.error(source) + "Unexpected writer state."); } } else { if ((sort == Sort.FLOAT || sort == Sort.DOUBLE) && (token == LSH || token == USH || token == RSH || token == BWAND || token == BWXOR || token == BWOR)) { - throw new IllegalStateException(error(source) + "Unexpected writer state."); + throw new IllegalStateException(Metadata.error(source) + "Unexpected writer state."); } if (sort == Sort.DEF) { @@ -1829,7 +1824,7 @@ class Writer extends PlanAParserBaseVisitor { case BWXOR: execute.invokeStatic(definition.defobjType.type, DEF_XOR_CALL); break; case BWOR: execute.invokeStatic(definition.defobjType.type, DEF_OR_CALL); break; default: - throw new IllegalStateException(error(source) + "Unexpected writer state."); + throw new IllegalStateException(Metadata.error(source) + "Unexpected writer state."); } } else { switch (token) { @@ -1845,7 +1840,7 @@ class Writer extends PlanAParserBaseVisitor { case BWXOR: execute.math(GeneratorAdapter.XOR, type.type); break; case BWOR: execute.math(GeneratorAdapter.OR, type.type); break; default: - throw new IllegalStateException(error(source) + "Unexpected writer state."); + throw new IllegalStateException(Metadata.error(source) + "Unexpected writer state."); } } } @@ -1973,8 +1968,8 @@ class Writer extends PlanAParserBaseVisitor { } private void writeLoadStoreExternal(final ParserRuleContext source) { - final ExtNodeMetadata sourceenmd = metadata.getExtNodeMetadata(source); - final ExternalMetadata parentemd = metadata.getExternalMetadata(sourceenmd.parent); + final Metadata.ExtNodeMetadata sourceenmd = metadata.getExtNodeMetadata(source); + final Metadata.ExternalMetadata parentemd = metadata.getExternalMetadata(sourceenmd.parent); final boolean length = "#length".equals(sourceenmd.target); final boolean array = "#brace".equals(sourceenmd.target); @@ -1984,7 +1979,7 @@ class Writer extends PlanAParserBaseVisitor { final boolean shortcut = sourceenmd.target instanceof Object[]; if (!length && !variable && !field && !array && !name && !shortcut) { - throw new IllegalStateException(error(source) + "Target not found for load/store."); + throw new IllegalStateException(Metadata.error(source) + "Target not found for load/store."); } final boolean maplist = shortcut && (boolean)((Object[])sourceenmd.target)[2]; @@ -1996,7 +1991,7 @@ class Writer extends PlanAParserBaseVisitor { if (length) { execute.arrayLength(); } else if (sourceenmd.last && parentemd.storeExpr != null) { - final ExpressionMetadata expremd = metadata.getExpressionMetadata(parentemd.storeExpr); + final Metadata.ExpressionMetadata expremd = metadata.getExpressionMetadata(parentemd.storeExpr); final boolean cat = strings.contains(parentemd.storeExpr); if (cat) { @@ -2101,7 +2096,7 @@ class Writer extends PlanAParserBaseVisitor { final boolean store, final boolean variable, final boolean field, final boolean name, final boolean array, final boolean shortcut) { - final ExtNodeMetadata sourceemd = metadata.getExtNodeMetadata(source); + final Metadata.ExtNodeMetadata sourceemd = metadata.getExtNodeMetadata(source); if (variable) { writeLoadStoreVariable(source, store, sourceemd.type, (int)sourceemd.target); @@ -2115,14 +2110,14 @@ class Writer extends PlanAParserBaseVisitor { Object[] targets = (Object[])sourceemd.target; writeLoadStoreShortcut(store, (Method)targets[0], (Method)targets[1]); } else { - throw new IllegalStateException(error(source) + "Load/Store requires a variable, field, or array."); + throw new IllegalStateException(Metadata.error(source) + "Load/Store requires a variable, field, or array."); } } private void writeLoadStoreVariable(final ParserRuleContext source, final boolean store, final Type type, final int slot) { if (type.sort == Sort.VOID) { - throw new IllegalStateException(error(source) + "Cannot load/store void type."); + throw new IllegalStateException(Metadata.error(source) + "Cannot load/store void type."); } if (store) { @@ -2158,9 +2153,9 @@ class Writer extends PlanAParserBaseVisitor { private void writeLoadStoreField(final ParserRuleContext source, final boolean store, final String name) { if (store) { - final ExtNodeMetadata sourceemd = metadata.getExtNodeMetadata(source); - final ExternalMetadata parentemd = metadata.getExternalMetadata(sourceemd.parent); - final ExpressionMetadata expremd = metadata.getExpressionMetadata(parentemd.storeExpr); + final Metadata.ExtNodeMetadata sourceemd = metadata.getExtNodeMetadata(source); + final Metadata.ExternalMetadata parentemd = metadata.getExternalMetadata(sourceemd.parent); + final Metadata.ExpressionMetadata expremd = metadata.getExpressionMetadata(parentemd.storeExpr); execute.push(name); execute.loadThis(); @@ -2177,17 +2172,17 @@ class Writer extends PlanAParserBaseVisitor { private void writeLoadStoreArray(final ParserRuleContext source, final boolean store, final Type type) { if (type.sort == Sort.VOID) { - throw new IllegalStateException(error(source) + "Cannot load/store void type."); + throw new IllegalStateException(Metadata.error(source) + "Cannot load/store void type."); } if (type.sort == Sort.DEF) { final ExtbraceContext bracectx = (ExtbraceContext)source; - final ExpressionMetadata expremd0 = metadata.getExpressionMetadata(bracectx.expression()); + final Metadata.ExpressionMetadata expremd0 = metadata.getExpressionMetadata(bracectx.expression()); if (store) { - final ExtNodeMetadata braceenmd = metadata.getExtNodeMetadata(bracectx); - final ExternalMetadata parentemd = metadata.getExternalMetadata(braceenmd.parent); - final ExpressionMetadata expremd1 = metadata.getExpressionMetadata(parentemd.storeExpr); + final Metadata.ExtNodeMetadata braceenmd = metadata.getExtNodeMetadata(bracectx); + final Metadata.ExternalMetadata parentemd = metadata.getExternalMetadata(braceenmd.parent); + final Metadata.ExpressionMetadata expremd1 = metadata.getExpressionMetadata(parentemd.storeExpr); execute.loadThis(); execute.getField(CLASS_TYPE, "definition", DEFINITION_TYPE); @@ -2246,14 +2241,14 @@ class Writer extends PlanAParserBaseVisitor { } private void writeNewExternal(final ExtnewContext source) { - final ExtNodeMetadata sourceenmd = metadata.getExtNodeMetadata(source); - final ExternalMetadata parentemd = metadata.getExternalMetadata(sourceenmd.parent); + final Metadata.ExtNodeMetadata sourceenmd = metadata.getExtNodeMetadata(source); + final Metadata.ExternalMetadata parentemd = metadata.getExternalMetadata(sourceenmd.parent); final boolean makearray = "#makearray".equals(sourceenmd.target); final boolean constructor = sourceenmd.target instanceof Constructor; if (!makearray && !constructor) { - throw new IllegalStateException(error(source) + "Target not found for new call."); + throw new IllegalStateException(Metadata.error(source) + "Target not found for new call."); } if (makearray) { @@ -2283,13 +2278,13 @@ class Writer extends PlanAParserBaseVisitor { } private void writeCallExternal(final ExtcallContext source) { - final ExtNodeMetadata sourceenmd = metadata.getExtNodeMetadata(source); + final Metadata.ExtNodeMetadata sourceenmd = metadata.getExtNodeMetadata(source); final boolean method = sourceenmd.target instanceof Method; final boolean def = sourceenmd.target instanceof String; if (!method && !def) { - throw new IllegalStateException(error(source) + "Target not found for call."); + throw new IllegalStateException(Metadata.error(source) + "Target not found for call."); } final List arguments = source.arguments().expression(); @@ -2349,7 +2344,7 @@ class Writer extends PlanAParserBaseVisitor { } } - private void checkWriteCast(final ExpressionMetadata sort) { + private void checkWriteCast(final Metadata.ExpressionMetadata sort) { checkWriteCast(sort.source, sort.cast); } @@ -2359,7 +2354,7 @@ class Writer extends PlanAParserBaseVisitor { } else if (cast != null) { writeCast(cast); } else { - throw new IllegalStateException(error(source) + "Unexpected cast object."); + throw new IllegalStateException(Metadata.error(source) + "Unexpected cast object."); } } diff --git a/plugins/lang-plan-a/src/main/plugin-metadata/plugin-security.policy b/plugins/lang-painless/src/main/plugin-metadata/plugin-security.policy similarity index 100% rename from plugins/lang-plan-a/src/main/plugin-metadata/plugin-security.policy rename to plugins/lang-painless/src/main/plugin-metadata/plugin-security.policy diff --git a/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/AdditionTests.java b/plugins/lang-painless/src/test/java/org/elasticsearch/painless/AdditionTests.java similarity index 99% rename from plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/AdditionTests.java rename to plugins/lang-painless/src/test/java/org/elasticsearch/painless/AdditionTests.java index d6e05f973a2..2a8195ca66d 100644 --- a/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/AdditionTests.java +++ b/plugins/lang-painless/src/test/java/org/elasticsearch/painless/AdditionTests.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.plan.a; +package org.elasticsearch.painless; /** Tests for addition operator across all types */ //TODO: NaN/Inf/overflow/... diff --git a/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/AndTests.java b/plugins/lang-painless/src/test/java/org/elasticsearch/painless/AndTests.java similarity index 97% rename from plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/AndTests.java rename to plugins/lang-painless/src/test/java/org/elasticsearch/painless/AndTests.java index 6a4168415dc..2c86250da83 100644 --- a/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/AndTests.java +++ b/plugins/lang-painless/src/test/java/org/elasticsearch/painless/AndTests.java @@ -17,29 +17,29 @@ * under the License. */ -package org.elasticsearch.plan.a; +package org.elasticsearch.painless; /** Tests for and operator across all types */ public class AndTests extends ScriptTestCase { - + public void testInt() throws Exception { assertEquals(5 & 12, exec("int x = 5; int y = 12; return x & y;")); assertEquals(5 & -12, exec("int x = 5; int y = -12; return x & y;")); assertEquals(7 & 15 & 3, exec("int x = 7; int y = 15; int z = 3; return x & y & z;")); } - + public void testIntConst() throws Exception { assertEquals(5 & 12, exec("return 5 & 12;")); assertEquals(5 & -12, exec("return 5 & -12;")); assertEquals(7 & 15 & 3, exec("return 7 & 15 & 3;")); } - + public void testLong() throws Exception { assertEquals(5L & 12L, exec("long x = 5; long y = 12; return x & y;")); assertEquals(5L & -12L, exec("long x = 5; long y = -12; return x & y;")); assertEquals(7L & 15L & 3L, exec("long x = 7; long y = 15; long z = 3; return x & y & z;")); } - + public void testLongConst() throws Exception { assertEquals(5L & 12L, exec("return 5L & 12L;")); assertEquals(5L & -12L, exec("return 5L & -12L;")); diff --git a/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/BasicAPITests.java b/plugins/lang-painless/src/test/java/org/elasticsearch/painless/BasicAPITests.java similarity index 98% rename from plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/BasicAPITests.java rename to plugins/lang-painless/src/test/java/org/elasticsearch/painless/BasicAPITests.java index d81c4029a79..bcfec2343d1 100644 --- a/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/BasicAPITests.java +++ b/plugins/lang-painless/src/test/java/org/elasticsearch/painless/BasicAPITests.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.plan.a; +package org.elasticsearch.painless; public class BasicAPITests extends ScriptTestCase { diff --git a/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/BasicExpressionTests.java b/plugins/lang-painless/src/test/java/org/elasticsearch/painless/BasicExpressionTests.java similarity index 99% rename from plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/BasicExpressionTests.java rename to plugins/lang-painless/src/test/java/org/elasticsearch/painless/BasicExpressionTests.java index 6af8adab564..ee3607242ca 100644 --- a/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/BasicExpressionTests.java +++ b/plugins/lang-painless/src/test/java/org/elasticsearch/painless/BasicExpressionTests.java @@ -1,4 +1,4 @@ -package org.elasticsearch.plan.a; +package org.elasticsearch.painless; import java.util.Collections; @@ -94,8 +94,8 @@ public class BasicExpressionTests extends ScriptTestCase { assertEquals(true, exec("return 3 != 4;")); assertEquals(false, exec("double x = 3; float y = 3; return x != y;")); } - - /** + + /** * Test boxed objects in various places */ public void testBoxing() { diff --git a/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/BasicStatementTests.java b/plugins/lang-painless/src/test/java/org/elasticsearch/painless/BasicStatementTests.java similarity index 99% rename from plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/BasicStatementTests.java rename to plugins/lang-painless/src/test/java/org/elasticsearch/painless/BasicStatementTests.java index 07ad32d74af..ebdb9021b7b 100644 --- a/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/BasicStatementTests.java +++ b/plugins/lang-painless/src/test/java/org/elasticsearch/painless/BasicStatementTests.java @@ -1,4 +1,4 @@ -package org.elasticsearch.plan.a; +package org.elasticsearch.painless; /* * Licensed to Elasticsearch under one or more contributor diff --git a/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/BinaryOperatorTests.java b/plugins/lang-painless/src/test/java/org/elasticsearch/painless/BinaryOperatorTests.java similarity index 98% rename from plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/BinaryOperatorTests.java rename to plugins/lang-painless/src/test/java/org/elasticsearch/painless/BinaryOperatorTests.java index 032cdcde5e0..5971cca0151 100644 --- a/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/BinaryOperatorTests.java +++ b/plugins/lang-painless/src/test/java/org/elasticsearch/painless/BinaryOperatorTests.java @@ -17,14 +17,14 @@ * under the License. */ -package org.elasticsearch.plan.a; +package org.elasticsearch.painless; -/** +/** * Tests binary operators across different types */ // TODO: NaN/Inf/overflow/... public class BinaryOperatorTests extends ScriptTestCase { - + // TODO: move to per-type tests and test for each type public void testBasics() { assertEquals(2.25F / 1.5F, exec("return 2.25F / 1.5F;")); @@ -43,7 +43,7 @@ public class BinaryOperatorTests extends ScriptTestCase { assertEquals(9L ^ 3, exec("return 9L ^ 3;")); assertEquals(9 ^ 3L, exec("return 9 ^ 3L;")); } - + public void testLongShifts() { // note: we always promote the results of shifts too (unlike java) assertEquals(1L << 2, exec("long x = 1L; int y = 2; return x << y;")); @@ -53,7 +53,7 @@ public class BinaryOperatorTests extends ScriptTestCase { assertEquals(-1L >>> 29, exec("long x = -1L; int y = 29; return x >>> y;")); assertEquals(-1L >>> 29L, exec("long x = -1L; long y = 29L; return x >>> y;")); } - + public void testLongShiftsConst() { // note: we always promote the results of shifts too (unlike java) assertEquals(1L << 2, exec("return 1L << 2;")); @@ -63,7 +63,7 @@ public class BinaryOperatorTests extends ScriptTestCase { assertEquals(-1L >>> 29, exec("return -1L >>> 29;")); assertEquals(-1L >>> 29L, exec("return -1 >>> 29L;")); } - + public void testMixedTypes() { assertEquals(8, exec("int x = 4; char y = 2; return x*y;")); assertEquals(0.5, exec("double x = 1; float y = 2; return x / y;")); @@ -77,7 +77,7 @@ public class BinaryOperatorTests extends ScriptTestCase { assertEquals(7, exec("short x = 5; byte y = 3; return x | y;")); assertEquals(10, exec("short x = 9; char y = 3; return x ^ y;")); } - + public void testBinaryPromotion() throws Exception { // byte/byte assertEquals((byte)1 + (byte)1, exec("byte x = 1; byte y = 1; return x+y;")); @@ -93,7 +93,7 @@ public class BinaryOperatorTests extends ScriptTestCase { assertEquals((byte)1 + 1F, exec("byte x = 1; float y = 1; return x+y;")); // byte/double assertEquals((byte)1 + 1.0, exec("byte x = 1; double y = 1; return x+y;")); - + // char/byte assertEquals((char)1 + (byte)1, exec("char x = 1; byte y = 1; return x+y;")); // char/char @@ -108,7 +108,7 @@ public class BinaryOperatorTests extends ScriptTestCase { assertEquals((char)1 + 1F, exec("char x = 1; float y = 1; return x+y;")); // char/double assertEquals((char)1 + 1.0, exec("char x = 1; double y = 1; return x+y;")); - + // short/byte assertEquals((short)1 + (byte)1, exec("short x = 1; byte y = 1; return x+y;")); // short/char @@ -123,7 +123,7 @@ public class BinaryOperatorTests extends ScriptTestCase { assertEquals((short)1 + 1F, exec("short x = 1; float y = 1; return x+y;")); // short/double assertEquals((short)1 + 1.0, exec("short x = 1; double y = 1; return x+y;")); - + // int/byte assertEquals(1 + (byte)1, exec("int x = 1; byte y = 1; return x+y;")); // int/char @@ -138,7 +138,7 @@ public class BinaryOperatorTests extends ScriptTestCase { assertEquals(1 + 1F, exec("int x = 1; float y = 1; return x+y;")); // int/double assertEquals(1 + 1.0, exec("int x = 1; double y = 1; return x+y;")); - + // long/byte assertEquals(1L + (byte)1, exec("long x = 1; byte y = 1; return x+y;")); // long/char @@ -153,7 +153,7 @@ public class BinaryOperatorTests extends ScriptTestCase { assertEquals(1L + 1F, exec("long x = 1; float y = 1; return x+y;")); // long/double assertEquals(1L + 1.0, exec("long x = 1; double y = 1; return x+y;")); - + // float/byte assertEquals(1F + (byte)1, exec("float x = 1; byte y = 1; return x+y;")); // float/char @@ -168,7 +168,7 @@ public class BinaryOperatorTests extends ScriptTestCase { assertEquals(1F + 1F, exec("float x = 1; float y = 1; return x+y;")); // float/double assertEquals(1F + 1.0, exec("float x = 1; double y = 1; return x+y;")); - + // double/byte assertEquals(1.0 + (byte)1, exec("double x = 1; byte y = 1; return x+y;")); // double/char @@ -184,7 +184,7 @@ public class BinaryOperatorTests extends ScriptTestCase { // double/double assertEquals(1.0 + 1.0, exec("double x = 1; double y = 1; return x+y;")); } - + public void testBinaryPromotionConst() throws Exception { // byte/byte assertEquals((byte)1 + (byte)1, exec("return (byte)1 + (byte)1;")); @@ -200,7 +200,7 @@ public class BinaryOperatorTests extends ScriptTestCase { assertEquals((byte)1 + 1F, exec("return (byte)1 + 1F;")); // byte/double assertEquals((byte)1 + 1.0, exec("return (byte)1 + 1.0;")); - + // char/byte assertEquals((char)1 + (byte)1, exec("return (char)1 + (byte)1;")); // char/char @@ -215,7 +215,7 @@ public class BinaryOperatorTests extends ScriptTestCase { assertEquals((char)1 + 1F, exec("return (char)1 + 1F;")); // char/double assertEquals((char)1 + 1.0, exec("return (char)1 + 1.0;")); - + // short/byte assertEquals((short)1 + (byte)1, exec("return (short)1 + (byte)1;")); // short/char @@ -230,7 +230,7 @@ public class BinaryOperatorTests extends ScriptTestCase { assertEquals((short)1 + 1F, exec("return (short)1 + 1F;")); // short/double assertEquals((short)1 + 1.0, exec("return (short)1 + 1.0;")); - + // int/byte assertEquals(1 + (byte)1, exec("return 1 + (byte)1;")); // int/char @@ -245,7 +245,7 @@ public class BinaryOperatorTests extends ScriptTestCase { assertEquals(1 + 1F, exec("return 1 + 1F;")); // int/double assertEquals(1 + 1.0, exec("return 1 + 1.0;")); - + // long/byte assertEquals(1L + (byte)1, exec("return 1L + (byte)1;")); // long/char @@ -260,7 +260,7 @@ public class BinaryOperatorTests extends ScriptTestCase { assertEquals(1L + 1F, exec("return 1L + 1F;")); // long/double assertEquals(1L + 1.0, exec("return 1L + 1.0;")); - + // float/byte assertEquals(1F + (byte)1, exec("return 1F + (byte)1;")); // float/char @@ -275,7 +275,7 @@ public class BinaryOperatorTests extends ScriptTestCase { assertEquals(1F + 1F, exec("return 1F + 1F;")); // float/double assertEquals(1F + 1.0, exec("return 1F + 1.0;")); - + // double/byte assertEquals(1.0 + (byte)1, exec("return 1.0 + (byte)1;")); // double/char diff --git a/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/CompoundAssignmentTests.java b/plugins/lang-painless/src/test/java/org/elasticsearch/painless/CompoundAssignmentTests.java similarity index 99% rename from plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/CompoundAssignmentTests.java rename to plugins/lang-painless/src/test/java/org/elasticsearch/painless/CompoundAssignmentTests.java index 3af440ad02c..d54b976d65d 100644 --- a/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/CompoundAssignmentTests.java +++ b/plugins/lang-painless/src/test/java/org/elasticsearch/painless/CompoundAssignmentTests.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.plan.a; +package org.elasticsearch.painless; /** * Tests compound assignments (+=, etc) across all data types @@ -47,7 +47,7 @@ public class CompoundAssignmentTests extends ScriptTestCase { assertEquals(15D, exec("double x = 5.0; x += 10; return x;")); assertEquals(-5D, exec("double x = 5.0; x += -10; return x;")); } - + public void testSubtraction() { // byte assertEquals((byte) 15, exec("byte x = 5; x -= -10; return x;")); @@ -71,7 +71,7 @@ public class CompoundAssignmentTests extends ScriptTestCase { assertEquals(15D, exec("double x = 5.0; x -= -10; return x;")); assertEquals(-5D, exec("double x = 5.0; x -= 10; return x;")); } - + public void testMultiplication() { // byte assertEquals((byte) 15, exec("byte x = 5; x *= 3; return x;")); @@ -94,7 +94,7 @@ public class CompoundAssignmentTests extends ScriptTestCase { assertEquals(15D, exec("double x = 5.0; x *= 3; return x;")); assertEquals(-5D, exec("double x = 5.0; x *= -1; return x;")); } - + public void testDivision() { // byte assertEquals((byte) 15, exec("byte x = 45; x /= 3; return x;")); @@ -117,7 +117,7 @@ public class CompoundAssignmentTests extends ScriptTestCase { assertEquals(15D, exec("double x = 45.0; x /= 3; return x;")); assertEquals(-5D, exec("double x = 5.0; x /= -1; return x;")); } - + public void testDivisionByZero() { // byte try { @@ -130,26 +130,26 @@ public class CompoundAssignmentTests extends ScriptTestCase { exec("short x = 1; x /= 0; return x;"); fail("should have hit exception"); } catch (ArithmeticException expected) {} - + // char try { exec("char x = 1; x /= 0; return x;"); fail("should have hit exception"); } catch (ArithmeticException expected) {} - + // int try { exec("int x = 1; x /= 0; return x;"); fail("should have hit exception"); } catch (ArithmeticException expected) {} - + // long try { exec("long x = 1; x /= 0; return x;"); fail("should have hit exception"); } catch (ArithmeticException expected) {} } - + public void testRemainder() { // byte assertEquals((byte) 3, exec("byte x = 15; x %= 4; return x;")); @@ -189,7 +189,7 @@ public class CompoundAssignmentTests extends ScriptTestCase { assertEquals(60L, exec("long x = 15L; x <<= 2; return x;")); assertEquals(-60L, exec("long x = -15L; x <<= 2; return x;")); } - + public void testRightShift() { // byte assertEquals((byte) 15, exec("byte x = 60; x >>= 2; return x;")); @@ -206,7 +206,7 @@ public class CompoundAssignmentTests extends ScriptTestCase { assertEquals(15L, exec("long x = 60L; x >>= 2; return x;")); assertEquals(-15L, exec("long x = -60L; x >>= 2; return x;")); } - + public void testUnsignedRightShift() { // byte assertEquals((byte) 15, exec("byte x = 60; x >>>= 2; return x;")); @@ -242,7 +242,7 @@ public class CompoundAssignmentTests extends ScriptTestCase { assertEquals(false, exec("Boolean[] x = new Boolean[1]; x[0] = true; x[0] &= false; return x[0];")); assertEquals(false, exec("Boolean[] x = new Boolean[1]; x[0] = false; x[0] &= true; return x[0];")); assertEquals(false, exec("Boolean[] x = new Boolean[1]; x[0] = false; x[0] &= false; return x[0];")); - + // byte assertEquals((byte) (13 & 14), exec("byte x = 13; x &= 14; return x;")); // short @@ -254,7 +254,7 @@ public class CompoundAssignmentTests extends ScriptTestCase { // long assertEquals((long) (13 & 14), exec("long x = 13L; x &= 14; return x;")); } - + public void testOr() { // boolean assertEquals(true, exec("boolean x = true; x |= true; return x;")); @@ -273,7 +273,7 @@ public class CompoundAssignmentTests extends ScriptTestCase { assertEquals(true, exec("Boolean[] x = new Boolean[1]; x[0] = true; x[0] |= false; return x[0];")); assertEquals(true, exec("Boolean[] x = new Boolean[1]; x[0] = false; x[0] |= true; return x[0];")); assertEquals(false, exec("Boolean[] x = new Boolean[1]; x[0] = false; x[0] |= false; return x[0];")); - + // byte assertEquals((byte) (13 | 14), exec("byte x = 13; x |= 14; return x;")); // short @@ -285,7 +285,7 @@ public class CompoundAssignmentTests extends ScriptTestCase { // long assertEquals((long) (13 | 14), exec("long x = 13L; x |= 14; return x;")); } - + public void testXor() { // boolean assertEquals(false, exec("boolean x = true; x ^= true; return x;")); @@ -304,7 +304,7 @@ public class CompoundAssignmentTests extends ScriptTestCase { assertEquals(true, exec("Boolean[] x = new Boolean[1]; x[0] = true; x[0] ^= false; return x[0];")); assertEquals(true, exec("Boolean[] x = new Boolean[1]; x[0] = false; x[0] ^= true; return x[0];")); assertEquals(false, exec("Boolean[] x = new Boolean[1]; x[0] = false; x[0] ^= false; return x[0];")); - + // byte assertEquals((byte) (13 ^ 14), exec("byte x = 13; x ^= 14; return x;")); // short diff --git a/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/ConditionalTests.java b/plugins/lang-painless/src/test/java/org/elasticsearch/painless/ConditionalTests.java similarity index 99% rename from plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/ConditionalTests.java rename to plugins/lang-painless/src/test/java/org/elasticsearch/painless/ConditionalTests.java index bc466427da7..a0eaf31c93b 100644 --- a/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/ConditionalTests.java +++ b/plugins/lang-painless/src/test/java/org/elasticsearch/painless/ConditionalTests.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.plan.a; +package org.elasticsearch.painless; import java.util.ArrayList; import java.util.HashMap; diff --git a/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/DefTests.java b/plugins/lang-painless/src/test/java/org/elasticsearch/painless/DefTests.java similarity index 99% rename from plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/DefTests.java rename to plugins/lang-painless/src/test/java/org/elasticsearch/painless/DefTests.java index 6ff51131fe5..5a3cea711a5 100644 --- a/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/DefTests.java +++ b/plugins/lang-painless/src/test/java/org/elasticsearch/painless/DefTests.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.plan.a; +package org.elasticsearch.painless; public class DefTests extends ScriptTestCase { public void testNot() { @@ -235,7 +235,7 @@ public class DefTests extends ScriptTestCase { assertEquals(0F, exec("def x = (Float)2 def y = (float)2 return x % y")); assertEquals(0D, exec("def x = (Double)2 def y = (double)2 return x % y")); } - + public void testAdd() { assertEquals(2, exec("def x = (byte)1 def y = (byte)1 return x + y")); assertEquals(2, exec("def x = (short)1 def y = (byte)1 return x + y")); diff --git a/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/DivisionTests.java b/plugins/lang-painless/src/test/java/org/elasticsearch/painless/DivisionTests.java similarity index 98% rename from plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/DivisionTests.java rename to plugins/lang-painless/src/test/java/org/elasticsearch/painless/DivisionTests.java index 24849fae72b..99a48d1ee9d 100644 --- a/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/DivisionTests.java +++ b/plugins/lang-painless/src/test/java/org/elasticsearch/painless/DivisionTests.java @@ -17,14 +17,14 @@ * under the License. */ -package org.elasticsearch.plan.a; +package org.elasticsearch.painless; /** Tests for division operator across all types */ //TODO: NaN/Inf/overflow/... public class DivisionTests extends ScriptTestCase { - + // TODO: byte,short,char - + public void testInt() throws Exception { assertEquals(1/1, exec("int x = 1; int y = 1; return x/y;")); assertEquals(2/3, exec("int x = 2; int y = 3; return x/y;")); @@ -35,7 +35,7 @@ public class DivisionTests extends ScriptTestCase { assertEquals(10/1, exec("int x = 10; int y = 1; return x/y;")); assertEquals(0/1, exec("int x = 0; int y = 1; return x/y;")); } - + public void testIntConst() throws Exception { assertEquals(1/1, exec("return 1/1;")); assertEquals(2/3, exec("return 2/3;")); @@ -46,7 +46,7 @@ public class DivisionTests extends ScriptTestCase { assertEquals(10/1, exec("return 10/1;")); assertEquals(0/1, exec("return 0/1;")); } - + public void testLong() throws Exception { assertEquals(1L/1L, exec("long x = 1; long y = 1; return x/y;")); assertEquals(2L/3L, exec("long x = 2; long y = 3; return x/y;")); @@ -57,7 +57,7 @@ public class DivisionTests extends ScriptTestCase { assertEquals(10L/1L, exec("long x = 10; long y = 1; return x/y;")); assertEquals(0L/1L, exec("long x = 0; long y = 1; return x/y;")); } - + public void testLongConst() throws Exception { assertEquals(1L/1L, exec("return 1L/1L;")); assertEquals(2L/3L, exec("return 2L/3L;")); @@ -68,7 +68,7 @@ public class DivisionTests extends ScriptTestCase { assertEquals(10L/1L, exec("return 10L/1L;")); assertEquals(0L/1L, exec("return 0L/1L;")); } - + public void testFloat() throws Exception { assertEquals(1F/1F, exec("float x = 1; float y = 1; return x/y;")); assertEquals(2F/3F, exec("float x = 2; float y = 3; return x/y;")); @@ -79,7 +79,7 @@ public class DivisionTests extends ScriptTestCase { assertEquals(10F/1F, exec("float x = 10; float y = 1; return x/y;")); assertEquals(0F/1F, exec("float x = 0; float y = 1; return x/y;")); } - + public void testFloatConst() throws Exception { assertEquals(1F/1F, exec("return 1F/1F;")); assertEquals(2F/3F, exec("return 2F/3F;")); @@ -90,7 +90,7 @@ public class DivisionTests extends ScriptTestCase { assertEquals(10F/1F, exec("return 10F/1F;")); assertEquals(0F/1F, exec("return 0F/1F;")); } - + public void testDouble() throws Exception { assertEquals(1.0/1.0, exec("double x = 1; double y = 1; return x/y;")); assertEquals(2.0/3.0, exec("double x = 2; double y = 3; return x/y;")); @@ -101,7 +101,7 @@ public class DivisionTests extends ScriptTestCase { assertEquals(10.0/1.0, exec("double x = 10; double y = 1; return x/y;")); assertEquals(0.0/1.0, exec("double x = 0; double y = 1; return x/y;")); } - + public void testDoubleConst() throws Exception { assertEquals(1.0/1.0, exec("return 1.0/1.0;")); assertEquals(2.0/3.0, exec("return 2.0/3.0;")); @@ -120,7 +120,7 @@ public class DivisionTests extends ScriptTestCase { } catch (ArithmeticException expected) { // divide by zero } - + try { exec("long x = 1L; long y = 0L; return x / y;"); fail("should have hit exception"); @@ -128,7 +128,7 @@ public class DivisionTests extends ScriptTestCase { // divide by zero } } - + public void testDivideByZeroConst() throws Exception { try { exec("return 1/0;"); @@ -136,7 +136,7 @@ public class DivisionTests extends ScriptTestCase { } catch (ArithmeticException expected) { // divide by zero } - + try { exec("return 1L/0L;"); fail("should have hit exception"); diff --git a/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/EqualsTests.java b/plugins/lang-painless/src/test/java/org/elasticsearch/painless/EqualsTests.java similarity index 99% rename from plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/EqualsTests.java rename to plugins/lang-painless/src/test/java/org/elasticsearch/painless/EqualsTests.java index db83755aeff..607da9f0f32 100644 --- a/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/EqualsTests.java +++ b/plugins/lang-painless/src/test/java/org/elasticsearch/painless/EqualsTests.java @@ -1,4 +1,4 @@ -package org.elasticsearch.plan.a; +package org.elasticsearch.painless; /* * Licensed to Elasticsearch under one or more contributor diff --git a/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/FieldTests.java b/plugins/lang-painless/src/test/java/org/elasticsearch/painless/FieldTests.java similarity index 99% rename from plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/FieldTests.java rename to plugins/lang-painless/src/test/java/org/elasticsearch/painless/FieldTests.java index 7504ed9d4bc..86324ae6aa3 100644 --- a/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/FieldTests.java +++ b/plugins/lang-painless/src/test/java/org/elasticsearch/painless/FieldTests.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.plan.a; +package org.elasticsearch.painless; import org.junit.Before; diff --git a/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/FloatOverflowDisabledTests.java b/plugins/lang-painless/src/test/java/org/elasticsearch/painless/FloatOverflowDisabledTests.java similarity index 99% rename from plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/FloatOverflowDisabledTests.java rename to plugins/lang-painless/src/test/java/org/elasticsearch/painless/FloatOverflowDisabledTests.java index 2f8966d581f..7bec0b110df 100644 --- a/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/FloatOverflowDisabledTests.java +++ b/plugins/lang-painless/src/test/java/org/elasticsearch/painless/FloatOverflowDisabledTests.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.plan.a; +package org.elasticsearch.painless; import java.util.Collections; import java.util.Map; diff --git a/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/FloatOverflowEnabledTests.java b/plugins/lang-painless/src/test/java/org/elasticsearch/painless/FloatOverflowEnabledTests.java similarity index 99% rename from plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/FloatOverflowEnabledTests.java rename to plugins/lang-painless/src/test/java/org/elasticsearch/painless/FloatOverflowEnabledTests.java index c858e211faa..91a595680c5 100644 --- a/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/FloatOverflowEnabledTests.java +++ b/plugins/lang-painless/src/test/java/org/elasticsearch/painless/FloatOverflowEnabledTests.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.plan.a; +package org.elasticsearch.painless; import java.util.Collections; import java.util.Map; diff --git a/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/IncrementTests.java b/plugins/lang-painless/src/test/java/org/elasticsearch/painless/IncrementTests.java similarity index 98% rename from plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/IncrementTests.java rename to plugins/lang-painless/src/test/java/org/elasticsearch/painless/IncrementTests.java index ec4ffd0ec1d..13d94b9e9b3 100644 --- a/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/IncrementTests.java +++ b/plugins/lang-painless/src/test/java/org/elasticsearch/painless/IncrementTests.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.plan.a; +package org.elasticsearch.painless; /** Tests for increment/decrement operators across all data types */ public class IncrementTests extends ScriptTestCase { @@ -29,14 +29,14 @@ public class IncrementTests extends ScriptTestCase { assertEquals((byte)1, exec("byte x = (byte)0; return ++x;")); assertEquals((byte)-1, exec("byte x = (byte)0; return --x;")); } - + /** incrementing char values */ public void testIncrementChar() { assertEquals((char)0, exec("char x = (char)0; return x++;")); assertEquals((char)1, exec("char x = (char)1; return x--;")); assertEquals((char)1, exec("char x = (char)0; return ++x;")); } - + /** incrementing short values */ public void testIncrementShort() { assertEquals((short)0, exec("short x = (short)0; return x++;")); @@ -52,7 +52,7 @@ public class IncrementTests extends ScriptTestCase { assertEquals(1, exec("int x = 0; return ++x;")); assertEquals(-1, exec("int x = 0; return --x;")); } - + /** incrementing long values */ public void testIncrementLong() { assertEquals(0L, exec("long x = 0; return x++;")); @@ -60,7 +60,7 @@ public class IncrementTests extends ScriptTestCase { assertEquals(1L, exec("long x = 0; return ++x;")); assertEquals(-1L, exec("long x = 0; return --x;")); } - + /** incrementing float values */ public void testIncrementFloat() { assertEquals(0F, exec("float x = 0F; return x++;")); @@ -68,7 +68,7 @@ public class IncrementTests extends ScriptTestCase { assertEquals(1F, exec("float x = 0F; return ++x;")); assertEquals(-1F, exec("float x = 0F; return --x;")); } - + /** incrementing double values */ public void testIncrementDouble() { assertEquals(0D, exec("double x = 0.0; return x++;")); diff --git a/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/IntegerOverflowDisabledTests.java b/plugins/lang-painless/src/test/java/org/elasticsearch/painless/IntegerOverflowDisabledTests.java similarity index 99% rename from plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/IntegerOverflowDisabledTests.java rename to plugins/lang-painless/src/test/java/org/elasticsearch/painless/IntegerOverflowDisabledTests.java index 86b4f46a765..f4adcfce878 100644 --- a/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/IntegerOverflowDisabledTests.java +++ b/plugins/lang-painless/src/test/java/org/elasticsearch/painless/IntegerOverflowDisabledTests.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.plan.a; +package org.elasticsearch.painless; import java.util.Collections; import java.util.Map; diff --git a/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/IntegerOverflowEnabledTests.java b/plugins/lang-painless/src/test/java/org/elasticsearch/painless/IntegerOverflowEnabledTests.java similarity index 99% rename from plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/IntegerOverflowEnabledTests.java rename to plugins/lang-painless/src/test/java/org/elasticsearch/painless/IntegerOverflowEnabledTests.java index ab5f82fd6d2..f74e193c3fb 100644 --- a/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/IntegerOverflowEnabledTests.java +++ b/plugins/lang-painless/src/test/java/org/elasticsearch/painless/IntegerOverflowEnabledTests.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.plan.a; +package org.elasticsearch.painless; import java.util.Collections; import java.util.Map; diff --git a/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/MultiplicationTests.java b/plugins/lang-painless/src/test/java/org/elasticsearch/painless/MultiplicationTests.java similarity index 98% rename from plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/MultiplicationTests.java rename to plugins/lang-painless/src/test/java/org/elasticsearch/painless/MultiplicationTests.java index c5fde3b6ff1..9c10d90bec2 100644 --- a/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/MultiplicationTests.java +++ b/plugins/lang-painless/src/test/java/org/elasticsearch/painless/MultiplicationTests.java @@ -17,14 +17,14 @@ * under the License. */ -package org.elasticsearch.plan.a; +package org.elasticsearch.painless; /** Tests for multiplication operator across all types */ //TODO: NaN/Inf/overflow/... public class MultiplicationTests extends ScriptTestCase { - + // TODO: short,byte,char - + public void testInt() throws Exception { assertEquals(1*1, exec("int x = 1; int y = 1; return x*y;")); assertEquals(2*3, exec("int x = 2; int y = 3; return x*y;")); @@ -35,7 +35,7 @@ public class MultiplicationTests extends ScriptTestCase { assertEquals(10*0, exec("int x = 10; int y = 0; return x*y;")); assertEquals(0*0, exec("int x = 0; int y = 0; return x*x;")); } - + public void testIntConst() throws Exception { assertEquals(1*1, exec("return 1*1;")); assertEquals(2*3, exec("return 2*3;")); @@ -46,7 +46,7 @@ public class MultiplicationTests extends ScriptTestCase { assertEquals(10*0, exec("return 10*0;")); assertEquals(0*0, exec("return 0*0;")); } - + public void testByte() throws Exception { assertEquals((byte)1*(byte)1, exec("byte x = 1; byte y = 1; return x*y;")); assertEquals((byte)2*(byte)3, exec("byte x = 2; byte y = 3; return x*y;")); @@ -57,7 +57,7 @@ public class MultiplicationTests extends ScriptTestCase { assertEquals((byte)10*(byte)0, exec("byte x = 10; byte y = 0; return x*y;")); assertEquals((byte)0*(byte)0, exec("byte x = 0; byte y = 0; return x*x;")); } - + public void testLong() throws Exception { assertEquals(1L*1L, exec("long x = 1; long y = 1; return x*y;")); assertEquals(2L*3L, exec("long x = 2; long y = 3; return x*y;")); @@ -68,7 +68,7 @@ public class MultiplicationTests extends ScriptTestCase { assertEquals(10L*0L, exec("long x = 10; long y = 0; return x*y;")); assertEquals(0L*0L, exec("long x = 0; long y = 0; return x*x;")); } - + public void testLongConst() throws Exception { assertEquals(1L*1L, exec("return 1L*1L;")); assertEquals(2L*3L, exec("return 2L*3L;")); @@ -79,7 +79,7 @@ public class MultiplicationTests extends ScriptTestCase { assertEquals(10L*0L, exec("return 10L*0L;")); assertEquals(0L*0L, exec("return 0L*0L;")); } - + public void testFloat() throws Exception { assertEquals(1F*1F, exec("float x = 1; float y = 1; return x*y;")); assertEquals(2F*3F, exec("float x = 2; float y = 3; return x*y;")); @@ -90,7 +90,7 @@ public class MultiplicationTests extends ScriptTestCase { assertEquals(10F*0F, exec("float x = 10; float y = 0; return x*y;")); assertEquals(0F*0F, exec("float x = 0; float y = 0; return x*x;")); } - + public void testFloatConst() throws Exception { assertEquals(1F*1F, exec("return 1F*1F;")); assertEquals(2F*3F, exec("return 2F*3F;")); @@ -101,7 +101,7 @@ public class MultiplicationTests extends ScriptTestCase { assertEquals(10F*0F, exec("return 10F*0F;")); assertEquals(0F*0F, exec("return 0F*0F;")); } - + public void testDouble() throws Exception { assertEquals(1D*1D, exec("double x = 1; double y = 1; return x*y;")); assertEquals(2D*3D, exec("double x = 2; double y = 3; return x*y;")); @@ -112,7 +112,7 @@ public class MultiplicationTests extends ScriptTestCase { assertEquals(10D*0D, exec("double x = 10; float y = 0; return x*y;")); assertEquals(0D*0D, exec("double x = 0; float y = 0; return x*x;")); } - + public void testDoubleConst() throws Exception { assertEquals(1.0*1.0, exec("return 1.0*1.0;")); assertEquals(2.0*3.0, exec("return 2.0*3.0;")); diff --git a/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/NoSemiColonTest.java b/plugins/lang-painless/src/test/java/org/elasticsearch/painless/NoSemiColonTest.java similarity index 99% rename from plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/NoSemiColonTest.java rename to plugins/lang-painless/src/test/java/org/elasticsearch/painless/NoSemiColonTest.java index ff56ee3f07e..b4807bb5b4c 100644 --- a/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/NoSemiColonTest.java +++ b/plugins/lang-painless/src/test/java/org/elasticsearch/painless/NoSemiColonTest.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.plan.a; +package org.elasticsearch.painless; import java.util.HashMap; import java.util.Map; diff --git a/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/OrTests.java b/plugins/lang-painless/src/test/java/org/elasticsearch/painless/OrTests.java similarity index 97% rename from plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/OrTests.java rename to plugins/lang-painless/src/test/java/org/elasticsearch/painless/OrTests.java index f3ba0c88fc1..f287b1e4cf4 100644 --- a/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/OrTests.java +++ b/plugins/lang-painless/src/test/java/org/elasticsearch/painless/OrTests.java @@ -17,29 +17,29 @@ * under the License. */ -package org.elasticsearch.plan.a; +package org.elasticsearch.painless; /** Tests for or operator across all types */ public class OrTests extends ScriptTestCase { - + public void testInt() throws Exception { assertEquals(5 | 12, exec("int x = 5; int y = 12; return x | y;")); assertEquals(5 | -12, exec("int x = 5; int y = -12; return x | y;")); assertEquals(7 | 15 | 3, exec("int x = 7; int y = 15; int z = 3; return x | y | z;")); } - + public void testIntConst() throws Exception { assertEquals(5 | 12, exec("return 5 | 12;")); assertEquals(5 | -12, exec("return 5 | -12;")); assertEquals(7 | 15 | 3, exec("return 7 | 15 | 3;")); } - + public void testLong() throws Exception { assertEquals(5L | 12L, exec("long x = 5; long y = 12; return x | y;")); assertEquals(5L | -12L, exec("long x = 5; long y = -12; return x | y;")); assertEquals(7L | 15L | 3L, exec("long x = 7; long y = 15; long z = 3; return x | y | z;")); } - + public void testLongConst() throws Exception { assertEquals(5L | 12L, exec("return 5L | 12L;")); assertEquals(5L | -12L, exec("return 5L | -12L;")); diff --git a/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/PlanARestIT.java b/plugins/lang-painless/src/test/java/org/elasticsearch/painless/PainlessRestIT.java similarity index 89% rename from plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/PlanARestIT.java rename to plugins/lang-painless/src/test/java/org/elasticsearch/painless/PainlessRestIT.java index 5e0b0035ceb..dbb596c275c 100644 --- a/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/PlanARestIT.java +++ b/plugins/lang-painless/src/test/java/org/elasticsearch/painless/PainlessRestIT.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.plan.a; +package org.elasticsearch.painless; import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; @@ -28,9 +28,9 @@ import org.elasticsearch.test.rest.parser.RestTestParseException; import java.io.IOException; /** Runs yaml rest tests */ -public class PlanARestIT extends ESRestTestCase { +public class PainlessRestIT extends ESRestTestCase { - public PlanARestIT(@Name("yaml") RestTestCandidate testCandidate) { + public PainlessRestIT(@Name("yaml") RestTestCandidate testCandidate) { super(testCandidate); } diff --git a/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/RemainderTests.java b/plugins/lang-painless/src/test/java/org/elasticsearch/painless/RemainderTests.java similarity index 98% rename from plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/RemainderTests.java rename to plugins/lang-painless/src/test/java/org/elasticsearch/painless/RemainderTests.java index c7b6f7b1e3f..836405a9829 100644 --- a/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/RemainderTests.java +++ b/plugins/lang-painless/src/test/java/org/elasticsearch/painless/RemainderTests.java @@ -17,14 +17,14 @@ * under the License. */ -package org.elasticsearch.plan.a; +package org.elasticsearch.painless; /** Tests for division operator across all types */ //TODO: NaN/Inf/overflow/... public class RemainderTests extends ScriptTestCase { - + // TODO: byte,short,char - + public void testInt() throws Exception { assertEquals(1%1, exec("int x = 1; int y = 1; return x%y;")); assertEquals(2%3, exec("int x = 2; int y = 3; return x%y;")); @@ -35,7 +35,7 @@ public class RemainderTests extends ScriptTestCase { assertEquals(10%1, exec("int x = 10; int y = 1; return x%y;")); assertEquals(0%1, exec("int x = 0; int y = 1; return x%y;")); } - + public void testIntConst() throws Exception { assertEquals(1%1, exec("return 1%1;")); assertEquals(2%3, exec("return 2%3;")); @@ -46,7 +46,7 @@ public class RemainderTests extends ScriptTestCase { assertEquals(10%1, exec("return 10%1;")); assertEquals(0%1, exec("return 0%1;")); } - + public void testLong() throws Exception { assertEquals(1L%1L, exec("long x = 1; long y = 1; return x%y;")); assertEquals(2L%3L, exec("long x = 2; long y = 3; return x%y;")); @@ -57,7 +57,7 @@ public class RemainderTests extends ScriptTestCase { assertEquals(10L%1L, exec("long x = 10; long y = 1; return x%y;")); assertEquals(0L%1L, exec("long x = 0; long y = 1; return x%y;")); } - + public void testLongConst() throws Exception { assertEquals(1L%1L, exec("return 1L%1L;")); assertEquals(2L%3L, exec("return 2L%3L;")); @@ -68,7 +68,7 @@ public class RemainderTests extends ScriptTestCase { assertEquals(10L%1L, exec("return 10L%1L;")); assertEquals(0L%1L, exec("return 0L%1L;")); } - + public void testFloat() throws Exception { assertEquals(1F%1F, exec("float x = 1; float y = 1; return x%y;")); assertEquals(2F%3F, exec("float x = 2; float y = 3; return x%y;")); @@ -79,7 +79,7 @@ public class RemainderTests extends ScriptTestCase { assertEquals(10F%1F, exec("float x = 10; float y = 1; return x%y;")); assertEquals(0F%1F, exec("float x = 0; float y = 1; return x%y;")); } - + public void testFloatConst() throws Exception { assertEquals(1F%1F, exec("return 1F%1F;")); assertEquals(2F%3F, exec("return 2F%3F;")); @@ -90,7 +90,7 @@ public class RemainderTests extends ScriptTestCase { assertEquals(10F%1F, exec("return 10F%1F;")); assertEquals(0F%1F, exec("return 0F%1F;")); } - + public void testDouble() throws Exception { assertEquals(1.0%1.0, exec("double x = 1; double y = 1; return x%y;")); assertEquals(2.0%3.0, exec("double x = 2; double y = 3; return x%y;")); @@ -101,7 +101,7 @@ public class RemainderTests extends ScriptTestCase { assertEquals(10.0%1.0, exec("double x = 10; double y = 1; return x%y;")); assertEquals(0.0%1.0, exec("double x = 0; double y = 1; return x%y;")); } - + public void testDoubleConst() throws Exception { assertEquals(1.0%1.0, exec("return 1.0%1.0;")); assertEquals(2.0%3.0, exec("return 2.0%3.0;")); @@ -112,7 +112,7 @@ public class RemainderTests extends ScriptTestCase { assertEquals(10.0%1.0, exec("return 10.0%1.0;")); assertEquals(0.0%1.0, exec("return 0.0%1.0;")); } - + public void testDivideByZero() throws Exception { try { exec("int x = 1; int y = 0; return x % y;"); @@ -120,7 +120,7 @@ public class RemainderTests extends ScriptTestCase { } catch (ArithmeticException expected) { // divide by zero } - + try { exec("long x = 1L; long y = 0L; return x % y;"); fail("should have hit exception"); @@ -128,7 +128,7 @@ public class RemainderTests extends ScriptTestCase { // divide by zero } } - + public void testDivideByZeroConst() throws Exception { try { exec("return 1%0;"); @@ -136,7 +136,7 @@ public class RemainderTests extends ScriptTestCase { } catch (ArithmeticException expected) { // divide by zero } - + try { exec("return 1L%0L;"); fail("should have hit exception"); diff --git a/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/ScriptEngineTests.java b/plugins/lang-painless/src/test/java/org/elasticsearch/painless/ScriptEngineTests.java similarity index 95% rename from plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/ScriptEngineTests.java rename to plugins/lang-painless/src/test/java/org/elasticsearch/painless/ScriptEngineTests.java index 8f2991c3d0c..b5c3f8cc245 100644 --- a/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/ScriptEngineTests.java +++ b/plugins/lang-painless/src/test/java/org/elasticsearch/painless/ScriptEngineTests.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.plan.a; +package org.elasticsearch.painless; import org.elasticsearch.script.CompiledScript; import org.elasticsearch.script.ExecutableScript; @@ -83,7 +83,7 @@ public class ScriptEngineTests extends ScriptTestCase { Object compiledScript = scriptEngine.compile("return ((Map)input.get(\"ctx\")).get(\"value\");", Collections.emptyMap()); ExecutableScript script = scriptEngine.executable(new CompiledScript(ScriptService.ScriptType.INLINE, - "testChangingVarsCrossExecution1", "plan-a", compiledScript), vars); + "testChangingVarsCrossExecution1", "painless", compiledScript), vars); ctx.put("value", 1); Object o = script.run(); @@ -99,7 +99,7 @@ public class ScriptEngineTests extends ScriptTestCase { Object compiledScript = scriptEngine.compile("return input.get(\"value\");", Collections.emptyMap()); ExecutableScript script = scriptEngine.executable(new CompiledScript(ScriptService.ScriptType.INLINE, - "testChangingVarsCrossExecution2", "plan-a", compiledScript), vars); + "testChangingVarsCrossExecution2", "painless", compiledScript), vars); script.setNextVar("value", 1); Object value = script.run(); diff --git a/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/ScriptTestCase.java b/plugins/lang-painless/src/test/java/org/elasticsearch/painless/ScriptTestCase.java similarity index 90% rename from plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/ScriptTestCase.java rename to plugins/lang-painless/src/test/java/org/elasticsearch/painless/ScriptTestCase.java index 8ff87bd2a0d..462c25c6634 100644 --- a/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/ScriptTestCase.java +++ b/plugins/lang-painless/src/test/java/org/elasticsearch/painless/ScriptTestCase.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.plan.a; +package org.elasticsearch.painless; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.script.CompiledScript; @@ -34,11 +34,11 @@ import java.util.Map; * Typically just asserts the output of {@code exec()} */ public abstract class ScriptTestCase extends ESTestCase { - protected PlanAScriptEngineService scriptEngine; + protected PainlessScriptEngineService scriptEngine; @Before public void setup() { - scriptEngine = new PlanAScriptEngineService(Settings.EMPTY); + scriptEngine = new PainlessScriptEngineService(Settings.EMPTY); } /** Compiles and returns the result of {@code script} */ @@ -54,7 +54,7 @@ public abstract class ScriptTestCase extends ESTestCase { /** Compiles and returns the result of {@code script} with access to {@code vars} and compile-time parameters */ public Object exec(String script, Map vars, Map compileParams) { Object object = scriptEngine.compile(script, compileParams); - CompiledScript compiled = new CompiledScript(ScriptService.ScriptType.INLINE, getTestName(), "plan-a", object); + CompiledScript compiled = new CompiledScript(ScriptService.ScriptType.INLINE, getTestName(), "painless", object); return scriptEngine.executable(compiled, vars).run(); } } diff --git a/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/StringTests.java b/plugins/lang-painless/src/test/java/org/elasticsearch/painless/StringTests.java similarity index 99% rename from plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/StringTests.java rename to plugins/lang-painless/src/test/java/org/elasticsearch/painless/StringTests.java index 0fbcaa1e6d3..7d489332f92 100644 --- a/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/StringTests.java +++ b/plugins/lang-painless/src/test/java/org/elasticsearch/painless/StringTests.java @@ -17,10 +17,10 @@ * under the License. */ -package org.elasticsearch.plan.a; +package org.elasticsearch.painless; public class StringTests extends ScriptTestCase { - + public void testAppend() { // boolean assertEquals("cat" + true, exec("String s = \"cat\"; return s + true;")); diff --git a/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/SubtractionTests.java b/plugins/lang-painless/src/test/java/org/elasticsearch/painless/SubtractionTests.java similarity index 99% rename from plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/SubtractionTests.java rename to plugins/lang-painless/src/test/java/org/elasticsearch/painless/SubtractionTests.java index 1acd0458b52..f6a14b175e6 100644 --- a/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/SubtractionTests.java +++ b/plugins/lang-painless/src/test/java/org/elasticsearch/painless/SubtractionTests.java @@ -17,12 +17,12 @@ * under the License. */ -package org.elasticsearch.plan.a; +package org.elasticsearch.painless; /** Tests for subtraction operator across all types */ //TODO: NaN/Inf/overflow/... public class SubtractionTests extends ScriptTestCase { - + public void testInt() throws Exception { assertEquals(1-1, exec("int x = 1; int y = 1; return x-y;")); assertEquals(2-3, exec("int x = 2; int y = 3; return x-y;")); @@ -33,7 +33,7 @@ public class SubtractionTests extends ScriptTestCase { assertEquals(10-0, exec("int x = 10; int y = 0; return x-y;")); assertEquals(0-0, exec("int x = 0; int y = 0; return x-x;")); } - + public void testIntConst() throws Exception { assertEquals(1-1, exec("return 1-1;")); assertEquals(2-3, exec("return 2-3;")); @@ -44,7 +44,7 @@ public class SubtractionTests extends ScriptTestCase { assertEquals(10-0, exec("return 10-0;")); assertEquals(0-0, exec("return 0-0;")); } - + public void testByte() throws Exception { assertEquals((byte)1-(byte)1, exec("byte x = 1; byte y = 1; return x-y;")); assertEquals((byte)2-(byte)3, exec("byte x = 2; byte y = 3; return x-y;")); @@ -55,7 +55,7 @@ public class SubtractionTests extends ScriptTestCase { assertEquals((byte)10-(byte)1, exec("byte x = 10; byte y = 1; return x-y;")); assertEquals((byte)0-(byte)0, exec("byte x = 0; byte y = 0; return x-y;")); } - + public void testByteConst() throws Exception { assertEquals((byte)1-(byte)1, exec("return (byte)1-(byte)1;")); assertEquals((byte)2-(byte)3, exec("return (byte)2-(byte)3;")); @@ -66,7 +66,7 @@ public class SubtractionTests extends ScriptTestCase { assertEquals((byte)10-(byte)1, exec("return (byte)10-(byte)1;")); assertEquals((byte)0-(byte)0, exec("return (byte)0-(byte)0;")); } - + public void testChar() throws Exception { assertEquals((char)1-(char)1, exec("char x = 1; char y = 1; return x-y;")); assertEquals((char)2-(char)3, exec("char x = 2; char y = 3; return x-y;")); @@ -77,7 +77,7 @@ public class SubtractionTests extends ScriptTestCase { assertEquals((char)10-(char)1, exec("char x = 10; char y = 1; return x-y;")); assertEquals((char)0-(char)0, exec("char x = 0; char y = 0; return x-y;")); } - + public void testCharConst() throws Exception { assertEquals((char)1-(char)1, exec("return (char)1-(char)1;")); assertEquals((char)2-(char)3, exec("return (char)2-(char)3;")); @@ -88,7 +88,7 @@ public class SubtractionTests extends ScriptTestCase { assertEquals((char)10-(char)1, exec("return (char)10-(char)1;")); assertEquals((char)0-(char)0, exec("return (char)0-(char)0;")); } - + public void testShort() throws Exception { assertEquals((short)1-(short)1, exec("short x = 1; short y = 1; return x-y;")); assertEquals((short)2-(short)3, exec("short x = 2; short y = 3; return x-y;")); @@ -99,7 +99,7 @@ public class SubtractionTests extends ScriptTestCase { assertEquals((short)10-(short)1, exec("short x = 10; short y = 1; return x-y;")); assertEquals((short)0-(short)0, exec("short x = 0; short y = 0; return x-y;")); } - + public void testShortConst() throws Exception { assertEquals((short)1-(short)1, exec("return (short)1-(short)1;")); assertEquals((short)2-(short)3, exec("return (short)2-(short)3;")); @@ -110,7 +110,7 @@ public class SubtractionTests extends ScriptTestCase { assertEquals((short)10-(short)1, exec("return (short)10-(short)1;")); assertEquals((short)0-(short)0, exec("return (short)0-(short)0;")); } - + public void testLong() throws Exception { assertEquals(1L-1L, exec("long x = 1; long y = 1; return x-y;")); assertEquals(2L-3L, exec("long x = 2; long y = 3; return x-y;")); @@ -121,7 +121,7 @@ public class SubtractionTests extends ScriptTestCase { assertEquals(10L-0L, exec("long x = 10; long y = 0; return x-y;")); assertEquals(0L-0L, exec("long x = 0; long y = 0; return x-x;")); } - + public void testLongConst() throws Exception { assertEquals(1L-1L, exec("return 1L-1L;")); assertEquals(2L-3L, exec("return 2L-3L;")); @@ -132,7 +132,7 @@ public class SubtractionTests extends ScriptTestCase { assertEquals(10L-0L, exec("return 10L-0L;")); assertEquals(0L-0L, exec("return 0L-0L;")); } - + public void testFloat() throws Exception { assertEquals(1F-1F, exec("float x = 1; float y = 1; return x-y;")); assertEquals(2F-3F, exec("float x = 2; float y = 3; return x-y;")); @@ -143,7 +143,7 @@ public class SubtractionTests extends ScriptTestCase { assertEquals(10F-0F, exec("float x = 10; float y = 0; return x-y;")); assertEquals(0F-0F, exec("float x = 0; float y = 0; return x-x;")); } - + public void testFloatConst() throws Exception { assertEquals(1F-1F, exec("return 1F-1F;")); assertEquals(2F-3F, exec("return 2F-3F;")); @@ -154,7 +154,7 @@ public class SubtractionTests extends ScriptTestCase { assertEquals(10F-0F, exec("return 10F-0F;")); assertEquals(0F-0F, exec("return 0F-0F;")); } - + public void testDouble() throws Exception { assertEquals(1D-1D, exec("double x = 1; double y = 1; return x-y;")); assertEquals(2D-3D, exec("double x = 2; double y = 3; return x-y;")); @@ -165,7 +165,7 @@ public class SubtractionTests extends ScriptTestCase { assertEquals(10D-0D, exec("double x = 10; float y = 0; return x-y;")); assertEquals(0D-0D, exec("double x = 0; float y = 0; return x-x;")); } - + public void testyDoubleConst() throws Exception { assertEquals(1.0-1.0, exec("return 1.0-1.0;")); assertEquals(2.0-3.0, exec("return 2.0-3.0;")); diff --git a/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/UnaryTests.java b/plugins/lang-painless/src/test/java/org/elasticsearch/painless/UnaryTests.java similarity index 97% rename from plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/UnaryTests.java rename to plugins/lang-painless/src/test/java/org/elasticsearch/painless/UnaryTests.java index c0199ffadd5..e670e23925b 100644 --- a/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/UnaryTests.java +++ b/plugins/lang-painless/src/test/java/org/elasticsearch/painless/UnaryTests.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.plan.a; +package org.elasticsearch.painless; /** Tests for unary operators across different types */ public class UnaryTests extends ScriptTestCase { diff --git a/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/UtilityTests.java b/plugins/lang-painless/src/test/java/org/elasticsearch/painless/UtilityTests.java similarity index 96% rename from plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/UtilityTests.java rename to plugins/lang-painless/src/test/java/org/elasticsearch/painless/UtilityTests.java index 5c9fe20d1a7..ba476fac7f2 100644 --- a/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/UtilityTests.java +++ b/plugins/lang-painless/src/test/java/org/elasticsearch/painless/UtilityTests.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.plan.a; +package org.elasticsearch.painless; import org.elasticsearch.test.ESTestCase; @@ -25,7 +25,7 @@ import org.elasticsearch.test.ESTestCase; * Tests utility methods (typically built-ins) */ public class UtilityTests extends ESTestCase { - + public void testDivideWithoutOverflowInt() { assertEquals(5 / 2, Utility.divideWithoutOverflow(5, 2)); @@ -33,75 +33,75 @@ public class UtilityTests extends ESTestCase { Utility.divideWithoutOverflow(Integer.MIN_VALUE, -1); fail("did not get expected exception"); } catch (ArithmeticException expected) {} - + try { Utility.divideWithoutOverflow(5, 0); fail("did not get expected exception"); } catch (ArithmeticException expected) {} } - + public void testDivideWithoutOverflowLong() { assertEquals(5L / 2L, Utility.divideWithoutOverflow(5L, 2L)); - + try { Utility.divideWithoutOverflow(Long.MIN_VALUE, -1L); fail("did not get expected exception"); } catch (ArithmeticException expected) {} - + try { Utility.divideWithoutOverflow(5L, 0L); fail("did not get expected exception"); } catch (ArithmeticException expected) {} } - + public void testToByteExact() { for (int b = Byte.MIN_VALUE; b < Byte.MAX_VALUE; b++) { assertEquals((byte)b, Utility.toByteExact(b)); } - + try { Utility.toByteExact(Byte.MIN_VALUE - 1); fail("did not get expected exception"); } catch (ArithmeticException expected) {} - + try { Utility.toByteExact(Byte.MAX_VALUE + 1); fail("did not get expected exception"); } catch (ArithmeticException expected) {} } - + public void testToShortExact() { for (int s = Short.MIN_VALUE; s < Short.MAX_VALUE; s++) { assertEquals((short)s, Utility.toShortExact(s)); } - + try { Utility.toShortExact(Short.MIN_VALUE - 1); fail("did not get expected exception"); } catch (ArithmeticException expected) {} - + try { Utility.toShortExact(Short.MAX_VALUE + 1); fail("did not get expected exception"); } catch (ArithmeticException expected) {} } - + public void testToCharExact() { for (int c = Character.MIN_VALUE; c < Character.MAX_VALUE; c++) { assertEquals((char)c, Utility.toCharExact(c)); } - + try { Utility.toCharExact(Character.MIN_VALUE - 1); fail("did not get expected exception"); } catch (ArithmeticException expected) {} - + try { Utility.toCharExact(Character.MAX_VALUE + 1); fail("did not get expected exception"); } catch (ArithmeticException expected) {} } - + public void testAddWithoutOverflowFloat() { assertEquals(10F, Utility.addWithoutOverflow(5F, 5F), 0F); assertTrue(Float.isNaN(Utility.addWithoutOverflow(5F, Float.NaN))); @@ -111,29 +111,29 @@ public class UtilityTests extends ESTestCase { Utility.addWithoutOverflow(Float.MAX_VALUE, Float.MAX_VALUE); fail("did not get expected exception"); } catch (ArithmeticException expected) {} - + try { Utility.addWithoutOverflow(-Float.MAX_VALUE, -Float.MAX_VALUE); fail("did not get expected exception"); } catch (ArithmeticException expected) {} } - + public void testAddWithoutOverflowDouble() { assertEquals(10D, Utility.addWithoutOverflow(5D, 5D), 0D); assertTrue(Double.isNaN(Utility.addWithoutOverflow(5D, Double.NaN))); assertTrue(Double.isNaN(Utility.addWithoutOverflow(Double.POSITIVE_INFINITY, Double.NEGATIVE_INFINITY))); - + try { Utility.addWithoutOverflow(Double.MAX_VALUE, Double.MAX_VALUE); fail("did not get expected exception"); } catch (ArithmeticException expected) {} - + try { Utility.addWithoutOverflow(-Double.MAX_VALUE, -Double.MAX_VALUE); fail("did not get expected exception"); } catch (ArithmeticException expected) {} } - + public void testSubtractWithoutOverflowFloat() { assertEquals(5F, Utility.subtractWithoutOverflow(10F, 5F), 0F); assertTrue(Float.isNaN(Utility.subtractWithoutOverflow(5F, Float.NaN))); @@ -143,29 +143,29 @@ public class UtilityTests extends ESTestCase { Utility.subtractWithoutOverflow(Float.MAX_VALUE, -Float.MAX_VALUE); fail("did not get expected exception"); } catch (ArithmeticException expected) {} - + try { Utility.subtractWithoutOverflow(-Float.MAX_VALUE, Float.MAX_VALUE); fail("did not get expected exception"); } catch (ArithmeticException expected) {} } - + public void testSubtractWithoutOverflowDouble() { assertEquals(5D, Utility.subtractWithoutOverflow(10D, 5D), 0D); assertTrue(Double.isNaN(Utility.subtractWithoutOverflow(5D, Double.NaN))); assertTrue(Double.isNaN(Utility.subtractWithoutOverflow(Double.POSITIVE_INFINITY, Double.POSITIVE_INFINITY))); - + try { Utility.subtractWithoutOverflow(Double.MAX_VALUE, -Double.MAX_VALUE); fail("did not get expected exception"); } catch (ArithmeticException expected) {} - + try { Utility.subtractWithoutOverflow(-Double.MAX_VALUE, Double.MAX_VALUE); fail("did not get expected exception"); } catch (ArithmeticException expected) {} } - + public void testMultiplyWithoutOverflowFloat() { assertEquals(25F, Utility.multiplyWithoutOverflow(5F, 5F), 0F); assertTrue(Float.isNaN(Utility.multiplyWithoutOverflow(5F, Float.NaN))); @@ -176,18 +176,18 @@ public class UtilityTests extends ESTestCase { fail("did not get expected exception"); } catch (ArithmeticException expected) {} } - + public void testMultiplyWithoutOverflowDouble() { assertEquals(25D, Utility.multiplyWithoutOverflow(5D, 5D), 0D); assertTrue(Double.isNaN(Utility.multiplyWithoutOverflow(5D, Double.NaN))); assertEquals(Double.POSITIVE_INFINITY, Utility.multiplyWithoutOverflow(5D, Double.POSITIVE_INFINITY), 0D); - + try { Utility.multiplyWithoutOverflow(Double.MAX_VALUE, Double.MAX_VALUE); fail("did not get expected exception"); } catch (ArithmeticException expected) {} } - + public void testDivideWithoutOverflowFloat() { assertEquals(5F, Utility.divideWithoutOverflow(25F, 5F), 0F); assertTrue(Float.isNaN(Utility.divideWithoutOverflow(5F, Float.NaN))); @@ -197,51 +197,51 @@ public class UtilityTests extends ESTestCase { Utility.divideWithoutOverflow(Float.MAX_VALUE, Float.MIN_VALUE); fail("did not get expected exception"); } catch (ArithmeticException expected) {} - + try { Utility.divideWithoutOverflow(0F, 0F); fail("did not get expected exception"); } catch (ArithmeticException expected) {} - + try { Utility.divideWithoutOverflow(5F, 0F); fail("did not get expected exception"); } catch (ArithmeticException expected) {} } - + public void testDivideWithoutOverflowDouble() { assertEquals(5D, Utility.divideWithoutOverflow(25D, 5D), 0D); assertTrue(Double.isNaN(Utility.divideWithoutOverflow(5D, Double.NaN))); assertEquals(Double.POSITIVE_INFINITY, Utility.divideWithoutOverflow(Double.POSITIVE_INFINITY, 5D), 0D); - + try { Utility.divideWithoutOverflow(Double.MAX_VALUE, Double.MIN_VALUE); fail("did not get expected exception"); } catch (ArithmeticException expected) {} - + try { Utility.divideWithoutOverflow(0D, 0D); fail("did not get expected exception"); } catch (ArithmeticException expected) {} - + try { Utility.divideWithoutOverflow(5D, 0D); fail("did not get expected exception"); } catch (ArithmeticException expected) {} } - + public void testRemainderWithoutOverflowFloat() { assertEquals(1F, Utility.remainderWithoutOverflow(25F, 4F), 0F); - + try { Utility.remainderWithoutOverflow(5F, 0F); fail("did not get expected exception"); } catch (ArithmeticException expected) {} } - + public void testRemainderWithoutOverflowDouble() { assertEquals(1D, Utility.remainderWithoutOverflow(25D, 4D), 0D); - + try { Utility.remainderWithoutOverflow(5D, 0D); fail("did not get expected exception"); diff --git a/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/WhenThingsGoWrongTests.java b/plugins/lang-painless/src/test/java/org/elasticsearch/painless/WhenThingsGoWrongTests.java similarity index 81% rename from plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/WhenThingsGoWrongTests.java rename to plugins/lang-painless/src/test/java/org/elasticsearch/painless/WhenThingsGoWrongTests.java index e4dbce83122..8d4fa9a066f 100644 --- a/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/WhenThingsGoWrongTests.java +++ b/plugins/lang-painless/src/test/java/org/elasticsearch/painless/WhenThingsGoWrongTests.java @@ -17,9 +17,8 @@ * under the License. */ -package org.elasticsearch.plan.a; +package org.elasticsearch.painless; -import java.text.ParseException; import java.util.Collections; public class WhenThingsGoWrongTests extends ScriptTestCase { @@ -54,66 +53,66 @@ public class WhenThingsGoWrongTests extends ScriptTestCase { public void testInfiniteLoops() { try { exec("boolean x = true; while (x) {}"); - fail("should have hit PlanAError"); - } catch (PlanAError expected) { + fail("should have hit PainlessError"); + } catch (PainlessError expected) { assertTrue(expected.getMessage().contains( "The maximum number of statements that can be executed in a loop has been reached.")); } try { exec("while (true) {int y = 5}"); - fail("should have hit PlanAError"); - } catch (PlanAError expected) { + fail("should have hit PainlessError"); + } catch (PainlessError expected) { assertTrue(expected.getMessage().contains( "The maximum number of statements that can be executed in a loop has been reached.")); } try { exec("while (true) { boolean x = true; while (x) {} }"); - fail("should have hit PlanAError"); - } catch (PlanAError expected) { + fail("should have hit PainlessError"); + } catch (PainlessError expected) { assertTrue(expected.getMessage().contains( "The maximum number of statements that can be executed in a loop has been reached.")); } try { exec("while (true) { boolean x = false; while (x) {} }"); - fail("should have hit PlanAError"); - } catch (PlanAError expected) { + fail("should have hit PainlessError"); + } catch (PainlessError expected) { assertTrue(expected.getMessage().contains( "The maximum number of statements that can be executed in a loop has been reached.")); } try { exec("boolean x = true; for (;x;) {}"); - fail("should have hit PlanAError"); - } catch (PlanAError expected) { + fail("should have hit PainlessError"); + } catch (PainlessError expected) { assertTrue(expected.getMessage().contains( "The maximum number of statements that can be executed in a loop has been reached.")); } try { exec("for (;;) {int x = 5}"); - fail("should have hit PlanAError"); - } catch (PlanAError expected) { + fail("should have hit PainlessError"); + } catch (PainlessError expected) { assertTrue(expected.getMessage().contains( "The maximum number of statements that can be executed in a loop has been reached.")); } try { exec("def x = true; do {int y = 5;} while (x)"); - fail("should have hit PlanAError"); - } catch (PlanAError expected) { + fail("should have hit PainlessError"); + } catch (PainlessError expected) { assertTrue(expected.getMessage().contains( "The maximum number of statements that can be executed in a loop has been reached.")); } try { - exec("try { int x } catch (PlanAError error) {}"); + exec("try { int x } catch (PainlessError error) {}"); fail("should have hit ParseException"); } catch (RuntimeException expected) { assertTrue(expected.getMessage().contains( - "unexpected token ['PlanAError'] was expecting one of [TYPE].")); + "unexpected token ['PainlessError'] was expecting one of [TYPE].")); } } @@ -123,8 +122,8 @@ public class WhenThingsGoWrongTests extends ScriptTestCase { try { exec("for (int x = 0; x < 10000; ++x) {}"); - fail("should have hit PlanAError"); - } catch (PlanAError expected) { + fail("should have hit PainlessError"); + } catch (PainlessError expected) { assertTrue(expected.getMessage().contains( "The maximum number of statements that can be executed in a loop has been reached.")); } diff --git a/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/XorTests.java b/plugins/lang-painless/src/test/java/org/elasticsearch/painless/XorTests.java similarity index 98% rename from plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/XorTests.java rename to plugins/lang-painless/src/test/java/org/elasticsearch/painless/XorTests.java index f10477dcd0a..f5dd0a92011 100644 --- a/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/XorTests.java +++ b/plugins/lang-painless/src/test/java/org/elasticsearch/painless/XorTests.java @@ -17,29 +17,29 @@ * under the License. */ -package org.elasticsearch.plan.a; +package org.elasticsearch.painless; /** Tests for xor operator across all types */ public class XorTests extends ScriptTestCase { - + public void testInt() throws Exception { assertEquals(5 ^ 12, exec("int x = 5; int y = 12; return x ^ y;")); assertEquals(5 ^ -12, exec("int x = 5; int y = -12; return x ^ y;")); assertEquals(7 ^ 15 ^ 3, exec("int x = 7; int y = 15; int z = 3; return x ^ y ^ z;")); } - + public void testIntConst() throws Exception { assertEquals(5 ^ 12, exec("return 5 ^ 12;")); assertEquals(5 ^ -12, exec("return 5 ^ -12;")); assertEquals(7 ^ 15 ^ 3, exec("return 7 ^ 15 ^ 3;")); } - + public void testLong() throws Exception { assertEquals(5L ^ 12L, exec("long x = 5; long y = 12; return x ^ y;")); assertEquals(5L ^ -12L, exec("long x = 5; long y = -12; return x ^ y;")); assertEquals(7L ^ 15L ^ 3L, exec("long x = 7; long y = 15; long z = 3; return x ^ y ^ z;")); } - + public void testLongConst() throws Exception { assertEquals(5L ^ 12L, exec("return 5L ^ 12L;")); assertEquals(5L ^ -12L, exec("return 5L ^ -12L;")); diff --git a/plugins/lang-plan-a/src/test/resources/rest-api-spec/test/plan_a/10_basic.yaml b/plugins/lang-painless/src/test/resources/rest-api-spec/test/plan_a/10_basic.yaml similarity index 51% rename from plugins/lang-plan-a/src/test/resources/rest-api-spec/test/plan_a/10_basic.yaml rename to plugins/lang-painless/src/test/resources/rest-api-spec/test/plan_a/10_basic.yaml index 04a5a7a2051..7bdba7d0e95 100644 --- a/plugins/lang-plan-a/src/test/resources/rest-api-spec/test/plan_a/10_basic.yaml +++ b/plugins/lang-painless/src/test/resources/rest-api-spec/test/plan_a/10_basic.yaml @@ -1,6 +1,6 @@ -# Integration tests for Plan A Plugin +# Integration tests for Painless Plugin # -"Plan A plugin loaded": +"Painless plugin loaded": - do: cluster.state: {} @@ -10,4 +10,4 @@ - do: nodes.info: {} - - match: { nodes.$master.plugins.0.name: lang-plan-a } + - match: { nodes.$master.plugins.0.name: lang-painless } diff --git a/plugins/lang-plan-a/src/test/resources/rest-api-spec/test/plan_a/20_scriptfield.yaml b/plugins/lang-painless/src/test/resources/rest-api-spec/test/plan_a/20_scriptfield.yaml similarity index 92% rename from plugins/lang-plan-a/src/test/resources/rest-api-spec/test/plan_a/20_scriptfield.yaml rename to plugins/lang-painless/src/test/resources/rest-api-spec/test/plan_a/20_scriptfield.yaml index 0a5a3a4a8d4..df1ce6cfe74 100644 --- a/plugins/lang-plan-a/src/test/resources/rest-api-spec/test/plan_a/20_scriptfield.yaml +++ b/plugins/lang-painless/src/test/resources/rest-api-spec/test/plan_a/20_scriptfield.yaml @@ -20,7 +20,7 @@ setup: bar: script: inline: "input.doc.foo.0 + input.x;" - lang: plan-a + lang: painless params: x: "bbb" diff --git a/plugins/lang-plan-a/src/test/resources/rest-api-spec/test/plan_a/30_search.yaml b/plugins/lang-painless/src/test/resources/rest-api-spec/test/plan_a/30_search.yaml similarity index 88% rename from plugins/lang-plan-a/src/test/resources/rest-api-spec/test/plan_a/30_search.yaml rename to plugins/lang-painless/src/test/resources/rest-api-spec/test/plan_a/30_search.yaml index a8d96a0d6fa..375b8c4986c 100644 --- a/plugins/lang-plan-a/src/test/resources/rest-api-spec/test/plan_a/30_search.yaml +++ b/plugins/lang-painless/src/test/resources/rest-api-spec/test/plan_a/30_search.yaml @@ -1,6 +1,6 @@ -# Integration tests for Plan-A search scripting +# Integration tests for Painless search scripting # -"Plan-A Query": +"Painless Query": - do: index: index: test @@ -30,12 +30,12 @@ script: script: inline: "input.doc.num1.0 > 1;" - lang: plan-a + lang: painless script_fields: sNum1: script: inline: "input.doc.num1.0;" - lang: plan-a + lang: painless sort: num1: order: asc @@ -52,7 +52,7 @@ script: script: inline: "input.doc.num1.0 > input.param1;" - lang: plan-a + lang: painless params: param1: 1 @@ -60,7 +60,7 @@ sNum1: script: inline: "return input.doc.num1.0;" - lang: plan-a + lang: painless sort: num1: order: asc @@ -77,7 +77,7 @@ script: script: inline: "input.doc.num1.0 > input.param1;" - lang: plan-a + lang: painless params: param1: -1 @@ -85,7 +85,7 @@ sNum1: script: inline: "input.doc.num1.0;" - lang: plan-a + lang: painless sort: num1: order: asc diff --git a/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/PlanAParserVisitor.java b/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/PlanAParserVisitor.java deleted file mode 100644 index 326a62555e7..00000000000 --- a/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/PlanAParserVisitor.java +++ /dev/null @@ -1,348 +0,0 @@ -// ANTLR GENERATED CODE: DO NOT EDIT -package org.elasticsearch.plan.a; -import org.antlr.v4.runtime.tree.ParseTreeVisitor; - -/** - * This interface defines a complete generic visitor for a parse tree produced - * by {@link PlanAParser}. - * - * @param The return type of the visit operation. Use {@link Void} for - * operations with no return type. - */ -interface PlanAParserVisitor extends ParseTreeVisitor { - /** - * Visit a parse tree produced by {@link PlanAParser#source}. - * @param ctx the parse tree - * @return the visitor result - */ - T visitSource(PlanAParser.SourceContext ctx); - /** - * Visit a parse tree produced by the {@code if} - * labeled alternative in {@link PlanAParser#statement}. - * @param ctx the parse tree - * @return the visitor result - */ - T visitIf(PlanAParser.IfContext ctx); - /** - * Visit a parse tree produced by the {@code while} - * labeled alternative in {@link PlanAParser#statement}. - * @param ctx the parse tree - * @return the visitor result - */ - T visitWhile(PlanAParser.WhileContext ctx); - /** - * Visit a parse tree produced by the {@code do} - * labeled alternative in {@link PlanAParser#statement}. - * @param ctx the parse tree - * @return the visitor result - */ - T visitDo(PlanAParser.DoContext ctx); - /** - * Visit a parse tree produced by the {@code for} - * labeled alternative in {@link PlanAParser#statement}. - * @param ctx the parse tree - * @return the visitor result - */ - T visitFor(PlanAParser.ForContext ctx); - /** - * Visit a parse tree produced by the {@code decl} - * labeled alternative in {@link PlanAParser#statement}. - * @param ctx the parse tree - * @return the visitor result - */ - T visitDecl(PlanAParser.DeclContext ctx); - /** - * Visit a parse tree produced by the {@code continue} - * labeled alternative in {@link PlanAParser#statement}. - * @param ctx the parse tree - * @return the visitor result - */ - T visitContinue(PlanAParser.ContinueContext ctx); - /** - * Visit a parse tree produced by the {@code break} - * labeled alternative in {@link PlanAParser#statement}. - * @param ctx the parse tree - * @return the visitor result - */ - T visitBreak(PlanAParser.BreakContext ctx); - /** - * Visit a parse tree produced by the {@code return} - * labeled alternative in {@link PlanAParser#statement}. - * @param ctx the parse tree - * @return the visitor result - */ - T visitReturn(PlanAParser.ReturnContext ctx); - /** - * Visit a parse tree produced by the {@code try} - * labeled alternative in {@link PlanAParser#statement}. - * @param ctx the parse tree - * @return the visitor result - */ - T visitTry(PlanAParser.TryContext ctx); - /** - * Visit a parse tree produced by the {@code throw} - * labeled alternative in {@link PlanAParser#statement}. - * @param ctx the parse tree - * @return the visitor result - */ - T visitThrow(PlanAParser.ThrowContext ctx); - /** - * Visit a parse tree produced by the {@code expr} - * labeled alternative in {@link PlanAParser#statement}. - * @param ctx the parse tree - * @return the visitor result - */ - T visitExpr(PlanAParser.ExprContext ctx); - /** - * Visit a parse tree produced by the {@code multiple} - * labeled alternative in {@link PlanAParser#block}. - * @param ctx the parse tree - * @return the visitor result - */ - T visitMultiple(PlanAParser.MultipleContext ctx); - /** - * Visit a parse tree produced by the {@code single} - * labeled alternative in {@link PlanAParser#block}. - * @param ctx the parse tree - * @return the visitor result - */ - T visitSingle(PlanAParser.SingleContext ctx); - /** - * Visit a parse tree produced by {@link PlanAParser#empty}. - * @param ctx the parse tree - * @return the visitor result - */ - T visitEmpty(PlanAParser.EmptyContext ctx); - /** - * Visit a parse tree produced by {@link PlanAParser#emptyscope}. - * @param ctx the parse tree - * @return the visitor result - */ - T visitEmptyscope(PlanAParser.EmptyscopeContext ctx); - /** - * Visit a parse tree produced by {@link PlanAParser#initializer}. - * @param ctx the parse tree - * @return the visitor result - */ - T visitInitializer(PlanAParser.InitializerContext ctx); - /** - * Visit a parse tree produced by {@link PlanAParser#afterthought}. - * @param ctx the parse tree - * @return the visitor result - */ - T visitAfterthought(PlanAParser.AfterthoughtContext ctx); - /** - * Visit a parse tree produced by {@link PlanAParser#declaration}. - * @param ctx the parse tree - * @return the visitor result - */ - T visitDeclaration(PlanAParser.DeclarationContext ctx); - /** - * Visit a parse tree produced by {@link PlanAParser#decltype}. - * @param ctx the parse tree - * @return the visitor result - */ - T visitDecltype(PlanAParser.DecltypeContext ctx); - /** - * Visit a parse tree produced by {@link PlanAParser#declvar}. - * @param ctx the parse tree - * @return the visitor result - */ - T visitDeclvar(PlanAParser.DeclvarContext ctx); - /** - * Visit a parse tree produced by {@link PlanAParser#trap}. - * @param ctx the parse tree - * @return the visitor result - */ - T visitTrap(PlanAParser.TrapContext ctx); - /** - * Visit a parse tree produced by the {@code comp} - * labeled alternative in {@link PlanAParser#expression}. - * @param ctx the parse tree - * @return the visitor result - */ - T visitComp(PlanAParser.CompContext ctx); - /** - * Visit a parse tree produced by the {@code bool} - * labeled alternative in {@link PlanAParser#expression}. - * @param ctx the parse tree - * @return the visitor result - */ - T visitBool(PlanAParser.BoolContext ctx); - /** - * Visit a parse tree produced by the {@code conditional} - * labeled alternative in {@link PlanAParser#expression}. - * @param ctx the parse tree - * @return the visitor result - */ - T visitConditional(PlanAParser.ConditionalContext ctx); - /** - * Visit a parse tree produced by the {@code assignment} - * labeled alternative in {@link PlanAParser#expression}. - * @param ctx the parse tree - * @return the visitor result - */ - T visitAssignment(PlanAParser.AssignmentContext ctx); - /** - * Visit a parse tree produced by the {@code false} - * labeled alternative in {@link PlanAParser#expression}. - * @param ctx the parse tree - * @return the visitor result - */ - T visitFalse(PlanAParser.FalseContext ctx); - /** - * Visit a parse tree produced by the {@code numeric} - * labeled alternative in {@link PlanAParser#expression}. - * @param ctx the parse tree - * @return the visitor result - */ - T visitNumeric(PlanAParser.NumericContext ctx); - /** - * Visit a parse tree produced by the {@code unary} - * labeled alternative in {@link PlanAParser#expression}. - * @param ctx the parse tree - * @return the visitor result - */ - T visitUnary(PlanAParser.UnaryContext ctx); - /** - * Visit a parse tree produced by the {@code precedence} - * labeled alternative in {@link PlanAParser#expression}. - * @param ctx the parse tree - * @return the visitor result - */ - T visitPrecedence(PlanAParser.PrecedenceContext ctx); - /** - * Visit a parse tree produced by the {@code preinc} - * labeled alternative in {@link PlanAParser#expression}. - * @param ctx the parse tree - * @return the visitor result - */ - T visitPreinc(PlanAParser.PreincContext ctx); - /** - * Visit a parse tree produced by the {@code postinc} - * labeled alternative in {@link PlanAParser#expression}. - * @param ctx the parse tree - * @return the visitor result - */ - T visitPostinc(PlanAParser.PostincContext ctx); - /** - * Visit a parse tree produced by the {@code cast} - * labeled alternative in {@link PlanAParser#expression}. - * @param ctx the parse tree - * @return the visitor result - */ - T visitCast(PlanAParser.CastContext ctx); - /** - * Visit a parse tree produced by the {@code external} - * labeled alternative in {@link PlanAParser#expression}. - * @param ctx the parse tree - * @return the visitor result - */ - T visitExternal(PlanAParser.ExternalContext ctx); - /** - * Visit a parse tree produced by the {@code null} - * labeled alternative in {@link PlanAParser#expression}. - * @param ctx the parse tree - * @return the visitor result - */ - T visitNull(PlanAParser.NullContext ctx); - /** - * Visit a parse tree produced by the {@code binary} - * labeled alternative in {@link PlanAParser#expression}. - * @param ctx the parse tree - * @return the visitor result - */ - T visitBinary(PlanAParser.BinaryContext ctx); - /** - * Visit a parse tree produced by the {@code char} - * labeled alternative in {@link PlanAParser#expression}. - * @param ctx the parse tree - * @return the visitor result - */ - T visitChar(PlanAParser.CharContext ctx); - /** - * Visit a parse tree produced by the {@code true} - * labeled alternative in {@link PlanAParser#expression}. - * @param ctx the parse tree - * @return the visitor result - */ - T visitTrue(PlanAParser.TrueContext ctx); - /** - * Visit a parse tree produced by {@link PlanAParser#extstart}. - * @param ctx the parse tree - * @return the visitor result - */ - T visitExtstart(PlanAParser.ExtstartContext ctx); - /** - * Visit a parse tree produced by {@link PlanAParser#extprec}. - * @param ctx the parse tree - * @return the visitor result - */ - T visitExtprec(PlanAParser.ExtprecContext ctx); - /** - * Visit a parse tree produced by {@link PlanAParser#extcast}. - * @param ctx the parse tree - * @return the visitor result - */ - T visitExtcast(PlanAParser.ExtcastContext ctx); - /** - * Visit a parse tree produced by {@link PlanAParser#extbrace}. - * @param ctx the parse tree - * @return the visitor result - */ - T visitExtbrace(PlanAParser.ExtbraceContext ctx); - /** - * Visit a parse tree produced by {@link PlanAParser#extdot}. - * @param ctx the parse tree - * @return the visitor result - */ - T visitExtdot(PlanAParser.ExtdotContext ctx); - /** - * Visit a parse tree produced by {@link PlanAParser#exttype}. - * @param ctx the parse tree - * @return the visitor result - */ - T visitExttype(PlanAParser.ExttypeContext ctx); - /** - * Visit a parse tree produced by {@link PlanAParser#extcall}. - * @param ctx the parse tree - * @return the visitor result - */ - T visitExtcall(PlanAParser.ExtcallContext ctx); - /** - * Visit a parse tree produced by {@link PlanAParser#extvar}. - * @param ctx the parse tree - * @return the visitor result - */ - T visitExtvar(PlanAParser.ExtvarContext ctx); - /** - * Visit a parse tree produced by {@link PlanAParser#extfield}. - * @param ctx the parse tree - * @return the visitor result - */ - T visitExtfield(PlanAParser.ExtfieldContext ctx); - /** - * Visit a parse tree produced by {@link PlanAParser#extnew}. - * @param ctx the parse tree - * @return the visitor result - */ - T visitExtnew(PlanAParser.ExtnewContext ctx); - /** - * Visit a parse tree produced by {@link PlanAParser#extstring}. - * @param ctx the parse tree - * @return the visitor result - */ - T visitExtstring(PlanAParser.ExtstringContext ctx); - /** - * Visit a parse tree produced by {@link PlanAParser#arguments}. - * @param ctx the parse tree - * @return the visitor result - */ - T visitArguments(PlanAParser.ArgumentsContext ctx); - /** - * Visit a parse tree produced by {@link PlanAParser#increment}. - * @param ctx the parse tree - * @return the visitor result - */ - T visitIncrement(PlanAParser.IncrementContext ctx); -} diff --git a/qa/evil-tests/src/test/java/org/elasticsearch/node/internal/EvilInternalSettingsPreparerTests.java b/qa/evil-tests/src/test/java/org/elasticsearch/node/internal/EvilInternalSettingsPreparerTests.java index d2c8ccfd3c9..61410c612ab 100644 --- a/qa/evil-tests/src/test/java/org/elasticsearch/node/internal/EvilInternalSettingsPreparerTests.java +++ b/qa/evil-tests/src/test/java/org/elasticsearch/node/internal/EvilInternalSettingsPreparerTests.java @@ -93,7 +93,7 @@ public class EvilInternalSettingsPreparerTests extends ESTestCase { assertThat(env.settings().get("node.zone"), equalTo("foo")); settings = settingsBuilder() - .put(InternalSettingsPreparer.IGNORE_SYSTEM_PROPERTIES_SETTING, true) + .put(InternalSettingsPreparer.IGNORE_SYSTEM_PROPERTIES_SETTING.getKey(), true) .put("node.zone", "bar") .put(baseEnvSettings) .build(); diff --git a/qa/evil-tests/src/test/java/org/elasticsearch/plugins/PluginManagerTests.java b/qa/evil-tests/src/test/java/org/elasticsearch/plugins/PluginManagerTests.java index 24055d9f6dc..d997a167541 100644 --- a/qa/evil-tests/src/test/java/org/elasticsearch/plugins/PluginManagerTests.java +++ b/qa/evil-tests/src/test/java/org/elasticsearch/plugins/PluginManagerTests.java @@ -593,7 +593,7 @@ public class PluginManagerTests extends ESIntegTestCase { PluginManager.checkForOfficialPlugins("analysis-stempel"); PluginManager.checkForOfficialPlugins("delete-by-query"); PluginManager.checkForOfficialPlugins("lang-javascript"); - PluginManager.checkForOfficialPlugins("lang-plan-a"); + PluginManager.checkForOfficialPlugins("lang-painless"); PluginManager.checkForOfficialPlugins("lang-python"); PluginManager.checkForOfficialPlugins("mapper-attachments"); PluginManager.checkForOfficialPlugins("mapper-murmur3"); diff --git a/qa/evil-tests/src/test/java/org/elasticsearch/tribe/TribeUnitTests.java b/qa/evil-tests/src/test/java/org/elasticsearch/tribe/TribeUnitTests.java index f18a2874220..1756e6b7dad 100644 --- a/qa/evil-tests/src/test/java/org/elasticsearch/tribe/TribeUnitTests.java +++ b/qa/evil-tests/src/test/java/org/elasticsearch/tribe/TribeUnitTests.java @@ -105,7 +105,7 @@ public class TribeUnitTests extends ESTestCase { Path pathConf = getDataPath("elasticsearch.yml").getParent(); Settings settings = Settings .builder() - .put(InternalSettingsPreparer.IGNORE_SYSTEM_PROPERTIES_SETTING, true) + .put(InternalSettingsPreparer.IGNORE_SYSTEM_PROPERTIES_SETTING.getKey(), true) .put(Environment.PATH_CONF_SETTING.getKey(), pathConf) .build(); assertTribeNodeSuccessfullyCreated(settings); diff --git a/qa/smoke-test-client/src/test/java/org/elasticsearch/smoketest/ESSmokeClientTestCase.java b/qa/smoke-test-client/src/test/java/org/elasticsearch/smoketest/ESSmokeClientTestCase.java index bd9f42490b2..97e328fb1aa 100644 --- a/qa/smoke-test-client/src/test/java/org/elasticsearch/smoketest/ESSmokeClientTestCase.java +++ b/qa/smoke-test-client/src/test/java/org/elasticsearch/smoketest/ESSmokeClientTestCase.java @@ -78,7 +78,7 @@ public abstract class ESSmokeClientTestCase extends LuceneTestCase { private static Client startClient(Path tempDir, TransportAddress... transportAddresses) { Settings clientSettings = Settings.settingsBuilder() .put("name", "qa_smoke_client_" + counter.getAndIncrement()) - .put(InternalSettingsPreparer.IGNORE_SYSTEM_PROPERTIES_SETTING, true) // prevents any settings to be replaced by system properties. + .put(InternalSettingsPreparer.IGNORE_SYSTEM_PROPERTIES_SETTING.getKey(), true) // prevents any settings to be replaced by system properties. .put("client.transport.ignore_cluster_name", true) .put(Environment.PATH_HOME_SETTING.getKey(), tempDir) .put(Node.NODE_MODE_SETTING.getKey(), "network").build(); // we require network here! diff --git a/qa/vagrant/src/test/resources/packaging/scripts/plugin_test_cases.bash b/qa/vagrant/src/test/resources/packaging/scripts/plugin_test_cases.bash index da5790d69c8..9889048e973 100644 --- a/qa/vagrant/src/test/resources/packaging/scripts/plugin_test_cases.bash +++ b/qa/vagrant/src/test/resources/packaging/scripts/plugin_test_cases.bash @@ -231,8 +231,8 @@ fi install_and_check_plugin lang groovy } -@test "[$GROUP] install lang-plan-a plugin" { - install_and_check_plugin lang plan-a +@test "[$GROUP] install lang-painless plugin" { + install_and_check_plugin lang painless } @test "[$GROUP] install javascript plugin" { @@ -337,8 +337,8 @@ fi remove_plugin lang-groovy } -@test "[$GROUP] remove lang-plan-a plugin" { - remove_plugin lang-plan-a +@test "[$GROUP] remove lang-painless plugin" { + remove_plugin lang-painless } @test "[$GROUP] remove javascript plugin" { diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/search.json b/rest-api-spec/src/main/resources/rest-api-spec/api/search.json index d1c19f3ef21..d2b9b8cf9b4 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/search.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/search.json @@ -154,10 +154,6 @@ "request_cache": { "type" : "boolean", "description" : "Specify if request cache should be used for this request or not, defaults to index level setting" - }, - "search_after": { - "type" : "list", - "description" : "An array of sort values that indicates where the sort of the top hits should start" } } }, diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/cluster.put_settings/10_basic.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/cluster.put_settings/10_basic.yaml index 9955f4519d6..41552f217be 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/cluster.put_settings/10_basic.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/cluster.put_settings/10_basic.yaml @@ -1,10 +1,5 @@ --- -setup: - - skip: - version: "all" - reason: leaves transient metadata behind, need to fix it ---- -"Test put settings": +"Test put and reset transient settings": - do: cluster.put_settings: body: @@ -19,3 +14,50 @@ setup: flat_settings: true - match: {transient: {discovery.zen.minimum_master_nodes: "1"}} + + - do: + cluster.put_settings: + body: + transient: + discovery.zen.minimum_master_nodes: null + flat_settings: true + + - match: {transient: {}} + + - do: + cluster.get_settings: + flat_settings: true + + - match: {transient: {}} +--- +"Test put and reset persistent settings": + - do: + cluster.put_settings: + body: + persistent: + cluster.routing.allocation.disk.threshold_enabled: false + flat_settings: true + + - match: {persistent: {cluster.routing.allocation.disk.threshold_enabled: "false"}} + + - do: + cluster.get_settings: + flat_settings: true + + - match: {persistent: {cluster.routing.allocation.disk.threshold_enabled: "false"}} + + - do: + cluster.put_settings: + body: + persistent: + cluster.routing.allocation.disk.threshold_enabled: null + flat_settings: true + + - match: {persistent: {}} + + - do: + cluster.get_settings: + flat_settings: true + + - match: {persistent: {}} + diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/cluster.put_settings/11_reset.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/cluster.put_settings/11_reset.yaml deleted file mode 100644 index 4162296532d..00000000000 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/cluster.put_settings/11_reset.yaml +++ /dev/null @@ -1,31 +0,0 @@ ---- -"Test reset cluster settings": - - do: - cluster.put_settings: - body: - persistent: - cluster.routing.allocation.disk.threshold_enabled: false - flat_settings: true - - - match: {persistent: {cluster.routing.allocation.disk.threshold_enabled: "false"}} - - - do: - cluster.get_settings: - flat_settings: true - - - match: {persistent: {cluster.routing.allocation.disk.threshold_enabled: "false"}} - - - do: - cluster.put_settings: - body: - persistent: - cluster.routing.allocation.disk.threshold_enabled: null - flat_settings: true - - - match: {persistent: {}} - - - do: - cluster.get_settings: - flat_settings: true - - - match: {persistent: {}} diff --git a/settings.gradle b/settings.gradle index f6dab0f85b8..c8616789569 100644 --- a/settings.gradle +++ b/settings.gradle @@ -27,7 +27,7 @@ List projects = [ 'plugins:discovery-multicast', 'plugins:ingest-geoip', 'plugins:lang-javascript', - 'plugins:lang-plan-a', + 'plugins:lang-painless', 'plugins:lang-python', 'plugins:mapper-attachments', 'plugins:mapper-murmur3', diff --git a/test/framework/src/main/java/org/elasticsearch/cluster/routing/TestShardRouting.java b/test/framework/src/main/java/org/elasticsearch/cluster/routing/TestShardRouting.java index df9e1f8af24..b5064476ee5 100644 --- a/test/framework/src/main/java/org/elasticsearch/cluster/routing/TestShardRouting.java +++ b/test/framework/src/main/java/org/elasticsearch/cluster/routing/TestShardRouting.java @@ -19,6 +19,8 @@ package org.elasticsearch.cluster.routing; +import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.index.Index; import org.elasticsearch.test.ESTestCase; /** @@ -28,24 +30,46 @@ import org.elasticsearch.test.ESTestCase; public class TestShardRouting { public static ShardRouting newShardRouting(String index, int shardId, String currentNodeId, boolean primary, ShardRoutingState state, long version) { + return newShardRouting(new Index(index, IndexMetaData.INDEX_UUID_NA_VALUE), shardId, currentNodeId,primary, state, version); + } + + public static ShardRouting newShardRouting(Index index, int shardId, String currentNodeId, boolean primary, ShardRoutingState state, long version) { return new ShardRouting(index, shardId, currentNodeId, null, null, primary, state, version, buildUnassignedInfo(state), buildAllocationId(state), true, -1); } public static ShardRouting newShardRouting(String index, int shardId, String currentNodeId, String relocatingNodeId, boolean primary, ShardRoutingState state, long version) { + return newShardRouting(new Index(index, IndexMetaData.INDEX_UUID_NA_VALUE), shardId, currentNodeId, relocatingNodeId, primary, state,version); + } + + public static ShardRouting newShardRouting(Index index, int shardId, String currentNodeId, String relocatingNodeId, boolean primary, ShardRoutingState state, long version) { return new ShardRouting(index, shardId, currentNodeId, relocatingNodeId, null, primary, state, version, buildUnassignedInfo(state), buildAllocationId(state), true, -1); } public static ShardRouting newShardRouting(String index, int shardId, String currentNodeId, String relocatingNodeId, boolean primary, ShardRoutingState state, AllocationId allocationId, long version) { + return newShardRouting(new Index(index, IndexMetaData.INDEX_UUID_NA_VALUE), shardId, currentNodeId, relocatingNodeId, primary, state, allocationId, version); + } + + public static ShardRouting newShardRouting(Index index, int shardId, String currentNodeId, String relocatingNodeId, boolean primary, ShardRoutingState state, AllocationId allocationId, long version) { return new ShardRouting(index, shardId, currentNodeId, relocatingNodeId, null, primary, state, version, buildUnassignedInfo(state), allocationId, true, -1); } public static ShardRouting newShardRouting(String index, int shardId, String currentNodeId, String relocatingNodeId, RestoreSource restoreSource, boolean primary, ShardRoutingState state, long version) { + return newShardRouting(new Index(index, IndexMetaData.INDEX_UUID_NA_VALUE), shardId, currentNodeId, relocatingNodeId, restoreSource, primary, state, version); + } + + public static ShardRouting newShardRouting(Index index, int shardId, String currentNodeId, String relocatingNodeId, RestoreSource restoreSource, boolean primary, ShardRoutingState state, long version) { return new ShardRouting(index, shardId, currentNodeId, relocatingNodeId, restoreSource, primary, state, version, buildUnassignedInfo(state), buildAllocationId(state), true, -1); } public static ShardRouting newShardRouting(String index, int shardId, String currentNodeId, String relocatingNodeId, RestoreSource restoreSource, boolean primary, ShardRoutingState state, long version, UnassignedInfo unassignedInfo) { + return newShardRouting(new Index(index, IndexMetaData.INDEX_UUID_NA_VALUE), shardId, currentNodeId, relocatingNodeId, restoreSource, primary, state,version, unassignedInfo); + } + + public static ShardRouting newShardRouting(Index index, int shardId, String currentNodeId, + String relocatingNodeId, RestoreSource restoreSource, boolean primary, ShardRoutingState state, long version, + UnassignedInfo unassignedInfo) { return new ShardRouting(index, shardId, currentNodeId, relocatingNodeId, restoreSource, primary, state, version, unassignedInfo, buildAllocationId(state), true, -1); } diff --git a/test/framework/src/main/java/org/elasticsearch/index/MapperTestUtils.java b/test/framework/src/main/java/org/elasticsearch/index/MapperTestUtils.java index a9b45a5b336..b2b172e88ba 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/MapperTestUtils.java +++ b/test/framework/src/main/java/org/elasticsearch/index/MapperTestUtils.java @@ -52,7 +52,7 @@ public class MapperTestUtils { } Settings finalSettings = settingsBuilder.build(); MapperRegistry mapperRegistry = indicesModule.getMapperRegistry(); - IndexSettings indexSettings = IndexSettingsModule.newIndexSettings(new Index("test"), finalSettings); + IndexSettings indexSettings = IndexSettingsModule.newIndexSettings("test", finalSettings); AnalysisService analysisService = new AnalysisRegistry(null, new Environment(finalSettings)).build(indexSettings); SimilarityService similarityService = new SimilarityService(indexSettings, Collections.emptyMap()); return new MapperService(indexSettings, diff --git a/test/framework/src/main/java/org/elasticsearch/test/ActionRecordingPlugin.java b/test/framework/src/main/java/org/elasticsearch/test/ActionRecordingPlugin.java deleted file mode 100644 index a51c3f9eb40..00000000000 --- a/test/framework/src/main/java/org/elasticsearch/test/ActionRecordingPlugin.java +++ /dev/null @@ -1,138 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.test; - -import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.ActionModule; -import org.elasticsearch.action.ActionRequest; -import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.action.support.ActionFilter; -import org.elasticsearch.common.inject.AbstractModule; -import org.elasticsearch.common.inject.Inject; -import org.elasticsearch.common.inject.Module; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.plugins.Plugin; - -import java.util.ArrayList; -import java.util.Collection; -import java.util.Collections; -import java.util.List; -import java.util.concurrent.CopyOnWriteArrayList; - -import static java.util.Collections.unmodifiableList; - -/** - * Plugin that registers a filter that records actions. - */ -public class ActionRecordingPlugin extends Plugin { - /** - * Fetch all the requests recorded by the test plugin. The list is an - * immutable, moment in time snapshot. - */ - public static List> allRequests() { - List> requests = new ArrayList<>(); - for (RecordingFilter filter : ESIntegTestCase.internalCluster().getInstances(RecordingFilter.class)) { - requests.addAll(filter.requests); - } - return unmodifiableList(requests); - } - - /** - * Fetch all requests recorded by the test plugin of a certain type. The - * list is an immutable, moment in time snapshot. - */ - public static List requestsOfType(Class type) { - List requests = new ArrayList<>(); - for (RecordingFilter filter : ESIntegTestCase.internalCluster().getInstances(RecordingFilter.class)) { - for (ActionRequest request : filter.requests) { - if (type.isInstance(request)) { - requests.add(type.cast(request)); - } - } - } - return unmodifiableList(requests); - } - - /** - * Clear all the recorded requests. Use between test methods that shared a - * suite scoped cluster. - */ - public static void clear() { - for (RecordingFilter filter : ESIntegTestCase.internalCluster().getInstances(RecordingFilter.class)) { - filter.requests.clear(); - } - } - - @Override - public String name() { - return "test-action-logging"; - } - - @Override - public String description() { - return "Test action logging"; - } - - @Override - public Collection nodeModules() { - return Collections.singletonList(new ActionRecordingModule()); - } - - public void onModule(ActionModule module) { - module.registerFilter(RecordingFilter.class); - } - - public static class ActionRecordingModule extends AbstractModule { - @Override - protected void configure() { - bind(RecordingFilter.class).asEagerSingleton(); - } - - } - - public static class RecordingFilter extends ActionFilter.Simple { - private final List> requests = new CopyOnWriteArrayList<>(); - - @Inject - public RecordingFilter(Settings settings) { - super(settings); - } - - public List> getRequests() { - return new ArrayList<>(requests); - } - - @Override - public int order() { - return 999; - } - - @Override - protected boolean apply(String action, ActionRequest request, ActionListener listener) { - requests.add(request); - return true; - } - - @Override - protected boolean apply(String action, ActionResponse response, ActionListener listener) { - return true; - } - } -} diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESAllocationTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESAllocationTestCase.java index 5dc824f687d..f653819c140 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESAllocationTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESAllocationTestCase.java @@ -235,7 +235,7 @@ public abstract class ESAllocationTestCase extends ESTestCase { boolean changed = false; while (unassignedIterator.hasNext()) { ShardRouting shard = unassignedIterator.next(); - IndexMetaData indexMetaData = allocation.metaData().index(shard.getIndex()); + IndexMetaData indexMetaData = allocation.metaData().index(shard.getIndexName()); if (shard.primary() || shard.allocatedPostIndexCreate(indexMetaData) == false) { continue; } diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESBackcompatTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESBackcompatTestCase.java index 71bcca341b8..447bc331d48 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESBackcompatTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESBackcompatTestCase.java @@ -230,7 +230,7 @@ public abstract class ESBackcompatTestCase extends ESIntegTestCase { for (IndexRoutingTable indexRoutingTable : clusterState.routingTable()) { for (IndexShardRoutingTable indexShardRoutingTable : indexRoutingTable) { for (ShardRouting shardRouting : indexShardRoutingTable) { - if (shardRouting.currentNodeId() != null && index.equals(shardRouting.getIndex())) { + if (shardRouting.currentNodeId() != null && index.equals(shardRouting.getIndexName())) { String name = clusterState.nodes().get(shardRouting.currentNodeId()).name(); assertThat("Allocated on new node: " + name, Regex.simpleMatch(pattern, name), is(true)); } diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java index d673aa9cb6d..d2a6039d1a2 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java @@ -110,7 +110,6 @@ import org.elasticsearch.index.translog.Translog; import org.elasticsearch.indices.IndicesService; import org.elasticsearch.indices.cache.request.IndicesRequestCache; import org.elasticsearch.indices.store.IndicesStore; -import org.elasticsearch.node.Node; import org.elasticsearch.node.NodeMocksPlugin; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.rest.RestStatus; @@ -1942,7 +1941,7 @@ public abstract class ESIntegTestCase extends ESTestCase { for (IndexRoutingTable indexRoutingTable : clusterState.routingTable()) { for (IndexShardRoutingTable indexShardRoutingTable : indexRoutingTable) { for (ShardRouting shardRouting : indexShardRoutingTable) { - if (shardRouting.currentNodeId() != null && index.equals(shardRouting.getIndex())) { + if (shardRouting.currentNodeId() != null && index.equals(shardRouting.getIndexName())) { String name = clusterState.nodes().get(shardRouting.currentNodeId()).name(); nodes.add(name); assertThat("Allocated on new node: " + name, Regex.simpleMatch(pattern, name), is(true)); diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESSingleNodeTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESSingleNodeTestCase.java index 8b495d35cc6..f2f1d19bc9f 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESSingleNodeTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESSingleNodeTestCase.java @@ -158,7 +158,7 @@ public abstract class ESSingleNodeTestCase extends ESTestCase { private Node newNode() { Settings settings = Settings.builder() - .put(ClusterName.SETTING, InternalTestCluster.clusterName("single-node-cluster", randomLong())) + .put(ClusterName.CLUSTER_NAME_SETTING.getKey(), InternalTestCluster.clusterName("single-node-cluster", randomLong())) .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()) // TODO: use a consistent data path for custom paths // This needs to tie into the ESIntegTestCase#indexSettings() method @@ -172,7 +172,7 @@ public abstract class ESSingleNodeTestCase extends ESTestCase { .put("http.enabled", false) .put(Node.NODE_LOCAL_SETTING.getKey(), true) .put(Node.NODE_DATA_SETTING.getKey(), true) - .put(InternalSettingsPreparer.IGNORE_SYSTEM_PROPERTIES_SETTING, true) // make sure we get what we set :) + .put(InternalSettingsPreparer.IGNORE_SYSTEM_PROPERTIES_SETTING.getKey(), true) // make sure we get what we set :) .build(); Node build = new MockNode(settings, getVersion(), getPlugins()); build.start(); diff --git a/test/framework/src/main/java/org/elasticsearch/test/ExternalNode.java b/test/framework/src/main/java/org/elasticsearch/test/ExternalNode.java index 8c07111742a..1a7164ff5a6 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ExternalNode.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ExternalNode.java @@ -53,7 +53,7 @@ import static org.elasticsearch.common.settings.Settings.settingsBuilder; final class ExternalNode implements Closeable { public static final Settings REQUIRED_SETTINGS = Settings.builder() - .put(InternalSettingsPreparer.IGNORE_SYSTEM_PROPERTIES_SETTING, true) + .put(InternalSettingsPreparer.IGNORE_SYSTEM_PROPERTIES_SETTING.getKey(), true) .put(DiscoveryModule.DISCOVERY_TYPE_SETTING.getKey(), "zen") .put(Node.NODE_MODE_SETTING.getKey(), "network").build(); // we need network mode for this @@ -115,7 +115,7 @@ final class ExternalNode implements Closeable { case NetworkModule.TRANSPORT_TYPE_KEY: case "discovery.type": case NetworkModule.TRANSPORT_SERVICE_TYPE_KEY: - case InternalSettingsPreparer.IGNORE_SYSTEM_PROPERTIES_SETTING: + case "config.ignore_system_properties": continue; default: externaNodeSettingsBuilder.put(entry.getKey(), entry.getValue()); @@ -195,7 +195,7 @@ final class ExternalNode implements Closeable { Settings clientSettings = settingsBuilder().put(externalNodeSettings) .put("client.transport.nodes_sampler_interval", "1s") .put("name", "transport_client_" + nodeInfo.getNode().name()) - .put(ClusterName.SETTING, clusterName).put("client.transport.sniff", false).build(); + .put(ClusterName.CLUSTER_NAME_SETTING.getKey(), clusterName).put("client.transport.sniff", false).build(); TransportClient client = TransportClient.builder().settings(clientSettings).build(); client.addTransportAddress(addr); this.client = client; diff --git a/test/framework/src/main/java/org/elasticsearch/test/ExternalTestCluster.java b/test/framework/src/main/java/org/elasticsearch/test/ExternalTestCluster.java index 97e0d0f1642..e54b177fb30 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ExternalTestCluster.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ExternalTestCluster.java @@ -74,7 +74,7 @@ public final class ExternalTestCluster extends TestCluster { Settings clientSettings = Settings.settingsBuilder() .put(additionalSettings) .put("name", InternalTestCluster.TRANSPORT_CLIENT_PREFIX + EXTERNAL_CLUSTER_PREFIX + counter.getAndIncrement()) - .put(InternalSettingsPreparer.IGNORE_SYSTEM_PROPERTIES_SETTING, true) // prevents any settings to be replaced by system properties. + .put(InternalSettingsPreparer.IGNORE_SYSTEM_PROPERTIES_SETTING.getKey(), true) // prevents any settings to be replaced by system properties. .put("client.transport.ignore_cluster_name", true) .put(Environment.PATH_HOME_SETTING.getKey(), tempDir) .put(Node.NODE_MODE_SETTING.getKey(), "network").build(); // we require network here! diff --git a/test/framework/src/main/java/org/elasticsearch/test/IndexSettingsModule.java b/test/framework/src/main/java/org/elasticsearch/test/IndexSettingsModule.java index f4e08979e7c..a7c0d7af01a 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/IndexSettingsModule.java +++ b/test/framework/src/main/java/org/elasticsearch/test/IndexSettingsModule.java @@ -49,7 +49,7 @@ public class IndexSettingsModule extends AbstractModule { } public static IndexSettings newIndexSettings(String index, Settings settings, Setting... setting) { - return newIndexSettings(new Index(index), settings, setting); + return newIndexSettings(new Index(index, settings.get(IndexMetaData.SETTING_INDEX_UUID, IndexMetaData.INDEX_UUID_NA_VALUE)), settings, setting); } public static IndexSettings newIndexSettings(Index index, Settings settings, Setting... setting) { @@ -63,6 +63,6 @@ public class IndexSettingsModule extends AbstractModule { if (setting.length > 0) { settingSet.addAll(Arrays.asList(setting)); } - return new IndexSettings(metaData, Settings.EMPTY, (idx) -> Regex.simpleMatch(idx, metaData.getIndex()), new IndexScopedSettings(Settings.EMPTY, settingSet)); + return new IndexSettings(metaData, Settings.EMPTY, (idx) -> Regex.simpleMatch(idx, metaData.getIndex().getName()), new IndexScopedSettings(Settings.EMPTY, settingSet)); } } diff --git a/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java b/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java index ec6499156ae..f2e00b2d3c1 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java +++ b/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java @@ -60,6 +60,7 @@ import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.EsExecutors; +import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.discovery.DiscoveryService; import org.elasticsearch.env.Environment; import org.elasticsearch.env.NodeEnvironment; @@ -289,7 +290,7 @@ public final class InternalTestCluster extends TestCluster { builder.put(Environment.PATH_REPO_SETTING.getKey(), baseDir.resolve("repos")); builder.put("transport.tcp.port", TRANSPORT_BASE_PORT + "-" + (TRANSPORT_BASE_PORT + PORTS_PER_CLUSTER)); builder.put("http.port", HTTP_BASE_PORT + "-" + (HTTP_BASE_PORT + PORTS_PER_CLUSTER)); - builder.put(InternalSettingsPreparer.IGNORE_SYSTEM_PROPERTIES_SETTING, true); + builder.put(InternalSettingsPreparer.IGNORE_SYSTEM_PROPERTIES_SETTING.getKey(), true); builder.put(Node.NODE_MODE_SETTING.getKey(), nodeMode); builder.put("http.pipelining", enableHttpPipelining); if (Strings.hasLength(System.getProperty("es.logger.level"))) { @@ -312,7 +313,7 @@ public final class InternalTestCluster extends TestCluster { // always reduce this - it can make tests really slow builder.put(RecoverySettings.INDICES_RECOVERY_RETRY_DELAY_STATE_SYNC_SETTING.getKey(), TimeValue.timeValueMillis(RandomInts.randomIntBetween(random, 20, 50))); defaultSettings = builder.build(); - executor = EsExecutors.newCached("test runner", 0, TimeUnit.SECONDS, EsExecutors.daemonThreadFactory("test_" + clusterName)); + executor = EsExecutors.newCached("test runner", 0, TimeUnit.SECONDS, EsExecutors.daemonThreadFactory("test_" + clusterName), new ThreadContext(Settings.EMPTY)); } public static String configuredNodeMode() { @@ -351,15 +352,15 @@ public final class InternalTestCluster extends TestCluster { .put(getRandomNodeSettings(nodeSeed)); Settings settings = nodeConfigurationSource.nodeSettings(nodeOrdinal); if (settings != null) { - if (settings.get(ClusterName.SETTING) != null) { - throw new IllegalStateException("Tests must not set a '" + ClusterName.SETTING + "' as a node setting set '" + ClusterName.SETTING + "': [" + settings.get(ClusterName.SETTING) + "]"); + if (settings.get(ClusterName.CLUSTER_NAME_SETTING.getKey()) != null) { + throw new IllegalStateException("Tests must not set a '" + ClusterName.CLUSTER_NAME_SETTING.getKey() + "' as a node setting set '" + ClusterName.CLUSTER_NAME_SETTING.getKey() + "': [" + settings.get(ClusterName.CLUSTER_NAME_SETTING.getKey()) + "]"); } builder.put(settings); } if (others != null) { builder.put(others); } - builder.put(ClusterName.SETTING, clusterName); + builder.put(ClusterName.CLUSTER_NAME_SETTING.getKey(), clusterName); return builder.build(); } @@ -882,11 +883,11 @@ public final class InternalTestCluster extends TestCluster { .put("client.transport.nodes_sampler_interval", "1s") .put(Environment.PATH_HOME_SETTING.getKey(), baseDir) .put("name", TRANSPORT_CLIENT_PREFIX + node.settings().get("name")) - .put(ClusterName.SETTING, clusterName).put("client.transport.sniff", sniff) + .put(ClusterName.CLUSTER_NAME_SETTING.getKey(), clusterName).put("client.transport.sniff", sniff) .put(Node.NODE_MODE_SETTING.getKey(), Node.NODE_MODE_SETTING.exists(nodeSettings) ? Node.NODE_MODE_SETTING.get(nodeSettings) : nodeMode) .put("logger.prefix", nodeSettings.get("logger.prefix", "")) .put("logger.level", nodeSettings.get("logger.level", "INFO")) - .put(InternalSettingsPreparer.IGNORE_SYSTEM_PROPERTIES_SETTING, true) + .put(InternalSettingsPreparer.IGNORE_SYSTEM_PROPERTIES_SETTING.getKey(), true) .put(settings); if (Node.NODE_LOCAL_SETTING.exists(nodeSettings)) { diff --git a/test/framework/src/main/java/org/elasticsearch/test/TestCluster.java b/test/framework/src/main/java/org/elasticsearch/test/TestCluster.java index a05309a8a51..77f8de84390 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/TestCluster.java +++ b/test/framework/src/main/java/org/elasticsearch/test/TestCluster.java @@ -147,7 +147,7 @@ public abstract class TestCluster implements Iterable, Closeable { ClusterStateResponse clusterStateResponse = client().admin().cluster().prepareState().execute().actionGet(); ObjectArrayList concreteIndices = new ObjectArrayList<>(); for (IndexMetaData indexMetaData : clusterStateResponse.getState().metaData()) { - concreteIndices.add(indexMetaData.getIndex()); + concreteIndices.add(indexMetaData.getIndex().getName()); } if (!concreteIndices.isEmpty()) { assertAcked(client().admin().indices().prepareDelete(concreteIndices.toArray(String.class))); diff --git a/test/framework/src/main/java/org/elasticsearch/test/TestSearchContext.java b/test/framework/src/main/java/org/elasticsearch/test/TestSearchContext.java index e244c861ffa..35faf0d1a32 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/TestSearchContext.java +++ b/test/framework/src/main/java/org/elasticsearch/test/TestSearchContext.java @@ -26,9 +26,6 @@ import org.apache.lucene.search.Sort; import org.apache.lucene.util.Counter; import org.elasticsearch.action.search.SearchType; import org.elasticsearch.cache.recycler.PageCacheRecycler; -import org.elasticsearch.common.HasContext; -import org.elasticsearch.common.HasContextAndHeaders; -import org.elasticsearch.common.HasHeaders; import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.util.BigArrays; @@ -99,7 +96,7 @@ public class TestSearchContext extends SearchContext { private final Map subPhaseContexts = new HashMap<>(); public TestSearchContext(ThreadPool threadPool,PageCacheRecycler pageCacheRecycler, BigArrays bigArrays, ScriptService scriptService, IndexService indexService) { - super(ParseFieldMatcher.STRICT, null); + super(ParseFieldMatcher.STRICT); this.pageCacheRecycler = pageCacheRecycler; this.bigArrays = bigArrays.withCircuitBreaking(); this.indexService = indexService; @@ -111,7 +108,7 @@ public class TestSearchContext extends SearchContext { } public TestSearchContext() { - super(ParseFieldMatcher.STRICT, null); + super(ParseFieldMatcher.STRICT); this.pageCacheRecycler = null; this.bigArrays = null; this.indexService = null; @@ -595,73 +592,6 @@ public class TestSearchContext extends SearchContext { throw new UnsupportedOperationException(); } - @Override - public V putInContext(Object key, Object value) { - return null; - } - - @Override - public void putAllInContext(ObjectObjectAssociativeContainer map) { - } - - @Override - public V getFromContext(Object key) { - return null; - } - - @Override - public V getFromContext(Object key, V defaultValue) { - return defaultValue; - } - - @Override - public boolean hasInContext(Object key) { - return false; - } - - @Override - public int contextSize() { - return 0; - } - - @Override - public boolean isContextEmpty() { - return true; - } - - @Override - public ImmutableOpenMap getContext() { - return ImmutableOpenMap.of(); - } - - @Override - public void copyContextFrom(HasContext other) { - } - - @Override - public void putHeader(String key, V value) {} - - @Override - public V getHeader(String key) { - return null; - } - - @Override - public boolean hasHeader(String key) { - return false; - } - - @Override - public Set getHeaders() { - return Collections.emptySet(); - } - - @Override - public void copyHeadersFrom(HasHeaders from) {} - - @Override - public void copyContextAndHeadersFrom(HasContextAndHeaders other) {} - @Override public Profilers getProfilers() { return null; // no profiling diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/FakeRestRequest.java b/test/framework/src/main/java/org/elasticsearch/test/rest/FakeRestRequest.java index a24869b40bd..9b1d55be081 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/FakeRestRequest.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/FakeRestRequest.java @@ -32,14 +32,11 @@ public class FakeRestRequest extends RestRequest { private final Map params; public FakeRestRequest() { - this(new HashMap(), new HashMap()); + this(new HashMap<>()); } - public FakeRestRequest(Map headers, Map context) { + public FakeRestRequest(Map headers) { this.headers = headers; - for (Map.Entry entry : context.entrySet()) { - putInContext(entry.getKey(), entry.getValue()); - } this.params = new HashMap<>(); } @@ -101,4 +98,4 @@ public class FakeRestRequest extends RestRequest { public Map params() { return params; } -} \ No newline at end of file +} diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/client/RestClient.java b/test/framework/src/main/java/org/elasticsearch/test/rest/client/RestClient.java index 2b6ded9a5f5..a29739c3982 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/client/RestClient.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/client/RestClient.java @@ -30,7 +30,6 @@ import org.apache.http.impl.client.HttpClients; import org.apache.http.impl.conn.PoolingHttpClientConnectionManager; import org.apache.lucene.util.IOUtils; import org.elasticsearch.Version; -import org.elasticsearch.client.support.Headers; import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.PathUtils; import org.elasticsearch.common.logging.ESLogger; @@ -38,6 +37,7 @@ import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.network.InetAddresses; import org.elasticsearch.common.network.NetworkAddress; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.test.rest.client.http.HttpRequestBuilder; import org.elasticsearch.test.rest.client.http.HttpResponse; @@ -81,16 +81,16 @@ public class RestClient implements Closeable { private final String protocol; private final RestSpec restSpec; private final CloseableHttpClient httpClient; - private final Headers headers; private final URL[] urls; private final Version esVersion; + private final ThreadContext threadContext; public RestClient(RestSpec restSpec, Settings settings, URL[] urls) throws IOException, RestException { assert urls.length > 0; this.restSpec = restSpec; - this.headers = new Headers(settings); this.protocol = settings.get(PROTOCOL, "http"); this.httpClient = createHttpClient(settings); + this.threadContext = new ThreadContext(settings); this.urls = urls; this.esVersion = readAndCheckVersion(); logger.info("REST client initialized {}, elasticsearch version: [{}]", urls, esVersion); @@ -252,7 +252,7 @@ public class RestClient implements Closeable { protected HttpRequestBuilder httpRequestBuilder(URL url) { return new HttpRequestBuilder(httpClient) - .addHeaders(headers) + .addHeaders(threadContext.getHeaders()) .protocol(protocol) .host(url.getHost()) .port(url.getPort()); diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/client/http/HttpRequestBuilder.java b/test/framework/src/main/java/org/elasticsearch/test/rest/client/http/HttpRequestBuilder.java index e4c8849a92f..6a484e9ae69 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/client/http/HttpRequestBuilder.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/client/http/HttpRequestBuilder.java @@ -27,7 +27,6 @@ import org.apache.http.client.methods.HttpPut; import org.apache.http.client.methods.HttpUriRequest; import org.apache.http.entity.StringEntity; import org.apache.http.impl.client.CloseableHttpClient; -import org.elasticsearch.client.support.Headers; import org.elasticsearch.common.Strings; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.Loggers; @@ -135,10 +134,8 @@ public class HttpRequestBuilder { } } - public HttpRequestBuilder addHeaders(Headers headers) { - for (String header : headers.headers().names()) { - this.headers.put(header, headers.headers().get(header)); - } + public HttpRequestBuilder addHeaders(Map headers) { + this.headers.putAll(headers); return this; } diff --git a/test/framework/src/test/java/org/elasticsearch/test/test/InternalTestClusterTests.java b/test/framework/src/test/java/org/elasticsearch/test/test/InternalTestClusterTests.java index 0764ad4ebea..03da80741f1 100644 --- a/test/framework/src/test/java/org/elasticsearch/test/test/InternalTestClusterTests.java +++ b/test/framework/src/test/java/org/elasticsearch/test/test/InternalTestClusterTests.java @@ -71,7 +71,7 @@ public class InternalTestClusterTests extends ESTestCase { final static Set clusterUniqueSettings = new HashSet<>(); static { - clusterUniqueSettings.add(ClusterName.SETTING); + clusterUniqueSettings.add(ClusterName.CLUSTER_NAME_SETTING.getKey()); clusterUniqueSettings.add("transport.tcp.port"); clusterUniqueSettings.add("http.port"); clusterUniqueSettings.add("http.port");